repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
meiercaleb/incubator-rya | extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/BindingSetRow.java | 2810 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rya.indexing.pcj.fluo.app;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.NODEID_BS_DELIM;
import org.apache.fluo.api.data.Bytes;
import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
import edu.umd.cs.findbugs.annotations.NonNull;
import net.jcip.annotations.Immutable;
/**
* The values of an Accumulo Row ID for a row that stores a Binding set for
* a specific Node ID of a query.
*/
@Immutable
@DefaultAnnotation(NonNull.class)
public class BindingSetRow {
private final String nodeId;
private final String bindingSetString;
/**
* Constructs an instance of {@link BindingSetRow}.
*
* @param nodeId - The Node ID of a query node. (not null)
* @param bindingSetString - A Binding Set that is part of the node's results. (not null)
*/
public BindingSetRow(final String nodeId, final String bindingSetString) {
this.nodeId = checkNotNull(nodeId);
this.bindingSetString = checkNotNull(bindingSetString);
}
/**
* @return The Node ID of a query node.
*/
public String getNodeId() {
return nodeId;
}
/**
* @return A Binding Set that is part of the node's results.
*/
public String getBindingSetString() {
return bindingSetString;
}
/**
* Parses the {@link Bytes} of an Accumulo Row ID into a {@link BindingSetRow}.
*
* @param row - The Row ID to parse. (not null).
* @return A {@link BindingSetRow} holding the parsed values.
*/
public static BindingSetRow make(final Bytes row) {
checkNotNull(row);
// Read the Node ID from the row's bytes.
final String[] rowArray = row.toString().split(NODEID_BS_DELIM);
final String nodeId = rowArray[0];
final String bindingSetString = rowArray.length == 2 ? rowArray[1] : "";
return new BindingSetRow(nodeId, bindingSetString);
}
} | apache-2.0 |
lsmall/flowable-engine | modules/flowable-cmmn-api/src/main/java/org/flowable/cmmn/api/runtime/CaseInstance.java | 1171 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.api.runtime;
import java.util.Date;
import java.util.Map;
/**
* @author Joram Barrez
*/
public interface CaseInstance {
String getId();
String getParentId();
String getBusinessKey();
String getName();
String getCaseDefinitionId();
String getState();
Date getStartTime();
String getStartUserId();
String getCallbackId();
String getCallbackType();
boolean isCompleteable();
String getTenantId();
/**
* Returns the case variables if requested in the case instance query
*/
Map<String, Object> getCaseVariables();
}
| apache-2.0 |
MikeThomsen/nifi | nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/swap/SimpleSwapDeserializer.java | 13651 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller.swap;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.nifi.controller.queue.FlowFileQueue;
import org.apache.nifi.controller.queue.QueueSize;
import org.apache.nifi.controller.repository.FlowFileRecord;
import org.apache.nifi.controller.repository.IncompleteSwapFileException;
import org.apache.nifi.controller.repository.StandardFlowFileRecord;
import org.apache.nifi.controller.repository.SwapContents;
import org.apache.nifi.controller.repository.SwapSummary;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaimManager;
import org.apache.nifi.controller.repository.claim.StandardContentClaim;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SimpleSwapDeserializer implements SwapDeserializer {
public static final int SWAP_ENCODING_VERSION = 10;
private static final Logger logger = LoggerFactory.getLogger(SimpleSwapDeserializer.class);
@Override
public SwapSummary getSwapSummary(final DataInputStream in, final String swapLocation, final ResourceClaimManager claimManager) throws IOException {
final int swapEncodingVersion = in.readInt();
if (swapEncodingVersion > SWAP_ENCODING_VERSION) {
final String errMsg = "Cannot swap FlowFiles in from " + swapLocation + " because the encoding version is "
+ swapEncodingVersion + ", which is too new (expecting " + SWAP_ENCODING_VERSION + " or less)";
throw new IOException(errMsg);
}
final int numRecords;
final long contentSize;
Long maxRecordId = null;
try {
in.readUTF(); // ignore Connection ID
numRecords = in.readInt();
contentSize = in.readLong();
if (numRecords == 0) {
return StandardSwapSummary.EMPTY_SUMMARY;
}
if (swapEncodingVersion > 7) {
maxRecordId = in.readLong();
}
} catch (final EOFException eof) {
logger.warn("Found premature End-of-File when reading Swap File {}. EOF occurred before any FlowFiles were encountered", swapLocation);
return StandardSwapSummary.EMPTY_SUMMARY;
}
final QueueSize queueSize = new QueueSize(numRecords, contentSize);
final SwapContents swapContents = deserializeFlowFiles(in, queueSize, maxRecordId, swapEncodingVersion, claimManager, swapLocation);
return swapContents.getSummary();
}
@Override
public SwapContents deserializeFlowFiles(final DataInputStream in, final String swapLocation, final FlowFileQueue queue, final ResourceClaimManager claimManager) throws IOException {
final int swapEncodingVersion = in.readInt();
if (swapEncodingVersion > SWAP_ENCODING_VERSION) {
throw new IOException("Cannot swap FlowFiles in from SwapFile because the encoding version is "
+ swapEncodingVersion + ", which is too new (expecting " + SWAP_ENCODING_VERSION + " or less)");
}
final String connectionId = in.readUTF(); // Connection ID
if (!connectionId.equals(queue.getIdentifier())) {
throw new IllegalArgumentException("Cannot deserialize FlowFiles from Swap File at location " + swapLocation
+ " because those FlowFiles belong to Connection with ID " + connectionId + " and an attempt was made to swap them into a Connection with ID " + queue.getIdentifier());
}
int numRecords = 0;
long contentSize = 0L;
Long maxRecordId = null;
try {
numRecords = in.readInt();
contentSize = in.readLong(); // Content Size
if (swapEncodingVersion > 7) {
maxRecordId = in.readLong(); // Max Record ID
}
} catch (final EOFException eof) {
final QueueSize queueSize = new QueueSize(numRecords, contentSize);
final SwapSummary summary = new StandardSwapSummary(queueSize, maxRecordId, Collections.emptyList(), 0L, 0L);
final SwapContents partialContents = new StandardSwapContents(summary, Collections.emptyList());
throw new IncompleteSwapFileException(swapLocation, partialContents);
}
final QueueSize queueSize = new QueueSize(numRecords, contentSize);
return deserializeFlowFiles(in, queueSize, maxRecordId, swapEncodingVersion, claimManager, swapLocation);
}
private static SwapContents deserializeFlowFiles(final DataInputStream in, final QueueSize queueSize, final Long maxRecordId,
final int serializationVersion, final ResourceClaimManager claimManager, final String location) throws IOException {
final List<FlowFileRecord> flowFiles = new ArrayList<>(queueSize.getObjectCount());
final List<ResourceClaim> resourceClaims = new ArrayList<>(queueSize.getObjectCount());
Long maxId = maxRecordId;
for (int i = 0; i < queueSize.getObjectCount(); i++) {
try {
// legacy encoding had an "action" because it used to be couple with FlowFile Repository code
if (serializationVersion < 3) {
final int action = in.read();
if (action != 1) {
throw new IOException("Swap File is version " + serializationVersion + " but did not contain a 'UPDATE' record type");
}
}
final StandardFlowFileRecord.Builder ffBuilder = new StandardFlowFileRecord.Builder();
final long recordId = in.readLong();
if (maxId == null || recordId > maxId) {
maxId = recordId;
}
ffBuilder.id(recordId);
ffBuilder.entryDate(in.readLong());
if (serializationVersion > 1) {
// Lineage information was added in version 2
if (serializationVersion < 10) {
final int numLineageIdentifiers = in.readInt();
for (int lineageIdIdx = 0; lineageIdIdx < numLineageIdentifiers; lineageIdIdx++) {
in.readUTF(); //skip each identifier
}
}
// version 9 adds in a 'lineage start index'
final long lineageStartDate = in.readLong();
final long lineageStartIndex;
if (serializationVersion > 8) {
lineageStartIndex = in.readLong();
} else {
lineageStartIndex = 0L;
}
ffBuilder.lineageStart(lineageStartDate, lineageStartIndex);
if (serializationVersion > 5) {
// Version 9 adds in a 'queue date index'
final long lastQueueDate = in.readLong();
final long queueDateIndex;
if (serializationVersion > 8) {
queueDateIndex = in.readLong();
} else {
queueDateIndex = 0L;
}
ffBuilder.lastQueued(lastQueueDate, queueDateIndex);
}
}
ffBuilder.size(in.readLong());
if (serializationVersion < 3) {
readString(in); // connection Id
}
final boolean hasClaim = in.readBoolean();
ResourceClaim resourceClaim = null;
if (hasClaim) {
final String claimId;
if (serializationVersion < 5) {
claimId = String.valueOf(in.readLong());
} else {
claimId = in.readUTF();
}
final String container = in.readUTF();
final String section = in.readUTF();
final long resourceOffset;
final long resourceLength;
if (serializationVersion < 6) {
resourceOffset = 0L;
resourceLength = -1L;
} else {
resourceOffset = in.readLong();
resourceLength = in.readLong();
}
final long claimOffset = in.readLong();
final boolean lossTolerant;
if (serializationVersion >= 4) {
lossTolerant = in.readBoolean();
} else {
lossTolerant = false;
}
resourceClaim = claimManager.getResourceClaim(container, section, claimId);
if (resourceClaim == null) {
logger.error("Swap file indicates that FlowFile was referencing Resource Claim at container={}, section={}, claimId={}, "
+ "but this Resource Claim cannot be found! Will create a temporary Resource Claim, but this may affect the framework's "
+ "ability to properly clean up this resource", container, section, claimId);
resourceClaim = claimManager.newResourceClaim(container, section, claimId, lossTolerant, true);
}
final StandardContentClaim claim = new StandardContentClaim(resourceClaim, resourceOffset);
claim.setLength(resourceLength);
ffBuilder.contentClaim(claim);
ffBuilder.contentClaimOffset(claimOffset);
}
boolean attributesChanged = true;
if (serializationVersion < 3) {
attributesChanged = in.readBoolean();
}
if (attributesChanged) {
final int numAttributes = in.readInt();
for (int j = 0; j < numAttributes; j++) {
final String key = readString(in);
final String value = readString(in);
ffBuilder.addAttribute(key, value);
}
}
final FlowFileRecord record = ffBuilder.build();
if (resourceClaim != null) {
resourceClaims.add(resourceClaim);
}
flowFiles.add(record);
} catch (final EOFException eof) {
final SwapSummary swapSummary = new StandardSwapSummary(queueSize, maxId, resourceClaims, 0L, 0L);
final SwapContents partialContents = new StandardSwapContents(swapSummary, flowFiles);
throw new IncompleteSwapFileException(location, partialContents);
}
}
final SwapSummary swapSummary = new StandardSwapSummary(queueSize, maxId, resourceClaims, 0L, 0L);
return new StandardSwapContents(swapSummary, flowFiles);
}
private static String readString(final InputStream in) throws IOException {
final Integer numBytes = readFieldLength(in);
if (numBytes == null) {
throw new EOFException();
}
final byte[] bytes = new byte[numBytes];
fillBuffer(in, bytes, numBytes);
return new String(bytes, StandardCharsets.UTF_8);
}
private static Integer readFieldLength(final InputStream in) throws IOException {
final int firstValue = in.read();
final int secondValue = in.read();
if (firstValue < 0) {
return null;
}
if (secondValue < 0) {
throw new EOFException();
}
if (firstValue == 0xff && secondValue == 0xff) {
final int ch1 = in.read();
final int ch2 = in.read();
final int ch3 = in.read();
final int ch4 = in.read();
if ((ch1 | ch2 | ch3 | ch4) < 0) {
throw new EOFException();
}
return (ch1 << 24) + (ch2 << 16) + (ch3 << 8) + ch4;
} else {
return (firstValue << 8) + secondValue;
}
}
private static void fillBuffer(final InputStream in, final byte[] buffer, final int length) throws IOException {
int bytesRead;
int totalBytesRead = 0;
while ((bytesRead = in.read(buffer, totalBytesRead, length - totalBytesRead)) > 0) {
totalBytesRead += bytesRead;
}
if (totalBytesRead != length) {
throw new EOFException();
}
}
}
| apache-2.0 |
nmldiegues/stibt | infinispan/query/src/test/java/org/infinispan/query/blackbox/TopologyAwareClusteredQueryTest.java | 2443 | /*
* JBoss, Home of Professional Open Source
* Copyright 2013 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.query.blackbox;
import org.infinispan.Cache;
import org.infinispan.configuration.cache.CacheMode;
import org.infinispan.query.helper.TestQueryHelperFactory;
import org.infinispan.query.test.Person;
import org.testng.annotations.Test;
import java.util.List;
/**
* Tests for testing clustered queries functionality on topology aware nodes.
*
* @author Anna Manukyan
*/
@Test(groups = "functional", testName = "query.distributed.TopologyAwareClusteredQueryTest")
public class TopologyAwareClusteredQueryTest extends ClusteredQueryTest {
@Override
protected void createCacheManagers() throws Throwable {
List caches = TestQueryHelperFactory.createTopologyAwareCacheNodes(2, getCacheMode(), transactionEnabled(),
isIndexLocalOnly(), isRamDirectory());
for(Object cache : caches) {
cacheManagers.add(((Cache) cache).getCacheManager());
}
cache1 = (Cache<String, Person>) caches.get(0);
cache2 = (Cache<String, Person>) caches.get(1);
waitForClusterToForm();
}
public CacheMode getCacheMode() {
return CacheMode.REPL_SYNC;
}
public boolean isIndexLocalOnly() {
return true;
}
public boolean isRamDirectory() {
return true;
}
public boolean transactionEnabled() {
return false;
}
}
| apache-2.0 |
fogbeam/cas_mirror | core/cas-server-core-webflow-api/src/test/java/org/apereo/cas/web/flow/executor/ClientFlowExecutionRepositoryTests.java | 6868 | package org.apereo.cas.web.flow.executor;
import lombok.val;
import org.cryptacular.bean.AEADBlockCipherBean;
import org.cryptacular.bean.KeyStoreFactoryBean;
import org.cryptacular.generator.sp80038d.RBGNonce;
import org.cryptacular.io.ClassPathResource;
import org.cryptacular.spec.AEADBlockCipherSpec;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Lazy;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.webflow.config.FlowBuilderServicesBuilder;
import org.springframework.webflow.config.FlowDefinitionRegistryBuilder;
import org.springframework.webflow.core.collection.LocalAttributeMap;
import org.springframework.webflow.definition.registry.FlowDefinitionLocator;
import org.springframework.webflow.definition.registry.FlowDefinitionRegistry;
import org.springframework.webflow.engine.builder.support.FlowBuilderServices;
import org.springframework.webflow.engine.impl.FlowExecutionImplFactory;
import org.springframework.webflow.execution.FlowExecution;
import org.springframework.webflow.execution.FlowExecutionFactory;
import org.springframework.webflow.execution.FlowExecutionKey;
import org.springframework.webflow.execution.repository.BadlyFormattedFlowExecutionKeyException;
import org.springframework.webflow.executor.FlowExecutor;
import org.springframework.webflow.executor.FlowExecutorImpl;
import org.springframework.webflow.expression.spel.WebFlowSpringELExpressionParser;
import org.springframework.webflow.test.CasMockViewFactoryCreator;
import org.springframework.webflow.test.MockExternalContext;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
/**
* Test cases for {@link ClientFlowExecutionRepository}.
* @author Misagh Moayyed
* @since 6.1
*/
@SpringBootTest(classes = {
ClientFlowExecutionRepositoryTests.WebflowTestConfiguration.class,
RefreshAutoConfiguration.class
})
@Tag("Webflow")
public class ClientFlowExecutionRepositoryTests {
@Autowired
@Qualifier("flowExecutor")
private FlowExecutor flowExecutor;
@Test
public void verifyBadKey() {
val factory = new ClientFlowExecutionRepository(mock(FlowExecutionFactory.class), mock(FlowDefinitionLocator.class), mock(Transcoder.class));
factory.removeFlowExecutionSnapshot(mock(FlowExecution.class));
factory.removeAllFlowExecutionSnapshots(mock(FlowExecution.class));
assertThrows(ClientFlowExecutionRepositoryException.class, () -> factory.getKey(mock(FlowExecution.class)));
assertThrows(IllegalArgumentException.class, () -> factory.getFlowExecution(mock(FlowExecutionKey.class)));
val key = mock(ClientFlowExecutionKey.class);
when(key.getData()).thenThrow(IllegalArgumentException.class);
assertThrows(ClientFlowExecutionRepositoryException.class, () -> factory.getFlowExecution(key));
}
@Test
public void verifyLaunchAndResumeFlow() {
assertNotNull(flowExecutor);
val launchResult = flowExecutor.launchExecution("test-flow", new LocalAttributeMap<>(), new MockExternalContext());
assertNotNull(launchResult.getPausedKey());
try {
val key = ClientFlowExecutionKey.parse(launchResult.getPausedKey());
assertEquals(key.toString(), launchResult.getPausedKey());
} catch (final BadlyFormattedFlowExecutionKeyException e) {
fail(() -> "Error parsing flow execution key: " + e.getMessage());
}
val context = new MockExternalContext();
context.setEventId("submit");
context.getRequestMap().put("vegan", "0");
val resumeResult = flowExecutor.resumeExecution(launchResult.getPausedKey(), context);
assertNotNull(resumeResult.getOutcome());
assertEquals("lasagnaDinner", resumeResult.getOutcome().getId());
}
@TestConfiguration("WebflowTestConfiguration")
@Lazy(false)
public static class WebflowTestConfiguration {
@Autowired
private ConfigurableApplicationContext applicationContext;
@Bean
public FlowExecutor flowExecutor() {
val impl = new FlowExecutionImplFactory();
val repo = getFlowExecutionRepository(impl);
impl.setExecutionKeyFactory(repo);
return new FlowExecutorImpl(flowRegistry(), flowExecutionFactory(), repo);
}
@Bean
public FlowDefinitionRegistry flowRegistry() {
val builder = new FlowDefinitionRegistryBuilder(this.applicationContext, flowBuilder());
builder.setBasePath("classpath:");
builder.addFlowLocationPattern("/test/*-flow.xml");
return builder.build();
}
@Bean
public FlowBuilderServices flowBuilder() {
val builder = new FlowBuilderServicesBuilder();
builder.setViewFactoryCreator(new CasMockViewFactoryCreator());
builder.setExpressionParser(new WebFlowSpringELExpressionParser(new SpelExpressionParser()));
return builder.build();
}
@Bean
public FlowExecutionFactory flowExecutionFactory() {
val impl = new FlowExecutionImplFactory();
val repo = getFlowExecutionRepository(impl);
impl.setExecutionKeyFactory(repo);
return impl;
}
private ClientFlowExecutionRepository getFlowExecutionRepository(final FlowExecutionFactory impl) {
val repo = new ClientFlowExecutionRepository();
repo.setFlowExecutionFactory(impl);
repo.setFlowDefinitionLocator(flowRegistry());
repo.setTranscoder(transcoder());
return repo;
}
@Bean
public Transcoder transcoder() {
val keystoreFactory = new KeyStoreFactoryBean();
keystoreFactory.setType("JCEKS");
keystoreFactory.setPassword("changeit");
keystoreFactory.setResource(new ClassPathResource("test-keystore.jceks"));
val cipher = new AEADBlockCipherBean();
cipher.setKeyAlias("aes128");
cipher.setKeyPassword("changeit");
cipher.setKeyStore(keystoreFactory.newInstance());
cipher.setBlockCipherSpec(new AEADBlockCipherSpec("AES", "GCM"));
cipher.setNonce(new RBGNonce());
return new EncryptedTranscoder(cipher);
}
}
}
| apache-2.0 |
vzhukovskyi/kaa | server/common/dto/src/main/java/org/kaaproject/kaa/common/dto/AbstractSchemaDto.java | 4172 | /*
* Copyright 2014 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.common.dto;
import org.kaaproject.avro.ui.shared.RecordField;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties({"schemaForm"})
public abstract class AbstractSchemaDto extends SchemaDto {
private static final long serialVersionUID = 6821310997907855007L;
protected String applicationId;
protected String schema;
protected RecordField schemaForm;
protected String name;
protected String description;
protected String createdUsername;
protected long createdTime;
protected long endpointCount;
public String getApplicationId() {
return applicationId;
}
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
public String getSchema() {
return schema;
}
public void setSchema(String schema) {
this.schema = schema;
}
public RecordField getSchemaForm() {
return schemaForm;
}
public void setSchemaForm(RecordField schemaForm) {
this.schemaForm = schemaForm;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getCreatedUsername() {
return createdUsername;
}
public void setCreatedUsername(String createdUsername) {
this.createdUsername = createdUsername;
}
public long getCreatedTime() {
return createdTime;
}
public void setCreatedTime(long createdTime) {
this.createdTime = createdTime;
}
public long getEndpointCount() {
return endpointCount;
}
public void setEndpointCount(long endpointCount) {
this.endpointCount = endpointCount;
}
public void editFields(AbstractSchemaDto other) {
this.name = other.name;
this.description = other.description;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof AbstractSchemaDto)) {
return false;
}
AbstractSchemaDto that = (AbstractSchemaDto) o;
if (majorVersion != that.majorVersion) {
return false;
}
if (minorVersion != that.minorVersion) {
return false;
}
if (applicationId != null ? !applicationId.equals(that.applicationId) : that.applicationId != null) {
return false;
}
if (schema != null ? !schema.equals(that.schema) : that.schema != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = applicationId != null ? applicationId.hashCode() : 0;
result = 31 * result + majorVersion;
result = 31 * result + minorVersion;
result = 31 * result + (schema != null ? schema.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "AbstractSchemaDto [id=" + id + ", applicationId="
+ applicationId + ", majorVersion=" + majorVersion
+ ", minorVersion=" + minorVersion
+ ", name=" + name + ", description=" + description
+ ", createdUsername=" + createdUsername + ", createdTime="
+ createdTime + ", endpointCount=" + endpointCount + "]";
}
}
| apache-2.0 |
samaitra/ignite | modules/core/src/main/java/org/apache/ignite/internal/binary/streams/BinaryHeapOutputStream.java | 5760 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.binary.streams;
import org.apache.ignite.internal.util.GridUnsafe;
import static org.apache.ignite.internal.util.GridUnsafe.BIG_ENDIAN;
/**
* Binary heap output stream.
*/
public final class BinaryHeapOutputStream extends BinaryAbstractOutputStream {
/** Allocator. */
private final BinaryMemoryAllocatorChunk chunk;
/** Data. */
private byte[] data;
/**
* Constructor.
*
* @param cap Initial capacity.
*/
public BinaryHeapOutputStream(int cap) {
this(cap, BinaryMemoryAllocator.THREAD_LOCAL.chunk());
}
/**
* Constructor.
*
* @param cap Capacity.
* @param chunk Chunk.
*/
public BinaryHeapOutputStream(int cap, BinaryMemoryAllocatorChunk chunk) {
this.chunk = chunk;
data = chunk.allocate(cap);
}
/** {@inheritDoc} */
@Override public void close() {
chunk.release(data, pos);
}
/** {@inheritDoc} */
@Override public void ensureCapacity(int cnt) {
if (cnt > data.length) {
int newCap = capacity(data.length, cnt);
data = chunk.reallocate(data, newCap);
}
}
/** {@inheritDoc} */
@Override public byte[] array() {
return data;
}
/** {@inheritDoc} */
@Override public byte[] arrayCopy() {
byte[] res = new byte[pos];
System.arraycopy(data, 0, res, 0, pos);
return res;
}
/** {@inheritDoc} */
@Override public boolean hasArray() {
return true;
}
/** {@inheritDoc} */
@Override protected void writeByteAndShift(byte val) {
data[pos++] = val;
}
/** {@inheritDoc} */
@Override protected void copyAndShift(Object src, long off, int len) {
GridUnsafe.copyMemory(src, off, data, GridUnsafe.BYTE_ARR_OFF + pos, len);
shift(len);
}
/** {@inheritDoc} */
@Override protected void writeShortFast(short val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putShortLE(data, off, val);
else
GridUnsafe.putShort(data, off, val);
}
/** {@inheritDoc} */
@Override protected void writeCharFast(char val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putCharLE(data, off, val);
else
GridUnsafe.putChar(data, off, val);
}
/** {@inheritDoc} */
@Override protected void writeIntFast(int val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putIntLE(data, off, val);
else
GridUnsafe.putInt(data, off, val);
}
/** {@inheritDoc} */
@Override protected void writeLongFast(long val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putLongLE(data, off, val);
else
GridUnsafe.putLong(data, off, val);
}
/** {@inheritDoc} */
@Override public void unsafeWriteByte(byte val) {
GridUnsafe.putByte(data, GridUnsafe.BYTE_ARR_OFF + pos++, val);
}
/** {@inheritDoc} */
@Override public void unsafeWriteShort(short val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putShortLE(data, off, val);
else
GridUnsafe.putShort(data, off, val);
shift(2);
}
/** {@inheritDoc} */
@Override public void unsafeWriteShort(int pos, short val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putShortLE(data, off, val);
else
GridUnsafe.putShort(data, off, val);
}
/** {@inheritDoc} */
@Override public void unsafeWriteChar(char val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putCharLE(data, off, val);
else
GridUnsafe.putChar(data, off, val);
shift(2);
}
/** {@inheritDoc} */
@Override public void unsafeWriteInt(int val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putIntLE(data, off, val);
else
GridUnsafe.putInt(data, off, val);
shift(4);
}
/** {@inheritDoc} */
@Override public void unsafeWriteInt(int pos, int val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putIntLE(data, off, val);
else
GridUnsafe.putInt(data, off, val);
}
/** {@inheritDoc} */
@Override public void unsafeWriteLong(long val) {
long off = GridUnsafe.BYTE_ARR_OFF + pos;
if (BIG_ENDIAN)
GridUnsafe.putLongLE(data, off, val);
else
GridUnsafe.putLong(data, off, val);
shift(8);
}
/** {@inheritDoc} */
@Override public int capacity() {
return data.length;
}
}
| apache-2.0 |
qixiaobo/glu | orchestration/org.linkedin.glu.orchestration-engine/src/main/java/org/linkedin/glu/orchestration/engine/delta/ValueDelta.java | 768 | /*
* Copyright (c) 2011 Yan Pujante
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.linkedin.glu.orchestration.engine.delta;
/**
* @author yan@pongasoft.com
*/
public interface ValueDelta<T>
{
T getExpectedValue();
T getCurrentValue();
} | apache-2.0 |
ankitsinghal/phoenix | phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java | 7482 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.memory;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.spy;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.phoenix.SystemExitRule;
import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
import org.junit.ClassRule;
import org.junit.Test;
/**
*
* Tests for GlobalMemoryManager and ChildMemoryManager
* TODO: use our own time keeper so these tests don't flap
*
*
* @since 0.1
*/
public class MemoryManagerTest {
@ClassRule
public static final SystemExitRule SYSTEM_EXIT_RULE = new SystemExitRule();
@Test
public void testOverGlobalMemoryLimit() throws Exception {
GlobalMemoryManager gmm = new GlobalMemoryManager(250);
try {
gmm.allocate(300);
fail();
} catch (InsufficientMemoryException e) { // expected
}
ChildMemoryManager rmm1 = new ChildMemoryManager(gmm,100);
ChildMemoryManager rmm2 = new ChildMemoryManager(gmm,100);
MemoryChunk c1 = rmm1.allocate(100);
MemoryChunk c2 = rmm2.allocate(100);
try {
rmm2.allocate(100);
fail();
} catch (InsufficientMemoryException e) { // expected
}
c1.close();
c2.close();
assertTrue(rmm1.getAvailableMemory() == rmm1.getMaxMemory());
}
@Test
public void testChildDecreaseAllocation() throws Exception {
MemoryManager gmm = spy(new GlobalMemoryManager(100));
ChildMemoryManager rmm1 = new ChildMemoryManager(gmm,100);
ChildMemoryManager rmm2 = new ChildMemoryManager(gmm,10);
MemoryChunk c1 = rmm1.allocate(50);
MemoryChunk c2 = rmm2.allocate(5,50);
assertTrue(c2.getSize() == 10);
c1.close();
assertTrue(rmm1.getAvailableMemory() == rmm1.getMaxMemory());
c2.close();
assertTrue(rmm2.getAvailableMemory() == rmm2.getMaxMemory());
assertTrue(gmm.getAvailableMemory() == gmm.getMaxMemory());
}
@Test
public void testOverChildMemoryLimit() throws Exception {
MemoryManager gmm = new GlobalMemoryManager(100);
ChildMemoryManager rmm1 = new ChildMemoryManager(gmm,25);
ChildMemoryManager rmm2 = new ChildMemoryManager(gmm,25);
ChildMemoryManager rmm3 = new ChildMemoryManager(gmm,25);
ChildMemoryManager rmm4 = new ChildMemoryManager(gmm,35);
MemoryChunk c1 = rmm1.allocate(20);
MemoryChunk c2 = rmm2.allocate(20);
try {
rmm1.allocate(10);
fail();
} catch (InsufficientMemoryException e) { // expected
}
MemoryChunk c3 = rmm3.allocate(25);
c1.close();
// Ensure that you can get back to max for rmn1 after failure
MemoryChunk c4 = rmm1.allocate(10);
MemoryChunk c5 = rmm1.allocate(15);
MemoryChunk c6 = rmm4.allocate(25);
try {
// This passes % test, but fails the next total memory usage test
rmm4.allocate(10);
fail();
} catch (InsufficientMemoryException e) { // expected
}
c2.close();
// Tests that % test passes (confirming that the 10 above was subtracted back from request memory usage,
// since we'd be at the max of 35% now
MemoryChunk c7 = rmm4.allocate(10);
try {
rmm4.allocate(1);
fail();
} catch (InsufficientMemoryException e) { // expected
}
try {
rmm2.allocate(25);
fail();
} catch (InsufficientMemoryException e) { // expected
}
c3.close();
c4.close();
c5.close();
c6.close();
c7.close();
assertTrue(rmm1.getAvailableMemory() == rmm1.getMaxMemory());
assertTrue(rmm2.getAvailableMemory() == rmm2.getMaxMemory());
assertTrue(rmm3.getAvailableMemory() == rmm3.getMaxMemory());
assertTrue(rmm4.getAvailableMemory() == rmm4.getMaxMemory());
}
@Test
public void testConcurrentAllocation() throws Exception {
int THREADS = 100;
// each thread will attempt up to 100 allocations on average.
final GlobalMemoryManager gmm = new GlobalMemoryManager(THREADS * 1000);
final AtomicInteger count = new AtomicInteger(0);
final CountDownLatch barrier = new CountDownLatch(THREADS);
final CountDownLatch barrier2 = new CountDownLatch(THREADS);
final CountDownLatch signal = new CountDownLatch(1);
/*
* each thread will allocate chunks of 10 bytes, until no more memory is available.
*/
for (int i = 0; i < THREADS; i++) {
new Thread(new Runnable() {
List<MemoryChunk> chunks = new ArrayList<>();
@Override
public void run() {
try {
while(true) {
Thread.sleep(1);
chunks.add(gmm.allocate(10));
count.incrementAndGet();
}
} catch (InsufficientMemoryException e) {
barrier.countDown();
// wait for the signal to go ahead
try {signal.await();} catch (InterruptedException ix) {}
for (MemoryChunk chunk : chunks) {
chunk.close();
}
barrier2.countDown();
} catch (InterruptedException ix) {}
}
}).start();
}
// wait until all threads failed an allocation
barrier.await();
// make sure all memory was used
assertTrue(gmm.getAvailableMemory() == 0);
// let the threads end, and free their memory
signal.countDown(); barrier2.await();
// make sure all memory is freed
assertTrue(gmm.getAvailableMemory() == gmm.getMaxMemory());
}
/**
* Test for SpillableGroupByCache which is using MemoryManager to allocate chunks for GroupBy execution
* @throws Exception
*/
@Test
public void testCorrectnessOfChunkAllocation() throws Exception {
for(int i = 1000;i < Integer.MAX_VALUE;) {
i *=1.5f;
long result = GroupedAggregateRegionObserver.sizeOfUnorderedGroupByMap(i, 100);
assertTrue("Size for GroupByMap is negative" , result > 0);
}
}
}
| apache-2.0 |
jackygurui/redisson | redisson-spring-data/redisson-spring-data-20/src/main/java/org/redisson/spring/data/connection/DistanceConvertor.java | 1164 | /**
* Copyright 2018 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.spring.data.connection;
import org.redisson.client.protocol.convertor.SingleConvertor;
import org.springframework.data.geo.Distance;
import org.springframework.data.geo.Metric;
/**
*
* @author Nikita Koksharov
*
*/
public class DistanceConvertor extends SingleConvertor<Distance> {
private final Metric metric;
public DistanceConvertor(Metric metric) {
super();
this.metric = metric;
}
@Override
public Distance convert(Object obj) {
return new Distance((Double)obj, metric);
}
}
| apache-2.0 |
kidaa/jena | jena-arq/src/test/java/org/apache/jena/sparql/algebra/TestClassify.java | 8890 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.algebra;
import org.apache.jena.atlas.junit.BaseTest ;
import org.apache.jena.query.Query ;
import org.apache.jena.query.QueryFactory ;
import org.apache.jena.query.Syntax ;
import org.apache.jena.sparql.algebra.op.OpJoin ;
import org.apache.jena.sparql.algebra.op.OpLeftJoin ;
import org.apache.jena.sparql.engine.main.JoinClassifier ;
import org.apache.jena.sparql.engine.main.LeftJoinClassifier ;
import org.junit.Test ;
public class TestClassify extends BaseTest
{
@Test public void testClassify_Join_01()
{ classifyJ("{?s :p :o . { ?s :p :o FILTER(true) } }", true) ; }
@Test public void testClassify_Join_02()
{ classifyJ("{?s :p :o . { ?s :p :o FILTER(?s) } }", true) ; }
@Test public void testClassify_Join_03()
{ classifyJ("{?s :p :o . { ?s :p ?o FILTER(?o) } }", true) ; }
@Test public void testClassify_Join_04()
{ classifyJ("{?s :p :o . { ?s :p :o FILTER(?o) } }", true) ; }
@Test public void testClassify_Join_05()
{ classifyJ("{?s :p :o . { ?x :p :o FILTER(?s) } }", false) ; }
@Test public void testClassify_Join_06()
{ classifyJ("{ { ?s :p :o FILTER(true) } ?s :p :o }", true) ; }
@Test public void testClassify_Join_07()
{ classifyJ("{ { ?s :p :o FILTER(?s) } ?s :p :o }", true) ; }
@Test public void testClassify_Join_08()
{ classifyJ("{ { ?s :p ?o FILTER(?o) } ?s :p :o }", true) ; }
@Test public void testClassify_Join_09()
{ classifyJ("{ { ?s :p :o FILTER(?o) } ?s :p :o }", true) ; }
// Actually, this is safe IF executed left, then streamed to right.
@Test public void testClassify_Join_10()
{ classifyJ("{ { ?x :p :o FILTER(?s) } ?s :p :o }", true) ; }
// Not safe: ?s
// Other parts of RHS may restrict ?s to things that can't match the LHS.
@Test public void testClassify_Join_11()
{ classifyJ("{?s :p :o . { OPTIONAL { ?s :p :o } } }", false) ; }
// Not safe: ?s
@Test public void testClassify_Join_12()
{ classifyJ("{?s :p :o . { OPTIONAL { ?s :p :o FILTER(?s) } } }", false) ; }
@Test public void testClassify_Join_13()
{ classifyJ("{?s :p :o . { ?x :p :o OPTIONAL { :s :p :o FILTER(?x) } } }", true) ; }
@Test public void testClassify_Join_14()
{ classifyJ("{?s :p :o . { OPTIONAL { :s :p :o FILTER(?o) } } }", true) ; }
@Test public void testClassify_Join_15()
{ classifyJ("{?s :p :o . { OPTIONAL { ?x :p :o FILTER(?s) } } }", false) ; }
@Test public void testClassify_Join_20()
{ classifyJ("{ {?s :p ?x } . { {} OPTIONAL { :s :p ?x } } }", false) ; }
// Assuming left-right execution, this is safe.
@Test public void testClassify_Join_21()
{ classifyJ("{ { {} OPTIONAL { :s :p ?x } } {?s :p ?x } }", true) ; }
@Test public void testClassify_Join_31()
{ classifyJ("{ ?x ?y ?z {SELECT ?s { ?s ?p ?o} } }", true) ; }
// Use of a filter variable not in from the LHS
@Test public void testClassify_Join_32()
{ classifyJ("{ GRAPH ?g { ?x ?y ?z } { FILTER (?a) } }", true) ; }
// Use of a filter variable from the LHS
@Test public void testClassify_Join_33()
{ classifyJ("{ GRAPH ?g { ?x ?y ?z } { FILTER (?z) } }", false) ; }
// Use of a filter variable from the LHS but grounded in RHS
@Test public void testClassify_Join_34()
{ classifyJ("{ GRAPH ?g { ?x ?y ?z } { ?a ?b ?z FILTER (?z) } }", true) ; }
// Use of a filter variable from the LHS but optional in RHS
@Test public void testClassify_Join_35()
{ classifyJ("{ GRAPH ?g { ?x ?y ?z } { OPTIONAL{?a ?b ?z} FILTER (?z) } }", false) ; }
@Test public void testClassify_Join_40()
{ classifyJ("{ ?x ?y ?z { ?x ?y ?z } UNION { ?x1 ?y1 ?z1 }}", true) ; }
@Test public void testClassify_Join_41()
{ classifyJ("{ ?x ?y ?z { ?x1 ?y1 ?z1 BIND(?z+2 AS ?A) } UNION { ?x1 ?y1 ?z1 }}", false) ; }
@Test public void testClassify_Join_42()
{ classifyJ("{ ?x ?y ?z { BIND(?z+2 AS ?A) } UNION { BIND(?z+2 AS ?B) }}", false) ; }
@Test public void testClassify_Join_43()
{ classifyJ("{ ?x ?y ?z { LET(?A := ?z+2) } UNION { }}", false) ; }
@Test public void testClassify_Join_44()
{ classifyJ("{ BIND(<x> AS ?typeX) { BIND(?typeX AS ?type) ?s ?p ?o FILTER(?o=?type) } }", false) ; }
// Unsafe - deep MINUS
// JENA-1021
@Test public void testClassify_Join_50()
{ classifyJ("{ ?x ?y ?z { ?x1 ?y1 ?z1 MINUS { ?a ?b ?c } } UNION {} }", false) ; }
private void classifyJ(String pattern, boolean expected)
{
String qs1 = "PREFIX : <http://example/>\n" ;
String qs = qs1+"SELECT * "+pattern;
Query query = QueryFactory.create(qs, Syntax.syntaxARQ) ;
Op op = Algebra.compile(query.getQueryPattern()) ;
if ( ! ( op instanceof OpJoin ) )
fail("Not a join: "+pattern) ;
boolean nonLinear = JoinClassifier.isLinear((OpJoin)op) ;
assertEquals("Join: "+pattern, expected, nonLinear) ;
}
@Test public void testClassify_LeftJoin_01()
{ classifyLJ("{ ?s ?p ?o OPTIONAL { ?s1 ?p2 ?x} }", true) ; }
@Test public void testClassify_LeftJoin_02()
{ classifyLJ("{ ?s ?p ?o OPTIONAL { ?s1 ?p2 ?o3 OPTIONAL { ?s1 ?p2 ?x} } }", true) ; }
@Test public void testClassify_LeftJoin_03()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { ?s1 ?p2 ?o3 OPTIONAL { ?s1 :p ?o3} } }", true) ; }
@Test public void testClassify_LeftJoin_04()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { ?s1 ?p2 ?o3 OPTIONAL { ?s1 :p ?x} } }", false) ; }
@Test public void testClassify_LeftJoin_05()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { ?s ?p ?x OPTIONAL { ?s ?p ?x } } }", true) ; }
@Test public void testClassify_LeftJoin_06() // Note use of {{ }}
{ classifyLJ("{ ?s ?p ?x OPTIONAL { { ?s ?p ?o FILTER(?x) } } }", false) ; }
@Test public void testClassify_LeftJoin_07()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { ?s ?p ?x1 OPTIONAL { ?s ?p ?x2 FILTER(?x) } } }", false) ; }
// Can't linearize into a projection.
@Test public void testClassify_LeftJoin_10()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { SELECT ?s { ?s ?p ?o } } }", false) ; }
/**
* Can linearize with BIND present provided mentioned vars are also on RHS
*/
@Test public void testClassify_LeftJoin_11()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { ?s1 ?p2 ?x . BIND(?x AS ?test) } }", true) ; }
/**
* Can't linearize with BIND present if any mentioned vars are not on RHS
*/
@Test public void testClassify_LeftJoin_12()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { ?s1 ?p2 ?x . BIND(?s AS ?test) } }", false) ; }
/**
* Can't linearize with BIND present if any mentioned vars are not on RHS
*/
@Test public void testClassify_LeftJoin_13()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { ?s1 ?p2 ?x . BIND(CONCAT(?s, ?x) AS ?test) } }", false) ; }
/**
* Can't linearize with BIND present if any mentioned vars are not on RHS
*/
@Test public void testClassify_LeftJoin_14()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { ?s1 ?p2 ?x . BIND(CONCAT(?s1, ?p1, ?p2, ?x) AS ?test) } }", false) ; }
/**
* Can't linearize with BIND present if any mentioned vars are not fixed on RHS
*/
@Test public void testClassify_LeftJoin_15()
{ classifyLJ("{ ?s ?p ?x OPTIONAL { BIND(?x AS ?test) OPTIONAL { ?x ?p1 ?o1 } } }", false) ; }
/**
* Test left join classification
* @param pattern WHERE clause for the query as a string
* @param expected Whether the join should be classified as linear
*/
private void classifyLJ(String pattern, boolean expected)
{
String qs1 = "PREFIX : <http://example/>\n" ;
String qs = qs1+"SELECT * "+pattern;
Query query = QueryFactory.create(qs, Syntax.syntaxARQ) ;
Op op = Algebra.compile(query.getQueryPattern()) ;
if ( ! ( op instanceof OpLeftJoin ) )
fail("Not a leftjoin: "+pattern) ;
boolean nonLinear = LeftJoinClassifier.isLinear((OpLeftJoin)op) ;
assertEquals("LeftJoin: "+pattern, expected, nonLinear) ;
}
}
| apache-2.0 |
khuxtable/seaglass | src/main/java/com/seaglasslookandfeel/painter/ContentPanePainter.java | 3267 | /*
* Copyright (c) 2009 Kathryn Huxtable and Kenneth Orr.
*
* This file is part of the SeaGlass Pluggable Look and Feel.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id$
*/
package com.seaglasslookandfeel.painter;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Shape;
import javax.swing.JComponent;
import com.seaglasslookandfeel.painter.AbstractRegionPainter.PaintContext.CacheMode;
/**
* ContentPanePainter implementation.
*/
public class ContentPanePainter extends AbstractRegionPainter {
/**
* DOCUMENT ME!
*
* @author $author$
* @version $Revision$, $Date$
*/
public static enum Which {
BACKGROUND_ENABLED, BACKGROUND_ENABLED_WINDOWFOCUSED,
}
private TwoColors rootPaneActive = new TwoColors(decodeColor("seaGlassToolBarActiveTopT"),
decodeColor("seaGlassToolBarActiveBottomB"));
private TwoColors rootPaneInactive = new TwoColors(decodeColor("seaGlassToolBarInactiveTopT"),
decodeColor("seaGlassToolBarInactiveBottomB"));
private Which state;
private PaintContext ctx;
/**
* Creates a new ContentPanePainter object.
*
* @param state DOCUMENT ME!
*/
public ContentPanePainter(Which state) {
super();
this.state = state;
this.ctx = new PaintContext(CacheMode.NO_CACHING);
}
/**
* @see com.seaglasslookandfeel.painter.AbstractRegionPainter#doPaint(java.awt.Graphics2D,
* javax.swing.JComponent, int, int, java.lang.Object[])
*/
protected void doPaint(Graphics2D g, JComponent c, int width, int height, Object[] extendedCacheKeys) {
Shape s = shapeGenerator.createRectangle(0, 0, width, height);
g.setPaint(getRootPaneInteriorPaint(s, state));
g.fill(s);
}
/**
* @see com.seaglasslookandfeel.painter.AbstractRegionPainter#getPaintContext()
*/
protected PaintContext getPaintContext() {
return ctx;
}
/**
* DOCUMENT ME!
*
* @param type DOCUMENT ME!
*
* @return DOCUMENT ME!
*/
private TwoColors getRootPaneInteriorColors(Which type) {
switch (type) {
case BACKGROUND_ENABLED_WINDOWFOCUSED:
return rootPaneActive;
case BACKGROUND_ENABLED:
return rootPaneInactive;
}
return null;
}
/**
* DOCUMENT ME!
*
* @param s DOCUMENT ME!
* @param type DOCUMENT ME!
*
* @return DOCUMENT ME!
*/
public Paint getRootPaneInteriorPaint(Shape s, Which type) {
return createVerticalGradient(s, getRootPaneInteriorColors(type));
}
}
| apache-2.0 |
fogbeam/cas_mirror | support/cas-server-support-saml-idp/src/test/java/org/apereo/cas/support/saml/web/idp/profile/sso/SSOSamlIdPProfileCallbackHandlerControllerWithBrowserStorageTests.java | 6429 | package org.apereo.cas.support.saml.web.idp.profile.sso;
import org.apereo.cas.CasProtocolConstants;
import org.apereo.cas.support.saml.BaseSamlIdPConfigurationTests;
import org.apereo.cas.support.saml.SamlProtocolConstants;
import org.apereo.cas.support.saml.SamlUtils;
import org.apereo.cas.support.saml.authentication.SamlIdPAuthenticationContext;
import org.apereo.cas.support.saml.services.SamlRegisteredService;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.util.EncodingUtils;
import org.apereo.cas.web.BrowserSessionStorage;
import org.apereo.cas.web.flow.CasWebflowConstants;
import lombok.val;
import org.apache.http.HttpStatus;
import org.jasig.cas.client.authentication.AttributePrincipalImpl;
import org.jasig.cas.client.validation.AssertionImpl;
import org.jasig.cas.client.validation.TicketValidator;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import org.opensaml.messaging.context.MessageContext;
import org.opensaml.saml.common.SAMLObjectBuilder;
import org.opensaml.saml.common.xml.SAMLConstants;
import org.opensaml.saml.saml2.core.AuthnRequest;
import org.opensaml.saml.saml2.core.Issuer;
import org.pac4j.core.context.JEEContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Lazy;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.test.context.TestPropertySource;
import java.util.UUID;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
/**
* This is {@link SSOSamlIdPProfileCallbackHandlerControllerWithBrowserStorageTests}.
*
* @author Misagh Moayyed
* @since 6.2.0
*/
@Import(SSOSamlIdPProfileCallbackHandlerControllerWithBrowserStorageTests.SamlIdPTestConfiguration.class)
@Tag("SAML")
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@TestPropertySource(properties = {
"cas.authn.saml-idp.core.session-storage-type=BROWSER_SESSION_STORAGE",
"cas.authn.saml-idp.metadata.file-system.location=file:src/test/resources/metadata"
})
public class SSOSamlIdPProfileCallbackHandlerControllerWithBrowserStorageTests extends BaseSamlIdPConfigurationTests {
@Autowired
@Qualifier("ssoPostProfileCallbackHandlerController")
private SSOSamlIdPProfileCallbackHandlerController controller;
private SamlRegisteredService samlRegisteredService;
@BeforeEach
public void beforeEach() {
samlRegisteredService = getSamlRegisteredServiceFor(false, false,
false, "https://cassp.example.org");
servicesManager.save(samlRegisteredService);
}
@Test
public void verifyReadFromStorage() throws Exception {
val request = new MockHttpServletRequest();
val response = new MockHttpServletResponse();
val authn = getAuthnRequest();
authn.setProtocolBinding(SAMLConstants.SAML2_POST_BINDING_URI);
val xml = SamlUtils.transformSamlObject(openSamlConfigBean, getAuthnRequest()).toString();
request.getSession().setAttribute(SamlProtocolConstants.PARAMETER_SAML_REQUEST, EncodingUtils.encodeBase64(xml));
request.getSession().setAttribute(SamlProtocolConstants.PARAMETER_SAML_RELAY_STATE, UUID.randomUUID().toString());
request.addParameter(CasProtocolConstants.PARAMETER_TICKET, "ST-1234567890");
val mv = controller.handleCallbackProfileRequestGet(response, request);
assertEquals(CasWebflowConstants.VIEW_ID_SESSION_STORAGE_READ, mv.getViewName());
}
@Test
public void verifyResumeFromStorage() throws Exception {
val request = new MockHttpServletRequest();
val response = new MockHttpServletResponse();
val authn = getAuthnRequest();
authn.setProtocolBinding(SAMLConstants.SAML2_POST_BINDING_URI);
val xml = SamlUtils.transformSamlObject(openSamlConfigBean, getAuthnRequest()).toString();
request.getSession().setAttribute(SamlProtocolConstants.PARAMETER_SAML_REQUEST, EncodingUtils.encodeBase64(xml));
request.getSession().setAttribute(SamlProtocolConstants.PARAMETER_SAML_RELAY_STATE, UUID.randomUUID().toString());
val context = new MessageContext();
context.setMessage(getAuthnRequest());
request.getSession().setAttribute(MessageContext.class.getName(), SamlIdPAuthenticationContext.from(context).encode());
request.addParameter(CasProtocolConstants.PARAMETER_TICKET, "ST-1234567890");
val payload = samlIdPDistributedSessionStore.getTrackableSession(new JEEContext(request, response))
.map(BrowserSessionStorage.class::cast)
.map(BrowserSessionStorage::getPayload)
.orElseThrow();
request.addParameter(BrowserSessionStorage.KEY_SESSION_STORAGE, payload);
val mv = controller.handleCallbackProfileRequestPost(response, request);
assertNull(mv);
assertEquals(HttpStatus.SC_OK, response.getStatus());
}
@TestConfiguration
@Lazy(false)
public static class SamlIdPTestConfiguration {
@Bean
public TicketValidator samlIdPTicketValidator() throws Exception {
val validator = mock(TicketValidator.class);
val principal = new AttributePrincipalImpl("casuser", CollectionUtils.wrap("cn", "cas"));
when(validator.validate(anyString(), anyString())).thenReturn(new AssertionImpl(principal));
return validator;
}
}
private AuthnRequest getAuthnRequest() {
var builder = (SAMLObjectBuilder) openSamlConfigBean.getBuilderFactory()
.getBuilder(AuthnRequest.DEFAULT_ELEMENT_NAME);
var authnRequest = (AuthnRequest) builder.buildObject();
builder = (SAMLObjectBuilder) openSamlConfigBean.getBuilderFactory()
.getBuilder(Issuer.DEFAULT_ELEMENT_NAME);
val issuer = (Issuer) builder.buildObject();
issuer.setValue(samlRegisteredService.getServiceId());
authnRequest.setIssuer(issuer);
return authnRequest;
}
}
| apache-2.0 |
ecarm002/incubator-asterixdb | asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java | 4428 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.runtime.job.listener;
import org.apache.asterix.common.api.IJobEventListenerFactory;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.ITransactionContext;
import org.apache.asterix.common.transactions.ITransactionManager;
import org.apache.asterix.common.transactions.ITransactionManager.AtomicityLevel;
import org.apache.asterix.common.transactions.TransactionOptions;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.hyracks.api.context.IHyracksJobletContext;
import org.apache.hyracks.api.job.IJobletEventListener;
import org.apache.hyracks.api.job.IJobletEventListenerFactory;
import org.apache.hyracks.api.job.JobParameterByteStore;
import org.apache.hyracks.api.job.JobStatus;
public class JobEventListenerFactory implements IJobEventListenerFactory {
private static final long serialVersionUID = 1L;
private TxnId txnId;
private final boolean transactionalWrite;
//To enable new Asterix TxnId for separate deployed job spec invocations
private static final byte[] TRANSACTION_ID_PARAMETER_NAME = "TxnIdParameter".getBytes();
public JobEventListenerFactory(TxnId txnId, boolean transactionalWrite) {
this.txnId = txnId;
this.transactionalWrite = transactionalWrite;
}
@Override
public TxnId getTxnId(int datasetId) {
return txnId;
}
@Override
public IJobletEventListenerFactory copyFactory() {
return new JobEventListenerFactory(txnId, transactionalWrite);
}
@Override
public void updateListenerJobParameters(JobParameterByteStore jobParameterByteStore) {
String AsterixTransactionIdString = new String(jobParameterByteStore
.getParameterValue(TRANSACTION_ID_PARAMETER_NAME, 0, TRANSACTION_ID_PARAMETER_NAME.length));
if (AsterixTransactionIdString.length() > 0) {
this.txnId = new TxnId(Integer.parseInt(AsterixTransactionIdString));
}
}
@Override
public IJobletEventListener createListener(final IHyracksJobletContext jobletContext) {
return new IJobletEventListener() {
@Override
public void jobletFinish(JobStatus jobStatus) {
try {
ITransactionManager txnManager =
((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
.getTransactionSubsystem().getTransactionManager();
ITransactionContext txnContext = txnManager.getTransactionContext(txnId);
txnContext.setWriteTxn(transactionalWrite);
if (jobStatus != JobStatus.FAILURE) {
txnManager.commitTransaction(txnId);
} else {
txnManager.abortTransaction(txnId);
}
} catch (ACIDException e) {
throw new Error(e);
}
}
@Override
public void jobletStart() {
try {
TransactionOptions options = new TransactionOptions(AtomicityLevel.ENTITY_LEVEL);
((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
.getTransactionSubsystem().getTransactionManager().beginTransaction(txnId, options);
} catch (ACIDException e) {
throw new Error(e);
}
}
};
}
}
| apache-2.0 |
krosenvold/AxonFramework | axon-server-connector/src/test/java/org/axonframework/axonserver/connector/heartbeat/HeartbeatMonitorTest.java | 2054 | package org.axonframework.axonserver.connector.heartbeat;
import org.axonframework.axonserver.connector.utils.FakeScheduler;
import org.junit.jupiter.api.*;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.junit.jupiter.api.Assertions.*;
/**
* Unit tests for {@link HeartbeatMonitor}
*
* @author Sara Pellegrini
*/
class HeartbeatMonitorTest {
@Test
void testConnectionAlive() {
FakeScheduler fakeScheduler = new FakeScheduler();
AtomicBoolean reconnect = new AtomicBoolean(false);
HeartbeatMonitor monitor = new HeartbeatMonitor(() -> reconnect.set(true),
() -> true,
fakeScheduler,
1000,
100);
monitor.start();
fakeScheduler.timeElapses(1450);
assertFalse(reconnect.get());
monitor.shutdown();
fakeScheduler.timeElapses(500);
assertEquals(5, fakeScheduler.performedExecutionsCount());
}
@Test
void testDisconnection() {
FakeScheduler fakeScheduler = new FakeScheduler();
AtomicBoolean reconnect = new AtomicBoolean(false);
HeartbeatMonitor monitor = new HeartbeatMonitor(() -> reconnect.set(true),
() -> false,
fakeScheduler,
1000,
100);
monitor.start();
fakeScheduler.timeElapses(1450);
assertTrue(reconnect.get());
assertEquals(5, fakeScheduler.performedExecutionsCount());
fakeScheduler.timeElapses(500);
assertEquals(10, fakeScheduler.performedExecutionsCount());
monitor.shutdown();
fakeScheduler.timeElapses(500);
assertEquals(10, fakeScheduler.performedExecutionsCount());
}
} | apache-2.0 |
cjh1/gerrit | gerrit-sshd/src/main/java/com/google/gerrit/sshd/commands/ReviewCommand.java | 14120 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.sshd.commands;
import com.google.gerrit.common.ChangeHookRunner;
import com.google.gerrit.common.data.ApprovalType;
import com.google.gerrit.common.data.ApprovalTypes;
import com.google.gerrit.common.data.SubmitRecord;
import com.google.gerrit.reviewdb.ApprovalCategory;
import com.google.gerrit.reviewdb.ApprovalCategoryValue;
import com.google.gerrit.reviewdb.Branch;
import com.google.gerrit.reviewdb.Change;
import com.google.gerrit.reviewdb.PatchSet;
import com.google.gerrit.reviewdb.PatchSetApproval;
import com.google.gerrit.reviewdb.RevId;
import com.google.gerrit.reviewdb.ReviewDb;
import com.google.gerrit.server.ChangeUtil;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.git.MergeOp;
import com.google.gerrit.server.git.MergeQueue;
import com.google.gerrit.server.mail.AbandonedSender;
import com.google.gerrit.server.mail.EmailException;
import com.google.gerrit.server.mail.RestoredSender;
import com.google.gerrit.server.patch.PublishComments;
import com.google.gerrit.server.project.ChangeControl;
import com.google.gerrit.server.project.InvalidChangeOperationException;
import com.google.gerrit.server.project.NoSuchChangeException;
import com.google.gerrit.server.project.ProjectControl;
import com.google.gerrit.server.workflow.FunctionState;
import com.google.gerrit.sshd.BaseCommand;
import com.google.gerrit.util.cli.CmdLineParser;
import com.google.gwtorm.client.OrmException;
import com.google.gwtorm.client.ResultSet;
import com.google.inject.Inject;
import org.apache.sshd.server.Environment;
import org.kohsuke.args4j.Argument;
import org.kohsuke.args4j.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
public class ReviewCommand extends BaseCommand {
private static final Logger log =
LoggerFactory.getLogger(ReviewCommand.class);
@Override
protected final CmdLineParser newCmdLineParser() {
final CmdLineParser parser = super.newCmdLineParser();
for (ApproveOption c : optionList) {
parser.addOption(c, c);
}
return parser;
}
private final Set<PatchSet.Id> patchSetIds = new HashSet<PatchSet.Id>();
@Argument(index = 0, required = true, multiValued = true, metaVar = "{COMMIT | CHANGE,PATCHSET}", usage = "patch to review")
void addPatchSetId(final String token) {
try {
patchSetIds.addAll(parsePatchSetId(token));
} catch (UnloggedFailure e) {
throw new IllegalArgumentException(e.getMessage(), e);
} catch (OrmException e) {
throw new IllegalArgumentException("database error", e);
}
}
@Option(name = "--project", aliases = "-p", usage = "project containing the patch set")
private ProjectControl projectControl;
@Option(name = "--message", aliases = "-m", usage = "cover message to publish on change", metaVar = "MESSAGE")
private String changeComment;
@Option(name = "--abandon", usage = "abandon the patch set")
private boolean abandonChange;
@Option(name = "--restore", usage = "restore an abandoned the patch set")
private boolean restoreChange;
@Option(name = "--submit", aliases = "-s", usage = "submit the patch set")
private boolean submitChange;
@Inject
private ReviewDb db;
@Inject
private IdentifiedUser currentUser;
@Inject
private MergeQueue merger;
@Inject
private MergeOp.Factory opFactory;
@Inject
private ApprovalTypes approvalTypes;
@Inject
private ChangeControl.Factory changeControlFactory;
@Inject
private AbandonedSender.Factory abandonedSenderFactory;
@Inject
private FunctionState.Factory functionStateFactory;
@Inject
private PublishComments.Factory publishCommentsFactory;
@Inject
private RestoredSender.Factory restoredSenderFactory;
@Inject
private ChangeHookRunner hooks;
private List<ApproveOption> optionList;
private Set<PatchSet.Id> toSubmit = new HashSet<PatchSet.Id>();
@Override
public final void start(final Environment env) {
startThread(new CommandRunnable() {
@Override
public void run() throws Failure {
initOptionList();
parseCommandLine();
if (abandonChange) {
if (restoreChange) {
throw error("abandon and restore actions are mutually exclusive");
}
if (submitChange) {
throw error("abandon and submit actions are mutually exclusive");
}
}
boolean ok = true;
for (final PatchSet.Id patchSetId : patchSetIds) {
try {
approveOne(patchSetId);
} catch (UnloggedFailure e) {
ok = false;
writeError("error: " + e.getMessage() + "\n");
} catch (Exception e) {
ok = false;
writeError("fatal: internal server error while approving "
+ patchSetId + "\n");
log.error("internal error while approving " + patchSetId, e);
}
}
if (!ok) {
throw new UnloggedFailure(1, "one or more approvals failed;"
+ " review output above");
}
if (!toSubmit.isEmpty()) {
final Set<Branch.NameKey> toMerge = new HashSet<Branch.NameKey>();
try {
for (PatchSet.Id patchSetId : toSubmit) {
ChangeUtil.submit(patchSetId, currentUser, db, opFactory,
new MergeQueue() {
@Override
public void merge(MergeOp.Factory mof, Branch.NameKey branch) {
toMerge.add(branch);
}
@Override
public void schedule(Branch.NameKey branch) {
toMerge.add(branch);
}
@Override
public void recheckAfter(Branch.NameKey branch, long delay,
TimeUnit delayUnit) {
toMerge.add(branch);
}
});
}
for (Branch.NameKey branch : toMerge) {
merger.merge(opFactory, branch);
}
} catch (OrmException updateError) {
throw new Failure(1, "one or more submits failed", updateError);
}
}
}
});
}
private void approveOne(final PatchSet.Id patchSetId) throws
NoSuchChangeException, OrmException, EmailException, Failure {
final Change.Id changeId = patchSetId.getParentKey();
ChangeControl changeControl = changeControlFactory.validateFor(changeId);
if (changeComment == null) {
changeComment = "";
}
Set<ApprovalCategoryValue.Id> aps = new HashSet<ApprovalCategoryValue.Id>();
for (ApproveOption ao : optionList) {
Short v = ao.value();
if (v != null) {
assertScoreIsAllowed(patchSetId, changeControl, ao, v);
aps.add(new ApprovalCategoryValue.Id(ao.getCategoryId(), v));
}
}
try {
publishCommentsFactory.create(patchSetId, changeComment, aps).call();
if (abandonChange) {
if (changeControl.canAbandon()) {
ChangeUtil.abandon(patchSetId, currentUser, changeComment, db,
abandonedSenderFactory, hooks);
} else {
throw error("Not permitted to abandon change");
}
}
if (restoreChange) {
if (changeControl.canRestore()) {
ChangeUtil.restore(patchSetId, currentUser, changeComment, db,
restoredSenderFactory, hooks);
} else {
throw error("Not permitted to restore change");
}
if (submitChange) {
changeControl = changeControlFactory.validateFor(changeId);
}
}
} catch (InvalidChangeOperationException e) {
throw error(e.getMessage());
}
if (submitChange) {
List<SubmitRecord> result = changeControl.canSubmit(db, patchSetId);
if (result.isEmpty()) {
throw new Failure(1, "ChangeControl.canSubmit returned empty list");
}
switch (result.get(0).status) {
case OK:
if (changeControl.getRefControl().canSubmit()) {
toSubmit.add(patchSetId);
} else {
throw error("change " + changeId + ": you do not have submit permission");
}
break;
case NOT_READY: {
StringBuilder msg = new StringBuilder();
for (SubmitRecord.Label lbl : result.get(0).labels) {
switch (lbl.status) {
case OK:
break;
case REJECT:
if (msg.length() > 0) msg.append("\n");
msg.append("change " + changeId + ": blocked by " + lbl.label);
break;
case NEED:
if (msg.length() > 0) msg.append("\n");
msg.append("change " + changeId + ": needs " + lbl.label);
break;
case IMPOSSIBLE:
if (msg.length() > 0) msg.append("\n");
msg.append("change " + changeId + ": needs " + lbl.label
+ " (check project access)");
break;
default:
throw new Failure(1, "Unsupported label status " + lbl.status);
}
}
throw error(msg.toString());
}
case CLOSED:
throw error("change " + changeId + " is closed");
case RULE_ERROR:
if (result.get(0).errorMessage != null) {
throw error("change " + changeId + ": " + result.get(0).errorMessage);
} else {
throw error("change " + changeId + ": internal rule error");
}
default:
throw new Failure(1, "Unsupported status " + result.get(0).status);
}
}
}
private Set<PatchSet.Id> parsePatchSetId(final String patchIdentity)
throws UnloggedFailure, OrmException {
// By commit?
//
if (patchIdentity.matches("^([0-9a-fA-F]{4," + RevId.LEN + "})$")) {
final RevId id = new RevId(patchIdentity);
final ResultSet<PatchSet> patches;
if (id.isComplete()) {
patches = db.patchSets().byRevision(id);
} else {
patches = db.patchSets().byRevisionRange(id, id.max());
}
final Set<PatchSet.Id> matches = new HashSet<PatchSet.Id>();
for (final PatchSet ps : patches) {
final Change change = db.changes().get(ps.getId().getParentKey());
if (inProject(change)) {
matches.add(ps.getId());
}
}
switch (matches.size()) {
case 1:
return matches;
case 0:
throw error("\"" + patchIdentity + "\" no such patch set");
default:
throw error("\"" + patchIdentity + "\" matches multiple patch sets");
}
}
// By older style change,patchset?
//
if (patchIdentity.matches("^[1-9][0-9]*,[1-9][0-9]*$")) {
final PatchSet.Id patchSetId;
try {
patchSetId = PatchSet.Id.parse(patchIdentity);
} catch (IllegalArgumentException e) {
throw error("\"" + patchIdentity + "\" is not a valid patch set");
}
if (db.patchSets().get(patchSetId) == null) {
throw error("\"" + patchIdentity + "\" no such patch set");
}
if (projectControl != null) {
final Change change = db.changes().get(patchSetId.getParentKey());
if (!inProject(change)) {
throw error("change " + change.getId() + " not in project "
+ projectControl.getProject().getName());
}
}
return Collections.singleton(patchSetId);
}
throw error("\"" + patchIdentity + "\" is not a valid patch set");
}
private boolean inProject(final Change change) {
if (projectControl == null) {
// No --project option, so they want every project.
return true;
}
return projectControl.getProject().getNameKey().equals(change.getProject());
}
private void assertScoreIsAllowed(final PatchSet.Id patchSetId,
final ChangeControl changeControl, ApproveOption ao, Short v)
throws UnloggedFailure {
final PatchSetApproval psa =
new PatchSetApproval(new PatchSetApproval.Key(patchSetId, currentUser
.getAccountId(), ao.getCategoryId()), v);
final FunctionState fs =
functionStateFactory.create(changeControl, patchSetId,
Collections.<PatchSetApproval> emptyList());
psa.setValue(v);
fs.normalize(approvalTypes.byId(psa.getCategoryId()), psa);
if (v != psa.getValue()) {
throw error(ao.name() + "=" + ao.value() + " not permitted");
}
}
private void initOptionList() {
optionList = new ArrayList<ApproveOption>();
for (ApprovalType type : approvalTypes.getApprovalTypes()) {
String usage = "";
final ApprovalCategory category = type.getCategory();
usage = "score for " + category.getName() + "\n";
for (ApprovalCategoryValue v : type.getValues()) {
usage += v.format() + "\n";
}
final String name =
"--" + category.getName().toLowerCase().replace(' ', '-');
optionList.add(new ApproveOption(name, usage, type));
}
}
private void writeError(final String msg) {
try {
err.write(msg.getBytes(ENC));
} catch (IOException e) {
}
}
private static UnloggedFailure error(final String msg) {
return new UnloggedFailure(1, msg);
}
}
| apache-2.0 |
paweld2/rest-assured | modules/spring-mock-mvc/src/test/java/io/restassured/module/mockmvc/http/HeaderController.java | 1422 | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.restassured.module.mockmvc.http;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
public class HeaderController {
@RequestMapping(value = "/header", method = GET, produces = APPLICATION_JSON_VALUE)
public @ResponseBody String header(@RequestHeader("headerName") String headerValue,
@RequestHeader(value = "User-Agent", required = false) String userAgent) {
return "{\"headerName\" : \""+headerValue+"\", \"user-agent\" : \""+userAgent+"\"}";
}
}
| apache-2.0 |
adessaigne/camel | components/camel-cmis/src/test/java/org/apache/camel/component/cmis/CMISConsumerTest.java | 3886 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.cmis;
import java.io.UnsupportedEncodingException;
import java.util.List;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
import org.apache.camel.EndpointInject;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.chemistry.opencmis.client.api.Folder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class CMISConsumerTest extends CMISTestSupport {
@EndpointInject("mock:result")
protected MockEndpoint resultEndpoint;
@Test
void getAllContentFromServerOrderedFromRootToLeaves() throws Exception {
resultEndpoint.expectedMessageCount(5);
Consumer treeBasedConsumer = createConsumerFor(getUrl() + "?pageSize=50");
treeBasedConsumer.start();
resultEndpoint.assertIsSatisfied();
treeBasedConsumer.stop();
List<Exchange> exchanges = resultEndpoint.getExchanges();
assertTrue(getNodeNameForIndex(exchanges, 0).equals("RootFolder"));
assertTrue(getNodeNameForIndex(exchanges, 1).equals("Folder1"));
assertTrue(getNodeNameForIndex(exchanges, 2).equals("Folder2"));
assertTrue(getNodeNameForIndex(exchanges, 3).contains(".txt"));
assertTrue(getNodeNameForIndex(exchanges, 4).contains(".txt"));
}
@Test
void consumeDocumentsWithQuery() throws Exception {
resultEndpoint.expectedMessageCount(2);
Consumer queryBasedConsumer = createConsumerFor(
getUrl() + "?query=SELECT * FROM cmis:document");
queryBasedConsumer.start();
resultEndpoint.assertIsSatisfied();
queryBasedConsumer.stop();
}
private Consumer createConsumerFor(String path) throws Exception {
Endpoint endpoint = context.getEndpoint("cmis://" + path);
return endpoint.createConsumer(new Processor() {
public void process(Exchange exchange) {
template.send("mock:result", exchange);
}
});
}
private String getNodeNameForIndex(List<Exchange> exchanges, int index) {
return exchanges.get(index).getIn().getHeader("cmis:name", String.class);
}
private void populateRepositoryRootFolderWithTwoFoldersAndTwoDocuments()
throws UnsupportedEncodingException {
Folder folder1 = createFolderWithName("Folder1");
Folder folder2 = createChildFolderWithName(folder1, "Folder2");
createTextDocument(folder2, "Document2.1", "2.1.txt");
createTextDocument(folder2, "Document2.2", "2.2.txt");
//L0 ROOT
// |
//L1 Folder1
//L2 |_____Folder2
// ||
//L3 Doc2.1___||___Doc2.2
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
populateRepositoryRootFolderWithTwoFoldersAndTwoDocuments();
}
}
| apache-2.0 |
krosenvold/AxonFramework | messaging/src/main/java/org/axonframework/messaging/annotation/PayloadParameterResolver.java | 1749 | /*
* Copyright (c) 2010-2018. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.messaging.annotation;
import org.axonframework.messaging.Message;
/**
* Implementation of a {@link ParameterResolver} that resolves the Message payload as parameter in a handler method.
*/
public class PayloadParameterResolver implements ParameterResolver {
private final Class<?> payloadType;
/**
* Initializes a new {@link PayloadParameterResolver} for a method parameter of given {@code payloadType}. This
* parameter resolver matches with a message if the payload of the message is assignable to the given {@code
* payloadType}.
*
* @param payloadType the parameter type
*/
public PayloadParameterResolver(Class<?> payloadType) {
this.payloadType = payloadType;
}
@Override
public Object resolveParameterValue(Message message) {
return message.getPayload();
}
@Override
public boolean matches(Message message) {
return message.getPayloadType() != null && payloadType.isAssignableFrom(message.getPayloadType());
}
@Override
public Class<?> supportedPayloadType() {
return payloadType;
}
}
| apache-2.0 |
hawtio/hawtio | platforms/hawtio-osgi-jmx/src/test/java/io/hawt/osgi/jmx/RBACDecoratorTest.java | 10264 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.hawt.osgi.jmx;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.TabularData;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.karaf.management.JMXSecurityMBean;
import org.junit.Test;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.osgi.service.cm.Configuration;
import org.osgi.service.cm.ConfigurationAdmin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.sameInstance;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.anyMap;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class RBACDecoratorTest {
private static Logger LOG = LoggerFactory.getLogger(RBACDecoratorTest.class);
@Test
public void objectNameSplitting() throws MalformedObjectNameException {
assertThat(RBACDecorator.nameSegments(new ObjectName("a.b:type=a,name=b")).toArray(new String[3]),
equalTo(new String[]{"a.b", "a", "b"}));
assertThat(RBACDecorator.nameSegments(new ObjectName("a.b:name=b,type=a")).toArray(new String[3]),
equalTo(new String[]{"a.b", "a", "b"}));
}
@Test
public void iteratingDownPids() {
assertThat(RBACDecorator.iterateDownPids(Arrays.asList("a.b", "c", "d")).toArray(new String[4]),
equalTo(new String[]{"jmx.acl.a.b.c.d", "jmx.acl.a.b.c", "jmx.acl.a.b", "jmx.acl"}));
}
@Test
public void objectNameKeys() throws Exception {
// real order that is examined
List<String> realJmxAclPids = Arrays.asList(
"jmx.acl.org.apache.activemq.Broker.amq1.Queue.q2",
"jmx.acl.org.apache.activemq.Broker.amq1.Queue._",
"jmx.acl.org.apache.activemq.Broker._.Queue.q1",
"jmx.acl.org.apache.activemq.Broker._.Queue",
"jmx.acl.org.apache.activemq.Broker.amq2",
"jmx.acl.org.apache.activemq.Broker._",
"jmx.acl.org.apache.activemq._",
"jmx.acl.org.apache.activemq",
"jmx.acl");
// ActiveMQ queues from broker amq1
ObjectName o11 = new ObjectName("org.apache.activemq:type=Broker,brokerName=amq1,destinationType=Queue,destinationName=q1");
ObjectName o12 = new ObjectName("org.apache.activemq:type=Broker,brokerName=amq1,destinationType=Queue,destinationName=q2");
ObjectName o13 = new ObjectName("org.apache.activemq:type=Broker,brokerName=amq1,destinationType=Queue,destinationName=q3");
String k1 = RBACDecorator.pidListKey(realJmxAclPids, o11);
String k2 = RBACDecorator.pidListKey(realJmxAclPids, o12);
String k3 = RBACDecorator.pidListKey(realJmxAclPids, o13);
assertThat(k1, not(equalTo(k2)));
assertThat(k1, equalTo(k3));
}
@SuppressWarnings("unchecked")
private Map<String, Object> readInput() throws java.io.IOException {
String inputJson = getClass().getSimpleName() + "-input.json";
return new ObjectMapper().readValue(
getClass().getResourceAsStream(inputJson), Map.class);
}
@Test
@SuppressWarnings("unchecked")
public void decorateCanInvoke() throws Exception {
BundleContext bc = setUpMocksForDecorate();
RBACDecorator decorator = new RBACDecorator(bc);
decorator.setVerify(true);
Map<String, Object> result = readInput();
LOG.info("In: {}", result);
decorator.decorate(result);
LOG.info("Out: {}", result);
Map<String, Map<String, Object>> domains = (Map<String, Map<String, Object>>) result.get("domains");
Map<String, Object> domain = domains.get("org.apache.activemq");
Map<String, Object> mbean = (Map<String, Object>) domain.get("type=Broker,brokerName=amq");
assertThat(mbean.get("canInvoke"), equalTo(false));
// op
Map<String, Object> op = (Map<String, Object>) mbean.get("op");
LOG.info("op = {}", op);
assertThat(((Map<String, Object>) op.get("removeQueue")).get("canInvoke"), equalTo(false));
assertThat(((Map<String, Object>) op.get("addQueue")).get("canInvoke"), equalTo(false));
assertThat(((Map<String, Object>) op.get("stop")).get("canInvoke"), equalTo(true));
assertThat(((Map<String, Object>) op.get("start")).get("canInvoke"), equalTo(true));
assertThat(((List<Map<String, Object>>) op.get("overloadedMethod")).get(0).get("canInvoke"), equalTo(true));
assertThat(((List<Map<String, Object>>) op.get("overloadedMethod")).get(1).get("canInvoke"), equalTo(false));
assertThat(((List<Map<String, Object>>) op.get("overloadedMethod")).get(2).get("canInvoke"), equalTo(true));
// opByString
Map<String, Map<String, Boolean>> opByString = (Map<String, Map<String, Boolean>>) mbean.get("opByString");
assertThat(opByString, notNullValue());
LOG.info("opByString = {}", opByString);
assertThat(opByString.keySet(), containsInAnyOrder(
"removeQueue(java.lang.String)",
"addQueue(java.lang.String)",
"stop()",
"start()",
"overloadedMethod(java.lang.String)",
"overloadedMethod(java.lang.String,java.lang.Object)",
"overloadedMethod()"));
assertThat(opByString.get("removeQueue(java.lang.String)").get("canInvoke"), equalTo(false));
assertThat(opByString.get("addQueue(java.lang.String)").get("canInvoke"), equalTo(false));
assertThat(opByString.get("stop()").get("canInvoke"), equalTo(true));
assertThat(opByString.get("start()").get("canInvoke"), equalTo(true));
assertThat(opByString.get("overloadedMethod(java.lang.String)").get("canInvoke"), equalTo(true));
assertThat(opByString.get("overloadedMethod(java.lang.String,java.lang.Object)").get("canInvoke"), equalTo(false));
assertThat(opByString.get("overloadedMethod()").get("canInvoke"), equalTo(true));
}
@SuppressWarnings("unchecked")
private BundleContext setUpMocksForDecorate() throws Exception {
BundleContext bc = mock(BundleContext.class);
// mocks for ConfigurationAdmin
ServiceReference<ConfigurationAdmin> cmRef = mock(ServiceReference.class);
when(bc.getServiceReference(ConfigurationAdmin.class)).thenReturn(cmRef);
ConfigurationAdmin configAdmin = mock(ConfigurationAdmin.class);
when(bc.getService(cmRef)).thenReturn(configAdmin);
Configuration config = mock(Configuration.class);
when(configAdmin.listConfigurations(anyString())).thenReturn(new Configuration[]{config});
when(config.getPid()).thenReturn("pid-xxxxx");
// mocks for JMXSecurityMBean
ServiceReference<JMXSecurityMBean> jmxSecRef = mock(ServiceReference.class);
when(bc.getServiceReference(JMXSecurityMBean.class)).thenReturn(jmxSecRef);
JMXSecurityMBean jmxSec = mock(JMXSecurityMBean.class);
when(bc.getService(jmxSecRef)).thenReturn(jmxSec);
TabularData td = mock(TabularData.class);
when(jmxSec.canInvoke(anyMap())).thenReturn(td);
CompositeData cdForMBeans = mock(CompositeData.class);
CompositeData cdForMBeanOps = mock(CompositeData.class);
when((Collection<CompositeData>) td.values()).thenReturn(
Arrays.asList(cdForMBeans),
Arrays.asList(
cdForMBeanOps, cdForMBeanOps, cdForMBeanOps, cdForMBeanOps,
cdForMBeanOps, cdForMBeanOps, cdForMBeanOps));
when(cdForMBeans.get("ObjectName")).thenReturn("org.apache.activemq:type=Broker,brokerName=amq");
when(cdForMBeans.get("CanInvoke")).thenReturn(false);
when(cdForMBeanOps.get("ObjectName")).thenReturn("org.apache.activemq:type=Broker,brokerName=amq");
when(cdForMBeanOps.get("Method")).thenReturn(
"removeQueue(java.lang.String)",
"addQueue(java.lang.String)",
"stop()",
"start()",
"overloadedMethod(java.lang.String)",
"overloadedMethod(java.lang.String,java.lang.Object)",
"overloadedMethod()");
// invoked two times for each cd
when(cdForMBeanOps.get("CanInvoke")).thenReturn(
false, false,
false, false,
true, true,
true, true,
true, true,
false, false,
true, true);
return bc;
}
@Test
@SuppressWarnings("unchecked")
public void deepCopy() throws Exception {
Map<String, Object> result = readInput();
Map<String, Map<String, Object>> cache = (Map<String, Map<String, Object>>) result.get("cache");
for (String key : cache.keySet()) {
Map<String, Object> original = cache.get(key);
Map<String, Object> copy = RBACDecorator.deepCopy(original);
assertThat(copy, not(sameInstance(original)));
assertThat(copy, equalTo(original));
}
}
}
| apache-2.0 |
iperdomo/keycloak | saml-core/src/main/java/org/keycloak/saml/SAML2AuthnRequestBuilder.java | 3364 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.saml;
import org.keycloak.dom.saml.v2.assertion.NameIDType;
import org.keycloak.dom.saml.v2.protocol.AuthnRequestType;
import org.keycloak.saml.common.exceptions.ConfigurationException;
import org.keycloak.saml.processing.api.saml.v2.request.SAML2Request;
import org.keycloak.saml.processing.core.saml.v2.common.IDGenerator;
import org.keycloak.saml.processing.core.saml.v2.util.XMLTimeUtil;
import org.w3c.dom.Document;
import java.net.URI;
/**
* @author pedroigor
*/
public class SAML2AuthnRequestBuilder {
private final AuthnRequestType authnRequestType;
protected String destination;
protected String issuer;
public SAML2AuthnRequestBuilder destination(String destination) {
this.destination = destination;
return this;
}
public SAML2AuthnRequestBuilder issuer(String issuer) {
this.issuer = issuer;
return this;
}
public SAML2AuthnRequestBuilder() {
try {
this.authnRequestType = new AuthnRequestType(IDGenerator.create("ID_"), XMLTimeUtil.getIssueInstant());
} catch (ConfigurationException e) {
throw new RuntimeException("Could not create SAML AuthnRequest builder.", e);
}
}
public SAML2AuthnRequestBuilder assertionConsumerUrl(String assertionConsumerUrl) {
this.authnRequestType.setAssertionConsumerServiceURL(URI.create(assertionConsumerUrl));
return this;
}
public SAML2AuthnRequestBuilder forceAuthn(boolean forceAuthn) {
this.authnRequestType.setForceAuthn(forceAuthn);
return this;
}
public SAML2AuthnRequestBuilder isPassive(boolean isPassive) {
this.authnRequestType.setIsPassive(isPassive);
return this;
}
public SAML2AuthnRequestBuilder nameIdPolicy(SAML2NameIDPolicyBuilder nameIDPolicy) {
this.authnRequestType.setNameIDPolicy(nameIDPolicy.build());
return this;
}
public SAML2AuthnRequestBuilder protocolBinding(String protocolBinding) {
this.authnRequestType.setProtocolBinding(URI.create(protocolBinding));
return this;
}
public Document toDocument() {
try {
AuthnRequestType authnRequestType = this.authnRequestType;
NameIDType nameIDType = new NameIDType();
nameIDType.setValue(this.issuer);
authnRequestType.setIssuer(nameIDType);
authnRequestType.setDestination(URI.create(this.destination));
return new SAML2Request().convert(authnRequestType);
} catch (Exception e) {
throw new RuntimeException("Could not convert " + authnRequestType + " to a document.", e);
}
}
} | apache-2.0 |
goodwinnk/intellij-community | plugins/junit/src/com/intellij/execution/junit/JUnitExternalLibraryDescriptor.java | 1908 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.junit;
import com.intellij.openapi.roots.ExternalLibraryDescriptor;
/**
* @author nik
*/
public class JUnitExternalLibraryDescriptor extends ExternalLibraryDescriptor {
public static final ExternalLibraryDescriptor JUNIT3 = new JUnitExternalLibraryDescriptor("3", "3.8.2");
public static final ExternalLibraryDescriptor JUNIT4 = new JUnitExternalLibraryDescriptor("4", "4.12");
public static final ExternalLibraryDescriptor JUNIT5 = new JUnitExternalLibraryDescriptor("org.junit.jupiter", "junit-jupiter-api", "5.2",
null);
private final String myVersion;
private JUnitExternalLibraryDescriptor(String baseVersion, String preferredVersion) {
this("junit", "junit", baseVersion, preferredVersion);
}
private JUnitExternalLibraryDescriptor(final String groupId,
final String artifactId,
final String version,
String preferredVersion) {
super(groupId, artifactId, version + ".0", version + ".999", preferredVersion);
myVersion = version;
}
@Override
public String getPresentableName() {
return "JUnit" + myVersion;
}
}
| apache-2.0 |
pioryan/smslib | src/java/org/smslib/http/HTTPGateway.java | 4949 | // SMSLib for Java v3
// A Java API library for sending and receiving SMS via a GSM modem
// or other supported gateways.
// Web Site: http://www.smslib.org
//
// Copyright (C) 2002-2012, Thanasis Delenikas, Athens/GREECE.
// SMSLib is distributed under the terms of the Apache License version 2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.smslib.http;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.List;
import org.smslib.AGateway;
import org.smslib.helper.Logger;
class HTTPGateway extends AGateway
{
public HTTPGateway(String id)
{
super(id);
}
List<String> HttpPost(URL url, List<HttpHeader> requestList) throws IOException
{
List<String> responseList = new ArrayList<String>();
URLConnection con;
BufferedReader in;
OutputStreamWriter out;
StringBuffer req;
String line;
Logger.getInstance().logInfo("HTTP POST: " + url, null, getGatewayId());
con = url.openConnection();
con.setConnectTimeout(20000);
con.setDoInput(true);
con.setDoOutput(true);
con.setUseCaches(false);
con.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
out = new OutputStreamWriter(con.getOutputStream());
req = new StringBuffer();
for (int i = 0, n = requestList.size(); i < n; i++)
{
if (i != 0) req.append("&");
req.append(requestList.get(i).key);
req.append("=");
if (requestList.get(i).unicode)
{
StringBuffer tmp = new StringBuffer(200);
byte[] uniBytes = requestList.get(i).value.getBytes("UnicodeBigUnmarked");
for (int j = 0; j < uniBytes.length; j++)
tmp.append(Integer.toHexString(uniBytes[j]).length() == 1 ? "0" + Integer.toHexString(uniBytes[j]) : Integer.toHexString(uniBytes[j]));
req.append(tmp.toString().replaceAll("ff", ""));
}
else req.append(URLEncoder.encode(requestList.get(i).value, "utf-8"));
}
out.write(req.toString());
out.flush();
out.close();
in = new BufferedReader(new InputStreamReader((con.getInputStream())));
while ((line = in.readLine()) != null)
responseList.add(line);
in.close();
return responseList;
}
List<String> HttpGet(URL url) throws IOException
{
List<String> responseList = new ArrayList<String>();
Logger.getInstance().logInfo("HTTP GET: " + url, null, getGatewayId());
URLConnection con = url.openConnection();
con.setConnectTimeout(20000);
con.setAllowUserInteraction(false);
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
while ((inputLine = in.readLine()) != null)
responseList.add(inputLine);
in.close();
return responseList;
}
String ExpandHttpHeaders(List<HttpHeader> httpHeaderList)
{
StringBuffer buffer = new StringBuffer();
for (HttpHeader h : httpHeaderList)
{
buffer.append(h.key);
buffer.append("=");
buffer.append(h.value);
buffer.append("&");
}
return buffer.toString();
}
class HttpHeader
{
public String key;
public String value;
public boolean unicode;
public HttpHeader()
{
this.key = "";
this.value = "";
this.unicode = false;
}
public HttpHeader(String myKey, String myValue, boolean myUnicode)
{
this.key = myKey;
this.value = myValue;
this.unicode = myUnicode;
}
}
String calculateMD5(String in)
{
try
{
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] pre_md5 = md.digest(in.getBytes("LATIN1"));
StringBuilder md5 = new StringBuilder();
for (int i = 0; i < 16; i++)
{
if (pre_md5[i] < 0)
{
md5.append(Integer.toHexString(256 + pre_md5[i]));
}
else if (pre_md5[i] > 15)
{
md5.append(Integer.toHexString(pre_md5[i]));
}
else
{
md5.append("0" + Integer.toHexString(pre_md5[i]));
}
}
return md5.toString();
}
catch (UnsupportedEncodingException ex)
{
Logger.getInstance().logError("Unsupported encoding.", ex, getGatewayId());
return "";
}
catch (NoSuchAlgorithmException ex)
{
Logger.getInstance().logError("No such algorithm.", ex, getGatewayId());
return "";
}
}
@Override
public int getQueueSchedulingInterval()
{
return 500;
}
}
| apache-2.0 |
raja15792/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201508/ActivatePlacements.java | 904 |
package com.google.api.ads.dfp.jaxws.v201508;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
*
* The action used for activating {@link Placement} objects.
*
*
* <p>Java class for ActivatePlacements complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ActivatePlacements">
* <complexContent>
* <extension base="{https://www.google.com/apis/ads/publisher/v201508}PlacementAction">
* <sequence>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ActivatePlacements")
public class ActivatePlacements
extends PlacementAction
{
}
| apache-2.0 |
leandrosilvaferreira/camel-cookbook-examples | camel-cookbook-routing/src/test/java/org/camelcookbook/routing/filtering/FilteringTest.java | 5225 | /*
* Copyright (C) Scott Cranton and Jakub Korab
* https://github.com/CamelCookbook
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camelcookbook.routing.filtering;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
public class FilteringTest extends CamelTestSupport {
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new FilteringRouteBuilder();
}
@Test
public void testFirstFilter() throws Exception {
final MockEndpoint mockEndpointC = getMockEndpoint("mock:C");
mockEndpointC.expectedMessageCount(1);
mockEndpointC.expectedPropertyReceived(Exchange.FILTER_MATCHED, true);
final MockEndpoint mockEndpointAfterC = getMockEndpoint("mock:afterC");
mockEndpointAfterC.expectedMessageCount(1);
// FILTER_MATCHED set to true if message matched previous Filter Predicate
mockEndpointAfterC.expectedPropertyReceived(Exchange.FILTER_MATCHED, true);
getMockEndpoint("mock:amel").expectedMessageCount(0);
final MockEndpoint mockEndpointOther = getMockEndpoint("mock:other");
mockEndpointOther.expectedMessageCount(1);
// FILTER_MATCHED set to true if message matched previous Filter Predicate
mockEndpointOther.expectedPropertyReceived(Exchange.FILTER_MATCHED, false);
template.sendBody("direct:start", "Cooks Rocks");
assertMockEndpointsSatisfied();
}
@Test
public void testSecondFilter() throws Exception {
getMockEndpoint("mock:C").expectedMessageCount(0);
final MockEndpoint mockEndpointAfterC = getMockEndpoint("mock:afterC");
mockEndpointAfterC.expectedMessageCount(1);
// FILTER_MATCHED set to true if message matched previous Filter Predicate
mockEndpointAfterC.expectedPropertyReceived(Exchange.FILTER_MATCHED, false);
final MockEndpoint mockEndpointAmel = getMockEndpoint("mock:amel");
mockEndpointAmel.expectedMessageCount(1);
mockEndpointAmel.expectedPropertyReceived(Exchange.FILTER_MATCHED, true);
final MockEndpoint mockEndpointOther = getMockEndpoint("mock:other");
mockEndpointOther.expectedMessageCount(1);
// FILTER_MATCHED set to true if message matched previous Filter Predicate
mockEndpointOther.expectedPropertyReceived(Exchange.FILTER_MATCHED, true);
template.sendBody("direct:start", "amel is in Belgium");
assertMockEndpointsSatisfied();
}
@Test
public void testBothFilter() throws Exception {
final MockEndpoint mockEndpointC = getMockEndpoint("mock:C");
mockEndpointC.expectedMessageCount(1);
mockEndpointC.expectedPropertyReceived(Exchange.FILTER_MATCHED, true);
final MockEndpoint mockEndpointAfterC = getMockEndpoint("mock:afterC");
mockEndpointAfterC.expectedMessageCount(1);
// FILTER_MATCHED set to true if message matched previous Filter Predicate
mockEndpointAfterC.expectedPropertyReceived(Exchange.FILTER_MATCHED, true);
final MockEndpoint mockEndpointAmel = getMockEndpoint("mock:amel");
mockEndpointAmel.expectedMessageCount(1);
mockEndpointAmel.expectedPropertyReceived(Exchange.FILTER_MATCHED, true);
final MockEndpoint mockEndpointOther = getMockEndpoint("mock:other");
mockEndpointOther.expectedMessageCount(1);
// FILTER_MATCHED set to true if message matched previous Filter Predicate
mockEndpointOther.expectedPropertyReceived(Exchange.FILTER_MATCHED, true);
template.sendBody("direct:start", "Camel Rocks!");
assertMockEndpointsSatisfied();
}
@Test
public void testOther() throws Exception {
getMockEndpoint("mock:C").expectedMessageCount(0);
final MockEndpoint mockEndpointAfterC = getMockEndpoint("mock:afterC");
mockEndpointAfterC.expectedMessageCount(1);
// FILTER_MATCHED set to true if message matched previous Filter Predicate
mockEndpointAfterC.expectedPropertyReceived(Exchange.FILTER_MATCHED, false);
getMockEndpoint("mock:amel").expectedMessageCount(0);
final MockEndpoint mockEndpointOther = getMockEndpoint("mock:other");
mockEndpointOther.expectedMessageCount(1);
// FILTER_MATCHED set to true if message matched previous Filter Predicate
mockEndpointOther.expectedPropertyReceived(Exchange.FILTER_MATCHED, false);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
}
| apache-2.0 |
zhangdian/solr4.6.0 | solr/core/src/java/org/apache/solr/highlight/ScoreOrderFragmentsBuilder.java | 2051 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.highlight;
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
import org.apache.lucene.search.vectorhighlight.FragmentsBuilder;
import org.apache.solr.common.params.SolrParams;
public class ScoreOrderFragmentsBuilder extends SolrFragmentsBuilder {
@Override
protected FragmentsBuilder getFragmentsBuilder( SolrParams params,
String[] preTags, String[] postTags, BoundaryScanner bs ) {
org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder sofb =
new org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder( preTags, postTags, bs );
sofb.setMultiValuedSeparator( getMultiValuedSeparatorChar( params ) );
return sofb;
}
///////////////////////////////////////////////////////////////////////
//////////////////////// SolrInfoMBeans methods ///////////////////////
///////////////////////////////////////////////////////////////////////
@Override
public String getDescription() {
return "ScoreOrderFragmentsBuilder";
}
@Override
public String getSource() {
return "$URL: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene_solr_4_6/solr/core/src/java/org/apache/solr/highlight/ScoreOrderFragmentsBuilder.java $";
}
}
| apache-2.0 |
medicayun/medicayundicom | dcm4jboss-all/tags/dcm4jboss_0_8_5/dcm4jboss-ejb/src/java/org/dcm4chex/archive/ejb/entity/InstanceBean.java | 10940 | /* $Id: InstanceBean.java 1133 2004-06-15 11:13:47Z gunterze $
* Copyright (c) 2002,2003 by TIANI MEDGRAPH AG
*
* This file is part of dcm4che.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.dcm4chex.archive.ejb.entity;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import javax.ejb.CreateException;
import javax.ejb.EJBException;
import javax.ejb.EntityBean;
import javax.ejb.EntityContext;
import javax.ejb.FinderException;
import javax.ejb.RemoveException;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.apache.log4j.Logger;
import org.dcm4che.data.Dataset;
import org.dcm4che.data.DcmDecodeParam;
import org.dcm4che.dict.Tags;
import org.dcm4cheri.util.DatasetUtils;
import org.dcm4cheri.util.StringUtils;
import org.dcm4chex.archive.ejb.interfaces.CodeLocal;
import org.dcm4chex.archive.ejb.interfaces.CodeLocalHome;
import org.dcm4chex.archive.ejb.interfaces.SeriesLocal;
/**
* Instance Bean
*
* @author <a href="mailto:gunter@tiani.com">Gunter Zeilinger</a>
* @version $Revision: 1133 $ $Date: 2004-06-15 19:13:47 +0800 (周二, 15 6月 2004) $
*
* @ejb.bean
* name="Instance"
* type="CMP"
* view-type="local"
* primkey-field="pk"
* local-jndi-name="ejb/Instance"
*
* @ejb.transaction
* type="Required"
*
* @ejb.persistence
* table-name="instance"
*
* @jboss.entity-command
* name="hsqldb-fetch-key"
*
* @ejb.finder
* signature="java.util.Collection findAll()"
* query="SELECT OBJECT(a) FROM Instance AS a"
* transaction-type="Supports"
*
* @ejb.finder
* signature="org.dcm4chex.archive.ejb.interfaces.InstanceLocal findBySopIuid(java.lang.String uid)"
* query="SELECT OBJECT(a) FROM Instance AS a WHERE a.sopIuid = ?1"
* transaction-type="Supports"
*
* @ejb.ejb-ref ejb-name="Code" view-type="local" ref-name="ejb/Code"
*
*/
public abstract class InstanceBean implements EntityBean {
private static final Logger log = Logger.getLogger(InstanceBean.class);
private CodeLocalHome codeHome;
private Set retrieveAETSet;
public void setEntityContext(EntityContext ctx) {
Context jndiCtx = null;
try {
jndiCtx = new InitialContext();
codeHome = (CodeLocalHome) jndiCtx.lookup("java:comp/env/ejb/Code");
} catch (NamingException e) {
throw new EJBException(e);
} finally {
if (jndiCtx != null) {
try {
jndiCtx.close();
} catch (NamingException ignore) {
}
}
}
}
public void unsetEntityContext() {
codeHome = null;
}
/**
* Auto-generated Primary Key
*
* @ejb.interface-method
* @ejb.pk-field
* @ejb.persistence
* column-name="pk"
* @jboss.persistence
* auto-increment="true"
*
*/
public abstract Integer getPk();
public abstract void setPk(Integer pk);
/**
* SOP Instance UID
*
* @ejb.persistence
* column-name="sop_iuid"
*
* @ejb.interface-method
*
*/
public abstract String getSopIuid();
public abstract void setSopIuid(String iuid);
/**
* SOP Class UID
*
* @ejb.persistence
* column-name="sop_cuid"
*
* @ejb.interface-method
*
*/
public abstract String getSopCuid();
public abstract void setSopCuid(String cuid);
/**
* Instance Number
*
* @ejb.persistence
* column-name="inst_no"
*
* @ejb.interface-method
*
*/
public abstract String getInstanceNumber();
public abstract void setInstanceNumber(String no);
/**
* SR Completion Flag
*
* @ejb.persistence
* column-name="sr_complete"
*
* @ejb.interface-method
*
*/
public abstract String getSrCompletionFlag();
public abstract void setSrCompletionFlag(String flag);
/**
* SR Verification Flag
*
* @ejb.persistence
* column-name="sr_verified"
*
* @ejb.interface-method
*
*/
public abstract String getSrVerificationFlag();
public abstract void setSrVerificationFlag(String flag);
/**
* Instance DICOM Attributes
*
* @ejb.persistence
* column-name="inst_attrs"
*
*/
public abstract byte[] getEncodedAttributes();
public abstract void setEncodedAttributes(byte[] bytes);
/**
* Retrieve AETs
*
* @ejb.interface-method
* @ejb.persistence
* column-name="retrieve_aets"
*/
public abstract String getRetrieveAETs();
public abstract void setRetrieveAETs(String aets);
/**
* Instance Availability
*
* @ejb.persistence
* column-name="availability"
*/
public abstract int getAvailability();
/**
* @ejb.interface-method
*/
public int getAvailabilitySafe() {
try {
return getAvailability();
} catch (NullPointerException npe) {
return 0;
}
}
public abstract void setAvailability(int availability);
/**
* Storage Commitment
*
* @ejb.persistence
* column-name="commitment"
*/
public abstract boolean getCommitment();
/**
* @ejb.interface-method
*/
public boolean getCommitmentSafe() {
try {
return getCommitment();
} catch (NullPointerException npe) {
return false;
}
}
/**
* @ejb.interface-method
*/
public abstract void setCommitment(boolean commitment);
/**
* @ejb.relation
* name="series-instance"
* role-name="instance-of-series"
* cascade-delete="yes"
*
* @jboss:relation
* fk-column="series_fk"
* related-pk-field="pk"
*
* @param series series of this instance
*/
public abstract void setSeries(SeriesLocal series);
/**
* @ejb.interface-method view-type="local"
*
* @return series of this series
*/
public abstract SeriesLocal getSeries();
/**
* @ejb.relation
* name="instance-files"
* role-name="instance-in-files"
*
* @ejb.interface-method view-type="local"
*
* @return all files of this instance
*/
public abstract java.util.Collection getFiles();
public abstract void setFiles(java.util.Collection files);
/**
* @ejb.relation
* name="instance-srcode"
* role-name="sr-with-title"
* target-ejb="Code"
* target-role-name="title-of-sr"
* target-multiple="yes"
*
* @jboss:relation
* fk-column="srcode_fk"
* related-pk-field="pk"
*
* @param srCode code of SR title
*/
public abstract void setSrCode(CodeLocal srCode);
/**
* @ejb.interface-method view-type="local"
*
* @return code of SR title
*/
public abstract CodeLocal getSrCode();
public void ejbLoad() {
retrieveAETSet = null;
}
/**
* Create Instance.
*
* @ejb.create-method
*/
public Integer ejbCreate(Dataset ds, SeriesLocal series)
throws CreateException {
retrieveAETSet = null;
setAttributes(ds);
return null;
}
public void ejbPostCreate(Dataset ds, SeriesLocal series)
throws CreateException {
try {
setSrCode(CodeBean.valueOf(codeHome, ds
.getItem(Tags.ConceptNameCodeSeq)));
} catch (CreateException e) {
throw new CreateException(e.getMessage());
} catch (FinderException e) {
throw new CreateException(e.getMessage());
}
setSeries(series);
series.incNumberOfSeriesRelatedInstances(1);
log.info("Created " + prompt());
}
public void ejbRemove() throws RemoveException {
log.info("Deleting " + prompt());
SeriesLocal series = getSeries();
if (series != null) {
series.incNumberOfSeriesRelatedInstances(-1);
}
}
/**
* @ejb.interface-method
*/
public boolean updateAvailability(int availability) {
if (availability != getAvailabilitySafe()) {
setAvailability(availability);
return true;
}
return false;
}
/**
* @ejb.interface-method
*/
public Dataset getAttributes() {
return DatasetUtils.fromByteArray(getEncodedAttributes(),
DcmDecodeParam.EVR_LE);
}
/**
*
* @ejb.interface-method
*/
public void setAttributes(Dataset ds) {
setSopIuid(ds.getString(Tags.SOPInstanceUID));
setSopCuid(ds.getString(Tags.SOPClassUID));
setInstanceNumber(ds.getString(Tags.InstanceNumber));
setSrCompletionFlag(ds.getString(Tags.CompletionFlag));
setSrVerificationFlag(ds.getString(Tags.VerificationFlag));
setEncodedAttributes(DatasetUtils
.toByteArray(ds, DcmDecodeParam.EVR_LE));
}
/**
* @ejb.interface-method
*/
public Set getRetrieveAETSet() {
return Collections.unmodifiableSet(retrieveAETSet());
}
private Set retrieveAETSet() {
if (retrieveAETSet == null) {
retrieveAETSet = new HashSet();
String aets = getRetrieveAETs();
if (aets != null)
retrieveAETSet.addAll(Arrays.asList(StringUtils.split(aets,
'\\')));
}
return retrieveAETSet;
}
/**
* @ejb.interface-method
*/
public boolean addRetrieveAETs(String[] aets) {
if (!retrieveAETSet().addAll(Arrays.asList(aets))) return false;
setRetrieveAETs(StringUtils.toString((String[]) retrieveAETSet()
.toArray(new String[retrieveAETSet.size()]), '\\'));
return true;
}
/**
*
* @ejb.interface-method
*/
public String asString() {
return prompt();
}
private String prompt() {
return "Instance[pk=" + getPk() + ", iuid=" + getSopIuid() + ", cuid="
+ getSopCuid() + ", series->" + getSeries() + "]";
}
}
| apache-2.0 |
apache/ant-ivy | src/java/org/apache/ivy/core/settings/IvySettings.java | 59798 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ivy.core.settings;
import org.apache.ivy.Ivy;
import org.apache.ivy.core.IvyPatternHelper;
import org.apache.ivy.core.NormalRelativeUrlResolver;
import org.apache.ivy.core.RelativeUrlResolver;
import org.apache.ivy.core.cache.CacheUtil;
import org.apache.ivy.core.cache.DefaultRepositoryCacheManager;
import org.apache.ivy.core.cache.DefaultResolutionCacheManager;
import org.apache.ivy.core.cache.RepositoryCacheManager;
import org.apache.ivy.core.cache.ResolutionCacheManager;
import org.apache.ivy.core.check.CheckEngineSettings;
import org.apache.ivy.core.deliver.DeliverEngineSettings;
import org.apache.ivy.core.install.InstallEngineSettings;
import org.apache.ivy.core.module.id.ModuleId;
import org.apache.ivy.core.module.id.ModuleRevisionId;
import org.apache.ivy.core.module.id.ModuleRules;
import org.apache.ivy.core.module.status.StatusManager;
import org.apache.ivy.core.pack.ArchivePacking;
import org.apache.ivy.core.pack.PackingRegistry;
import org.apache.ivy.core.publish.PublishEngineSettings;
import org.apache.ivy.core.repository.RepositoryManagementEngineSettings;
import org.apache.ivy.core.resolve.ResolveEngineSettings;
import org.apache.ivy.core.resolve.ResolveOptions;
import org.apache.ivy.core.retrieve.RetrieveEngineSettings;
import org.apache.ivy.core.sort.SortEngineSettings;
import org.apache.ivy.osgi.core.OsgiLatestStrategy;
import org.apache.ivy.plugins.IvySettingsAware;
import org.apache.ivy.plugins.circular.CircularDependencyStrategy;
import org.apache.ivy.plugins.circular.ErrorCircularDependencyStrategy;
import org.apache.ivy.plugins.circular.IgnoreCircularDependencyStrategy;
import org.apache.ivy.plugins.circular.WarnCircularDependencyStrategy;
import org.apache.ivy.plugins.conflict.ConflictManager;
import org.apache.ivy.plugins.conflict.LatestCompatibleConflictManager;
import org.apache.ivy.plugins.conflict.LatestConflictManager;
import org.apache.ivy.plugins.conflict.NoConflictManager;
import org.apache.ivy.plugins.conflict.StrictConflictManager;
import org.apache.ivy.plugins.latest.LatestLexicographicStrategy;
import org.apache.ivy.plugins.latest.LatestRevisionStrategy;
import org.apache.ivy.plugins.latest.LatestStrategy;
import org.apache.ivy.plugins.latest.LatestTimeStrategy;
import org.apache.ivy.plugins.latest.WorkspaceLatestStrategy;
import org.apache.ivy.plugins.lock.CreateFileLockStrategy;
import org.apache.ivy.plugins.lock.LockStrategy;
import org.apache.ivy.plugins.lock.NIOFileLockStrategy;
import org.apache.ivy.plugins.lock.NoLockStrategy;
import org.apache.ivy.plugins.matcher.ExactOrRegexpPatternMatcher;
import org.apache.ivy.plugins.matcher.ExactPatternMatcher;
import org.apache.ivy.plugins.matcher.MapMatcher;
import org.apache.ivy.plugins.matcher.PatternMatcher;
import org.apache.ivy.plugins.matcher.RegexpPatternMatcher;
import org.apache.ivy.plugins.namespace.Namespace;
import org.apache.ivy.plugins.parser.ModuleDescriptorParser;
import org.apache.ivy.plugins.parser.ModuleDescriptorParserRegistry;
import org.apache.ivy.plugins.parser.ParserSettings;
import org.apache.ivy.plugins.report.LogReportOutputter;
import org.apache.ivy.plugins.report.ReportOutputter;
import org.apache.ivy.plugins.report.XmlReportOutputter;
import org.apache.ivy.plugins.resolver.AbstractWorkspaceResolver;
import org.apache.ivy.plugins.resolver.ChainResolver;
import org.apache.ivy.plugins.resolver.DependencyResolver;
import org.apache.ivy.plugins.resolver.DualResolver;
import org.apache.ivy.plugins.resolver.ResolverSettings;
import org.apache.ivy.plugins.resolver.WorkspaceChainResolver;
import org.apache.ivy.plugins.signer.SignatureGenerator;
import org.apache.ivy.plugins.trigger.Trigger;
import org.apache.ivy.plugins.version.ChainVersionMatcher;
import org.apache.ivy.plugins.version.ExactVersionMatcher;
import org.apache.ivy.plugins.version.LatestVersionMatcher;
import org.apache.ivy.plugins.version.SubVersionMatcher;
import org.apache.ivy.plugins.version.VersionMatcher;
import org.apache.ivy.plugins.version.VersionRangeMatcher;
import org.apache.ivy.util.Checks;
import org.apache.ivy.util.FileUtil;
import org.apache.ivy.util.Message;
import org.apache.ivy.util.StringUtils;
import org.apache.ivy.util.filter.Filter;
import org.apache.ivy.util.url.URLHandlerRegistry;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.AccessControlException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import static org.apache.ivy.util.StringUtils.splitToArray;
public class IvySettings implements SortEngineSettings, PublishEngineSettings, ParserSettings,
DeliverEngineSettings, CheckEngineSettings, InstallEngineSettings, ResolverSettings,
ResolveEngineSettings, RetrieveEngineSettings, RepositoryManagementEngineSettings {
private static final long INTERRUPT_TIMEOUT = 2000;
private Map<String, Class<?>> typeDefs = new HashMap<>();
private Map<String, DependencyResolver> resolversMap = new HashMap<>();
private DependencyResolver defaultResolver;
private DependencyResolver dictatorResolver = null;
private String defaultResolverName;
private File defaultCache;
private String defaultBranch = null;
private boolean checkUpToDate = true;
private ModuleRules<ModuleSettings> moduleSettings = new ModuleRules<>();
private Map<String, ConflictManager> conflictsManager = new HashMap<>();
private Map<String, LatestStrategy> latestStrategies = new HashMap<>();
private Map<String, LockStrategy> lockStrategies = new HashMap<>();
private Map<String, Namespace> namespaces = new HashMap<>();
private Map<String, PatternMatcher> matchers = new HashMap<>();
private Map<String, ReportOutputter> reportOutputters = new HashMap<>();
private Map<String, VersionMatcher> versionMatchers = new HashMap<>();
private Map<String, CircularDependencyStrategy> circularDependencyStrategies = new HashMap<>();
private Map<String, RepositoryCacheManager> repositoryCacheManagers = new HashMap<>();
private Map<String, SignatureGenerator> signatureGenerators = new HashMap<>();
private List<Trigger> triggers = new ArrayList<>();
private IvyVariableContainer variableContainer = new IvyVariableContainerImpl();
private boolean validate = true;
private LatestStrategy defaultLatestStrategy = null;
private LockStrategy defaultLockStrategy = null;
private ConflictManager defaultConflictManager = null;
private CircularDependencyStrategy circularDependencyStrategy = null;
private RepositoryCacheManager defaultRepositoryCacheManager = null;
private ResolutionCacheManager resolutionCacheManager = null;
private List<String> listingIgnore = new ArrayList<>();
private boolean repositoriesConfigured;
private boolean useRemoteConfig = false;
private File defaultUserDir;
private File baseDir = new File(".").getAbsoluteFile();
private List<URL> classpathURLs = new ArrayList<>();
private ClassLoader classloader;
private Boolean debugConflictResolution;
private boolean logNotConvertedExclusionRule;
private VersionMatcher versionMatcher;
private StatusManager statusManager;
private Boolean debugLocking;
private Boolean dumpMemoryUsage;
private String defaultCacheIvyPattern;
private String defaultCacheArtifactPattern;
private boolean defaultUseOrigin;
private String defaultResolveMode = ResolveOptions.RESOLVEMODE_DEFAULT;
private PackingRegistry packingRegistry = new PackingRegistry();
private AbstractWorkspaceResolver workspaceResolver;
private final Map<String, TimeoutConstraint> timeoutConstraints = new HashMap<>();
public IvySettings() {
this(new IvyVariableContainerImpl());
}
public IvySettings(IvyVariableContainer variableContainer) {
setVariableContainer(variableContainer);
setVariable("ivy.default.settings.dir", getDefaultSettingsDir(), true);
setVariable("ivy.basedir", getBaseDir().getAbsolutePath());
setDeprecatedVariable("ivy.default.conf.dir", "ivy.default.settings.dir");
String ivyTypeDefs = System.getProperty("ivy.typedef.files");
if (ivyTypeDefs != null) {
for (String file : splitToArray(ivyTypeDefs)) {
try {
typeDefs(new FileInputStream(Checks.checkAbsolute(file,
"ivy.typedef.files")), true);
} catch (FileNotFoundException e) {
Message.warn("typedefs file not found: " + file);
} catch (IOException e) {
Message.warn("problem with typedef file: " + file, e);
}
}
} else {
try {
typeDefs(getSettingsURL("typedef.properties").openStream(), true);
} catch (IOException e) {
Message.warn("impossible to load default type defs", e);
}
}
LatestLexicographicStrategy latestLexicographicStrategy = new LatestLexicographicStrategy();
LatestRevisionStrategy latestRevisionStrategy = new LatestRevisionStrategy();
LatestTimeStrategy latestTimeStrategy = new LatestTimeStrategy();
OsgiLatestStrategy osgiLatestStrategy = new OsgiLatestStrategy();
addLatestStrategy("latest-revision", latestRevisionStrategy);
addLatestStrategy("latest-lexico", latestLexicographicStrategy);
addLatestStrategy("latest-time", latestTimeStrategy);
addLatestStrategy("latest-osgi", osgiLatestStrategy);
addLockStrategy("no-lock", new NoLockStrategy());
addLockStrategy("artifact-lock", new CreateFileLockStrategy(debugLocking()));
addLockStrategy("artifact-lock-nio", new NIOFileLockStrategy(debugLocking()));
addConflictManager("latest-revision", new LatestConflictManager("latest-revision",
latestRevisionStrategy));
addConflictManager("latest-compatible", new LatestCompatibleConflictManager(
"latest-compatible", latestRevisionStrategy));
addConflictManager("latest-time", new LatestConflictManager("latest-time",
latestTimeStrategy));
addConflictManager("all", new NoConflictManager());
addConflictManager("strict", new StrictConflictManager());
addMatcher(ExactPatternMatcher.INSTANCE);
addMatcher(RegexpPatternMatcher.INSTANCE);
addMatcher(ExactOrRegexpPatternMatcher.INSTANCE);
try {
// GlobPatternMatcher is optional. Only add it when available.
@SuppressWarnings("unchecked")
Class<? extends PatternMatcher> globClazz = (Class<? extends PatternMatcher>) IvySettings.class
.getClassLoader()
.loadClass("org.apache.ivy.plugins.matcher.GlobPatternMatcher");
Field instanceField = globClazz.getField("INSTANCE");
addMatcher((PatternMatcher) instanceField.get(null));
} catch (Exception e) {
// ignore: the matcher isn't on the classpath
Message.info("impossible to define glob matcher: "
+ "org.apache.ivy.plugins.matcher.GlobPatternMatcher was not found", e);
}
addReportOutputter(new LogReportOutputter());
addReportOutputter(new XmlReportOutputter());
configureDefaultCircularDependencyStrategies();
listingIgnore.add(".cvsignore");
listingIgnore.add("CVS");
listingIgnore.add(".svn");
listingIgnore.add("maven-metadata.xml");
listingIgnore.add("maven-metadata.xml.md5");
listingIgnore.add("maven-metadata.xml.sha1");
addSystemProperties();
}
private synchronized void addSystemProperties() {
try {
addAllVariables((Map<?, ?>) System.getProperties().clone());
} catch (AccessControlException ex) {
Message.verbose("access denied to getting all system properties: they won't be available as Ivy variables."
+ "\nset " + ex.getPermission() + " permission if you want to access them");
}
}
/**
* Call this method to ask ivy to configure some variables using either a remote or a local
* properties file
*
* @param remote boolean
*/
@SuppressWarnings("deprecation")
public synchronized void configureRepositories(boolean remote) {
if (!repositoriesConfigured) {
Properties props = new Properties();
boolean configured = false;
if (useRemoteConfig && remote) {
try {
URL url = new URL("https://ant.apache.org/ivy/repository.properties");
Message.verbose("configuring repositories with " + url);
props.load(URLHandlerRegistry.getDefault().openStream(url));
configured = true;
} catch (Exception ex) {
Message.verbose("unable to use remote repository configuration", ex);
props = new Properties();
}
}
if (!configured) {
InputStream repositoryPropsStream = null;
try {
repositoryPropsStream = getSettingsURL("repository.properties").openStream();
props.load(repositoryPropsStream);
} catch (IOException e) {
Message.error("unable to use internal repository configuration", e);
if (repositoryPropsStream != null) {
try {
repositoryPropsStream.close();
} catch (Exception ex) {
// nothing to do
}
}
}
}
addAllVariables(props, false);
repositoriesConfigured = true;
}
}
public synchronized void typeDefs(InputStream stream) throws IOException {
typeDefs(stream, false);
}
public synchronized void typeDefs(InputStream stream, boolean silentFail) throws IOException {
try {
Properties p = new Properties();
p.load(stream);
typeDefs(p, silentFail);
} finally {
stream.close();
}
}
public synchronized void typeDefs(Properties p) {
typeDefs(p, false);
}
public synchronized void typeDefs(Properties p, boolean silentFail) {
for (Map.Entry<Object, Object> entry : p.entrySet()) {
typeDef(entry.getKey().toString(), entry.getValue().toString(), silentFail);
}
}
public synchronized void load(File settingsFile) throws ParseException, IOException {
Message.info(":: loading settings :: file = " + settingsFile);
long start = System.currentTimeMillis();
setSettingsVariables(settingsFile);
if (getVariable("ivy.default.ivy.user.dir") != null) {
setDefaultIvyUserDir(Checks.checkAbsolute(getVariable("ivy.default.ivy.user.dir"),
"ivy.default.ivy.user.dir"));
} else {
getDefaultIvyUserDir();
}
loadDefaultProperties();
try {
new XmlSettingsParser(this).parse(settingsFile.toURI().toURL());
} catch (MalformedURLException e) {
throw new IllegalArgumentException(
"given file cannot be transformed to url: " + settingsFile, e);
}
setVariable("ivy.default.ivy.user.dir", getDefaultIvyUserDir().getAbsolutePath(), false);
Message.verbose("settings loaded (" + (System.currentTimeMillis() - start) + "ms)");
dumpSettings();
}
public synchronized void load(URL settingsURL) throws ParseException, IOException {
Message.info(":: loading settings :: url = " + settingsURL);
long start = System.currentTimeMillis();
setSettingsVariables(settingsURL);
if (getVariable("ivy.default.ivy.user.dir") != null) {
setDefaultIvyUserDir(Checks.checkAbsolute(getVariable("ivy.default.ivy.user.dir"),
"ivy.default.ivy.user.dir"));
} else {
getDefaultIvyUserDir();
}
loadDefaultProperties();
new XmlSettingsParser(this).parse(settingsURL);
setVariable("ivy.default.ivy.user.dir", getDefaultIvyUserDir().getAbsolutePath(), false);
Message.verbose("settings loaded (" + (System.currentTimeMillis() - start) + "ms)");
dumpSettings();
}
/**
* Default initialization of settings, useful when you don't want to load your settings from a
* settings file or URL, but prefer to set them manually. By calling this method you will still
* have the basic initialization done when loading settings.
*
* @throws IOException if something goes wrong
*/
public synchronized void defaultInit() throws IOException {
if (getVariable("ivy.default.ivy.user.dir") != null) {
setDefaultIvyUserDir(Checks.checkAbsolute(getVariable("ivy.default.ivy.user.dir"),
"ivy.default.ivy.user.dir"));
} else {
getDefaultIvyUserDir();
}
getDefaultCache();
loadDefaultProperties();
setVariable("ivy.default.ivy.user.dir", getDefaultIvyUserDir().getAbsolutePath(), false);
dumpSettings();
}
public synchronized void loadDefault() throws ParseException, IOException {
load(getDefaultSettingsURL());
}
public synchronized void loadDefault14() throws ParseException, IOException {
load(getDefault14SettingsURL());
}
private void loadDefaultProperties() throws IOException {
loadProperties(getDefaultPropertiesURL(), false);
}
public static URL getDefaultPropertiesURL() {
return getSettingsURL("ivy.properties");
}
public static URL getDefaultSettingsURL() {
return getSettingsURL("ivysettings.xml");
}
public static URL getDefault14SettingsURL() {
return getSettingsURL("ivysettings-1.4.xml");
}
private String getDefaultSettingsDir() {
String ivysettingsLocation = getDefaultSettingsURL().toExternalForm();
return ivysettingsLocation.substring(0,
ivysettingsLocation.length() - "ivysettings.xml".length() - 1);
}
private static URL getSettingsURL(String file) {
return XmlSettingsParser.class.getResource(file);
}
public synchronized void setSettingsVariables(File settingsFile) {
try {
setVariable("ivy.settings.dir", new File(settingsFile.getAbsolutePath()).getParent());
setDeprecatedVariable("ivy.conf.dir", "ivy.settings.dir");
setVariable("ivy.settings.file", settingsFile.getAbsolutePath());
setDeprecatedVariable("ivy.conf.file", "ivy.settings.file");
setVariable("ivy.settings.url", settingsFile.toURI().toURL().toExternalForm());
setDeprecatedVariable("ivy.conf.url", "ivy.settings.url");
setVariable("ivy.settings.dir.url", new File(settingsFile.getAbsolutePath())
.getParentFile().toURI().toURL().toExternalForm());
} catch (MalformedURLException e) {
throw new IllegalArgumentException(
"given file cannot be transformed to url: " + settingsFile, e);
}
}
/**
* Sets a deprecated variable with the value of the new variable
*
* @param deprecatedKey
* the deprecated variable name
* @param newKey
* the new variable name
*/
private void setDeprecatedVariable(String deprecatedKey, String newKey) {
setVariable(deprecatedKey, getVariable(newKey));
}
public synchronized void setSettingsVariables(URL settingsURL) {
String settingsURLStr = settingsURL.toExternalForm();
setVariable("ivy.settings.url", settingsURLStr);
setDeprecatedVariable("ivy.conf.url", "ivy.settings.url");
int slashIndex = settingsURLStr.lastIndexOf('/');
if (slashIndex != -1) {
String dirUrl = settingsURLStr.substring(0, slashIndex);
setVariable("ivy.settings.dir", dirUrl);
setVariable("ivy.settings.dir.url", dirUrl);
setDeprecatedVariable("ivy.conf.dir", "ivy.settings.dir");
} else {
Message.warn("settings url does not contain any slash (/): "
+ "ivy.settings.dir variable not set");
}
}
private void dumpSettings() {
Message.verbose("\tdefault cache: " + getDefaultCache());
Message.verbose("\tdefault resolver: " + getDefaultResolver());
Message.debug("\tdefault latest strategy: " + getDefaultLatestStrategy());
Message.debug("\tdefault conflict manager: " + getDefaultConflictManager());
Message.debug("\tcircular dependency strategy: " + getCircularDependencyStrategy());
Message.debug("\tvalidate: " + doValidate());
Message.debug("\tcheck up2date: " + isCheckUpToDate());
if (!classpathURLs.isEmpty()) {
Message.verbose("\t-- " + classpathURLs.size() + " custom classpath urls:");
for (URL url : classpathURLs) {
Message.debug("\t\t" + url);
}
}
Message.verbose("\t-- " + resolversMap.size() + " resolvers:");
for (DependencyResolver resolver : resolversMap.values()) {
resolver.dumpSettings();
}
Message.debug("\tmodule settings:");
moduleSettings.dump("\t\t");
}
public synchronized void loadProperties(URL url) throws IOException {
loadProperties(url, true);
}
public synchronized void loadProperties(URL url, boolean overwrite) throws IOException {
loadProperties(url.openStream(), overwrite);
}
public synchronized void loadProperties(File file) throws IOException {
loadProperties(file, true);
}
public synchronized void loadProperties(File file, boolean overwrite) throws IOException {
loadProperties(new FileInputStream(file), overwrite);
}
private void loadProperties(InputStream stream, boolean overwrite) throws IOException {
try {
Properties properties = new Properties();
properties.load(stream);
addAllVariables(properties, overwrite);
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
// nothing
}
}
}
}
public synchronized void setVariable(String varName, String value) {
setVariable(varName, value, true);
}
public synchronized void setVariable(String varName, String value, boolean overwrite) {
setVariable(varName, value, overwrite, null, null);
}
public synchronized void setVariable(String varName, String value, boolean overwrite,
String ifSetVar, String unlessSetVar) {
if (ifSetVar != null && variableContainer.getVariable(ifSetVar) == null) {
Message.verbose("Not setting '" + varName + "' to '" + value + "' since '" + ifSetVar
+ "' is not set.");
return;
}
if (unlessSetVar != null && variableContainer.getVariable(unlessSetVar) != null) {
Message.verbose("Not setting '" + varName + "' to '" + value + "' since '"
+ unlessSetVar + "' is set.");
return;
}
variableContainer.setVariable(varName, value, overwrite);
}
public synchronized void addAllVariables(Map<?, ?> variables) {
addAllVariables(variables, true);
}
public synchronized void addAllVariables(Map<?, ?> variables, boolean overwrite) {
for (Map.Entry<?, ?> entry : variables.entrySet()) {
Object val = entry.getValue();
if (val == null || val instanceof String) {
setVariable(entry.getKey().toString(), (String) val, overwrite);
}
}
}
/**
* Substitute variables in the given string by their value found in the current set of variables
*
* @param str
* the string in which substitution should be made
* @return the string where all current ivy variables have been substituted by their value If
* the input str doesn't use any variable, the same object is returned
*/
public synchronized String substitute(String str) {
return IvyPatternHelper.substituteVariables(str, variableContainer);
}
/**
* Substitute variables in the given map values by their value found in the current set of
* variables
*
* @param strings
* the map of strings in which substitution should be made
* @return a new map of strings in which all current ivy variables in values have been
* substituted by their value
*/
public synchronized Map<String, String> substitute(Map<String, String> strings) {
Map<String, String> substituted = new LinkedHashMap<>();
for (Map.Entry<String, String> entry : strings.entrySet()) {
substituted.put(entry.getKey(), substitute(entry.getValue()));
}
return substituted;
}
/**
* Returns the variables loaded in configuration file. Those variables may better be seen as ant
* properties
*
* @return IvyVariableContainer
*/
public synchronized IvyVariableContainer getVariables() {
return variableContainer;
}
public synchronized Class<?> typeDef(String name, String className) {
return typeDef(name, className, false);
}
public synchronized Class<?> typeDef(String name, String className, boolean silentFail) {
Class<?> clazz = classForName(className, silentFail);
if (clazz != null) {
typeDefs.put(name, clazz);
}
return clazz;
}
private Class<?> classForName(String className, boolean silentFail) {
try {
return getClassLoader().loadClass(className);
} catch (ClassNotFoundException e) {
if (silentFail) {
Message.info("impossible to define new type: class not found: " + className
+ " in " + classpathURLs + " nor Ivy classloader");
return null;
} else {
throw new RuntimeException("impossible to define new type: class not found: "
+ className + " in " + classpathURLs + " nor Ivy classloader");
}
}
}
private ClassLoader getClassLoader() {
if (classloader == null) {
if (classpathURLs.isEmpty()) {
classloader = Ivy.class.getClassLoader();
} else {
classloader = new URLClassLoader(
classpathURLs.toArray(new URL[classpathURLs.size()]),
Ivy.class.getClassLoader());
}
}
return classloader;
}
public synchronized void addClasspathURL(URL url) {
classpathURLs.add(url);
classloader = null;
}
public synchronized Map<String, Class<?>> getTypeDefs() {
return typeDefs;
}
public synchronized Class<?> getTypeDef(String name) {
return typeDefs.get(name);
}
// methods which match ivy conf method signature specs
public synchronized void addConfigured(DependencyResolver resolver) {
addResolver(resolver);
}
public synchronized void addConfigured(ModuleDescriptorParser parser) {
ModuleDescriptorParserRegistry.getInstance().addParser(parser);
}
public synchronized void addConfigured(SignatureGenerator generator) {
addSignatureGenerator(generator);
}
public synchronized void addSignatureGenerator(SignatureGenerator generator) {
init(generator);
signatureGenerators.put(generator.getName(), generator);
}
public synchronized SignatureGenerator getSignatureGenerator(String name) {
return signatureGenerators.get(name);
}
public synchronized void addResolver(DependencyResolver resolver) {
if (resolver == null) {
throw new NullPointerException("null resolver");
}
init(resolver);
resolversMap.put(resolver.getName(), resolver);
if (resolver instanceof ChainResolver) {
List<DependencyResolver> subresolvers = ((ChainResolver) resolver).getResolvers();
for (DependencyResolver dr : subresolvers) {
addResolver(dr);
}
} else if (resolver instanceof DualResolver) {
DependencyResolver ivyResolver = ((DualResolver) resolver).getIvyResolver();
if (ivyResolver != null) {
addResolver(ivyResolver);
}
DependencyResolver artifactResolver = ((DualResolver) resolver).getArtifactResolver();
if (artifactResolver != null) {
addResolver(artifactResolver);
}
}
}
public synchronized void setDefaultCache(File cacheDirectory) {
setVariable("ivy.cache.dir", cacheDirectory.getAbsolutePath(), false);
defaultCache = cacheDirectory;
if (defaultRepositoryCacheManager != null) {
if ("default-cache".equals(defaultRepositoryCacheManager.getName())
&& defaultRepositoryCacheManager instanceof DefaultRepositoryCacheManager) {
((DefaultRepositoryCacheManager) defaultRepositoryCacheManager)
.setBasedir(defaultCache);
}
}
}
public synchronized void setDefaultResolver(String resolverName) {
checkResolverName(resolverName);
if (resolverName != null && !resolverName.equals(defaultResolverName)) {
defaultResolver = null;
}
defaultResolverName = resolverName;
}
private void checkResolverName(String resolverName) {
if (resolverName != null && !resolversMap.containsKey(resolverName)) {
throw new IllegalArgumentException("no resolver found called " + resolverName
+ ": check your settings");
}
}
/**
* regular expressions as explained in Pattern class may be used in attributes
*
* @param attributes Map
* @param matcher PatternMatcher
* @param resolverName String
* @param branch String
* @param conflictManager String
* @param resolveMode String
*/
public synchronized void addModuleConfiguration(Map<String, String> attributes,
PatternMatcher matcher, String resolverName, String branch, String conflictManager,
String resolveMode) {
checkResolverName(resolverName);
moduleSettings.defineRule(new MapMatcher(attributes, matcher), new ModuleSettings(
resolverName, branch, conflictManager, resolveMode));
}
/**
* Return the canonical form of a filename.
* <p>
* If the specified file name is relative it is resolved with respect to the settings's base
* directory.
*
* @param fileName
* The name of the file to resolve. Must not be <code>null</code>.
*
* @return the resolved File.
*
*/
public synchronized File resolveFile(String fileName) {
return FileUtil.resolveFile(baseDir, fileName);
}
public synchronized void setBaseDir(File baseDir) {
this.baseDir = baseDir.getAbsoluteFile();
setVariable("ivy.basedir", this.baseDir.getAbsolutePath());
setVariable("basedir", this.baseDir.getAbsolutePath(), false);
}
public synchronized File getBaseDir() {
return baseDir;
}
public synchronized File getDefaultIvyUserDir() {
if (defaultUserDir == null) {
if (getVariable("ivy.home") != null) {
setDefaultIvyUserDir(Checks.checkAbsolute(getVariable("ivy.home"), "ivy.home"));
Message.verbose("using ivy.default.ivy.user.dir variable for default ivy user dir: "
+ defaultUserDir);
} else {
setDefaultIvyUserDir(new File(System.getProperty("user.home"), ".ivy2"));
Message.verbose("no default ivy user dir defined: set to " + defaultUserDir);
}
}
return defaultUserDir;
}
public synchronized void setDefaultIvyUserDir(File defaultUserDir) {
this.defaultUserDir = defaultUserDir;
setVariable("ivy.default.ivy.user.dir", this.defaultUserDir.getAbsolutePath());
setVariable("ivy.home", this.defaultUserDir.getAbsolutePath());
}
public synchronized File getDefaultCache() {
if (defaultCache == null) {
String cache = getVariable("ivy.cache.dir");
if (cache != null) {
defaultCache = Checks.checkAbsolute(cache, "ivy.cache.dir");
} else {
setDefaultCache(new File(getDefaultIvyUserDir(), "cache"));
Message.verbose("no default cache defined: set to " + defaultCache);
}
}
return defaultCache;
}
public synchronized void setDefaultRepositoryCacheBasedir(String repositoryCacheRoot) {
setVariable("ivy.cache.repository", repositoryCacheRoot, true);
if (defaultRepositoryCacheManager != null
&& "default-cache".equals(defaultRepositoryCacheManager.getName())
&& defaultRepositoryCacheManager instanceof DefaultRepositoryCacheManager) {
((DefaultRepositoryCacheManager) defaultRepositoryCacheManager)
.setBasedir(getDefaultRepositoryCacheBasedir());
}
}
public synchronized void setDefaultResolutionCacheBasedir(String resolutionCacheRoot) {
setVariable("ivy.cache.resolution", resolutionCacheRoot, true);
if (resolutionCacheManager != null
&& resolutionCacheManager instanceof DefaultResolutionCacheManager) {
((DefaultResolutionCacheManager) resolutionCacheManager)
.setBasedir(getDefaultResolutionCacheBasedir());
}
}
public synchronized File getDefaultRepositoryCacheBasedir() {
String repositoryCacheRoot = getVariable("ivy.cache.repository");
if (repositoryCacheRoot != null) {
return Checks.checkAbsolute(repositoryCacheRoot, "ivy.cache.repository");
} else {
return getDefaultCache();
}
}
public synchronized File getDefaultResolutionCacheBasedir() {
String resolutionCacheRoot = getVariable("ivy.cache.resolution");
if (resolutionCacheRoot != null) {
return Checks.checkAbsolute(resolutionCacheRoot, "ivy.cache.resolution");
} else {
return getDefaultCache();
}
}
public synchronized void setDictatorResolver(DependencyResolver resolver) {
dictatorResolver = resolver;
}
private DependencyResolver getDictatorResolver() {
if (dictatorResolver == null) {
return null;
}
if (workspaceResolver != null && !(dictatorResolver instanceof WorkspaceChainResolver)) {
dictatorResolver = new WorkspaceChainResolver(this, dictatorResolver, workspaceResolver);
}
return dictatorResolver;
}
public synchronized DependencyResolver getResolver(ModuleRevisionId mrid) {
DependencyResolver r = getDictatorResolver();
if (r != null) {
return r;
}
String resolverName = getResolverName(mrid);
return getResolver(resolverName);
}
public synchronized boolean hasResolver(String resolverName) {
return resolversMap.containsKey(resolverName);
}
public synchronized DependencyResolver getResolver(String resolverName) {
DependencyResolver r = getDictatorResolver();
if (r != null) {
return r;
}
DependencyResolver resolver = resolversMap.get(resolverName);
if (resolver == null) {
Message.error("unknown resolver " + resolverName);
} else if (workspaceResolver != null && !(resolver instanceof WorkspaceChainResolver)) {
resolver = new WorkspaceChainResolver(this, resolver, workspaceResolver);
resolversMap.put(resolver.getName(), resolver);
resolversMap.put(resolverName, resolver);
}
return resolver;
}
public synchronized DependencyResolver getDefaultResolver() {
DependencyResolver r = getDictatorResolver();
if (r != null) {
return r;
}
if (defaultResolver == null) {
defaultResolver = resolversMap.get(defaultResolverName);
}
if (workspaceResolver != null && !(defaultResolver instanceof WorkspaceChainResolver)) {
defaultResolver = new WorkspaceChainResolver(this, defaultResolver, workspaceResolver);
}
return defaultResolver;
}
public synchronized String getResolverName(ModuleRevisionId mrid) {
ModuleSettings ms = moduleSettings.getRule(mrid, new Filter<ModuleSettings>() {
public boolean accept(ModuleSettings o) {
return o.getResolverName() != null;
}
});
return ms == null ? defaultResolverName : ms.getResolverName();
}
public synchronized String getDefaultBranch(ModuleId moduleId) {
ModuleSettings ms = moduleSettings.getRule(moduleId, new Filter<ModuleSettings>() {
public boolean accept(ModuleSettings o) {
return o.getBranch() != null;
}
});
return ms == null ? getDefaultBranch() : ms.getBranch();
}
public synchronized String getDefaultBranch() {
return defaultBranch;
}
public synchronized void setDefaultBranch(String defaultBranch) {
this.defaultBranch = defaultBranch;
}
public synchronized ConflictManager getConflictManager(ModuleId moduleId) {
ModuleSettings ms = moduleSettings.getRule(moduleId, new Filter<ModuleSettings>() {
public boolean accept(ModuleSettings o) {
return o.getConflictManager() != null;
}
});
if (ms == null) {
return getDefaultConflictManager();
} else {
ConflictManager cm = getConflictManager(ms.getConflictManager());
if (cm == null) {
throw new IllegalStateException("ivy badly configured: unknown conflict manager "
+ ms.getConflictManager());
}
return cm;
}
}
public synchronized String getResolveMode(ModuleId moduleId) {
ModuleSettings ms = moduleSettings.getRule(moduleId, new Filter<ModuleSettings>() {
public boolean accept(ModuleSettings o) {
return o.getResolveMode() != null;
}
});
return ms == null ? getDefaultResolveMode() : ms.getResolveMode();
}
public synchronized String getDefaultResolveMode() {
return defaultResolveMode;
}
public synchronized void setDefaultResolveMode(String defaultResolveMode) {
this.defaultResolveMode = defaultResolveMode;
}
public synchronized void addConfigured(ConflictManager cm) {
addConflictManager(cm.getName(), cm);
}
public synchronized ConflictManager getConflictManager(String name) {
if ("default".equals(name)) {
return getDefaultConflictManager();
}
return conflictsManager.get(name);
}
public synchronized void addConflictManager(String name, ConflictManager cm) {
init(cm);
conflictsManager.put(name, cm);
}
public synchronized void addConfigured(LatestStrategy latest) {
addLatestStrategy(latest.getName(), latest);
}
public synchronized LatestStrategy getLatestStrategy(String name) {
if ("default".equals(name)) {
return getDefaultLatestStrategy();
}
LatestStrategy strategy = latestStrategies.get(name);
if (workspaceResolver != null && !(strategy instanceof WorkspaceLatestStrategy)) {
strategy = new WorkspaceLatestStrategy(strategy);
latestStrategies.put(name, strategy);
}
return strategy;
}
public synchronized void addLatestStrategy(String name, LatestStrategy latest) {
init(latest);
latestStrategies.put(name, latest);
}
public synchronized void addConfigured(LockStrategy lockStrategy) {
addLockStrategy(lockStrategy.getName(), lockStrategy);
}
public synchronized LockStrategy getLockStrategy(String name) {
if ("default".equals(name)) {
return getDefaultLockStrategy();
}
return lockStrategies.get(name);
}
public synchronized void addLockStrategy(String name, LockStrategy lockStrategy) {
init(lockStrategy);
lockStrategies.put(name, lockStrategy);
}
public synchronized void addConfigured(Namespace ns) {
addNamespace(ns);
}
public synchronized Namespace getNamespace(String name) {
if ("system".equals(name)) {
return getSystemNamespace();
}
return namespaces.get(name);
}
public final Namespace getSystemNamespace() {
return Namespace.SYSTEM_NAMESPACE;
}
public synchronized void addNamespace(Namespace ns) {
init(ns);
namespaces.put(ns.getName(), ns);
}
public void addConfigured(final NamedTimeoutConstraint timeoutConstraint) {
if (timeoutConstraint == null) {
return;
}
final String name = timeoutConstraint.getName();
StringUtils.assertNotNullNorEmpty(name, "Name of a timeout constraint cannot be null or empty string");
this.timeoutConstraints.put(name, timeoutConstraint);
}
@Override
public TimeoutConstraint getTimeoutConstraint(final String name) {
return this.timeoutConstraints.get(name);
}
public synchronized void addConfigured(PatternMatcher m) {
addMatcher(m);
}
public synchronized PatternMatcher getMatcher(String name) {
return matchers.get(name);
}
public synchronized void addMatcher(PatternMatcher m) {
init(m);
matchers.put(m.getName(), m);
}
public synchronized void addConfigured(RepositoryCacheManager c) {
addRepositoryCacheManager(c);
}
public synchronized RepositoryCacheManager getRepositoryCacheManager(String name) {
return repositoryCacheManagers.get(name);
}
public synchronized void addRepositoryCacheManager(RepositoryCacheManager c) {
init(c);
repositoryCacheManagers.put(c.getName(), c);
}
public synchronized RepositoryCacheManager[] getRepositoryCacheManagers() {
return repositoryCacheManagers.values().toArray(
new RepositoryCacheManager[repositoryCacheManagers.size()]);
}
public synchronized void addConfigured(ReportOutputter outputter) {
addReportOutputter(outputter);
}
public synchronized ReportOutputter getReportOutputter(String name) {
return reportOutputters.get(name);
}
public synchronized void addReportOutputter(ReportOutputter outputter) {
init(outputter);
reportOutputters.put(outputter.getName(), outputter);
}
public synchronized ReportOutputter[] getReportOutputters() {
return reportOutputters.values().toArray(new ReportOutputter[reportOutputters.size()]);
}
public synchronized void addConfigured(VersionMatcher vmatcher) {
addVersionMatcher(vmatcher);
}
public synchronized VersionMatcher getVersionMatcher(String name) {
return versionMatchers.get(name);
}
public synchronized void addVersionMatcher(VersionMatcher vmatcher) {
init(vmatcher);
versionMatchers.put(vmatcher.getName(), vmatcher);
if (versionMatcher == null) {
versionMatcher = new ChainVersionMatcher();
addVersionMatcher(new ExactVersionMatcher());
}
if (versionMatcher instanceof ChainVersionMatcher) {
ChainVersionMatcher chain = (ChainVersionMatcher) versionMatcher;
chain.add(vmatcher);
}
}
public synchronized VersionMatcher[] getVersionMatchers() {
return versionMatchers.values().toArray(new VersionMatcher[versionMatchers.size()]);
}
public synchronized VersionMatcher getVersionMatcher() {
if (versionMatcher == null) {
configureDefaultVersionMatcher();
}
return versionMatcher;
}
public synchronized void configureDefaultVersionMatcher() {
addVersionMatcher(new LatestVersionMatcher());
addVersionMatcher(new SubVersionMatcher());
addVersionMatcher(new VersionRangeMatcher());
}
public synchronized CircularDependencyStrategy getCircularDependencyStrategy() {
if (circularDependencyStrategy == null) {
circularDependencyStrategy = getCircularDependencyStrategy("default");
}
return circularDependencyStrategy;
}
public synchronized CircularDependencyStrategy getCircularDependencyStrategy(String name) {
if ("default".equals(name)) {
name = "warn";
}
return circularDependencyStrategies.get(name);
}
public synchronized void setCircularDependencyStrategy(CircularDependencyStrategy strategy) {
circularDependencyStrategy = strategy;
}
public synchronized void addConfigured(CircularDependencyStrategy strategy) {
addCircularDependencyStrategy(strategy);
}
private void addCircularDependencyStrategy(CircularDependencyStrategy strategy) {
circularDependencyStrategies.put(strategy.getName(), strategy);
}
private void configureDefaultCircularDependencyStrategies() {
addCircularDependencyStrategy(WarnCircularDependencyStrategy.getInstance());
addCircularDependencyStrategy(ErrorCircularDependencyStrategy.getInstance());
addCircularDependencyStrategy(IgnoreCircularDependencyStrategy.getInstance());
}
public synchronized StatusManager getStatusManager() {
if (statusManager == null) {
statusManager = StatusManager.newDefaultInstance();
}
return statusManager;
}
public void setStatusManager(StatusManager statusManager) {
this.statusManager = statusManager;
}
/**
* Returns the file names of the files that should be ignored when creating a file listing.
*
* @return String[]
*/
public synchronized String[] getIgnorableFilenames() {
return listingIgnore.toArray(new String[listingIgnore.size()]);
}
/**
* Filters the names list by removing all names that should be ignored as defined by the listing
* ignore list
*
* @param names ditto
*/
public synchronized void filterIgnore(Collection<String> names) {
names.removeAll(listingIgnore);
}
public synchronized boolean isCheckUpToDate() {
return checkUpToDate;
}
public synchronized void setCheckUpToDate(boolean checkUpToDate) {
this.checkUpToDate = checkUpToDate;
}
public synchronized boolean doValidate() {
return validate;
}
public synchronized void setValidate(boolean validate) {
this.validate = validate;
}
public synchronized String getVariable(String name) {
return variableContainer.getVariable(name);
}
/**
* Returns a variable as boolean value.
* @param name name of the variable
* @param valueIfUnset value if the variable is unset
* @return <tt>true</tt> if the variable is <tt>'true'</tt> (ignoring case)
* or the value of <i>valueIfUnset</i> if the variable is <tt>null</tt>
*/
public synchronized boolean getVariableAsBoolean(String name, boolean valueIfUnset) {
String var = getVariable(name);
return var == null ? valueIfUnset : Boolean.valueOf(var);
}
public synchronized ConflictManager getDefaultConflictManager() {
if (defaultConflictManager == null) {
defaultConflictManager = new LatestConflictManager(getDefaultLatestStrategy());
((LatestConflictManager) defaultConflictManager).setSettings(this);
}
return defaultConflictManager;
}
public synchronized void setDefaultConflictManager(ConflictManager defaultConflictManager) {
this.defaultConflictManager = defaultConflictManager;
}
public synchronized LatestStrategy getDefaultLatestStrategy() {
if (defaultLatestStrategy == null) {
defaultLatestStrategy = new LatestRevisionStrategy();
}
if (workspaceResolver != null
&& !(defaultLatestStrategy instanceof WorkspaceLatestStrategy)) {
defaultLatestStrategy = new WorkspaceLatestStrategy(defaultLatestStrategy);
}
return defaultLatestStrategy;
}
public synchronized void setDefaultLatestStrategy(LatestStrategy defaultLatestStrategy) {
this.defaultLatestStrategy = defaultLatestStrategy;
}
public synchronized LockStrategy getDefaultLockStrategy() {
if (defaultLockStrategy == null) {
defaultLockStrategy = new NoLockStrategy();
}
return defaultLockStrategy;
}
public synchronized void setDefaultLockStrategy(LockStrategy defaultLockStrategy) {
this.defaultLockStrategy = defaultLockStrategy;
}
public synchronized RepositoryCacheManager getDefaultRepositoryCacheManager() {
if (defaultRepositoryCacheManager == null) {
defaultRepositoryCacheManager = new DefaultRepositoryCacheManager("default-cache",
this, getDefaultRepositoryCacheBasedir());
addRepositoryCacheManager(defaultRepositoryCacheManager);
}
return defaultRepositoryCacheManager;
}
public synchronized void setDefaultRepositoryCacheManager(RepositoryCacheManager cache) {
this.defaultRepositoryCacheManager = cache;
}
public synchronized ResolutionCacheManager getResolutionCacheManager() {
if (resolutionCacheManager == null) {
resolutionCacheManager = new DefaultResolutionCacheManager(
getDefaultResolutionCacheBasedir());
init(resolutionCacheManager);
}
return resolutionCacheManager;
}
public synchronized void setResolutionCacheManager(ResolutionCacheManager resolutionCacheManager) {
this.resolutionCacheManager = resolutionCacheManager;
}
public synchronized void addTrigger(Trigger trigger) {
init(trigger);
triggers.add(trigger);
}
public synchronized List<Trigger> getTriggers() {
return triggers;
}
public synchronized void addConfigured(Trigger trigger) {
addTrigger(trigger);
}
public synchronized boolean isUseRemoteConfig() {
return useRemoteConfig;
}
public synchronized void setUseRemoteConfig(boolean useRemoteConfig) {
this.useRemoteConfig = useRemoteConfig;
}
public synchronized boolean logModulesInUse() {
return getVariableAsBoolean("ivy.log.modules.in.use", true);
}
public synchronized boolean logModuleWhenFound() {
return getVariableAsBoolean("ivy.log.module.when.found", true);
}
public synchronized boolean logResolvedRevision() {
return getVariableAsBoolean("ivy.log.resolved.revision", true);
}
public synchronized boolean debugConflictResolution() {
if (debugConflictResolution == null) {
debugConflictResolution = getVariableAsBoolean("ivy.log.conflict.resolution", false);
}
return debugConflictResolution;
}
public synchronized boolean debugLocking() {
if (debugLocking == null) {
debugLocking = getVariableAsBoolean("ivy.log.locking", false);
}
return debugLocking;
}
public synchronized boolean dumpMemoryUsage() {
if (dumpMemoryUsage == null) {
dumpMemoryUsage = getVariableAsBoolean("ivy.log.memory", false);
}
return dumpMemoryUsage;
}
public synchronized boolean logNotConvertedExclusionRule() {
return logNotConvertedExclusionRule;
}
public synchronized void setLogNotConvertedExclusionRule(boolean logNotConvertedExclusionRule) {
this.logNotConvertedExclusionRule = logNotConvertedExclusionRule;
}
private void init(Object obj) {
if (obj instanceof IvySettingsAware) {
((IvySettingsAware) obj).setSettings(this);
} else if (obj instanceof DependencyResolver) {
((DependencyResolver) obj).setSettings(this);
}
}
private static class ModuleSettings {
private String resolverName;
private String branch;
private String conflictManager;
private String resolveMode;
public ModuleSettings(String resolver, String branchName, String conflictMgr,
String resolveMode) {
this.resolverName = resolver;
this.branch = branchName;
this.conflictManager = conflictMgr;
this.resolveMode = resolveMode;
}
@Override
public String toString() {
return (resolverName != null ? "resolver: " + resolverName : "")
+ (branch != null ? "branch: " + branch : "")
+ (conflictManager != null ? "conflictManager: " + conflictManager : "")
+ (resolveMode != null ? "resolveMode: " + resolveMode : "");
}
public String getBranch() {
return branch;
}
public String getResolverName() {
return resolverName;
}
public String getConflictManager() {
return conflictManager;
}
public String getResolveMode() {
return resolveMode;
}
}
public final long getInterruptTimeout() {
return INTERRUPT_TIMEOUT;
}
public synchronized Collection<DependencyResolver> getResolvers() {
return resolversMap.values();
}
public synchronized Collection<String> getResolverNames() {
return resolversMap.keySet();
}
public synchronized Collection<String> getMatcherNames() {
return matchers.keySet();
}
public synchronized IvyVariableContainer getVariableContainer() {
return variableContainer;
}
/**
* Use a different variable container.
*
* @param variables IvyVariableContainer
*/
public synchronized void setVariableContainer(IvyVariableContainer variables) {
variableContainer = variables;
}
public synchronized RelativeUrlResolver getRelativeUrlResolver() {
return new NormalRelativeUrlResolver();
}
public synchronized void setDefaultCacheIvyPattern(String defaultCacheIvyPattern) {
CacheUtil.checkCachePattern(defaultCacheIvyPattern);
this.defaultCacheIvyPattern = defaultCacheIvyPattern;
}
public synchronized String getDefaultCacheIvyPattern() {
return defaultCacheIvyPattern;
}
public synchronized void setDefaultCacheArtifactPattern(String defaultCacheArtifactPattern) {
CacheUtil.checkCachePattern(defaultCacheArtifactPattern);
this.defaultCacheArtifactPattern = defaultCacheArtifactPattern;
}
public synchronized String getDefaultCacheArtifactPattern() {
return defaultCacheArtifactPattern;
}
public synchronized void setDefaultUseOrigin(boolean useOrigin) {
defaultUseOrigin = useOrigin;
}
public synchronized boolean isDefaultUseOrigin() {
return defaultUseOrigin;
}
public synchronized void useDeprecatedUseOrigin() {
Message.deprecated("useOrigin option is deprecated when calling resolve, use useOrigin"
+ " setting on the cache implementation instead");
setDefaultUseOrigin(true);
}
/**
* Validates the settings, throwing an {@link IllegalStateException} if the current state is not
* valid.
*
* @throws IllegalStateException
* if the settings is not valid.
*/
public synchronized void validate() {
validateAll(resolversMap.values());
validateAll(conflictsManager.values());
validateAll(latestStrategies.values());
validateAll(lockStrategies.values());
validateAll(repositoryCacheManagers.values());
validateAll(reportOutputters.values());
validateAll(circularDependencyStrategies.values());
validateAll(versionMatchers.values());
validateAll(namespaces.values());
}
/**
* Validates all {@link Validatable} objects in the collection.
*
* @param values
* the collection of objects to validate.
* @throws IllegalStateException
* if any of the objects is not valid.
*/
private void validateAll(Collection<?> values) {
for (Object object : values) {
if (object instanceof Validatable) {
((Validatable) object).validate();
}
}
}
public Namespace getContextNamespace() {
return Namespace.SYSTEM_NAMESPACE;
}
public synchronized void addConfigured(ArchivePacking packing) {
init(packing);
packingRegistry.register(packing);
}
public PackingRegistry getPackingRegistry() {
return packingRegistry;
}
public void addConfigured(AbstractWorkspaceResolver workspaceResolver) {
this.workspaceResolver = workspaceResolver;
if (workspaceResolver != null) {
workspaceResolver.setSettings(this);
DefaultRepositoryCacheManager cacheManager = new DefaultRepositoryCacheManager();
String cacheName = "workspace-resolver-cache-" + workspaceResolver.getName();
cacheManager.setBasedir(new File(getDefaultCache(), cacheName));
cacheManager.setCheckmodified(true);
cacheManager.setUseOrigin(true);
cacheManager.setName(cacheName);
addRepositoryCacheManager(cacheManager);
workspaceResolver.setCache(cacheName);
}
}
}
| apache-2.0 |
mykelalvis/springdm-in-action | ch03/springdm-sample/src/main/java/com/manning/sdmia/SpringDmSample.java | 206 | /**
*
*/
package com.manning.sdmia;
/**
* @author acogoluegnes
*
*/
public class SpringDmSample {
public SpringDmSample() {
System.out.println("Spring DM sample created");
}
}
| apache-2.0 |
fedgehog/Orient | commons/src/main/java/com/orientechnologies/common/console/TTYConsoleReader.java | 14391 | package com.orientechnologies.common.console;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.Reader;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.orientechnologies.common.log.OLogManager;
public class TTYConsoleReader implements OConsoleReader {
private static final String HISTORY_FILE_NAME = ".orientdb_history";
private static int MAX_HISTORY_ENTRIES = 50;
public static int END_CHAR = 70;
public static int BEGIN_CHAR = 72;
public static int DEL_CHAR = 126;
public static int DOWN_CHAR = 66;
public static int UP_CHAR = 65;
public static int RIGHT_CHAR = 67;
public static int LEFT_CHAR = 68;
public static int HORIZONTAL_TAB_CHAR = 9;
public static int VERTICAL_TAB_CHAR = 11;
public static int BACKSPACE_CHAR = 127;
public static int NEW_LINE_CHAR = 10;
public static int UNIT_SEPARATOR_CHAR = 31;
protected int currentPos = 0;
protected List<String> history = new ArrayList<String>();
protected String historyBuffer;
protected Reader inStream;
protected PrintStream outStream;
public TTYConsoleReader() {
File file = getHistoryFile(true);
BufferedReader reader;
try {
reader = new BufferedReader(new FileReader(file));
String historyEntry = reader.readLine();
while (historyEntry != null) {
history.add(historyEntry);
historyEntry = reader.readLine();
}
if (System.getProperty("file.encoding") != null) {
inStream = new InputStreamReader(System.in, System.getProperty("file.encoding"));
outStream = new PrintStream(System.out, false, System.getProperty("file.encoding"));
} else {
inStream = new InputStreamReader(System.in);
outStream = System.out;
}
} catch (FileNotFoundException fnfe) {
OLogManager.instance().error(this, "History file not found", fnfe, "");
} catch (IOException ioe) {
OLogManager.instance().error(this, "Error reading history file.", ioe, "");
}
}
protected OConsoleApplication console;
public String readLine() {
String consoleInput = "";
try {
StringBuffer buffer = new StringBuffer();
currentPos = 0;
historyBuffer = null;
int historyNum = history.size();
boolean hintedHistory = false;
while (true) {
boolean escape = false;
boolean ctrl = false;
int next = inStream.read();
if (next == 27) {
escape = true;
inStream.read();
next = inStream.read();
}
if (escape) {
if (next == 49) {
inStream.read();
next = inStream.read();
}
if (next == 53) {
ctrl = true;
next = inStream.read();
}
if (ctrl) {
if (next == RIGHT_CHAR) {
currentPos = buffer.indexOf(" ", currentPos) + 1;
if (currentPos == 0)
currentPos = buffer.length();
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else if (next == LEFT_CHAR) {
if (currentPos > 1 && currentPos < buffer.length() && buffer.charAt(currentPos - 1) == ' ') {
currentPos = buffer.lastIndexOf(" ", (currentPos - 2)) + 1;
} else {
currentPos = buffer.lastIndexOf(" ", currentPos) + 1;
}
if (currentPos < 0)
currentPos = 0;
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else {
}
} else {
if (next == UP_CHAR && !history.isEmpty()) {
if (history.size() > 0) { // UP
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
if (!hintedHistory && (historyNum == history.size() || !buffer.toString().equals(history.get(historyNum)))) {
if (buffer.length() > 0) {
hintedHistory = true;
historyBuffer = buffer.toString();
} else {
historyBuffer = null;
}
}
historyNum = getHintedHistoryIndexUp(historyNum);
if (historyNum > -1) {
buffer = new StringBuffer(history.get(historyNum));
} else {
buffer = new StringBuffer(historyBuffer);
}
currentPos = buffer.length();
rewriteConsole(buffer, false);
// writeHistory(historyNum);
}
} else if (next == DOWN_CHAR && !history.isEmpty()) { // DOWN
if (history.size() > 0) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
historyNum = getHintedHistoryIndexDown(historyNum);
if (historyNum == history.size()) {
if (historyBuffer != null) {
buffer = new StringBuffer(historyBuffer);
} else {
buffer = new StringBuffer("");
}
} else {
buffer = new StringBuffer(history.get(historyNum));
}
currentPos = buffer.length();
rewriteConsole(buffer, false);
// writeHistory(historyNum);
}
} else if (next == RIGHT_CHAR) {
if (currentPos < buffer.length()) {
currentPos++;
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
}
} else if (next == LEFT_CHAR) {
if (currentPos > 0) {
currentPos--;
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
}
} else if (next == END_CHAR) {
currentPos = buffer.length();
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else if (next == BEGIN_CHAR) {
currentPos = 0;
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else {
}
}
} else {
if (next == NEW_LINE_CHAR) {
outStream.println();
break;
} else if (next == BACKSPACE_CHAR) {
if (buffer.length() > 0 && currentPos > 0) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
buffer.deleteCharAt(currentPos - 1);
currentPos--;
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
}
} else if (next == DEL_CHAR) {
if (buffer.length() > 0 && currentPos >= 0 && currentPos < buffer.length()) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
buffer.deleteCharAt(currentPos);
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
}
} else if (next == HORIZONTAL_TAB_CHAR) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
buffer = writeHint(buffer);
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
currentPos = buffer.length();
} else {
if ((next > UNIT_SEPARATOR_CHAR && next < BACKSPACE_CHAR) || next > BACKSPACE_CHAR) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
if (currentPos == buffer.length()) {
buffer.append((char) next);
} else {
buffer.insert(currentPos, (char) next);
}
currentPos++;
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else {
outStream.println();
outStream.print(buffer);
}
}
historyNum = history.size();
hintedHistory = false;
}
}
consoleInput = buffer.toString();
history.remove(consoleInput);
history.add(consoleInput);
historyNum = history.size();
writeHistory(historyNum);
} catch (IOException e) {
return null;
}
if (consoleInput.equals("clear")) {
outStream.flush();
for (int i = 0; i < 150; i++) {
outStream.println();
}
outStream.print("\r");
outStream.print("> ");
return readLine();
} else {
return consoleInput;
}
}
private void writeHistory(int historyNum) throws IOException {
if (historyNum <= MAX_HISTORY_ENTRIES) {
File historyFile = getHistoryFile(false);
BufferedWriter writer = new BufferedWriter(new FileWriter(historyFile));
try {
for (String historyEntry : history) {
writer.write(historyEntry);
writer.newLine();
}
} finally {
writer.flush();
writer.close();
}
} else {
File historyFile = getHistoryFile(false);
BufferedWriter writer = new BufferedWriter(new FileWriter(historyFile));
try {
for (String historyEntry : history.subList(historyNum - MAX_HISTORY_ENTRIES - 1, historyNum - 1)) {
writer.write(historyEntry);
writer.newLine();
}
} finally {
writer.flush();
writer.close();
}
}
}
private StringBuffer writeHint(StringBuffer buffer) {
List<String> suggestions = new ArrayList<String>();
for (Method method : console.getConsoleMethods()) {
String command = OConsoleApplication.getClearName(method.getName());
if (command.startsWith(buffer.toString())) {
suggestions.add(command);
}
}
if (suggestions.size() > 1) {
StringBuffer hintBuffer = new StringBuffer();
String[] bufferComponents = buffer.toString().split(" ");
String[] suggestionComponents;
Set<String> bufferPart = new HashSet<String>();
String suggestionPart = null;
boolean appendSpace = true;
for (String suggestion : suggestions) {
suggestionComponents = suggestion.split(" ");
hintBuffer.append("* " + suggestion + " ");
hintBuffer.append("\n");
suggestionPart = "";
if (bufferComponents.length == 0 || buffer.length() == 0) {
suggestionPart = null;
} else if (bufferComponents.length == 1) {
bufferPart.add(suggestionComponents[0]);
if (bufferPart.size() > 1) {
suggestionPart = bufferComponents[0];
appendSpace = false;
} else {
suggestionPart = suggestionComponents[0];
}
} else {
bufferPart.add(suggestionComponents[bufferComponents.length - 1]);
if (bufferPart.size() > 1) {
for (int i = 0; i < bufferComponents.length; i++) {
suggestionPart += bufferComponents[i];
if (i < (bufferComponents.length - 1)) {
suggestionPart += " ";
}
appendSpace = false;
}
} else {
for (int i = 0; i < suggestionComponents.length; i++) {
suggestionPart += suggestionComponents[i] + " ";
}
}
}
}
if (suggestionPart != null) {
buffer = new StringBuffer();
buffer.append(suggestionPart);
if (appendSpace) {
buffer.append(" ");
}
}
hintBuffer.append("-----------------------------\n");
rewriteHintConsole(hintBuffer);
} else if (suggestions.size() > 0) {
buffer = new StringBuffer();
buffer.append(suggestions.get(0));
buffer.append(" ");
}
return buffer;
}
public void setConsole(OConsoleApplication iConsole) {
console = iConsole;
}
public OConsoleApplication getConsole() {
return console;
}
private void rewriteConsole(StringBuffer buffer, boolean cleaner) {
outStream.print("\r");
outStream.print("> ");
if (currentPos < buffer.length() && buffer.length() > 0 && !cleaner) {
outStream.print("\033[0m" + buffer.substring(0, currentPos) + "\033[0;30;47m" + buffer.substring(currentPos, currentPos + 1)
+ "\033[0m" + buffer.substring(currentPos + 1) + "\033[0m");
} else {
outStream.print(buffer);
}
}
private void rewriteHintConsole(StringBuffer buffer) {
outStream.print("\r");
outStream.print(buffer);
}
private int getHintedHistoryIndexUp(int historyNum) {
if (historyBuffer != null && !historyBuffer.equals("")) {
for (int i = (historyNum - 1); i >= 0; i--) {
if (history.get(i).startsWith(historyBuffer)) {
return i;
}
}
return -1;
}
return historyNum > 0 ? (historyNum - 1) : 0;
}
private int getHintedHistoryIndexDown(int historyNum) throws IOException {
if (historyBuffer != null && !historyBuffer.equals("")) {
for (int i = historyNum + 1; i < history.size(); i++) {
if (history.get(i).startsWith(historyBuffer)) {
return i;
}
}
return history.size();
}
return historyNum < history.size() ? (historyNum + 1) : history.size();
}
private File getHistoryFile(boolean read) {
File file = new File(HISTORY_FILE_NAME);
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException ioe) {
OLogManager.instance().error(this, "Error creating history file.", ioe, "");
}
} else if (!read) {
file.delete();
try {
file.createNewFile();
} catch (IOException ioe) {
OLogManager.instance().error(this, "Error creating history file.", ioe, "");
}
}
return file;
}
}
| apache-2.0 |
camunda/camunda-bpm-platform | engine/src/test/java/org/camunda/bpm/engine/test/cmmn/CmmnDisabledTest.java | 7032 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.cmmn;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import java.util.List;
import org.camunda.bpm.application.impl.EmbeddedProcessApplication;
import org.camunda.bpm.engine.AuthorizationService;
import org.camunda.bpm.engine.IdentityService;
import org.camunda.bpm.engine.RepositoryService;
import org.camunda.bpm.engine.RuntimeService;
import org.camunda.bpm.engine.TaskService;
import org.camunda.bpm.engine.authorization.ProcessDefinitionPermissions;
import org.camunda.bpm.engine.authorization.Resources;
import org.camunda.bpm.engine.authorization.TaskPermissions;
import org.camunda.bpm.engine.repository.ProcessApplicationDeployment;
import org.camunda.bpm.engine.repository.ProcessDefinition;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.runtime.VariableInstance;
import org.camunda.bpm.engine.task.Task;
import org.camunda.bpm.engine.test.util.ProcessEngineBootstrapRule;
import org.camunda.bpm.engine.test.util.ProcessEngineTestRule;
import org.camunda.bpm.engine.test.util.ProvidedProcessEngineRule;
import org.camunda.bpm.engine.variable.VariableMap;
import org.camunda.bpm.engine.variable.Variables;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.RuleChain;
/**
* @author Roman Smirnov
*
*/
public class CmmnDisabledTest {
@ClassRule
public static ProcessEngineBootstrapRule bootstrapRule = new ProcessEngineBootstrapRule(
"org/camunda/bpm/application/impl/deployment/cmmn.disabled.camunda.cfg.xml");
public ProvidedProcessEngineRule engineRule = new ProvidedProcessEngineRule(bootstrapRule);
public ProcessEngineTestRule engineTestRule = new ProcessEngineTestRule(engineRule);
@Rule
public RuleChain ruleChain = RuleChain.outerRule(engineRule).around(engineTestRule);
protected RuntimeService runtimeService;
protected RepositoryService repositoryService;
protected TaskService taskService;
protected IdentityService identityService;
protected AuthorizationService authorizationService;
protected EmbeddedProcessApplication processApplication;
@Before
public void setUp() throws Exception {
runtimeService = engineRule.getRuntimeService();
repositoryService = engineRule.getRepositoryService();
taskService = engineRule.getTaskService();
identityService = engineRule.getIdentityService();
authorizationService = engineRule.getAuthorizationService();
processApplication = new EmbeddedProcessApplication();
}
@After
public void tearDown() {
identityService.clearAuthentication();
engineRule.getProcessEngineConfiguration().setAuthorizationEnabled(false);
engineTestRule.deleteAllAuthorizations();
engineTestRule.deleteAllStandaloneTasks();
}
@Test
public void testCmmnDisabled() {
ProcessApplicationDeployment deployment = repositoryService.createDeployment(processApplication.getReference())
.addClasspathResource("org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml")
.deploy();
// process is deployed:
ProcessDefinition processDefinition = repositoryService.createProcessDefinitionQuery().singleResult();
assertNotNull(processDefinition);
assertEquals(1, processDefinition.getVersion());
try {
repositoryService.createCaseDefinitionQuery().singleResult();
fail("Cmmn Disabled: It should not be possible to query for a case definition.");
} catch (Exception e) {
// expected
}
repositoryService.deleteDeployment(deployment.getId(), true);
}
@Test
public void testVariableInstanceQuery() {
ProcessApplicationDeployment deployment = repositoryService.createDeployment(processApplication.getReference())
.addClasspathResource("org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml")
.deploy();
VariableMap variables = Variables.createVariables().putValue("my-variable", "a-value");
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", variables);
// variable instance query
List<VariableInstance> result = runtimeService.createVariableInstanceQuery().list();
assertEquals(1, result.size());
VariableInstance variableInstance = result.get(0);
assertEquals("my-variable", variableInstance.getName());
// get variable
assertNotNull(runtimeService.getVariable(processInstance.getId(), "my-variable"));
// get variable local
assertNotNull(runtimeService.getVariableLocal(processInstance.getId(), "my-variable"));
repositoryService.deleteDeployment(deployment.getId(), true);
}
@Test
public void testTaskQueryAuthorization() {
// given
engineTestRule.deploy("org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml");
engineTestRule.deploy("org/camunda/bpm/engine/test/api/twoTasksProcess.bpmn20.xml");
// a process instance task with read authorization
ProcessInstance instance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task processInstanceTask = taskService.createTaskQuery().processInstanceId(instance1.getId()).singleResult();
engineTestRule.createGrantAuthorization("user",
Resources.PROCESS_DEFINITION,
"oneTaskProcess",
ProcessDefinitionPermissions.READ_TASK);
// a standalone task with read authorization
Task standaloneTask = taskService.newTask();
taskService.saveTask(standaloneTask);
engineTestRule.createGrantAuthorization("user",
Resources.TASK,
standaloneTask.getId(),
TaskPermissions.READ);
// a third task for which we have no authorization
runtimeService.startProcessInstanceByKey("twoTasksProcess");
identityService.setAuthenticatedUserId("user");
engineRule.getProcessEngineConfiguration().setAuthorizationEnabled(true);
// when
List<Task> tasks = taskService.createTaskQuery().list();
// then
assertThat(tasks).extracting("id").containsExactlyInAnyOrder(standaloneTask.getId(), processInstanceTask.getId());
}
}
| apache-2.0 |
sergeds/Gluewine | imp/src/java/org/gluewine/console/SyntaxResponse.java | 2122 | /**************************************************************************
*
* Gluewine Console Module
*
* Copyright (C) 2013 FKS bvba http://www.fks.be/
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
***************************************************************************/
package org.gluewine.console;
/**
* A reponse that failed due to a syntax exception.
*
* @author fks/Serge de Schaetzen
*
*/
public class SyntaxResponse extends Response
{
// ===========================================================================
/** The serial uid. */
private static final long serialVersionUID = -7101539852928081970L;
/** The command that failed. */
private CLICommand cmd = null;
// ===========================================================================
/**
* Creates an instance.
*
* @param output The output of the command.
* @param cmd The command that failed.
* @param routed Whether the output is routed or not.
* @param batch Whether the command was entered from batch mode.
* @param interactive Whether the option values were entered interactively.
*/
public SyntaxResponse(String output, CLICommand cmd, boolean routed, boolean batch, boolean interactive)
{
super(output, routed, batch, interactive);
this.cmd = cmd;
}
// ===========================================================================
/**
* Returns the command that failed.
*
* @return The command that failed.
*/
public CLICommand getCommand()
{
return cmd;
}
}
| apache-2.0 |
waans11/incubator-asterixdb-hyracks | algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractScanPOperator.java | 1503 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.core.algebra.operators.physical;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
import org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
public abstract class AbstractScanPOperator extends AbstractPhysicalOperator {
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent) {
return emptyUnaryRequirements();
}
@Override
public boolean expensiveThanMaterialization() {
return false;
}
}
| apache-2.0 |
joshua0pang/bazel | src/main/java/com/google/devtools/build/lib/ideinfo/AndroidStudioInfoAspect.java | 14348 | // Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.ideinfo;
import static com.google.common.collect.Iterables.transform;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.io.ByteSource;
import com.google.devtools.build.lib.actions.ActionOwner;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Root;
import com.google.devtools.build.lib.analysis.Aspect;
import com.google.devtools.build.lib.analysis.Aspect.Builder;
import com.google.devtools.build.lib.analysis.ConfiguredAspectFactory;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.actions.BinaryFileWriteAction;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.ideinfo.androidstudio.AndroidStudioIdeInfo.AndroidSdkRuleInfo;
import com.google.devtools.build.lib.ideinfo.androidstudio.AndroidStudioIdeInfo.ArtifactLocation;
import com.google.devtools.build.lib.ideinfo.androidstudio.AndroidStudioIdeInfo.JavaRuleIdeInfo;
import com.google.devtools.build.lib.ideinfo.androidstudio.AndroidStudioIdeInfo.LibraryArtifact;
import com.google.devtools.build.lib.ideinfo.androidstudio.AndroidStudioIdeInfo.RuleIdeInfo;
import com.google.devtools.build.lib.ideinfo.androidstudio.AndroidStudioIdeInfo.RuleIdeInfo.Kind;
import com.google.devtools.build.lib.packages.AspectDefinition;
import com.google.devtools.build.lib.packages.AspectParameters;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.Type;
import com.google.devtools.build.lib.rules.android.AndroidSdkProvider;
import com.google.devtools.build.lib.rules.java.JavaExportsProvider;
import com.google.devtools.build.lib.rules.java.JavaRuleOutputJarsProvider;
import com.google.devtools.build.lib.rules.java.JavaSourceInfoProvider;
import com.google.devtools.build.lib.syntax.Label;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.protobuf.MessageLite;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
/**
* Generates ide-build information for Android Studio.
*/
public class AndroidStudioInfoAspect implements ConfiguredAspectFactory {
public static final String NAME = "AndroidStudioInfoAspect";
// Output groups.
public static final String IDE_RESOLVE = "ide-resolve";
public static final String IDE_BUILD = "ide-build";
// File suffixes.
public static final String ASWB_BUILD_SUFFIX = ".aswb-build";
public static final Function<Label, String> LABEL_TO_STRING = new Function<Label, String>() {
@Nullable
@Override
public String apply(Label label) {
return label.toString();
}
};
@Override
public AspectDefinition getDefinition() {
return new AspectDefinition.Builder(NAME)
.requireProvider(JavaSourceInfoProvider.class)
.attributeAspect("deps", AndroidStudioInfoAspect.class)
.build();
}
@Override
public Aspect create(ConfiguredTarget base, RuleContext ruleContext,
AspectParameters parameters) {
Aspect.Builder builder = new Builder(NAME);
// Collect ide build files and calculate dependencies.
NestedSetBuilder<Label> transitiveDependenciesBuilder = NestedSetBuilder.stableOrder();
NestedSetBuilder<Label> dependenciesBuilder = NestedSetBuilder.stableOrder();
NestedSetBuilder<Artifact> ideBuildFilesBuilder = NestedSetBuilder.stableOrder();
// todo(dslomov,tomlu): following current build info logic, this code enumerates dependencies
// directly by iterating over deps attribute. The more robust way to do this might be
// to iterate classpath as provided to build action.
if (ruleContext.attributes().has("deps", Type.LABEL_LIST)) {
Iterable<AndroidStudioInfoFilesProvider> androidStudioInfoFilesProviders =
ruleContext.getPrerequisites("deps", Mode.TARGET, AndroidStudioInfoFilesProvider.class);
for (AndroidStudioInfoFilesProvider depProvider : androidStudioInfoFilesProviders) {
ideBuildFilesBuilder.addTransitive(depProvider.getIdeBuildFiles());
transitiveDependenciesBuilder.addTransitive(depProvider.getTransitiveDependencies());
}
List<? extends TransitiveInfoCollection> deps =
ruleContext.getPrerequisites("deps", Mode.TARGET);
for (TransitiveInfoCollection dep : deps) {
dependenciesBuilder.add(dep.getLabel());
}
Iterable<JavaExportsProvider> javaExportsProviders = ruleContext
.getPrerequisites("deps", Mode.TARGET, JavaExportsProvider.class);
for (JavaExportsProvider javaExportsProvider : javaExportsProviders) {
dependenciesBuilder.addTransitive(javaExportsProvider.getTransitiveExports());
}
}
NestedSet<Label> directDependencies = dependenciesBuilder.build();
transitiveDependenciesBuilder.addTransitive(directDependencies);
NestedSet<Label> transitiveDependencies = transitiveDependenciesBuilder.build();
RuleIdeInfo.Kind ruleKind = getRuleKind(ruleContext.getRule(), base);
if (ruleKind != RuleIdeInfo.Kind.UNRECOGNIZED) {
Artifact ideBuildFile =
createIdeBuildArtifact(base, ruleContext, ruleKind,
directDependencies,
transitiveDependencies);
ideBuildFilesBuilder.add(ideBuildFile);
}
NestedSet<Artifact> ideBuildFiles = ideBuildFilesBuilder.build();
builder
.addOutputGroup(IDE_BUILD, ideBuildFiles)
.addProvider(
AndroidStudioInfoFilesProvider.class,
new AndroidStudioInfoFilesProvider(ideBuildFiles, transitiveDependencies));
return builder.build();
}
private static AndroidSdkRuleInfo makeAndroidSdkRuleInfo(RuleContext ruleContext,
AndroidSdkProvider provider) {
AndroidSdkRuleInfo.Builder sdkInfoBuilder = AndroidSdkRuleInfo.newBuilder();
Path androidSdkDirectory = provider.getAndroidJar().getPath().getParentDirectory();
sdkInfoBuilder.setAndroidSdkPath(androidSdkDirectory.toString());
Root genfilesDirectory = ruleContext.getConfiguration().getGenfilesDirectory();
sdkInfoBuilder.setGenfilesPath(genfilesDirectory.getPath().toString());
Path binfilesPath = ruleContext.getConfiguration().getBinDirectory().getPath();
sdkInfoBuilder.setBinPath(binfilesPath.toString());
return sdkInfoBuilder.build();
}
private Artifact createIdeBuildArtifact(
ConfiguredTarget base,
RuleContext ruleContext,
Kind ruleKind,
NestedSet<Label> directDependencies, NestedSet<Label> transitiveDependencies) {
PathFragment ideBuildFilePath = getOutputFilePath(base, ruleContext);
Root genfilesDirectory = ruleContext.getConfiguration().getGenfilesDirectory();
Artifact ideBuildFile =
ruleContext
.getAnalysisEnvironment()
.getDerivedArtifact(ideBuildFilePath, genfilesDirectory);
RuleIdeInfo.Builder outputBuilder = RuleIdeInfo.newBuilder();
outputBuilder.setLabel(base.getLabel().toString());
outputBuilder.setBuildFile(
ruleContext
.getRule()
.getPackage()
.getBuildFile()
.getPath()
.toString());
outputBuilder.setKind(ruleKind);
outputBuilder.addAllDependencies(transform(directDependencies, LABEL_TO_STRING));
outputBuilder.addAllTransitiveDependencies(transform(transitiveDependencies, LABEL_TO_STRING));
if (ruleKind == Kind.JAVA_LIBRARY
|| ruleKind == Kind.JAVA_IMPORT
|| ruleKind == Kind.JAVA_TEST
|| ruleKind == Kind.JAVA_BINARY) {
outputBuilder.setJavaRuleIdeInfo(makeJavaRuleIdeInfo(base));
} else if (ruleKind == Kind.ANDROID_SDK) {
outputBuilder.setAndroidSdkRuleInfo(
makeAndroidSdkRuleInfo(ruleContext, base.getProvider(AndroidSdkProvider.class)));
}
final RuleIdeInfo ruleIdeInfo = outputBuilder.build();
ruleContext.registerAction(
makeProtoWriteAction(ruleContext.getActionOwner(), ruleIdeInfo, ideBuildFile));
return ideBuildFile;
}
private static BinaryFileWriteAction makeProtoWriteAction(
ActionOwner actionOwner, final MessageLite message, Artifact artifact) {
return new BinaryFileWriteAction(
actionOwner,
artifact,
new ByteSource() {
@Override
public InputStream openStream() throws IOException {
return message.toByteString().newInput();
}
},
/*makeExecutable =*/ false);
}
private static ArtifactLocation makeArtifactLocation(Artifact artifact) {
return ArtifactLocation.newBuilder()
.setRootPath(artifact.getRoot().getPath().toString())
.setRelativePath(artifact.getRootRelativePathString())
.build();
}
private static JavaRuleIdeInfo makeJavaRuleIdeInfo(ConfiguredTarget base) {
JavaRuleIdeInfo.Builder builder = JavaRuleIdeInfo.newBuilder();
JavaRuleOutputJarsProvider outputJarsProvider =
base.getProvider(JavaRuleOutputJarsProvider.class);
if (outputJarsProvider != null) {
// java_library
collectJarsFromOutputJarsProvider(builder, outputJarsProvider);
} else {
JavaSourceInfoProvider provider = base.getProvider(JavaSourceInfoProvider.class);
if (provider != null) {
// java_import
collectJarsFromSourceInfoProvider(builder, provider);
}
}
Collection<Artifact> sourceFiles = getSources(base);
for (Artifact sourceFile : sourceFiles) {
builder.addSources(makeArtifactLocation(sourceFile));
}
return builder.build();
}
private static void collectJarsFromSourceInfoProvider(
JavaRuleIdeInfo.Builder builder, JavaSourceInfoProvider provider) {
Collection<Artifact> sourceJarsForJarFiles = provider.getSourceJarsForJarFiles();
// For java_import rule, we always have only one source jar specified.
// The intent is that that source jar provides sources for all imported jars,
// so we reflect that intent, adding that jar to all LibraryArtifacts we produce
// for java_import rule. We should consider supporting
// library=<collection of jars>+<collection of srcjars>
// mode in our AndroidStudio plugin (Android Studio itself supports that).
Artifact sourceJar;
if (sourceJarsForJarFiles.size() > 0) {
sourceJar = sourceJarsForJarFiles.iterator().next();
} else {
sourceJar = null;
}
for (Artifact artifact : provider.getJarFiles()) {
LibraryArtifact.Builder libraryBuilder = LibraryArtifact.newBuilder();
libraryBuilder.setJar(makeArtifactLocation(artifact));
if (sourceJar != null) {
libraryBuilder.setSourceJar(makeArtifactLocation(sourceJar));
}
builder.addJars(libraryBuilder.build());
}
}
private static void collectJarsFromOutputJarsProvider(
JavaRuleIdeInfo.Builder builder, JavaRuleOutputJarsProvider outputJarsProvider) {
LibraryArtifact.Builder jarsBuilder = LibraryArtifact.newBuilder();
Artifact classJar = outputJarsProvider.getClassJar();
if (classJar != null) {
jarsBuilder.setJar(makeArtifactLocation(classJar));
}
Artifact srcJar = outputJarsProvider.getSrcJar();
if (srcJar != null) {
jarsBuilder.setSourceJar(makeArtifactLocation(srcJar));
}
if (jarsBuilder.hasJar() || jarsBuilder.hasSourceJar()) {
builder.addJars(jarsBuilder.build());
}
LibraryArtifact.Builder genjarsBuilder = LibraryArtifact.newBuilder();
Artifact genClassJar = outputJarsProvider.getGenClassJar();
if (genClassJar != null) {
genjarsBuilder.setJar(makeArtifactLocation(genClassJar));
}
Artifact gensrcJar = outputJarsProvider.getGensrcJar();
if (gensrcJar != null) {
genjarsBuilder.setSourceJar(makeArtifactLocation(gensrcJar));
}
if (genjarsBuilder.hasJar() || genjarsBuilder.hasSourceJar()) {
builder.addGeneratedJars(genjarsBuilder.build());
}
}
private static Collection<Artifact> getSources(ConfiguredTarget base) {
// Calculate source files.
JavaSourceInfoProvider sourceInfoProvider = base.getProvider(JavaSourceInfoProvider.class);
return sourceInfoProvider != null
? sourceInfoProvider.getSourceFiles()
: ImmutableList.<Artifact>of();
}
private PathFragment getOutputFilePath(ConfiguredTarget base, RuleContext ruleContext) {
PathFragment packagePathFragment =
ruleContext.getLabel().getPackageIdentifier().getPathFragment();
String name = base.getLabel().getName();
return new PathFragment(packagePathFragment, new PathFragment(name + ASWB_BUILD_SUFFIX));
}
private RuleIdeInfo.Kind getRuleKind(Rule rule, ConfiguredTarget base) {
RuleIdeInfo.Kind kind;
String ruleClassName = rule.getRuleClassObject().getName();
if ("java_library".equals(ruleClassName)) {
kind = RuleIdeInfo.Kind.JAVA_LIBRARY;
} else if ("java_import".equals(ruleClassName)) {
kind = Kind.JAVA_IMPORT;
} else if ("java_test".equals(ruleClassName)) {
kind = Kind.JAVA_TEST;
} else if ("java_binary".equals(ruleClassName)) {
kind = Kind.JAVA_BINARY;
} else if (base.getProvider(AndroidSdkProvider.class) != null) {
kind = RuleIdeInfo.Kind.ANDROID_SDK;
} else {
kind = RuleIdeInfo.Kind.UNRECOGNIZED;
}
return kind;
}
}
| apache-2.0 |
shrinkwrap/resolver | maven/impl-maven/src/test/java/org/jboss/shrinkwrap/resolver/impl/maven/integration/PomFilteringUnitTestCase.java | 3076 | /*
* JBoss, Home of Professional Open Source
* Copyright 2010, Red Hat Middleware LLC, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.shrinkwrap.resolver.impl.maven.integration;
import java.io.File;
import org.jboss.shrinkwrap.resolver.api.maven.Maven;
import org.jboss.shrinkwrap.resolver.api.maven.strategy.RejectDependenciesStrategy;
import org.jboss.shrinkwrap.resolver.impl.maven.bootstrap.MavenSettingsBuilder;
import org.jboss.shrinkwrap.resolver.impl.maven.util.ValidationUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* @author <a href="mailto:kpiwko@redhat.com">Karel Piwko</a>
* @author <a href="http://community.jboss.org/people/silenius">Samuel Santos</a>
*/
public class PomFilteringUnitTestCase {
@BeforeClass
public static void setRemoteRepository() {
System.setProperty(MavenSettingsBuilder.ALT_LOCAL_REPOSITORY_LOCATION, "target/the-other-repository");
}
@AfterClass
public static void clearRemoteRepository() {
System.clearProperty(MavenSettingsBuilder.ALT_LOCAL_REPOSITORY_LOCATION);
}
@Test
public void testIncludeFromPomWithExclusionFilter() {
final File[] jars = Maven.resolver().loadPomFromFile("target/poms/test-filter.xml")
.importCompileAndRuntimeDependencies()
.resolve()
.using(new RejectDependenciesStrategy("org.jboss.shrinkwrap.test:test-deps-c"))
.as(File.class);
// We should not bring in b and c, as b is transitive from c, and we excluded c above.
new ValidationUtil("test-deps-a", "test-deps-d", "test-deps-e").validate(jars);
}
@Test
public void testIncludeFromPomWithExclusionsFilter() {
final File jar = Maven.resolver().loadPomFromFile("target/poms/test-filter.xml")
.importCompileAndRuntimeDependencies()
.resolve()
.using(
// because RejectDependenciesStrategy is rejectTranstivites by default, we remove all mentioned nedpendencies
// and their possible ancestors in dependency graph
new RejectDependenciesStrategy("org.jboss.shrinkwrap.test:test-deps-a",
"org.jboss.shrinkwrap.test:test-deps-c", "org.jboss.shrinkwrap.test:test-deps-d"))
.asSingle(File.class);
new ValidationUtil("test-deps-e").validate(jar);
}
}
| apache-2.0 |
apache/manifoldcf | connectors/solr/connector/src/main/java/org/apache/manifoldcf/agents/output/solr/ModifiedLBHttpSolrClient.java | 2906 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.manifoldcf.agents.output.solr;
import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.impl.BinaryResponseParser;
import org.apache.solr.client.solrj.request.RequestWriter;
import org.apache.solr.client.solrj.*;
import java.net.MalformedURLException;
import org.apache.http.client.HttpClient;
import java.util.Set;
/** This class overrides and somewhat changes the behavior of the
* SolrJ LBHttpSolrServer class. This is so it instantiates our modified
* HttpSolrServer class, so that multipart forms work.
*/
public class ModifiedLBHttpSolrClient extends LBHttpSolrClient
{
private final HttpClient httpClient;
private final ResponseParser parser;
private final boolean allowCompression;
public ModifiedLBHttpSolrClient(boolean allowCompression, String... solrServerUrls) throws MalformedURLException {
this(null, allowCompression, solrServerUrls);
}
/** The provided httpClient should use a multi-threaded connection manager */
public ModifiedLBHttpSolrClient(HttpClient httpClient, boolean allowCompression, String... solrServerUrl)
throws MalformedURLException {
this(httpClient, new BinaryResponseParser(), allowCompression, solrServerUrl);
}
/** The provided httpClient should use a multi-threaded connection manager */
public ModifiedLBHttpSolrClient(HttpClient httpClient, ResponseParser parser, boolean allowCompression, String... solrServerUrl)
throws MalformedURLException {
super(httpClient, parser, solrServerUrl);
this.httpClient = httpClient;
this.parser = parser;
this.allowCompression = allowCompression;
}
@Override
protected HttpSolrClient makeSolrClient(String server) {
HttpSolrClient client = new ModifiedHttpSolrClient(server, httpClient, parser, allowCompression);
if (getRequestWriter() != null) {
client.setRequestWriter(getRequestWriter());
}
if (getQueryParams() != null) {
client.setQueryParams(getQueryParams());
}
return client;
}
}
| apache-2.0 |
riptano/xml-doclet | src/test/java/com/github/markusbernhardt/xmldoclet/simpledata/Class2.java | 149 | package com.github.markusbernhardt.xmldoclet.simpledata;
/**
* Class2
*/
public class Class2 {
/**
* Constructor1
*/
public Class2() {
}
} | apache-2.0 |
flofreud/aws-sdk-java | aws-java-sdk-autoscaling/src/main/java/com/amazonaws/services/autoscaling/model/DescribeTagsResult.java | 6523 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.autoscaling.model;
import java.io.Serializable;
/**
*
*/
public class DescribeTagsResult implements Serializable, Cloneable {
/**
* <p>
* One or more tags.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<TagDescription> tags;
/**
* <p>
* The token to use when requesting the next set of items. If there are no
* additional items to return, the string is empty.
* </p>
*/
private String nextToken;
/**
* <p>
* One or more tags.
* </p>
*
* @return One or more tags.
*/
public java.util.List<TagDescription> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.SdkInternalList<TagDescription>();
}
return tags;
}
/**
* <p>
* One or more tags.
* </p>
*
* @param tags
* One or more tags.
*/
public void setTags(java.util.Collection<TagDescription> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new com.amazonaws.internal.SdkInternalList<TagDescription>(
tags);
}
/**
* <p>
* One or more tags.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setTags(java.util.Collection)} or
* {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* One or more tags.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeTagsResult withTags(TagDescription... tags) {
if (this.tags == null) {
setTags(new com.amazonaws.internal.SdkInternalList<TagDescription>(
tags.length));
}
for (TagDescription ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* One or more tags.
* </p>
*
* @param tags
* One or more tags.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeTagsResult withTags(java.util.Collection<TagDescription> tags) {
setTags(tags);
return this;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no
* additional items to return, the string is empty.
* </p>
*
* @param nextToken
* The token to use when requesting the next set of items. If there
* are no additional items to return, the string is empty.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no
* additional items to return, the string is empty.
* </p>
*
* @return The token to use when requesting the next set of items. If there
* are no additional items to return, the string is empty.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no
* additional items to return, the string is empty.
* </p>
*
* @param nextToken
* The token to use when requesting the next set of items. If there
* are no additional items to return, the string is empty.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeTagsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTags() != null)
sb.append("Tags: " + getTags() + ",");
if (getNextToken() != null)
sb.append("NextToken: " + getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeTagsResult == false)
return false;
DescribeTagsResult other = (DescribeTagsResult) obj;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null
&& other.getTags().equals(this.getTags()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null
&& other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getTags() == null) ? 0 : getTags().hashCode());
hashCode = prime * hashCode
+ ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeTagsResult clone() {
try {
return (DescribeTagsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| apache-2.0 |
Subasinghe/ode | bpel-runtime/src/main/java/org/apache/ode/bpel/engine/IMAManager2.java | 13206 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.engine;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.wsdl.OperationType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.ode.bpel.common.CorrelationKeySet;
import org.apache.ode.bpel.runtime.PartnerLinkInstance;
import org.apache.ode.bpel.runtime.Selector;
import org.apache.ode.utils.ObjectPrinter;
/**
* <p>
* This class handles behaviour of IMAs (Inbound Message Activities) as specified in WS BPEL.
* This includes detecting conflictingReceive and conflictingRequest faults.
* </p>
*/
public class IMAManager2 implements Serializable {
private static final long serialVersionUID = -5556374398943757951L;
private static final Logger __log = LoggerFactory.getLogger(IMAManager2.class);
// holds rid for registered IMAs
public final Map<RequestIdTuple, Entry> _byRid = new HashMap<RequestIdTuple, Entry>();
// holds outstanding rid that are now waiting to reply (Open IMAs)
public final Map<OutstandingRequestIdTuple, String> _byOrid = new HashMap<OutstandingRequestIdTuple, String>();
public final Map<String, Entry> _byChannel = new HashMap<String, Entry>();
/**
* finds conflictingReceive
*
* @param selectors
* @return
*/
int findConflict(Selector selectors[]) {
if (__log.isTraceEnabled()) {
__log.trace(ObjectPrinter.stringifyMethodEnter("findConflict", new Object[] { "selectors", selectors }));
}
Set<RequestIdTuple> workingSet = new HashSet<RequestIdTuple>(_byRid.keySet());
for (int i = 0; i < selectors.length; ++i) {
final RequestIdTuple rid = new RequestIdTuple(selectors[i].plinkInstance, selectors[i].opName, selectors[i].correlationKeySet);
if (workingSet.contains(rid)) {
return i;
}
workingSet.add(rid);
}
return -1;
}
/**
* Register IMA
*
* @param pickResponseChannel
* response channel associated with this receive/pick
* @param selectors
* selectors for this receive/pick
*/
void register(String pickResponseChannel, Selector selectors[]) {
if (__log.isTraceEnabled()) {
__log.trace(ObjectPrinter.stringifyMethodEnter("register", new Object[] { "pickResponseChannel", pickResponseChannel, "selectors", selectors }));
}
if (_byChannel.containsKey(pickResponseChannel)) {
String errmsg = "INTERNAL ERROR: Duplicate ENTRY for RESPONSE CHANNEL " + pickResponseChannel;
__log.error(errmsg);
throw new IllegalArgumentException(errmsg);
}
Entry entry = new Entry(pickResponseChannel, selectors);
for (int i = 0; i < selectors.length; ++i) {
final RequestIdTuple rid = new RequestIdTuple(selectors[i].plinkInstance, selectors[i].opName, selectors[i].correlationKeySet);
if (_byRid.containsKey(rid)) {
String errmsg = "INTERNAL ERROR: Duplicate ENTRY for RID " + rid;
__log.error(errmsg);
throw new IllegalStateException(errmsg);
}
_byRid.put(rid, entry);
}
_byChannel.put(pickResponseChannel, entry);
}
/**
* Registers Open IMA.
* It doesn't open IMA for non two way operations.
*
* @param partnerLink
* @param opName
* @param mexId
* @param mexRef
* @return
*/
String processOutstandingRequest(PartnerLinkInstance partnerLink, String opName, String mexId, String mexRef) {
if (__log.isTraceEnabled()) {
__log.trace(ObjectPrinter.stringifyMethodEnter("process", new Object[] { "partnerLinkInstance", partnerLink, "operationName", opName, "messageExchangeId", mexId, "mexRef", mexRef }));
}
final OutstandingRequestIdTuple orid = new OutstandingRequestIdTuple(partnerLink, opName, mexId);
if (_byOrid.containsKey(orid)) {
//conflictingRequest found
return mexRef;
}
// We convert into outstanding request only for in-out operations (pending release operation)
if (partnerLink.partnerLink.getMyRoleOperation(opName).getStyle().equals(OperationType.REQUEST_RESPONSE)) {
_byOrid.put(orid, mexRef);
}
return null;
}
/**
* This is used to remove IMA from registered state.
*
* @see #register(String, Selector[])
* @param pickResponseChannel
*/
void cancel(String pickResponseChannel, boolean isTimer) {
if (__log.isTraceEnabled())
__log.trace(ObjectPrinter.stringifyMethodEnter("cancel", new Object[] { "pickResponseChannel", pickResponseChannel }));
Entry entry = _byChannel.remove(pickResponseChannel);
if (entry != null) {
while (_byRid.values().remove(entry));
} else if (!isTimer){
String errmsg = "INTERNAL ERROR: No ENTRY for RESPONSE CHANNEL " + pickResponseChannel;
__log.error(errmsg);
throw new IllegalArgumentException(errmsg);
}
}
/**
* Release Open IMA.
*
* @param plinkInstnace
* partner link
* @param opName
* operation
* @param mexId
* message exchange identifier IN THE BPEL SENSE OF THE TERM (i.e. a receive/reply disambiguator).
* @return message exchange identifier associated with the registration that matches the parameters
*/
public String release(PartnerLinkInstance plinkInstnace, String opName, String mexId) {
if (__log.isTraceEnabled())
__log.trace(ObjectPrinter.stringifyMethodEnter("release", new Object[] { "plinkInstance", plinkInstnace, "opName", opName, "mexId", mexId }));
final OutstandingRequestIdTuple orid = new OutstandingRequestIdTuple(plinkInstnace, opName, mexId);
String mexRef = _byOrid.remove(orid);
if (mexRef == null) {
if (__log.isDebugEnabled()) {
__log.debug("==release: ORID " + orid + " not found in " + _byOrid);
}
return null;
}
return mexRef;
}
/**
* "Release" all Open IMAs
*
* @return a list of message exchange identifiers for message exchanges that were begun (receive/pick got a message) but not yet completed (reply not yet sent)
*/
public String[] releaseAll() {
if (__log.isTraceEnabled())
__log.trace(ObjectPrinter.stringifyMethodEnter("releaseAll", null));
ArrayList<String> mexRefs = new ArrayList<String>();
while (!_byOrid.isEmpty()) {
String mexRef = _byOrid.entrySet().iterator().next().getValue();
mexRefs.add(mexRef);
_byOrid.values().remove(mexRef);
}
return mexRefs.toArray(new String[mexRefs.size()]);
}
public String toString() {
return ObjectPrinter.toString(this, new Object[] { "byRid", _byRid, "byOrid", _byOrid, "byChannel", _byChannel });
}
public static class RequestIdTuple implements Serializable {
private static final long serialVersionUID = -1059389611839777482L;
/** On which partner link it was received. */
PartnerLinkInstance partnerLink;
/** Name of the operation. */
String opName;
/** cset */
CorrelationKeySet ckeySet;
/** Constructor. */
RequestIdTuple(PartnerLinkInstance partnerLink, String opName, CorrelationKeySet ckeySet) {
this.partnerLink = partnerLink;
this.opName = opName;
this.ckeySet = ckeySet;
}
public String toString() {
return ObjectPrinter.toString(this, new Object[] { "partnerLink", partnerLink, "opName", opName, "cSet", ckeySet});
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((ckeySet == null) ? 0 : ckeySet.hashCode());
result = prime * result
+ ((opName == null) ? 0 : opName.hashCode());
result = prime * result
+ ((partnerLink == null) ? 0 : partnerLink.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof RequestIdTuple)) {
return false;
}
RequestIdTuple other = (RequestIdTuple) obj;
if (ckeySet == null) {
if (other.ckeySet != null) {
return false;
}
} else if (!ckeySet.equals(other.ckeySet)) {
return false;
}
if (opName == null) {
if (other.opName != null) {
return false;
}
} else if (!opName.equals(other.opName)) {
return false;
}
if (partnerLink == null) {
if (other.partnerLink != null) {
return false;
}
} else if (!partnerLink.equals(other.partnerLink)) {
return false;
}
return true;
}
}
public static class OutstandingRequestIdTuple implements Serializable {
private static final long serialVersionUID = -1059389611839777482L;
/** On which partner link it was received. */
PartnerLinkInstance partnerLink;
/** Name of the operation. */
String opName;
/** Message exchange identifier. */
String mexId;
/** Constructor. */
OutstandingRequestIdTuple(PartnerLinkInstance partnerLink, String opName, String mexId) {
this.partnerLink = partnerLink;
this.opName = opName;
this.mexId = mexId == null ? "" : mexId;
}
public int hashCode() {
return this.partnerLink.hashCode() ^ this.opName.hashCode() ^ this.mexId.hashCode();
}
public boolean equals(Object obj) {
OutstandingRequestIdTuple other = (OutstandingRequestIdTuple) obj;
return other.partnerLink.equals(partnerLink) && other.opName.equals(opName) && other.mexId.equals(mexId);
}
public String toString() {
return ObjectPrinter.toString(this, new Object[] { "partnerLink", partnerLink, "opName", opName, "mexId", mexId });
}
}
public static class Entry implements Serializable {
private static final long serialVersionUID = -583743124656582887L;
final String pickResponseChannel;
public Selector[] selectors;
Entry(String pickResponseChannel, Selector[] selectors) {
this.pickResponseChannel = pickResponseChannel;
this.selectors = selectors;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime
* result
+ ((pickResponseChannel == null) ? 0 : pickResponseChannel
.hashCode());
result = prime * result + Arrays.hashCode(selectors);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Entry other = (Entry) obj;
if (pickResponseChannel == null) {
if (other.pickResponseChannel != null)
return false;
} else if (!pickResponseChannel.equals(other.pickResponseChannel))
return false;
if (!Arrays.equals(selectors, other.selectors))
return false;
return true;
}
public String toString() {
return ObjectPrinter.toString(this, new Object[] { "pickResponseChannel", pickResponseChannel, "selectors", selectors });
}
}
}
| apache-2.0 |
Subasinghe/ode | utils/src/main/java/org/apache/ode/utils/fs/FileUtils.java | 6403 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.utils.fs;
import java.io.File;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Various file system utilities.
*/
public class FileUtils {
private static final Logger __log = LoggerFactory.getLogger(FileUtils.class);
/**
* Test if the given path is absolute or not.
* @param path
* @return true is absolute
* @see java.io.File#isAbsolute()
*/
public static boolean isAbsolute(String path){
return new File(path).isAbsolute();
}
/**
* Test if the given path is relative or absolute.
* @param path
* @return true is relative
* @see java.io.File#isAbsolute()
*/
public static boolean isRelative(String path){
return !isAbsolute(path);
}
/**
* Delete a file/directory, recursively.
*
* @param file
* file/directory to delete
* @return <code>true</code> if successful
*/
public static boolean deepDelete(File file) {
if (file.exists()) {
if (__log.isDebugEnabled()) {
__log.debug("deleting: " + file.getAbsolutePath());
}
if (file.delete()) {
return true;
}
if (file.isDirectory()) {
boolean success = true;
File[] files = file.listFiles();
for (int i = 0; i < files.length; ++i) {
success &= deepDelete(files[i]);
}
return success ? file.delete() : false;
}
else {
__log.error("Unable to deepDelete file " + file.getAbsolutePath()
+ "; this may be caused by a descriptor leak and should be reported.");
return false;
}
}
else {
// file seems to be gone already?! anyway nothing to do for us.
return true;
}
}
/**
* Recursively collect all Files in the given directory and all its
* subdirectories.
*
* @param rootDirectory
* the top level directory used for the search
* @return a List of found Files
*/
public static List<File> directoryEntriesInPath(File rootDirectory) {
return FileUtils.directoryEntriesInPath(rootDirectory, null);
}
/**
* Recursively collect all Files in the given directory and all its
* subdirectories, applying the given FileFilter. The FileFilter is also applied to the given rootDirectory.
* As a result the rootDirectory might be in the returned list.
* <p>
* Returned files are ordered lexicographically but for each directory, files come before its sudirectories.
* For instance:<br/>
* test<br/>
* test/alpha.txt<br/>
* test/zulu.txt<br/>
* test/a<br/>
* test/a/alpha.txt<br/>
* test/z<br/>
* test/z/zulu.txt<br/>
* <p>
* instead of:<br/>
* test<br/>
* test/a<br/>
* test/a/alpha.txt<br/>
* test/alpha.txt<br/>
* test/z<br/>
* test/z/zulu.txt<br/>
* test/zulu.txt<br/>
*
* @param rootDirectory
* the top level directory used for the search
* @param filter
* a FileFilter used for accepting/rejecting individual entries
* @return a List of found Files
*/
public static List<File> directoryEntriesInPath(File rootDirectory, FileFilter filter) {
if (rootDirectory == null) {
throw new IllegalArgumentException("File must not be null!");
}
if (!rootDirectory.exists()) {
throw new IllegalArgumentException("File does not exist!");
}
ArrayList<File> collectedFiles = new ArrayList<File>(32);
if (rootDirectory.isFile()) {
if ((filter == null) || ((filter != null) && (filter.accept(rootDirectory)))) {
collectedFiles.add(rootDirectory);
}
return collectedFiles;
}
FileUtils.directoryEntriesInPath(collectedFiles, rootDirectory, filter);
return collectedFiles;
}
private static void directoryEntriesInPath(List<File> collectedFiles, File parentDir, FileFilter filter) {
if ((filter == null) || ((filter != null) && (filter.accept(parentDir)))) {
collectedFiles.add(parentDir);
}
File[] allFiles = parentDir.listFiles();
if (allFiles != null) {
TreeSet<File> dirs = new TreeSet<File>();
TreeSet<File> acceptedFiles = new TreeSet<File>();
for (File f : allFiles) {
if (f.isDirectory()) {
dirs.add(f);
} else {
if ((filter == null) || ((filter != null) && (filter.accept(f)))) {
acceptedFiles.add(f);
}
}
}
collectedFiles.addAll(acceptedFiles);
for (File currentFile : dirs) {
FileUtils.directoryEntriesInPath(collectedFiles, currentFile, filter);
}
}
}
public static String encodePath(String path) {
return path.replaceAll(" ", "%20");
}
public static void main(String[] args) {
List<File> l = directoryEntriesInPath(new File("/tmp/test"));
for(File f : l) System.out.println(f);
System.out.println("########");
TreeSet<File> s= new TreeSet(l);
for(File f : s) System.out.println(f);
}
}
| apache-2.0 |
apache/santuario-java | src/main/java/org/apache/xml/security/stax/impl/securityToken/DsaKeyValueSecurityToken.java | 3108 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.xml.security.stax.impl.securityToken;
import org.apache.xml.security.binding.xmldsig.DSAKeyValueType;
import org.apache.xml.security.exceptions.XMLSecurityException;
import org.apache.xml.security.stax.ext.InboundSecurityContext;
import org.apache.xml.security.stax.impl.util.IDGenerator;
import org.apache.xml.security.stax.securityToken.SecurityTokenConstants;
import java.math.BigInteger;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.security.spec.DSAPublicKeySpec;
import java.security.spec.InvalidKeySpecException;
/**
*/
public class DsaKeyValueSecurityToken extends AbstractInboundSecurityToken {
private DSAKeyValueType dsaKeyValueType;
public DsaKeyValueSecurityToken(DSAKeyValueType dsaKeyValueType, InboundSecurityContext inboundSecurityContext) {
super(inboundSecurityContext, IDGenerator.generateID(null), SecurityTokenConstants.KeyIdentifier_KeyValue, true);
this.dsaKeyValueType = dsaKeyValueType;
}
private PublicKey buildPublicKey(DSAKeyValueType dsaKeyValueType) throws InvalidKeySpecException, NoSuchAlgorithmException {
DSAPublicKeySpec dsaPublicKeySpec = new DSAPublicKeySpec(
new BigInteger(1, dsaKeyValueType.getY()),
new BigInteger(1, dsaKeyValueType.getP()),
new BigInteger(1, dsaKeyValueType.getQ()),
new BigInteger(1, dsaKeyValueType.getG()));
KeyFactory keyFactory = KeyFactory.getInstance("DSA");
return keyFactory.generatePublic(dsaPublicKeySpec);
}
@Override
public PublicKey getPublicKey() throws XMLSecurityException {
if (super.getPublicKey() == null) {
try {
setPublicKey(buildPublicKey(this.dsaKeyValueType));
} catch (InvalidKeySpecException e) {
throw new XMLSecurityException(e);
} catch (NoSuchAlgorithmException e) {
throw new XMLSecurityException(e);
}
}
return super.getPublicKey();
}
@Override
public boolean isAsymmetric() {
return true;
}
@Override
public SecurityTokenConstants.TokenType getTokenType() {
return SecurityTokenConstants.KeyValueToken;
}
}
| apache-2.0 |
gradle/gradle | subprojects/dependency-management/src/main/java/org/gradle/api/internal/artifacts/dependencies/DefaultDependencyConstraint.java | 6416 | /*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.dependencies;
import com.google.common.base.Objects;
import com.google.common.base.Strings;
import org.gradle.api.Action;
import org.gradle.api.artifacts.DependencyConstraint;
import org.gradle.api.artifacts.ModuleIdentifier;
import org.gradle.api.artifacts.ModuleVersionIdentifier;
import org.gradle.api.artifacts.MutableVersionConstraint;
import org.gradle.api.artifacts.VersionConstraint;
import org.gradle.api.attributes.AttributeContainer;
import org.gradle.api.internal.artifacts.DefaultModuleIdentifier;
import org.gradle.api.internal.artifacts.ModuleVersionSelectorStrictSpec;
import org.gradle.api.internal.attributes.AttributeContainerInternal;
import org.gradle.api.internal.attributes.ImmutableAttributes;
import org.gradle.api.internal.attributes.ImmutableAttributesFactory;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import javax.annotation.Nullable;
public class DefaultDependencyConstraint implements DependencyConstraintInternal {
private final static Logger LOG = Logging.getLogger(DefaultDependencyConstraint.class);
private final ModuleIdentifier moduleIdentifier;
private final MutableVersionConstraint versionConstraint;
private String reason;
private ImmutableAttributesFactory attributesFactory;
private AttributeContainerInternal attributes;
private boolean force;
public DefaultDependencyConstraint(String group, String name, String version) {
this.moduleIdentifier = DefaultModuleIdentifier.newId(group, name);
this.versionConstraint = new DefaultMutableVersionConstraint(version);
}
public static DefaultDependencyConstraint strictly(String group, String name, String strictVersion) {
DefaultMutableVersionConstraint versionConstraint = new DefaultMutableVersionConstraint((String) null);
versionConstraint.strictly(strictVersion);
return new DefaultDependencyConstraint(DefaultModuleIdentifier.newId(group, name), versionConstraint);
}
public DefaultDependencyConstraint(ModuleIdentifier module, VersionConstraint versionConstraint) {
this(module, new DefaultMutableVersionConstraint(versionConstraint));
}
private DefaultDependencyConstraint(ModuleIdentifier module, MutableVersionConstraint versionConstraint) {
this.moduleIdentifier = module;
this.versionConstraint = versionConstraint;
}
@Nullable
@Override
public String getGroup() {
return moduleIdentifier.getGroup();
}
@Override
public String getName() {
return moduleIdentifier.getName();
}
@Override
public String getVersion() {
return Strings.emptyToNull(versionConstraint.getRequiredVersion());
}
@Override
public AttributeContainer getAttributes() {
return attributes == null ? ImmutableAttributes.EMPTY : attributes.asImmutable();
}
@Override
public DependencyConstraint attributes(Action<? super AttributeContainer> configureAction) {
if (attributesFactory == null) {
warnAboutInternalApiUse();
return this;
}
if (attributes == null) {
attributes = attributesFactory.mutable();
}
configureAction.execute(attributes);
return this;
}
private void warnAboutInternalApiUse() {
LOG.warn("Cannot set attributes for constraint \"" + this.getGroup() + ":" + this.getName() + ":" + this.getVersion() + "\": it was probably created by a plugin using internal APIs");
}
public void setAttributesFactory(ImmutableAttributesFactory attributesFactory) {
this.attributesFactory = attributesFactory;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DefaultDependencyConstraint that = (DefaultDependencyConstraint) o;
return Objects.equal(moduleIdentifier, that.moduleIdentifier) &&
Objects.equal(versionConstraint, that.versionConstraint) &&
Objects.equal(attributes, that.attributes) &&
force == that.force;
}
@Override
public int hashCode() {
return Objects.hashCode(moduleIdentifier, versionConstraint, attributes);
}
@Override
public void version(Action<? super MutableVersionConstraint> configureAction) {
configureAction.execute(versionConstraint);
}
@Override
public VersionConstraint getVersionConstraint() {
return versionConstraint;
}
@Override
public boolean matchesStrictly(ModuleVersionIdentifier identifier) {
return new ModuleVersionSelectorStrictSpec(this).isSatisfiedBy(identifier);
}
@Override
public ModuleIdentifier getModule() {
return moduleIdentifier;
}
@Override
public String getReason() {
return reason;
}
@Override
public void because(String reason) {
this.reason = reason;
}
@Override
public DependencyConstraint copy() {
DefaultDependencyConstraint constraint = new DefaultDependencyConstraint(moduleIdentifier, versionConstraint);
constraint.reason = reason;
constraint.attributes = attributes;
constraint.attributesFactory = attributesFactory;
constraint.force = force;
return constraint;
}
@Override
public String toString() {
return "constraint " +
moduleIdentifier + ":" + versionConstraint +
", attributes=" + attributes;
}
@Override
public void setForce(boolean force) {
this.force = force;
}
@Override
public boolean isForce() {
return force;
}
}
| apache-2.0 |
androidx/androidx | appcompat/appcompat/src/androidTest/java/androidx/appcompat/app/DrawerDynamicLayoutTest.java | 8426 | /*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.appcompat.app;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.assertion.ViewAssertions.doesNotExist;
import static androidx.test.espresso.matcher.ViewMatchers.isAssignableFrom;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static org.hamcrest.CoreMatchers.allOf;
import static org.mockito.Mockito.mock;
import android.util.Log;
import android.view.View;
import android.view.ViewStub;
import androidx.annotation.LayoutRes;
import androidx.appcompat.test.R;
import androidx.core.view.GravityCompat;
import androidx.core.view.ViewCompat;
import androidx.drawerlayout.widget.DrawerLayout;
import androidx.test.annotation.UiThreadTest;
import androidx.test.espresso.UiController;
import androidx.test.espresso.ViewAction;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
import androidx.test.rule.ActivityTestRule;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Test cases to verify that <code>DrawerLayout</code> only supports configurations
* with at most one drawer child along each vertical (left / right) edge.
*/
@LargeTest
@RunWith(AndroidJUnit4.class)
public class DrawerDynamicLayoutTest {
@Rule
public final ActivityTestRule<DrawerDynamicLayoutActivity> mActivityTestRule =
new ActivityTestRule<>(DrawerDynamicLayoutActivity.class);
@UiThreadTest
@After
public void tearDown() {
// Now that the test is done, replace the activity content view with ViewStub so
// that it's ready to be replaced for the next test.
final DrawerDynamicLayoutActivity activity = mActivityTestRule.getActivity();
activity.setContentView(R.layout.drawer_dynamic_layout);
}
/**
* Matches views that have parents.
*/
private Matcher<View> hasParent() {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("has parent");
}
@Override
public boolean matchesSafely(View view) {
return view.getParent() != null;
}
};
}
private ViewAction inflateViewStub(final @LayoutRes int layoutResId) {
return inflateViewStub(layoutResId, false);
}
/**
* Inflates the <code>ViewStub</code> with the passed layout resource.
*/
private ViewAction inflateViewStub(final @LayoutRes int layoutResId, final boolean log) {
return new ViewAction() {
@Override
public Matcher<View> getConstraints() {
return allOf(isAssignableFrom(ViewStub.class), hasParent());
}
@Override
public String getDescription() {
return "Inflates view stub";
}
@Override
public void perform(UiController uiController, View view) {
uiController.loopMainThreadUntilIdle();
ViewStub viewStub = (ViewStub) view;
viewStub.setLayoutResource(layoutResId);
View drawer = viewStub.inflate();
uiController.loopMainThreadUntilIdle();
if (log) {
logGravity(drawer);
}
}
};
}
public static void logGravity(View view) {
DrawerLayout drawer = (DrawerLayout) view;
for (int i = 0; i < drawer.getChildCount(); i++) {
View child = drawer.getChildAt(i);
final int gravity = ((DrawerLayout.LayoutParams) child.getLayoutParams()).gravity;
final int absGravity = GravityCompat.getAbsoluteGravity(gravity,
ViewCompat.getLayoutDirection(child));
final int gravityInParent = GravityCompat.getAbsoluteGravity(gravity,
ViewCompat.getLayoutDirection(drawer));
Log.e("DrawerDynamicLayoutTest", "gravity of child[" + i + "] "
+ " = " + absGravity + "; gravity in parent " + gravityInParent);
}
}
@Test
public void testSingleStartDrawer() {
onView(withId(R.id.drawer_layout)).check(doesNotExist());
onView(withId(R.id.drawer_stub)).perform(
inflateViewStub(R.layout.drawer_dynamic_content_single_start));
}
@Test(expected=IllegalStateException.class)
public void testDoubleStartDrawers() {
onView(withId(R.id.drawer_layout)).check(doesNotExist());
// Note the expected exception in the @Test annotation, as we expect the DrawerLayout
// to throw exception during the measure pass as it detects two start drawers.
onView(withId(R.id.drawer_stub)).perform(
inflateViewStub(R.layout.drawer_dynamic_content_double_start));
}
@Test
public void testSingleEndDrawer() {
onView(withId(R.id.drawer_layout)).check(doesNotExist());
onView(withId(R.id.drawer_stub)).perform(
inflateViewStub(R.layout.drawer_dynamic_content_single_end));
}
@Test(expected=IllegalStateException.class)
public void testDoubleEndDrawers() {
onView(withId(R.id.drawer_layout)).check(doesNotExist());
// Note the expected exception in the @Test annotation, as we expect the DrawerLayout
// to throw exception during the measure pass as it detects two end drawers.
onView(withId(R.id.drawer_stub)).perform(
inflateViewStub(R.layout.drawer_dynamic_content_double_end, true));
}
@Test
public void testSingleStartDrawerSingleEndDrawer() {
onView(withId(R.id.drawer_layout)).check(doesNotExist());
onView(withId(R.id.drawer_stub)).perform(
inflateViewStub(R.layout.drawer_dynamic_content_start_end));
}
@Test(expected=IllegalStateException.class)
public void testDoubleStartDrawersSingleEndDrawer() {
onView(withId(R.id.drawer_layout)).check(doesNotExist());
// Note the expected exception in the @Test annotation, as we expect the DrawerLayout
// to throw exception during the measure pass as it detects two start drawers.
onView(withId(R.id.drawer_stub)).perform(
inflateViewStub(R.layout.drawer_dynamic_content_double_start_single_end));
}
@Test(expected=IllegalStateException.class)
public void testDoubleEndDrawersSingleStartDrawer() {
onView(withId(R.id.drawer_layout)).check(doesNotExist());
// Note the expected exception in the @Test annotation, as we expect the DrawerLayout
// to throw exception during the measure pass as it detects two start drawers.
onView(withId(R.id.drawer_stub)).perform(
inflateViewStub(R.layout.drawer_dynamic_content_double_end_single_start));
}
@Test
public void testRemoveUnregisteredListener() {
onView(withId(R.id.drawer_stub)).perform(
inflateViewStub(R.layout.drawer_dynamic_content_single_start));
// We do this test here and not in DrawerLayoutTest since we want to be sure that the
// call to DrawerLayout.removeDrawerLayout() didn't have any calls to addDrawerLayout()
// before it. DrawerLayoutTest and its DrawerLayoutActivity register listeners as part
// of their initial setup flow.
final DrawerLayout startDrawer =
(DrawerLayout) mActivityTestRule.getActivity().findViewById(R.id.drawer_layout);
DrawerLayout.DrawerListener mockedListener = mock(DrawerLayout.DrawerListener.class);
startDrawer.removeDrawerListener(mockedListener);
}
}
| apache-2.0 |
Soo000/SooChat | src/org/jivesoftware/smackx/xhtmlim/XHTMLManager.java | 5565 | /**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.xhtmlim;
import org.jivesoftware.smack.ConnectionCreationListener;
import org.jivesoftware.smack.SmackException.NoResponseException;
import org.jivesoftware.smack.SmackException.NotConnectedException;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.XMPPConnectionRegistry;
import org.jivesoftware.smack.XMPPException.XMPPErrorException;
import org.jivesoftware.smack.packet.Message;
import org.jivesoftware.smackx.disco.ServiceDiscoveryManager;
import org.jivesoftware.smackx.xhtmlim.packet.XHTMLExtension;
import java.util.List;
/**
* Manages XHTML formatted texts within messages. A XHTMLManager provides a high level access to
* get and set XHTML bodies to messages, enable and disable XHTML support and check if remote XMPP
* clients support XHTML.
*
* @author Gaston Dombiak
*/
public class XHTMLManager {
static {
XMPPConnectionRegistry.addConnectionCreationListener(new ConnectionCreationListener() {
public void connectionCreated(XMPPConnection connection) {
// Enable the XHTML support on every established connection
XHTMLManager.setServiceEnabled(connection, true);
}
});
}
/**
* Returns an Iterator for the XHTML bodies in the message. Returns null if
* the message does not contain an XHTML extension.
*
* @param message an XHTML message
* @return an Iterator for the bodies in the message or null if none.
*/
public static List<CharSequence> getBodies(Message message) {
XHTMLExtension xhtmlExtension = XHTMLExtension.from(message);
if (xhtmlExtension != null)
return xhtmlExtension.getBodies();
else
return null;
}
/**
* Adds an XHTML body to the message.
*
* @param message the message that will receive the XHTML body
* @param xhtmlText the string to add as an XHTML body to the message
*/
public static void addBody(Message message, XHTMLText xhtmlText) {
XHTMLExtension xhtmlExtension = XHTMLExtension.from(message);
if (xhtmlExtension == null) {
// Create an XHTMLExtension and add it to the message
xhtmlExtension = new XHTMLExtension();
message.addExtension(xhtmlExtension);
}
// Add the required bodies to the message
xhtmlExtension.addBody(xhtmlText.toXML());
}
/**
* Returns true if the message contains an XHTML extension.
*
* @param message the message to check if contains an XHTML extentsion or not
* @return a boolean indicating whether the message is an XHTML message
*/
public static boolean isXHTMLMessage(Message message) {
return message.getExtension(XHTMLExtension.ELEMENT, XHTMLExtension.NAMESPACE) != null;
}
/**
* Enables or disables the XHTML support on a given connection.<p>
*
* Before starting to send XHTML messages to a user, check that the user can handle XHTML
* messages. Enable the XHTML support to indicate that this client handles XHTML messages.
*
* @param connection the connection where the service will be enabled or disabled
* @param enabled indicates if the service will be enabled or disabled
*/
public synchronized static void setServiceEnabled(XMPPConnection connection, boolean enabled) {
if (isServiceEnabled(connection) == enabled)
return;
if (enabled) {
ServiceDiscoveryManager.getInstanceFor(connection).addFeature(XHTMLExtension.NAMESPACE);
}
else {
ServiceDiscoveryManager.getInstanceFor(connection).removeFeature(XHTMLExtension.NAMESPACE);
}
}
/**
* Returns true if the XHTML support is enabled for the given connection.
*
* @param connection the connection to look for XHTML support
* @return a boolean indicating if the XHTML support is enabled for the given connection
*/
public static boolean isServiceEnabled(XMPPConnection connection) {
return ServiceDiscoveryManager.getInstanceFor(connection).includesFeature(XHTMLExtension.NAMESPACE);
}
/**
* Returns true if the specified user handles XHTML messages.
*
* @param connection the connection to use to perform the service discovery
* @param userID the user to check. A fully qualified xmpp ID, e.g. jdoe@example.com
* @return a boolean indicating whether the specified user handles XHTML messages
* @throws XMPPErrorException
* @throws NoResponseException
* @throws NotConnectedException
*/
public static boolean isServiceEnabled(XMPPConnection connection, String userID)
throws NoResponseException, XMPPErrorException, NotConnectedException {
return ServiceDiscoveryManager.getInstanceFor(connection).supportsFeature(userID, XHTMLExtension.NAMESPACE);
}
}
| apache-2.0 |
xjbhenry/ChallengeApp | src/main/java/com/maximos/mobile/challengeapp/feedpageproject/CreateChallengeActivity.java | 15867 | package com.maximos.mobile.challengeapp.feedpageproject;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import com.maximos.mobile.challengeapp.FetchFriends.FriendPickerSampleActivity;
import com.maximos.mobile.challengeapp.R;
import com.maximos.mobile.challengeapp.constants.App_Constants;
import com.maximos.mobile.challengeapp.dao.ChallengeDao;
import com.maximos.mobile.challengeapp.model.Challenge;
import com.maximos.mobile.challengeapp.util.RecordAudio;
import com.maximos.mobile.challengeapp.util.UploadFile;
import com.maximos.mobile.challengeapp.util.VideoCapture;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
public class CreateChallengeActivity extends Activity {
private static final int SELECT_AUDIO = 2;
private static final int SELECT_VIDEO = 3;
private static final int SELECT_IMAGE = 1;
String selectedPath = "";
static final int REQUEST_IMAGE_CAPTURE = 1;
static final int REQUEST_TAKE_PHOTO = 1;
String mCurrentPhotoPath;
ImageView mImageView;
private static int responseCode = 0;
String fileOnServer = "";
public Logger logger = Logger.getLogger(CreateChallengeActivity.class.getName());
private static final String TAG_NAME = CreateChallengeActivity.class.getName();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_create_challenge);
((Button) findViewById(R.id.recordAudio)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
/*Intent intent = new Intent();
intent.setType("audio/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent,"Select Audio "), SELECT_AUDIO);
*/
Intent intent;
intent = new Intent(CreateChallengeActivity.this, RecordAudio.class);
startActivity(intent);
}
});
((Button) findViewById(R.id.takePicture)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
// Ensure that there's a camera activity to handle the intent
if (takePictureIntent.resolveActivity(getPackageManager()) != null) {
// Create the File where the photo should go
File photoFile = null;
try {
photoFile = createImageFile();
} catch (IOException ex) {
// Error occurred while creating the File
}
// Continue only if the File was successfully created
if (photoFile != null) {
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(photoFile));
startActivityForResult(takePictureIntent, REQUEST_TAKE_PHOTO);
}
}
}
});
((Button) findViewById(R.id.uploadVideo)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
/*
Intent intent = new Intent();
intent.setType("video/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent,"Select Video "), SELECT_VIDEO);
*/
Intent intent;
intent = new Intent(CreateChallengeActivity.this, VideoCapture.class);
startActivity(intent);
}
});
((Button) findViewById(R.id.selectFriend)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent;
intent = new Intent(CreateChallengeActivity.this, FriendPickerSampleActivity.class);
startActivity(intent);
}
});
((Button) findViewById(R.id.upload_audio)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent();
intent.setType("audio/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent, "Select Audio "), SELECT_AUDIO);
}
});
((Button) findViewById(R.id.upload_video)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent();
intent.setType("video/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent, "Select Video "), SELECT_VIDEO);
}
});
((Button) findViewById(R.id.upload_image)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent();
intent.setType("image/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent, "Select Image "), SELECT_IMAGE);
}
});
((Button) findViewById(R.id.location)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
}
});
((Button) findViewById(R.id.create_challenge)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
CreateChallengeTask createChallengeAsyncTask = new CreateChallengeTask();
createChallengeAsyncTask.execute((Void) null);
}
});
}
private File createImageFile() throws IOException {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES);
File image = File.createTempFile(
imageFileName, /* prefix */
".jpg", /* suffix */
storageDir /* directory */
);
// Save a file: path for use with ACTION_VIEW intents
mCurrentPhotoPath = "file:" + image.getAbsolutePath();
return image;
}
@Override
protected void onPause() {
logger.log(Level.INFO, "Inside onPause of Create Challenge class Activity");
super.onPause();
}
@Override
protected void onStop() {
logger.log(Level.INFO, "Inside OnStop in Create Challenge Class Activity ");
super.onStop();
}
@Override
protected void onStart() {
logger.log(Level.INFO, "Inside Onstart in Create challenge class activity");
super.onStart();
}
@Override
protected void onDestroy() {
logger.log(Level.INFO, "Inside OnDestroy in create challenge class activity");
super.onDestroy();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
if (requestCode == SELECT_VIDEO) {
logger.log(Level.INFO, TAG_NAME + ": Upload Video with Uri" + data.getData());
Uri selectedVideoUri = data.getData();
selectedPath = getPath(selectedVideoUri);
logger.log(Level.INFO, TAG_NAME + ": path selected" + selectedPath);
FileUploadAsyncTask fileUploadAsyncTask = new FileUploadAsyncTask(selectedPath);
fileUploadAsyncTask.execute((Void) null);
} else if (requestCode == SELECT_AUDIO) {
logger.log(Level.INFO, TAG_NAME + ": Upload Audio with Uri" + data.getData());
Uri selectedAudioUri = data.getData();
selectedPath = getPath(selectedAudioUri);
logger.log(Level.INFO, TAG_NAME + ": path selected" + selectedPath);
FileUploadAsyncTask fileUploadAsyncTask = new FileUploadAsyncTask(selectedPath);
fileUploadAsyncTask.execute((Void) null);
} else if (requestCode == SELECT_IMAGE) {
logger.log(Level.INFO, TAG_NAME + ": Upload Image with Uri" + data.getData());
Uri selectedImageUri = data.getData();
selectedPath = getPath(selectedImageUri);
logger.log(Level.INFO, TAG_NAME + ": path selected" + selectedPath);
FileUploadAsyncTask fileUploadAsyncTask = new FileUploadAsyncTask(selectedPath);
fileUploadAsyncTask.execute((Void) null);
}
if (requestCode == 200) {
TextView textView = (TextView) findViewById(R.id.uploadTextResult);
textView.setText(App_Constants.DATA_UPLOADED);
}
if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
setPic();
galleryAddPic();
Bundle extras = data.getExtras();
Bitmap imageBitmap = (Bitmap) extras.get("data");
mImageView.setImageBitmap(imageBitmap);
super.onActivityResult(requestCode, resultCode, data);
}
}
}
private void galleryAddPic() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(mCurrentPhotoPath);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
this.sendBroadcast(mediaScanIntent);
}
private void setPic() {
// Get the dimensions of the View
int targetW = mImageView.getWidth();
int targetH = mImageView.getHeight();
// Get the dimensions of the bitmap
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions);
int photoW = bmOptions.outWidth;
int photoH = bmOptions.outHeight;
// Determine how much to scale down the image
int scaleFactor = Math.min(photoW / targetW, photoH / targetH);
// Decode the image file into a Bitmap sized to fill the View
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor;
bmOptions.inPurgeable = true;
Bitmap bitmap = BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions);
mImageView.setImageBitmap(bitmap);
}
public String getPath(Uri uri) {
if ("content".equalsIgnoreCase(uri.getScheme())) {
String[] projection = {MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver().query(uri, projection, null, null, null);
logger.log(Level.INFO, TAG_NAME + " : cursor" + cursor);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
} else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.create_challenge, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public class FileUploadAsyncTask extends AsyncTask<Void, Void, Object> {
private String selectedPath;
private ProgressDialog pd;
FileUploadAsyncTask(String selectedPath) {
this.selectedPath = selectedPath;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
pd = new ProgressDialog(CreateChallengeActivity.this);
pd.setTitle("Uploading File..");
pd.setMessage("Please wait,File is getting sent");
pd.setCancelable(true);
pd.setIndeterminate(true);
pd.show();
}
@Override
protected Object doInBackground(Void... params) {
UploadFile uploadFile = new UploadFile();
responseCode = uploadFile.uploadVideo(selectedPath);
fileOnServer = uploadFile.fileOnServer;
logger.log(Level.INFO, TAG_NAME + " : file on server " + fileOnServer);
return null;
}
@Override
protected void onPostExecute(Object o) {
super.onPostExecute(o);
pd.dismiss();
}
}
public class CreateChallengeTask extends AsyncTask<Void, Void, Void> {
private ProgressDialog pd;
@Override
protected void onPreExecute() {
super.onPreExecute();
pd = new ProgressDialog(CreateChallengeActivity.this);
pd.setTitle("Creating Challenge..");
pd.setMessage("Please wait, Creating Challenge ");
pd.setCancelable(true);
pd.setIndeterminate(true);
pd.show();
}
@Override
protected Void doInBackground(Void... voids) {
try {
// Simulate network access.
Thread.sleep(2000);
} catch (InterruptedException e) {
}
String creatorId = "";
EditText titleEditText = (EditText) findViewById(R.id.title);
String title = titleEditText.getText().toString();
EditText descEditText = (EditText) findViewById(R.id.challenge_desc);
String desc = titleEditText.getText().toString();
SharedPreferences prefs = getSharedPreferences(App_Constants.USER_PREFERENCE_FILE, getApplicationContext().MODE_PRIVATE);
Boolean isLoggedIn = prefs.getBoolean(App_Constants.IS_USER_LOGGED_IN, false);
logger.log(Level.INFO, " : " + isLoggedIn);
if (isLoggedIn) {
creatorId = prefs.getString(App_Constants.LOGGED_USER_ID, "none");
Challenge challenge = new Challenge(title, desc, 1, fileOnServer, null, null, creatorId);
ChallengeDao.createChallenge(challenge);
}
pd.dismiss();
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
}
}
} | apache-2.0 |
Tycheo/coffeemud | com/planet_ink/coffee_mud/Locales/MountainsMaze.java | 1932 | package com.planet_ink.coffee_mud.Locales;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2001-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class MountainsMaze extends StdMaze
{
@Override public String ID(){return "MountainsMaze";}
public MountainsMaze()
{
super();
basePhyStats.setWeight(5);
recoverPhyStats();
}
@Override public int domainType(){return Room.DOMAIN_OUTDOORS_MOUNTAINS;}
@Override public String getGridChildLocaleID(){return "Mountains";}
@Override public List<Integer> resourceChoices(){return Mountains.roomResources;}
}
| apache-2.0 |
eclipse/gemini.managment | org.eclipse.gemini.management/src/main/java/org/osgi/jmx/framework/wiring/package-info.java | 1392 | /*
* Copyright (c) OSGi Alliance (2012). All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* OSGi JMX Framework Wiring Package Version 1.1.
*
* <p>
* Bundles wishing to use this package must list the package in the
* Import-Package header of the bundle's manifest. This package has two types of
* users: the consumers that use the API in this package and the providers that
* implement the API in this package.
*
* <p>
* Example import for consumers using the API in this package:
* <p>
* {@code Import-Package: org.osgi.jmx.framework.wiring; version="[1.1,2.0)"}
* <p>
* Example import for providers implementing the API in this package:
* <p>
* {@code Import-Package: org.osgi.jmx.framework.wiring; version="[1.1,1.2)"}
*
* @version $Id: 9710af79a8da06986298af0dede257cbcbb6c487 $
*/
package org.osgi.jmx.framework.wiring;
| apache-2.0 |
liveqmock/platform-tools-idea | java/idea-ui/src/com/intellij/ide/util/projectWizard/ProjectWizardStepFactoryImpl.java | 6321 | /*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util.projectWizard;
import com.intellij.ide.util.frameworkSupport.FrameworkSupportUtil;
import com.intellij.ide.util.newProjectWizard.AddModuleWizard;
import com.intellij.ide.util.newProjectWizard.SourcePathsStep;
import com.intellij.ide.util.newProjectWizard.SupportForFrameworksStep;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkType;
import com.intellij.openapi.projectRoots.SdkTypeId;
import com.intellij.openapi.roots.ui.configuration.ModulesProvider;
import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainer;
import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainerFactory;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Key;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Map;
/**
* @author Eugene Zhuravlev
* Date: Oct 6, 2004
*/
public class ProjectWizardStepFactoryImpl extends ProjectWizardStepFactory {
private static final Key<ProjectJdkStep> PROJECT_JDK_STEP_KEY = Key.create("ProjectJdkStep");
public ModuleWizardStep createNameAndLocationStep(WizardContext wizardContext, JavaModuleBuilder builder, ModulesProvider modulesProvider, Icon icon, String helpId) {
return new NameLocationStep(wizardContext, builder, modulesProvider, icon, helpId);
}
public ModuleWizardStep createNameAndLocationStep(final WizardContext wizardContext) {
return new ProjectNameStep(wizardContext);
}
/**
* @deprecated
*/
public ModuleWizardStep createOutputPathPathsStep(ModuleWizardStep nameAndLocationStep, JavaModuleBuilder builder, Icon icon, String helpId) {
return new OutputPathsStep((NameLocationStep)nameAndLocationStep, builder, icon, helpId);
}
public ModuleWizardStep createSourcePathsStep(ModuleWizardStep nameAndLocationStep, SourcePathsBuilder builder, Icon icon, String helpId) {
return null;
}
public ModuleWizardStep createSourcePathsStep(final WizardContext context, final SourcePathsBuilder builder, final Icon icon, @NonNls final String helpId) {
return new SourcePathsStep(builder, icon, helpId);
}
/**
* @deprecated
*/
public ModuleWizardStep createProjectJdkStep(WizardContext context,
final JavaModuleBuilder builder,
final Computable<Boolean> isVisible,
final Icon icon,
final String helpId) {
return createProjectJdkStep(context, null, builder, isVisible, icon, helpId);
}
public ModuleWizardStep createProjectJdkStep(WizardContext context,
SdkType type,
final JavaModuleBuilder builder,
final Computable<Boolean> isVisible,
final Icon icon,
@NonNls final String helpId) {
return new ProjectJdkForModuleStep(context, type){
public void updateDataModel() {
super.updateDataModel();
builder.setModuleJdk(getJdk());
}
public boolean isStepVisible() {
return isVisible.compute().booleanValue();
}
public Icon getIcon() {
return icon;
}
@Override
public String getName() {
return "Specify JDK";
}
public String getHelpId() {
return helpId;
}
};
}
public ModuleWizardStep createProjectJdkStep(final WizardContext wizardContext) {
ProjectJdkStep projectSdkStep = wizardContext.getUserData(PROJECT_JDK_STEP_KEY);
if (projectSdkStep != null) {
return projectSdkStep;
}
projectSdkStep = new ProjectJdkStep(wizardContext) {
public boolean isStepVisible() {
final Sdk newProjectJdk = AddModuleWizard.getProjectSdkByDefault(wizardContext);
if (newProjectJdk == null) return true;
final ProjectBuilder projectBuilder = wizardContext.getProjectBuilder();
return projectBuilder != null && !projectBuilder.isSuitableSdk(newProjectJdk);
}
};
wizardContext.putUserData(PROJECT_JDK_STEP_KEY, projectSdkStep);
return projectSdkStep;
}
@Nullable
@Override
public Sdk getNewProjectSdk(WizardContext wizardContext) {
return AddModuleWizard.getNewProjectJdk(wizardContext);
}
@Override
public ModuleWizardStep createSupportForFrameworksStep(WizardContext wizardContext, ModuleBuilder moduleBuilder) {
return createSupportForFrameworksStep(wizardContext, moduleBuilder, ModulesProvider.EMPTY_MODULES_PROVIDER);
}
@Override
public ModuleWizardStep createSupportForFrameworksStep(@NotNull WizardContext context, @NotNull ModuleBuilder builder, @NotNull ModulesProvider modulesProvider) {
Map<String,Boolean> availableFrameworks = builder.getAvailableFrameworks();
if (FrameworkSupportUtil.getProviders(builder).isEmpty() || availableFrameworks != null && availableFrameworks.isEmpty()) {
return null;
}
final LibrariesContainer container = LibrariesContainerFactory.createContainer(context, modulesProvider);
return new SupportForFrameworksStep(context, builder, container);
}
@Override
public ModuleWizardStep createJavaSettingsStep(@NotNull SettingsStep settingsStep, @NotNull ModuleBuilder moduleBuilder, @NotNull Condition<SdkTypeId> sdkFilter) {
return new JavaSettingsStep(settingsStep, moduleBuilder, sdkFilter);
}
}
| apache-2.0 |
dbrimley/hazelcast | hazelcast/src/main/java/com/hazelcast/mapreduce/aggregation/impl/AbstractAggregationCombinerFactory.java | 1754 | /*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.mapreduce.aggregation.impl;
import com.hazelcast.mapreduce.CombinerFactory;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import com.hazelcast.nio.serialization.BinaryInterface;
import java.io.IOException;
/**
* Base class for all internal aggregation CombinerFactories to easy the implementation of
* {@link com.hazelcast.nio.serialization.IdentifiedDataSerializable}.
*
* @param <KeyIn> the input key type
* @param <ValueIn> the input value type
* @param <ValueOut> the output value type
*/
@BinaryInterface
abstract class AbstractAggregationCombinerFactory<KeyIn, ValueIn, ValueOut>
implements CombinerFactory<KeyIn, ValueIn, ValueOut>, IdentifiedDataSerializable {
@Override
public int getFactoryId() {
return AggregationsDataSerializerHook.F_ID;
}
@Override
public void writeData(ObjectDataOutput out)
throws IOException {
}
@Override
public void readData(ObjectDataInput in)
throws IOException {
}
}
| apache-2.0 |
UIKit0/jsyn | src/com/jsyn/util/JavaTools.java | 1861 | /*
* Copyright 2009 Phil Burk, Mobileer Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jsyn.util;
public class JavaTools {
@SuppressWarnings("rawtypes")
public static Class loadClass(String className, boolean verbose) {
Class newClass = null;
try {
newClass = Class.forName(className);
} catch (Throwable e) {
if (verbose)
System.out.println("Caught " + e);
}
if (newClass == null) {
try {
ClassLoader systemLoader = ClassLoader.getSystemClassLoader();
newClass = Class.forName(className, true, systemLoader);
} catch (Throwable e) {
if (verbose)
System.out.println("Caught " + e);
}
}
return newClass;
}
/**
* First try Class.forName(). If this fails, try Class.forName() using
* ClassLoader.getSystemClassLoader().
*
* @return Class or null
*/
@SuppressWarnings("rawtypes")
public static Class loadClass(String className) {
/**
* First try Class.forName(). If this fails, try Class.forName() using
* ClassLoader.getSystemClassLoader().
*
* @return Class or null
*/
return loadClass(className, true);
}
}
| apache-2.0 |
KevinLiLu/kafka | clients/src/main/java/org/apache/kafka/common/header/internals/RecordHeaders.java | 5878 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.header.internals;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.record.Record;
import org.apache.kafka.common.utils.AbstractIterator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
public class RecordHeaders implements Headers {
private final List<Header> headers;
private volatile boolean isReadOnly;
public RecordHeaders() {
this((Iterable<Header>) null);
}
public RecordHeaders(Header[] headers) {
if (headers == null) {
this.headers = new ArrayList<>();
} else {
this.headers = new ArrayList<>(Arrays.asList(headers));
}
}
public RecordHeaders(Iterable<Header> headers) {
//Use efficient copy constructor if possible, fallback to iteration otherwise
if (headers == null) {
this.headers = new ArrayList<>();
} else if (headers instanceof RecordHeaders) {
this.headers = new ArrayList<>(((RecordHeaders) headers).headers);
} else if (headers instanceof Collection) {
this.headers = new ArrayList<>((Collection<Header>) headers);
} else {
this.headers = new ArrayList<>();
for (Header header : headers)
this.headers.add(header);
}
}
@Override
public Headers add(Header header) throws IllegalStateException {
Objects.requireNonNull(header, "Header cannot be null.");
canWrite();
headers.add(header);
return this;
}
@Override
public Headers add(String key, byte[] value) throws IllegalStateException {
return add(new RecordHeader(key, value));
}
@Override
public Headers remove(String key) throws IllegalStateException {
canWrite();
checkKey(key);
Iterator<Header> iterator = iterator();
while (iterator.hasNext()) {
if (iterator.next().key().equals(key)) {
iterator.remove();
}
}
return this;
}
@Override
public Header lastHeader(String key) {
checkKey(key);
for (int i = headers.size() - 1; i >= 0; i--) {
Header header = headers.get(i);
if (header.key().equals(key)) {
return header;
}
}
return null;
}
@Override
public Iterable<Header> headers(final String key) {
checkKey(key);
return () -> new FilterByKeyIterator(headers.iterator(), key);
}
@Override
public Iterator<Header> iterator() {
return closeAware(headers.iterator());
}
public void setReadOnly() {
this.isReadOnly = true;
}
public Header[] toArray() {
return headers.isEmpty() ? Record.EMPTY_HEADERS : headers.toArray(new Header[headers.size()]);
}
private void checkKey(String key) {
if (key == null)
throw new IllegalArgumentException("key cannot be null.");
}
private void canWrite() {
if (isReadOnly)
throw new IllegalStateException("RecordHeaders has been closed.");
}
private Iterator<Header> closeAware(final Iterator<Header> original) {
return new Iterator<Header>() {
@Override
public boolean hasNext() {
return original.hasNext();
}
public Header next() {
return original.next();
}
@Override
public void remove() {
canWrite();
original.remove();
}
};
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RecordHeaders headers1 = (RecordHeaders) o;
return Objects.equals(headers, headers1.headers);
}
@Override
public int hashCode() {
return headers != null ? headers.hashCode() : 0;
}
@Override
public String toString() {
return "RecordHeaders(" +
"headers = " + headers +
", isReadOnly = " + isReadOnly +
')';
}
private static final class FilterByKeyIterator extends AbstractIterator<Header> {
private final Iterator<Header> original;
private final String key;
private FilterByKeyIterator(Iterator<Header> original, String key) {
this.original = original;
this.key = key;
}
protected Header makeNext() {
while (true) {
if (original.hasNext()) {
Header header = original.next();
if (!header.key().equals(key))
continue;
return header;
}
return this.allDone();
}
}
}
}
| apache-2.0 |
xin-cai/openwhisk | tests/src/test/scala/common/WhiskProperties.java | 12579 | /*
* Copyright 2015-2016 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package common;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.util.Properties;
/**
* Properties that describe a whisk installation
*/
public class WhiskProperties {
/**
* The name of the properties file.
*/
protected static final String WHISK_PROPS_FILE = "whisk.properties";
/**
* Default concurrency level if otherwise unspecified
*/
private static final int DEFAULT_CONCURRENCY = 20;
/**
* The deployment target, e.g., local.
*/
public static final String deployTarget = System.getProperty("deploy.target");
/**
* If true, then tests will direct to the router rather than the edge
* components.
*/
public static final boolean testRouter = System.getProperty("test.router", "false").equals("true");
/**
* The number of tests to run concurrently.
*/
public static final int concurrentTestCount = getConcurrentTestCount(System.getProperty("testthreads", null));
/**
* The root of the whisk installation, used to retrieve files relative to
* home.
*/
private static final String whiskHome;
/**
* The properties read from the WHISK_PROPS_FILE.
*/
private static final Properties whiskProperties;
static {
/**
* Finds the whisk home directory. This is resolved to either (in
* order):
*
* 1. a system property openwhisk.dir
*
* 2. OPENWHISK_HOME from the environment
*
* 3. a path in the directory tree containing WHISK_PROPS_FILE.
*
* @return the path to whisk home as a string
* @throws assertion
* failure if whisk home cannot be determined
*/
String wskdir = System.getProperty("openwhisk.home", System.getenv("OPENWHISK_HOME"));
if (wskdir == null) {
String dir = System.getProperty("user.dir");
if (dir != null) {
File propfile = findFileRecursively(dir, WHISK_PROPS_FILE);
if (propfile != null) {
wskdir = propfile.getParent();
}
}
}
assertTrue("could not determine openwhisk home", wskdir != null);
File wskpropsFile = new File(wskdir, WHISK_PROPS_FILE);
assertTrue(String.format("'%s' does not exists but required", wskpropsFile), wskpropsFile.exists());
// loads properties from file
whiskProperties = loadProperties(wskpropsFile);
// set whisk home from read properties
whiskHome = whiskProperties.getProperty("openwhisk.home");
System.out.format("deploy target %s\n", deployTarget != null ? deployTarget : "not defined");
System.out.format("test router? %s\n", testRouter);
}
/**
* The path to the CLI directory.
*/
public static String getCLIDir() {
return whiskHome + "/bin";
}
/**
* The path to the Go CLI executable.
*/
public static String getCLIPath() {
return getCLIDir() + "/wsk";
}
public static File getFileRelativeToWhiskHome(String name) {
return new File(whiskHome, name);
}
public static String getProperty(String string) {
return whiskProperties.getProperty(string);
}
public static String getKafkaHost() {
return whiskProperties.getProperty("kafka.host");
}
public static int getKafkaPort() {
return Integer.parseInt(whiskProperties.getProperty("kafka.host.port"));
}
public static int getKafkaMonitorPort() {
return Integer.parseInt(whiskProperties.getProperty("kafkaras.host.port"));
}
public static String getConsulServerHost() {
return whiskProperties.getProperty("consulserver.host");
}
public static int getConsulKVPort() {
return Integer.parseInt(whiskProperties.getProperty("consul.host.port4"));
}
public static String getZookeeperHost() {
return whiskProperties.getProperty("zookeeper.host");
}
public static int getZookeeperPort() {
return Integer.parseInt(whiskProperties.getProperty("zookeeper.host.port"));
}
public static String getMainDockerEndpoint() {
return whiskProperties.getProperty("main.docker.endpoint");
}
public static String getKafkaDockerEndpoint() {
return whiskProperties.getProperty("kafka.docker.endpoint");
}
public static boolean useCLIDownload() {
return whiskProperties.getProperty("use.cli.download").equals("true");
}
public static String[] getInvokerHosts() {
// split of empty string is non-empty array
String hosts = whiskProperties.getProperty("invoker.hosts");
return (hosts == null || hosts.equals("")) ? new String[0] : hosts.split(",");
}
public static String[] getAdditionalHosts() {
// split of empty string is non-empty array
String hosts = whiskProperties.getProperty("additional.hosts");
return (hosts == null || hosts.equals("")) ? new String[0] : hosts.split(",");
}
public static int numberOfInvokers() {
return getInvokerHosts().length;
}
public static String getSslCertificateChallenge() {
return whiskProperties.getProperty("whisk.ssl.challenge");
}
/**
* Note that when testRouter == true, we pretend the router host is edge
* host.
*/
public static String getEdgeHost() {
return testRouter ? getRouterHost() : whiskProperties.getProperty("edge.host");
}
public static String getRealEdgeHost() {
return whiskProperties.getProperty("edge.host");
}
public static String getAuthForTesting() {
return whiskProperties.getProperty("testing.auth");
}
public static String getRouterHost() {
return whiskProperties.getProperty("router.host");
}
public static String getApiHostForAction() {
String proto = whiskProperties.getProperty("whisk.api.host.proto");
String port = whiskProperties.getProperty("whisk.api.host.port");
String host = whiskProperties.getProperty("whisk.api.host.name");
return proto + "://" + host + ":" + port;
}
public static String getApiHostForClient(String subdomain, boolean includeProtocol) {
String proto = whiskProperties.getProperty("whisk.api.host.proto");
String port = whiskProperties.getProperty("whisk.api.host.port");
String host = whiskProperties.getProperty("whisk.api.localhost.name");
if (includeProtocol) {
return proto + "://" + subdomain + "." + host + ":" + port;
} else {
return subdomain + "." + host + ":" + port;
}
}
public static int getPartsInVanitySubdomain() {
return Integer.parseInt(whiskProperties.getProperty("whisk.api.vanity.subdomain.parts"));
}
public static int getEdgeHostApiPort() {
return Integer.parseInt(whiskProperties.getProperty("edge.host.apiport"));
}
public static String getLoadbalancerHost() {
return whiskProperties.getProperty("loadbalancer.host");
}
public static int getLoadbalancerPort() {
return Integer.parseInt(whiskProperties.getProperty("loadbalancer.host.port"));
}
public static String getControllerHost() {
return whiskProperties.getProperty("controller.host");
}
public static int getControllerPort() {
return Integer.parseInt(whiskProperties.getProperty("controller.host.port"));
}
public static int getMaxActionInvokesPerMinute() {
String valStr = whiskProperties.getProperty("limits.actions.invokes.perMinute");
if (null == valStr) {
valStr = whiskProperties.getProperty("defaultLimits.actions.invokes.perMinute");
}
return Integer.parseInt(valStr);
}
/**
* read the contents of auth key file and return as a Pair
* <username,password>
*/
public static Pair<String, String> getBasicAuth() {
File f = getAuthFileForTesting();
String contents = readAuthKey(f);
String[] parts = contents.split(":");
assert parts.length == 2;
return Pair.make(parts[0], parts[1]);
}
/**
* @return the path to a file holding the auth key used during junit testing
*/
public static File getAuthFileForTesting() {
String testAuth = getAuthForTesting();
if (testAuth.startsWith(File.separator)) {
return new File(testAuth);
} else {
return WhiskProperties.getFileRelativeToWhiskHome(testAuth);
}
}
/**
* read the contents of a file which holds an auth key.
*/
public static String readAuthKey(File filename) {
// the following funny relative path works both from Eclipse and when
// running in bin/ directory from ant
try {
byte[] encoded = Files.readAllBytes(filename.toPath());
String authKey = new String(encoded, "UTF-8").trim();
return authKey;
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
/**
* @return the path to a file holding the VCAP_SERVICES used during junit
* testing
*/
public static File getVCAPServicesFile() {
String vcapServices = whiskProperties.getProperty("vcap.services.file");
if (vcapServices.startsWith(File.separator)) {
return new File(vcapServices);
} else {
return WhiskProperties.getFileRelativeToWhiskHome(vcapServices);
}
}
/**
* are we running on Mac OS X?
*/
public static boolean onMacOSX() {
String osname = System.getProperty("os.name");
return osname.toLowerCase().contains("mac");
}
/**
* are we running on Linux?
*/
public static boolean onLinux() {
String osname = System.getProperty("os.name");
return osname.equalsIgnoreCase("linux");
}
/**
* where is python 2.7?
*/
public static final String python = findPython();
protected static File findFileRecursively(String dir, String needle) {
if (dir != null) {
File base = new File(dir);
File file = new File(base, needle);
if (file.exists()) {
return file;
} else {
return findFileRecursively(base.getParent(), needle);
}
} else {
return null;
}
}
/**
* Load properties from whisk.properties
*/
protected static Properties loadProperties(File propsFile) {
Properties props = new Properties();
InputStream input = null;
try {
input = new FileInputStream(propsFile);
// load a properties file
props.load(input);
} catch (IOException ex) {
ex.printStackTrace();
} finally {
if (input != null) {
try {
input.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return props;
}
private static String findPython() {
File p27 = new File("/usr/local/bin/python2.7");
if (p27.exists()) {
return "/usr/local/bin/python2.7";
} else {
return "python";
}
}
private static int getConcurrentTestCount(String count) {
if (count != null && count.trim().isEmpty() == false) {
try {
int threads = Integer.parseInt(count);
if (threads > 0) {
return threads;
}
} catch (NumberFormatException e) {
}
}
return DEFAULT_CONCURRENCY;
}
}
| apache-2.0 |
liuyuanyuan/dbeaver | plugins/org.jkiss.dbeaver.ext.db2/src/org/jkiss/dbeaver/ext/db2/model/DB2Table.java | 16982 | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2013-2016 Denis Forveille (titou10.titou10@gmail.com)
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.db2.model;
import java.sql.ResultSet;
import java.sql.Timestamp;
import java.util.Collection;
import java.util.Map;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.ext.db2.DB2Constants;
import org.jkiss.dbeaver.ext.db2.DB2Utils;
import org.jkiss.dbeaver.ext.db2.editors.DB2SourceObject;
import org.jkiss.dbeaver.ext.db2.editors.DB2TableTablespaceListProvider;
import org.jkiss.dbeaver.ext.db2.model.cache.DB2TableTriggerCache;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableAccessMode;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableCompressionMode;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableDropRule;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableLockSize;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2TablePartitionMode;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableStatus;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableTemporalType;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableType;
import org.jkiss.dbeaver.ext.db2.model.dict.DB2YesNo;
import org.jkiss.dbeaver.model.DBPNamedObject2;
import org.jkiss.dbeaver.model.DBPRefreshableObject;
import org.jkiss.dbeaver.model.data.DBDPseudoAttribute;
import org.jkiss.dbeaver.model.data.DBDPseudoAttributeContainer;
import org.jkiss.dbeaver.model.exec.DBCException;
import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer;
import org.jkiss.dbeaver.model.impl.DBSObjectCache;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils;
import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectSimpleCache;
import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCStructCache;
import org.jkiss.dbeaver.model.meta.Association;
import org.jkiss.dbeaver.model.meta.Property;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.struct.DBSObject;
import org.jkiss.dbeaver.model.struct.DBSObjectState;
import org.jkiss.dbeaver.model.struct.rdb.DBSTableForeignKey;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.utils.CommonUtils;
/**
* DB2 Table
*
* @author Denis Forveille
*/
public class DB2Table extends DB2TableBase
implements DBPNamedObject2, DBPRefreshableObject, DB2SourceObject, DBDPseudoAttributeContainer {
private static final String LINE_SEPARATOR = GeneralUtils.getDefaultLineSeparator();
private static final String C_PT = "SELECT * FROM SYSCAT.DATAPARTITIONS WHERE TABSCHEMA = ? AND TABNAME = ? ORDER BY SEQNO WITH UR";
private static final String C_PE = "SELECT * FROM SYSCAT.PERIODS WHERE TABSCHEMA = ? AND TABNAME = ? ORDER BY PERIODNAME WITH UR";
private DB2TableTriggerCache tableTriggerCache = new DB2TableTriggerCache();
// Dependent of DB2 Version. OK because the folder is hidden in plugin.xml
private DBSObjectCache<DB2Table, DB2TablePartition> partitionCache;
private DBSObjectCache<DB2Table, DB2TablePeriod> periodCache;
private DB2TableStatus status;
private DB2TableType type;
private Object tablespace;
private Object indexTablespace;
private Object longTablespace;
private String dataCapture;
private String constChecked;
private DB2TablePartitionMode partitionMode;
private Boolean append;
private DB2TableLockSize lockSize;
private String volatileMode;
private DB2TableCompressionMode compression;
private DB2TableAccessMode accessMode;
private Boolean mdcClustered;
private DB2TableDropRule dropRule;
private DB2TableTemporalType temporalType;
private Timestamp alterTime;
private Timestamp invalidateTime;
private Timestamp lastRegenTime;
private Timestamp statsTime;
private Long card;
private Long nPages;
private Long fPages;
private Long overFLow;
// -----------------
// Constructors
// -----------------
public DB2Table(DBRProgressMonitor monitor, DB2Schema schema, ResultSet dbResult) throws DBException
{
super(monitor, schema, dbResult);
this.status = CommonUtils.valueOf(DB2TableStatus.class, JDBCUtils.safeGetString(dbResult, "STATUS"));
this.type = CommonUtils.valueOf(DB2TableType.class, JDBCUtils.safeGetString(dbResult, "TYPE"));
this.statsTime = JDBCUtils.safeGetTimestamp(dbResult, "STATS_TIME");
this.dataCapture = JDBCUtils.safeGetString(dbResult, "DATACAPTURE");
this.constChecked = JDBCUtils.safeGetString(dbResult, "CONST_CHECKED");
this.partitionMode = CommonUtils.valueOf(DB2TablePartitionMode.class, JDBCUtils.safeGetString(dbResult, "PARTITION_MODE"));
this.append = JDBCUtils.safeGetBoolean(dbResult, "APPEND_MODE", DB2YesNo.Y.name());
this.volatileMode = JDBCUtils.safeGetString(dbResult, "VOLATILE");
this.compression = CommonUtils.valueOf(DB2TableCompressionMode.class, JDBCUtils.safeGetString(dbResult, "COMPRESSION"));
this.accessMode = CommonUtils.valueOf(DB2TableAccessMode.class, JDBCUtils.safeGetString(dbResult, "ACCESS_MODE"));
this.mdcClustered = JDBCUtils.safeGetBoolean(dbResult, "CLUSTERED", DB2YesNo.Y.name());
this.dropRule = CommonUtils.valueOf(DB2TableDropRule.class, JDBCUtils.safeGetString(dbResult, "DROPRULE"));
this.card = JDBCUtils.safeGetLongNullable(dbResult, "CARD");
this.nPages = JDBCUtils.safeGetLongNullable(dbResult, "NPAGES");
this.fPages = JDBCUtils.safeGetLongNullable(dbResult, "FPAGES");
this.overFLow = JDBCUtils.safeGetLongNullable(dbResult, "OVERFLOW");
this.invalidateTime = JDBCUtils.safeGetTimestamp(dbResult, "INVALIDATE_TIME");
this.lastRegenTime = JDBCUtils.safeGetTimestamp(dbResult, "LAST_REGEN_TIME");
if (getDataSource().isAtLeastV9_5()) {
this.alterTime = JDBCUtils.safeGetTimestamp(dbResult, "ALTER_TIME");
}
if (getDataSource().isAtLeastV10_1()) {
this.temporalType = CommonUtils.valueOf(DB2TableTemporalType.class, JDBCUtils.safeGetString(dbResult, "TEMPORALTYPE"));
}
String lockSizeString = JDBCUtils.safeGetString(dbResult, "LOCKSIZE");
if (CommonUtils.isNotEmpty(lockSizeString)) {
this.lockSize = CommonUtils.valueOf(DB2TableLockSize.class, lockSizeString);
}
this.tablespace = JDBCUtils.safeGetString(dbResult, "TBSPACE");
this.indexTablespace = JDBCUtils.safeGetString(dbResult, "INDEX_TBSPACE");
this.longTablespace = JDBCUtils.safeGetString(dbResult, "LONG_TBSPACE");
this.partitionCache = new JDBCObjectSimpleCache<>(DB2TablePartition.class, C_PT, schema.getName(), getName());
this.periodCache = new JDBCObjectSimpleCache<>(DB2TablePeriod.class, C_PE, schema.getName(), getName());
}
public DB2Table(DB2Schema schema, String name)
{
super(schema, name, false);
this.type = DB2TableType.T;
this.status = DB2TableStatus.N;
}
// -----------------
// Business Contract
// -----------------
@Override
public boolean isView()
{
return false;
}
@Override
public JDBCStructCache<DB2Schema, DB2Table, DB2TableColumn> getCache()
{
return getContainer().getTableCache();
}
@Override
public DBSObject refreshObject(@NotNull DBRProgressMonitor monitor) throws DBException
{
getContainer().getConstraintCache().clearObjectCache(this);
getContainer().getAssociationCache().clearObjectCache(this);
getContainer().getReferenceCache().clearObjectCache(this);
super.refreshObject(monitor);
return getContainer().getTableCache().refreshObject(monitor, getContainer(), this);
}
@NotNull
@Override
public DBSObjectState getObjectState()
{
return status.getState();
}
@Override
public void refreshObjectState(@NotNull DBRProgressMonitor monitor) throws DBCException
{
}
@Override
public String getObjectDefinitionText(DBRProgressMonitor monitor, Map<String, Object> options) throws DBException
{
return DB2Utils.generateDDLforTable(monitor, LINE_SEPARATOR, getDataSource(), this);
}
// -----------------
// Associations
// -----------------
@Association
public Collection<DB2Trigger> getTriggers(DBRProgressMonitor monitor) throws DBException
{
return tableTriggerCache.getAllObjects(monitor, this);
}
@Association
public Collection<DB2TablePartition> getPartitions(DBRProgressMonitor monitor) throws DBException
{
// TODO DF: beurk: Consequences of "Integrated cache" that can not be created in class def= NPE with managers
if (partitionCache == null) {
return null;
} else {
return partitionCache.getAllObjects(monitor, this);
}
}
@Association
public Collection<DB2TablePeriod> getPeriods(DBRProgressMonitor monitor) throws DBException
{
// TODO DF: beurk: Consequences of "Integrated cache" that can not be created in class def= NPE with managers
if (periodCache == null) {
return null;
} else {
return periodCache.getAllObjects(monitor, this);
}
}
@Nullable
@Override
@Association
public Collection<DB2TableUniqueKey> getConstraints(@NotNull DBRProgressMonitor monitor) throws DBException
{
return getContainer().getConstraintCache().getObjects(monitor, getContainer(), this);
}
public DB2TableUniqueKey getConstraint(DBRProgressMonitor monitor, String ukName) throws DBException
{
return getContainer().getConstraintCache().getObject(monitor, getContainer(), this, ukName);
}
@Override
@Association
public Collection<DB2TableForeignKey> getAssociations(@NotNull DBRProgressMonitor monitor) throws DBException
{
return getContainer().getAssociationCache().getObjects(monitor, getContainer(), this);
}
public DBSTableForeignKey getAssociation(DBRProgressMonitor monitor, String ukName) throws DBException
{
return getContainer().getAssociationCache().getObject(monitor, getContainer(), this, ukName);
}
@Override
@Association
public Collection<DB2TableReference> getReferences(@NotNull DBRProgressMonitor monitor) throws DBException
{
return getContainer().getReferenceCache().getObjects(monitor, getContainer(), this);
}
public DBSTableForeignKey getReference(DBRProgressMonitor monitor, String ukName) throws DBException
{
return getContainer().getReferenceCache().getObject(monitor, getContainer(), this, ukName);
}
@Association
public Collection<DB2TableCheckConstraint> getCheckConstraints(DBRProgressMonitor monitor) throws DBException
{
return getContainer().getCheckCache().getObjects(monitor, getContainer(), this);
}
public DB2TableCheckConstraint getCheckConstraint(DBRProgressMonitor monitor, String ukName) throws DBException
{
return getContainer().getCheckCache().getObject(monitor, getContainer(), this, ukName);
}
// -----------------
// Properties
// -----------------
@NotNull
@Override
@Property(viewable = true, editable = true, valueTransformer = DBObjectNameCaseTransformer.class, order = 1)
public String getName()
{
return super.getName();
}
@Property(viewable = true, editable = false, order = 3, category = DB2Constants.CAT_STATS)
public Long getCard()
{
return card;
}
@Property(viewable = true, editable = false, order = 4)
public DB2TableStatus getStatus()
{
return status;
}
@Property(viewable = true, editable = false, order = 5)
public DB2TableType getType()
{
return type;
}
@Property(viewable = true, editable = true, order = 10, category = DB2Constants.CAT_TABLESPACE, listProvider = DB2TableTablespaceListProvider.class)
public DB2Tablespace getTablespace(DBRProgressMonitor monitor) throws DBException
{
return DB2Tablespace.resolveTablespaceReference(monitor, getDataSource(), tablespace);
}
public void setTablespace(DB2Tablespace tablespace)
{
this.tablespace = tablespace;
}
@Property(viewable = false, editable = true, order = 11, category = DB2Constants.CAT_TABLESPACE, listProvider = DB2TableTablespaceListProvider.class)
public DB2Tablespace getIndexTablespace(DBRProgressMonitor monitor) throws DBException
{
return DB2Tablespace.resolveTablespaceReference(monitor, getDataSource(), indexTablespace);
}
public void setIndexTablespace(DB2Tablespace indexTablespace)
{
this.indexTablespace = indexTablespace;
}
@Property(viewable = false, editable = true, order = 12, category = DB2Constants.CAT_TABLESPACE, listProvider = DB2TableTablespaceListProvider.class)
public DB2Tablespace getLongTablespace(DBRProgressMonitor monitor) throws DBException
{
return DB2Tablespace.resolveTablespaceReference(monitor, getDataSource(), longTablespace);
}
public void setLongTablespace(DB2Tablespace longTablespace)
{
this.longTablespace = longTablespace;
}
@Property(viewable = false, editable = false, category = DB2Constants.CAT_STATS)
public Timestamp getStatsTime()
{
return statsTime;
}
@Property(viewable = false, editable = false, category = DB2Constants.CAT_STATS)
public Long getnPages()
{
return nPages;
}
@Property(viewable = false, editable = false, category = DB2Constants.CAT_STATS)
public Long getfPages()
{
return fPages;
}
@Property(viewable = false, editable = false, category = DB2Constants.CAT_STATS)
public Long getOverFLow()
{
return overFLow;
}
@Property(viewable = false, editable = false, order = 100)
public Boolean getAppend()
{
return append;
}
@Property(viewable = false, editable = false, order = 101)
public String getVolatileMode()
{
return volatileMode;
}
@Property(viewable = false, editable = false, order = 104)
public DB2TableLockSize getLockSize()
{
return lockSize;
}
@Property(viewable = false, editable = false, order = 105)
public DB2TableCompressionMode getCompression()
{
return compression;
}
@Property(viewable = false, editable = false, order = 106)
public DB2TableAccessMode getAccessMode()
{
return accessMode;
}
@Property(viewable = false, editable = false, order = 107)
public Boolean getMdcClustered()
{
return mdcClustered;
}
@Property(viewable = false, editable = false, order = 108)
public DB2TableDropRule getDropRule()
{
return dropRule;
}
@Property(viewable = false, editable = false, order = 109)
public String getDataCapture()
{
return dataCapture;
}
@Property(viewable = false, editable = false, order = 110)
public DB2TablePartitionMode getPartitionMode()
{
return partitionMode;
}
@Property(viewable = false, editable = false, order = 111)
public String getConstChecked()
{
return constChecked;
}
@Property(viewable = false, editable = false, order = 120, category = DB2Constants.CAT_TEMPORAL)
public DB2TableTemporalType getTemporalType()
{
return temporalType;
}
@Property(viewable = false, editable = false, order = 101, category = DB2Constants.CAT_DATETIME)
public Timestamp getAlterTime()
{
return alterTime;
}
@Property(viewable = false, editable = false, order = 102, category = DB2Constants.CAT_DATETIME)
public Timestamp getInvalidateTime()
{
return invalidateTime;
}
@Property(viewable = false, editable = false, order = 103, category = DB2Constants.CAT_DATETIME)
public Timestamp getLastRegenTime()
{
return lastRegenTime;
}
@Override
public DBDPseudoAttribute[] getPseudoAttributes() throws DBException
{
if (getDataSource().isAtLeastV9_5()) {
return new DBDPseudoAttribute[] { DB2Constants.PSEUDO_ATTR_RID_BIT };
} else {
return null;
}
}
}
| apache-2.0 |
onders86/camel | components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java | 17602 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.Session;
import org.apache.camel.AsyncCallback;
import org.apache.camel.AsyncProcessor;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Processor;
import org.apache.camel.RollbackExchangeException;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.util.AsyncProcessorConverterHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jms.core.JmsOperations;
import org.springframework.jms.core.MessageCreator;
import org.springframework.jms.listener.SessionAwareMessageListener;
import static org.apache.camel.util.ObjectHelper.wrapRuntimeCamelException;
/**
* A JMS {@link MessageListener} which can be used to delegate processing to a
* Camel endpoint.
*
* Note that instance of this object has to be thread safe (reentrant)
*
* @version
*/
public class EndpointMessageListener implements SessionAwareMessageListener {
private static final Logger LOG = LoggerFactory.getLogger(EndpointMessageListener.class);
private final JmsEndpoint endpoint;
private final AsyncProcessor processor;
private JmsBinding binding;
private boolean eagerLoadingOfProperties;
private Object replyToDestination;
private JmsOperations template;
private boolean disableReplyTo;
private boolean async;
public EndpointMessageListener(JmsEndpoint endpoint, Processor processor) {
this.endpoint = endpoint;
this.processor = AsyncProcessorConverterHelper.convert(processor);
}
@Override
public void onMessage(Message message, Session session) throws JMSException {
LOG.trace("onMessage START");
LOG.debug("{} consumer received JMS message: {}", endpoint, message);
boolean sendReply;
RuntimeCamelException rce;
try {
Object replyDestination = getReplyToDestination(message);
// we can only send back a reply if there was a reply destination configured
// and disableReplyTo hasn't been explicit enabled
sendReply = replyDestination != null && !disableReplyTo;
// we should also not send back reply to ourself if this destination and replyDestination is the same
Destination destination = JmsMessageHelper.getJMSDestination(message);
if (destination != null && sendReply && !endpoint.isReplyToSameDestinationAllowed() && destination.equals(replyDestination)) {
LOG.debug("JMSDestination and JMSReplyTo is the same, will skip sending a reply message to itself: {}", destination);
sendReply = false;
}
final Exchange exchange = createExchange(message, session, replyDestination);
if (eagerLoadingOfProperties) {
exchange.getIn().getBody();
exchange.getIn().getHeaders();
}
String correlationId = message.getJMSCorrelationID();
if (correlationId != null) {
LOG.debug("Received Message has JMSCorrelationID [{}]", correlationId);
}
// process the exchange either asynchronously or synchronous
LOG.trace("onMessage.process START");
AsyncCallback callback = new EndpointMessageListenerAsyncCallback(message, exchange, endpoint, sendReply, replyDestination);
// async is by default false, which mean we by default will process the exchange synchronously
// to keep backwards compatible, as well ensure this consumer will pickup messages in order
// (eg to not consume the next message before the previous has been fully processed)
// but if end user explicit configure consumerAsync=true, then we can process the message
// asynchronously (unless endpoint has been configured synchronous, or we use transaction)
boolean forceSync = endpoint.isSynchronous() || endpoint.isTransacted();
if (forceSync || !isAsync()) {
// must process synchronous if transacted or configured to do so
if (LOG.isTraceEnabled()) {
LOG.trace("Processing exchange {} synchronously", exchange.getExchangeId());
}
try {
processor.process(exchange);
} catch (Exception e) {
exchange.setException(e);
} finally {
callback.done(true);
}
} else {
// process asynchronous using the async routing engine
if (LOG.isTraceEnabled()) {
LOG.trace("Processing exchange {} asynchronously", exchange.getExchangeId());
}
boolean sync = processor.process(exchange, callback);
if (!sync) {
// will be done async so return now
return;
}
}
// if we failed processed the exchange from the async callback task, then grab the exception
rce = exchange.getException(RuntimeCamelException.class);
} catch (Exception e) {
rce = wrapRuntimeCamelException(e);
}
// an exception occurred so rethrow to trigger rollback on JMS listener
// the JMS listener will use the error handler to handle the uncaught exception
if (rce != null) {
LOG.trace("onMessage END throwing exception: {}", rce.getMessage());
// Spring message listener container will handle uncaught exceptions
// being thrown from this onMessage, and will us the ErrorHandler configured
// on the JmsEndpoint to handle the exception
throw rce;
}
LOG.trace("onMessage END");
}
/**
* Callback task that is performed when the exchange has been processed
*/
private final class EndpointMessageListenerAsyncCallback implements AsyncCallback {
private final Message message;
private final Exchange exchange;
private final JmsEndpoint endpoint;
private final boolean sendReply;
private final Object replyDestination;
private EndpointMessageListenerAsyncCallback(Message message, Exchange exchange, JmsEndpoint endpoint,
boolean sendReply, Object replyDestination) {
this.message = message;
this.exchange = exchange;
this.endpoint = endpoint;
this.sendReply = sendReply;
this.replyDestination = replyDestination;
}
@Override
public void done(boolean doneSync) {
LOG.trace("onMessage.process END");
// now we evaluate the processing of the exchange and determine if it was a success or failure
// we also grab information from the exchange to be used for sending back a reply (if we are to do so)
// so the following logic seems a bit complicated at first glance
// if we send back a reply it can either be the message body or transferring a caused exception
org.apache.camel.Message body = null;
Exception cause = null;
RuntimeCamelException rce = null;
if (exchange.isFailed() || exchange.isRollbackOnly()) {
if (exchange.isRollbackOnly()) {
// rollback only so wrap an exception so we can rethrow the exception to cause rollback
rce = wrapRuntimeCamelException(new RollbackExchangeException(exchange));
} else if (exchange.getException() != null) {
// an exception occurred while processing
if (endpoint.isTransferException()) {
// send the exception as reply, so null body and set the exception as the cause
body = null;
cause = exchange.getException();
} else {
// only throw exception if endpoint is not configured to transfer exceptions back to caller
// do not send a reply but wrap and rethrow the exception
rce = wrapRuntimeCamelException(exchange.getException());
}
} else {
org.apache.camel.Message msg = exchange.hasOut() ? exchange.getOut() : exchange.getIn();
if (msg.isFault()) {
// a fault occurred while processing
body = msg;
cause = null;
}
}
} else {
// process OK so get the reply body if we are InOut and has a body
// If the ppl don't want to send the message back, he should use the InOnly
if (sendReply && exchange.getPattern().isOutCapable()) {
if (exchange.hasOut()) {
body = exchange.getOut();
} else {
body = exchange.getIn();
}
cause = null;
}
}
// send back reply if there was no error and we are supposed to send back a reply
if (rce == null && sendReply && (body != null || cause != null)) {
LOG.trace("onMessage.sendReply START");
if (replyDestination instanceof Destination) {
sendReply((Destination)replyDestination, message, exchange, body, cause);
} else {
sendReply((String)replyDestination, message, exchange, body, cause);
}
LOG.trace("onMessage.sendReply END");
}
// if an exception occurred
if (rce != null) {
if (doneSync) {
// we were done sync, so put exception on exchange, so we can grab it in the onMessage
// method and rethrow it
exchange.setException(rce);
} else {
// we were done async, so use the endpoint error handler
if (endpoint.getErrorHandler() != null) {
endpoint.getErrorHandler().handleError(rce);
}
}
}
}
}
public Exchange createExchange(Message message, Session session, Object replyDestination) {
Exchange exchange = endpoint.createExchange();
JmsBinding binding = getBinding();
exchange.setProperty(Exchange.BINDING, binding);
exchange.setIn(new JmsMessage(message, session, binding));
// lets set to an InOut if we have some kind of reply-to destination
if (replyDestination != null && !disableReplyTo) {
// only change pattern if not already out capable
if (!exchange.getPattern().isOutCapable()) {
exchange.setPattern(ExchangePattern.InOut);
}
}
return exchange;
}
// Properties
// -------------------------------------------------------------------------
public JmsBinding getBinding() {
if (binding == null) {
binding = endpoint.getBinding();
}
return binding;
}
/**
* Sets the binding used to convert from a Camel message to and from a JMS
* message
*
* @param binding the binding to use
*/
public void setBinding(JmsBinding binding) {
this.binding = binding;
}
public boolean isEagerLoadingOfProperties() {
return eagerLoadingOfProperties;
}
public void setEagerLoadingOfProperties(boolean eagerLoadingOfProperties) {
this.eagerLoadingOfProperties = eagerLoadingOfProperties;
}
public synchronized JmsOperations getTemplate() {
if (template == null) {
template = endpoint.createInOnlyTemplate();
}
return template;
}
public void setTemplate(JmsOperations template) {
this.template = template;
}
public boolean isDisableReplyTo() {
return disableReplyTo;
}
/**
* Allows the reply-to behaviour to be disabled
*/
public void setDisableReplyTo(boolean disableReplyTo) {
this.disableReplyTo = disableReplyTo;
}
public Object getReplyToDestination() {
return replyToDestination;
}
/**
* Provides an explicit reply to destination which overrides
* any incoming value of {@link Message#getJMSReplyTo()}
*
* @param replyToDestination the destination that should be used to send replies to
* as either a String or {@link javax.jms.Destination} type.
*/
public void setReplyToDestination(Object replyToDestination) {
this.replyToDestination = replyToDestination;
}
public boolean isAsync() {
return async;
}
/**
* Sets whether asynchronous routing is enabled.
* <p/>
* By default this is <tt>false</tt>. If configured as <tt>true</tt> then
* this listener will process the {@link org.apache.camel.Exchange} asynchronous.
*/
public void setAsync(boolean async) {
this.async = async;
}
// Implementation methods
//-------------------------------------------------------------------------
/**
* Strategy to determine which correlation id to use among <tt>JMSMessageID</tt> and <tt>JMSCorrelationID</tt>.
*
* @param message the JMS message
* @return the correlation id to use
* @throws JMSException can be thrown
*/
protected String determineCorrelationId(final Message message) throws JMSException {
final String messageId = message.getJMSMessageID();
final String correlationId = message.getJMSCorrelationID();
if (endpoint.getConfiguration().isUseMessageIDAsCorrelationID()) {
return messageId;
} else if (ObjectHelper.isEmpty(correlationId)) {
// correlation id is empty so fallback to message id
return messageId;
} else {
return correlationId;
}
}
protected void sendReply(Destination replyDestination, final Message message, final Exchange exchange,
final org.apache.camel.Message out, final Exception cause) {
if (replyDestination == null) {
LOG.debug("Cannot send reply message as there is no replyDestination for: {}", out);
return;
}
getTemplate().send(replyDestination, new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
Message reply = endpoint.getBinding().makeJmsMessage(exchange, out, session, cause);
final String correlationID = determineCorrelationId(message);
reply.setJMSCorrelationID(correlationID);
if (LOG.isDebugEnabled()) {
LOG.debug("{} sending reply JMS message [correlationId:{}]: {}", endpoint, correlationID, reply);
}
return reply;
}
});
}
protected void sendReply(String replyDestination, final Message message, final Exchange exchange,
final org.apache.camel.Message out, final Exception cause) {
if (replyDestination == null) {
LOG.debug("Cannot send reply message as there is no replyDestination for: {}", out);
return;
}
getTemplate().send(replyDestination, new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
Message reply = endpoint.getBinding().makeJmsMessage(exchange, out, session, cause);
final String correlationID = determineCorrelationId(message);
reply.setJMSCorrelationID(correlationID);
if (LOG.isDebugEnabled()) {
LOG.debug("{} sending reply JMS message [correlationId:{}]: {}", endpoint, correlationID, reply);
}
return reply;
}
});
}
protected Object getReplyToDestination(Message message) throws JMSException {
// lets send a response back if we can
Object destination = getReplyToDestination();
if (destination == null) {
destination = JmsMessageHelper.getJMSReplyTo(message);
}
return destination;
}
@Override
public String toString() {
return "EndpointMessageListener[" + endpoint + "]";
}
}
| apache-2.0 |
yafengguo/Apache-beam | runners/core-java/src/main/java/org/apache/beam/runners/core/SystemReduceFn.java | 5486 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
import org.apache.beam.sdk.transforms.Combine.KeyedCombineFn;
import org.apache.beam.sdk.transforms.CombineWithContext.KeyedCombineFnWithContext;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.AppliedCombineFn;
import org.apache.beam.sdk.util.state.AccumulatorCombiningState;
import org.apache.beam.sdk.util.state.BagState;
import org.apache.beam.sdk.util.state.CombiningState;
import org.apache.beam.sdk.util.state.MergingStateAccessor;
import org.apache.beam.sdk.util.state.ReadableState;
import org.apache.beam.sdk.util.state.StateAccessor;
import org.apache.beam.sdk.util.state.StateMerging;
import org.apache.beam.sdk.util.state.StateTag;
import org.apache.beam.sdk.util.state.StateTags;
/**
* {@link ReduceFn} implementing the default reduction behaviors of {@link GroupByKey}.
*
* @param <K> The type of key being processed.
* @param <InputT> The type of values associated with the key.
* @param <OutputT> The output type that will be produced for each key.
* @param <W> The type of windows this operates on.
*/
public abstract class SystemReduceFn<K, InputT, AccumT, OutputT, W extends BoundedWindow>
extends ReduceFn<K, InputT, OutputT, W> {
private static final String BUFFER_NAME = "buf";
/**
* Create a factory that produces {@link SystemReduceFn} instances that that buffer all of the
* input values in persistent state and produces an {@code Iterable<T>}.
*/
public static <K, T, W extends BoundedWindow> SystemReduceFn<K, T, Iterable<T>, Iterable<T>, W>
buffering(final Coder<T> inputCoder) {
final StateTag<Object, BagState<T>> bufferTag =
StateTags.makeSystemTagInternal(StateTags.bag(BUFFER_NAME, inputCoder));
return new SystemReduceFn<K, T, Iterable<T>, Iterable<T>, W>(bufferTag) {
@Override
public void prefetchOnMerge(MergingStateAccessor<K, W> state) throws Exception {
StateMerging.prefetchBags(state, bufferTag);
}
@Override
public void onMerge(OnMergeContext c) throws Exception {
StateMerging.mergeBags(c.state(), bufferTag);
}
};
}
/**
* Create a factory that produces {@link SystemReduceFn} instances that combine all of the input
* values using a {@link CombineFn}.
*/
public static <K, InputT, AccumT, OutputT, W extends BoundedWindow> SystemReduceFn<K, InputT,
AccumT, OutputT, W>
combining(
final Coder<K> keyCoder, final AppliedCombineFn<K, InputT, AccumT, OutputT> combineFn) {
final StateTag<K, AccumulatorCombiningState<InputT, AccumT, OutputT>> bufferTag;
if (combineFn.getFn() instanceof KeyedCombineFnWithContext) {
bufferTag = StateTags.makeSystemTagInternal(
StateTags.<K, InputT, AccumT, OutputT>keyedCombiningValueWithContext(
BUFFER_NAME, combineFn.getAccumulatorCoder(),
(KeyedCombineFnWithContext<K, InputT, AccumT, OutputT>) combineFn.getFn()));
} else {
bufferTag = StateTags.makeSystemTagInternal(
StateTags.<K, InputT, AccumT, OutputT>keyedCombiningValue(
BUFFER_NAME, combineFn.getAccumulatorCoder(),
(KeyedCombineFn<K, InputT, AccumT, OutputT>) combineFn.getFn()));
}
return new SystemReduceFn<K, InputT, AccumT, OutputT, W>(bufferTag) {
@Override
public void prefetchOnMerge(MergingStateAccessor<K, W> state) throws Exception {
StateMerging.prefetchCombiningValues(state, bufferTag);
}
@Override
public void onMerge(OnMergeContext c) throws Exception {
StateMerging.mergeCombiningValues(c.state(), bufferTag);
}
};
}
private StateTag<? super K, ? extends CombiningState<InputT, OutputT>> bufferTag;
public SystemReduceFn(
StateTag<? super K, ? extends CombiningState<InputT, OutputT>> bufferTag) {
this.bufferTag = bufferTag;
}
@Override
public void processValue(ProcessValueContext c) throws Exception {
c.state().access(bufferTag).add(c.value());
}
@Override
public void prefetchOnTrigger(StateAccessor<K> state) {
state.access(bufferTag).readLater();
}
@Override
public void onTrigger(OnTriggerContext c) throws Exception {
c.output(c.state().access(bufferTag).read());
}
@Override
public void clearState(Context c) throws Exception {
c.state().access(bufferTag).clear();
}
@Override
public ReadableState<Boolean> isEmpty(StateAccessor<K> state) {
return state.access(bufferTag).isEmpty();
}
}
| apache-2.0 |
salyh/geronimo-specs | geronimo-jms_1.1_spec/src/main/java/javax/jms/TopicConnection.java | 1656 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//
// This source code implements specifications defined by the Java
// Community Process. In order to remain compliant with the specification
// DO NOT add / change / or delete method signatures!
//
package javax.jms;
/**
* @version $Rev$ $Date$
*/
public interface TopicConnection extends Connection {
TopicSession createTopicSession(boolean transacted, int acknowledgeMode)
throws JMSException;
ConnectionConsumer createConnectionConsumer(
Topic topic,
String messageSelector,
ServerSessionPool sessionPool,
int maxMessages)
throws JMSException;
ConnectionConsumer createDurableConnectionConsumer(
Topic topic,
String subscriptionName,
String messageSelector,
ServerSessionPool sessionPool,
int maxMessages)
throws JMSException;
}
| apache-2.0 |
Fokko/druid | indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/PartialHashSegmentMergeIngestionSpecTest.java | 2471 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.common.task.batch.parallel;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.druid.indexer.partitions.HashedPartitionsSpec;
import org.apache.druid.segment.TestHelper;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
public class PartialHashSegmentMergeIngestionSpecTest
{
private static final ObjectMapper OBJECT_MAPPER = ParallelIndexTestingFactory.createObjectMapper();
private static final HashPartitionLocation HASH_PARTITION_LOCATION = new HashPartitionLocation(
ParallelIndexTestingFactory.HOST,
ParallelIndexTestingFactory.PORT,
ParallelIndexTestingFactory.USE_HTTPS,
ParallelIndexTestingFactory.SUBTASK_ID,
ParallelIndexTestingFactory.INTERVAL,
ParallelIndexTestingFactory.PARTITION_ID
);
private static final PartialHashSegmentMergeIOConfig IO_CONFIG =
new PartialHashSegmentMergeIOConfig(Collections.singletonList(HASH_PARTITION_LOCATION));
private static final HashedPartitionsSpec PARTITIONS_SPEC = new HashedPartitionsSpec(
null,
1,
Collections.emptyList()
);
private PartialHashSegmentMergeIngestionSpec target;
@Before
public void setup()
{
target = new PartialHashSegmentMergeIngestionSpec(
ParallelIndexTestingFactory.createDataSchema(ParallelIndexTestingFactory.INPUT_INTERVALS),
IO_CONFIG,
new ParallelIndexTestingFactory.TuningConfigBuilder()
.partitionsSpec(PARTITIONS_SPEC)
.build()
);
}
@Test
public void serializesDeserializes()
{
TestHelper.testSerializesDeserializes(OBJECT_MAPPER, target);
}
}
| apache-2.0 |
AndroidX/androidx | room/integration-tests/testapp/src/androidTest/java/androidx/room/integration/testapp/vo/PlaylistMultiSongXRefView.java | 1172 | /*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.room.integration.testapp.vo;
import androidx.room.DatabaseView;
// View of join table with playlists with more than 1 song
@DatabaseView("SELECT * FROM PlaylistSongXRef WHERE mPlaylistId IN (SELECT mPlaylistId FROM"
+ " PlaylistSongXRef GROUP BY mPlaylistId HAVING COUNT(mSongId) > 1)")
public class PlaylistMultiSongXRefView {
public final int mPlaylistId;
public final int mSongId;
public PlaylistMultiSongXRefView(int playlistId, int songId) {
mPlaylistId = playlistId;
mSongId = songId;
}
}
| apache-2.0 |
mulesoft-consulting/sumtotal-connector | lib/apache-cxf-2.7.5/samples/js_browser_client_java_first/src/main/java/demo/hw/server/JavascriptExample.java | 1576 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package demo.hw.server;
import java.util.List;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebService;
import javax.jws.soap.SOAPBinding;
import demo.hw.server.data.Beverage;
import demo.hw.server.data.Category;
import demo.hw.server.data.Ingredient;
@WebService(targetNamespace = "urn:cxf.apache.org:jsjaxws")
@SOAPBinding(parameterStyle = SOAPBinding.ParameterStyle.BARE)
public interface JavascriptExample {
@WebMethod
Beverage[] getBeveragesWithIngredient(@WebParam(name = "ingredient") Ingredient i);
@WebMethod
Beverage[] getBeverageWithIngredientCategory(@WebParam(name = "category") Category c);
@WebMethod
Beverage[] getBeverageWithIngredientCategories(@WebParam(name = "categories") List<Category> c);
}
| apache-2.0 |
pubudu538/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/impl/ThrottlingPoliciesApiServiceImpl.java | 7570 | /*
*
* Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* /
*/
package org.wso2.carbon.apimgt.rest.api.publisher.v1.impl;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.cxf.jaxrs.ext.MessageContext;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.ThrottlingPoliciesApiService;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ThrottlingPolicyDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ThrottlingPolicyListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.mappings.ThrottlingPolicyMappingUtil;
import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants;
import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.Response;
/**
* This is the service implementation class for Publisher throttling policies related operations
*/
public class ThrottlingPoliciesApiServiceImpl implements ThrottlingPoliciesApiService {
private static final Log log = LogFactory.getLog(ThrottlingPoliciesApiServiceImpl.class);
/**
* Retrieves all the Tiers
*
* @param policyLevel tier level (api/application or resource)
* @param limit max number of objects returns
* @param offset starting index
* @param ifNoneMatch If-None-Match header value
* @return Response object containing resulted tiers
*/
@Override
public Response getAllThrottlingPolicies(String policyLevel, Integer limit, Integer offset,
String ifNoneMatch,MessageContext messageContext) {
//pre-processing
//setting default limit and offset if they are null
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
List<Tier> tierList = getThrottlingPolicyList(policyLevel);
ThrottlingPolicyListDTO policyListDTO = ThrottlingPolicyMappingUtil
.fromTierListToDTO(tierList, policyLevel, limit, offset);
//todo: set total counts properly
ThrottlingPolicyMappingUtil.setPaginationParams(policyListDTO, policyLevel, limit, offset, tierList.size());
return Response.ok().entity(policyListDTO).build();
}
/**
* Returns the matched throttling policy to the given policy name
*
* @param policyName name of the throttling policy
* @param policyLevel throttling policy level (subscription or api)
* @param ifNoneMatch If-None-Match header value
* @return ThrottlingPolicyDTO matched to the given throttling policy name
*/
@Override
public Response getThrottlingPolicyByName(String policyName, String policyLevel, String ifNoneMatch,
MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
ThrottlingPolicyDTO.PolicyLevelEnum policyLevelEnum;
Tier foundTier = null;
if (StringUtils.isBlank(policyLevel)) {
RestApiUtil.handleBadRequest("policyLevel cannot be empty", log);
}
//retrieves the tier based on the given tier-level
if (ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION.toString().equals(policyLevel)) {
foundTier = APIUtil.getTierFromCache(policyName, tenantDomain);
policyLevelEnum = ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION;
} else if (ThrottlingPolicyDTO.PolicyLevelEnum.API.toString().equals(policyLevel)) {
Map<String, Tier> resourceTiersMap =
APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain);
policyLevelEnum = ThrottlingPolicyDTO.PolicyLevelEnum.API;
if (resourceTiersMap != null) {
foundTier = RestApiUtil.findTier(resourceTiersMap.values(), policyName);
}
} else {
RestApiUtil.handleResourceNotFoundError(
"policyLevel should be one of " + Arrays.toString(ThrottlingPolicyDTO.PolicyLevelEnum.values()),
log);
return null;
}
//returns if the tier is found, otherwise send 404
if (foundTier != null) {
return Response.ok()
.entity(ThrottlingPolicyMappingUtil.fromTierToDTO(foundTier, policyLevelEnum.toString()))
.build();
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_THROTTLING_POLICY, policyName, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving throttling policies";
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Returns the throttling policies which belongs to the given policy level
* @param policyLevel
* @return list of throttling policies
*/
public List<Tier> getThrottlingPolicyList(String policyLevel) {
try {
List<Tier> tierList = new ArrayList<>();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
if (StringUtils.isBlank(policyLevel)) {
RestApiUtil.handleBadRequest("policyLevel cannot be empty", log);
}
//retrieves the tier based on the given tier-level
if (ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION.toString().equals(policyLevel)) {
Map<String, Tier> apiTiersMap = APIUtil.getTiers(APIConstants.TIER_API_TYPE, tenantDomain);
if (apiTiersMap != null) {
tierList.addAll(apiTiersMap.values());
}
} else if (ThrottlingPolicyDTO.PolicyLevelEnum.API.toString().equals(policyLevel)) {
Map<String, Tier> resourceTiersMap =
APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain);
if (resourceTiersMap != null) {
tierList.addAll(resourceTiersMap.values());
}
} else {
RestApiUtil.handleResourceNotFoundError(
"policyLevel should be one of " +
Arrays.toString(ThrottlingPolicyDTO.PolicyLevelEnum.values()), log);
}
return tierList;
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving tiers";
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
}
| apache-2.0 |
twitter/bookkeeper | bookkeeper-server/src/main/java/org/apache/bookkeeper/proto/PerChannelBookieClientPool.java | 1732 | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.proto;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GenericCallback;
/**
* An interface to manage channel pooling for bookie client.
*/
interface PerChannelBookieClientPool {
/**
* intialize the pool. the implementation should not be blocked.
*/
void intialize();
/**
* Obtain a channel from channel pool to execute operations.
*
* @param callback
* callback to return channel from channel pool.
*/
void obtain(GenericCallback<PerChannelBookieClient> callback);
/**
* Disconnect the connections in the pool.
*
* @param wait
* whether need to wait until pool disconnected.
*/
void disconnect(boolean wait);
/**
* Close the pool.
*
* @param wait
* whether need to wait until pool closed.
*/
void close(boolean wait);
}
| apache-2.0 |
apache/commons-ognl | src/main/java/org/apache/commons/ognl/EnumerationPropertyAccessor.java | 2456 | package org.apache.commons.ognl;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Enumeration;
import java.util.Map;
/**
* Implementation of PropertyAccessor that provides "property" reference to "nextElement" (aliases to "next" also) and
* "hasMoreElements" (also aliased to "hasNext").
*/
public class EnumerationPropertyAccessor
extends ObjectPropertyAccessor
implements PropertyAccessor // This is here to make javadoc show this class as an implementor
{
@Override
public Object getProperty( Map<String, Object> context, Object target, Object name )
throws OgnlException
{
Object result;
Enumeration<?> e = (Enumeration<?>) target; // check performed by the invoker
if ( name instanceof String )
{
if ( "next".equals( name ) || "nextElement".equals( name ) )
{
result = e.nextElement();
}
else
{
if ( "hasNext".equals( name ) || "hasMoreElements".equals( name ) )
{
result = e.hasMoreElements() ? Boolean.TRUE : Boolean.FALSE;
}
else
{
result = super.getProperty( context, target, name );
}
}
}
else
{
result = super.getProperty( context, target, name );
}
return result;
}
@Override
public void setProperty( Map<String, Object> context, Object target, Object name, Object value )
throws OgnlException
{
throw new IllegalArgumentException( "can't set property " + name + " on Enumeration" );
}
}
| apache-2.0 |
iotivity/iotivity | service/simulator/java/eclipse-plugin/ClientControllerPlugin/src/oic/simulator/clientcontroller/view/AttributeEditingSupport.java | 17171 | /*
* Copyright 2015 Samsung Electronics All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package oic.simulator.clientcontroller.view;
import org.eclipse.jface.dialogs.TitleAreaDialog;
import org.eclipse.jface.viewers.CellEditor;
import org.eclipse.jface.viewers.CheckboxCellEditor;
import org.eclipse.jface.viewers.ComboBoxCellEditor;
import org.eclipse.jface.viewers.EditingSupport;
import org.eclipse.jface.viewers.TextCellEditor;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeItem;
import java.util.Date;
import java.util.List;
import org.oic.simulator.ArrayProperty;
import org.oic.simulator.AttributeProperty;
import org.oic.simulator.AttributeValue;
import org.oic.simulator.AttributeValue.TypeInfo;
import org.oic.simulator.AttributeValue.ValueType;
import org.oic.simulator.ILogger.Level;
import org.oic.simulator.InvalidArgsException;
import org.oic.simulator.SimulatorResourceAttribute;
import oic.simulator.clientcontroller.Activator;
import oic.simulator.clientcontroller.manager.ResourceManager;
import oic.simulator.clientcontroller.remoteresource.AttributeElement;
import oic.simulator.clientcontroller.remoteresource.RemoteResource;
import oic.simulator.clientcontroller.remoteresource.ResourceRepresentation;
import oic.simulator.clientcontroller.utils.AttributeValueBuilder;
import oic.simulator.clientcontroller.utils.Utility;
import oic.simulator.clientcontroller.view.dialogs.PostRequestDialog;
import oic.simulator.clientcontroller.view.dialogs.UpdatePrimitiveArrayAttributeDialog;
/**
* This class provides editing support to the resources attributes table in the
* attributes view.
*/
public class AttributeEditingSupport {
private AttributeValueEditor attValueEditor;
private PostSelectionEditor postSelectionEditor;
public AttributeValueEditor createAttributeValueEditor(TreeViewer viewer,
TitleAreaDialog dialog) {
attValueEditor = new AttributeValueEditor(viewer, dialog);
return attValueEditor;
}
public PostSelectionEditor createPostSelectionEditor(TreeViewer viewer) {
postSelectionEditor = new PostSelectionEditor(viewer);
return postSelectionEditor;
}
class AttributeValueEditor extends EditingSupport {
private final TreeViewer viewer;
private CCombo comboBox;
private TitleAreaDialog dialog;
public AttributeValueEditor(TreeViewer viewer, TitleAreaDialog dialog) {
super(viewer);
this.viewer = viewer;
this.dialog = dialog;
}
@Override
protected boolean canEdit(Object arg0) {
return true;
}
@Override
protected CellEditor getCellEditor(final Object element) {
ResourceManager resourceManager = Activator.getDefault()
.getResourceManager();
RemoteResource res = resourceManager
.getCurrentResourceInSelection();
if (null == res) {
return null;
}
final SimulatorResourceAttribute attribute;
if (!(element instanceof AttributeElement)) {
return null;
}
final AttributeElement attributeElement = ((AttributeElement) element);
attribute = attributeElement.getSimulatorResourceAttribute();
if (null == attribute) {
return null;
}
final AttributeValue val = attribute.value();
if (null == val) {
return null;
}
final TypeInfo type = val.typeInfo();
if (type.mBaseType == ValueType.RESOURCEMODEL) {
return null;
}
CellEditor editor;
if (type.mType == ValueType.ARRAY && res.isConfigUploaded()
&& isArrayAttributeValid(attribute)) {
editor = new TextCellEditor(viewer.getTree());
editor.setStyle(SWT.READ_ONLY);
final Text txt = (Text) editor.getControl();
txt.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
UpdatePrimitiveArrayAttributeDialog dialog = new UpdatePrimitiveArrayAttributeDialog(
Display.getDefault().getActiveShell(),
attribute);
if (dialog.open() == Window.OK) {
updateAttributeValue(attributeElement, attribute,
dialog.getNewValueObj());
}
// Update the viewer in a separate UI thread.
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
// Set the post state of the top-level
// attribute.
AttributeElement rootElement = getRootElement(attributeElement);
rootElement.setPostState(true);
viewer.refresh(rootElement, true);
}
});
}
});
} else {
String values[] = null;
List<String> valueSet = resourceManager
.getAllValuesOfAttribute(attribute);
values = convertListToStringArray(valueSet);
editor = new ComboBoxCellEditor(viewer.getTree(), values);
comboBox = (CCombo) editor.getControl();
comboBox.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent event) {
// Set the post state of the top-level attribute.
AttributeElement rootElement = getRootElement(attributeElement);
rootElement.setPostState(true);
if (AttributeValueEditor.this.dialog instanceof PostRequestDialog) {
viewer.update(rootElement, null);
}
}
});
}
return editor;
}
@Override
protected Object getValue(Object element) {
int indexOfItem = 0;
SimulatorResourceAttribute att = null;
if (element instanceof AttributeElement) {
att = ((AttributeElement) element)
.getSimulatorResourceAttribute();
}
if (att == null) {
return 0;
}
final AttributeValue val = att.value();
if (null == val) {
return null;
}
final TypeInfo type = val.typeInfo();
if (type.mBaseType == ValueType.RESOURCEMODEL) {
return null;
}
String valueString = Utility.getAttributeValueAsString(att.value());
if (null == valueString) {
valueString = "";
}
if (type.mType == ValueType.ARRAY) {
ResourceManager resourceManager = Activator.getDefault()
.getResourceManager();
RemoteResource res = resourceManager
.getCurrentResourceInSelection();
if (null != res && res.isConfigUploaded()
&& isArrayAttributeValid(att)) {
return valueString;
}
}
List<String> valueSet = Activator.getDefault().getResourceManager()
.getAllValuesOfAttribute(att);
if (null != valueSet) {
indexOfItem = valueSet.indexOf(valueString);
}
if (indexOfItem == -1) {
indexOfItem = 0;
}
return indexOfItem;
}
@Override
protected void setValue(Object element, Object value) {
SimulatorResourceAttribute att = null;
if (element instanceof AttributeElement) {
att = ((AttributeElement) element)
.getSimulatorResourceAttribute();
}
if (att == null) {
return;
}
AttributeValue val = att.value();
if (null == val) {
return;
}
TypeInfo type = val.typeInfo();
if (type.mBaseType == ValueType.RESOURCEMODEL) {
return;
}
if (type.mType == ValueType.ARRAY) {
ResourceManager resourceManager = Activator.getDefault()
.getResourceManager();
RemoteResource res = resourceManager
.getCurrentResourceInSelection();
if (null != res && res.isConfigUploaded()
&& isArrayAttributeValid(att)) {
return;
}
}
String oldValue = String.valueOf(Utility
.getAttributeValueAsString(val));
if (null == oldValue) {
oldValue = "";
}
String newValue = comboBox.getText();
if (type.mType == ValueType.ARRAY
&& type.mBaseType != ValueType.RESOURCEMODEL) {
newValue = Utility.removeWhiteSpacesInArrayValues(newValue);
}
if (!oldValue.equals(newValue)) {
boolean invalid = false;
// Get the AttriuteValue from the string
AttributeValue attValue = null;
try {
attValue = AttributeValueBuilder.build(newValue,
type.mBaseType);
} catch (Exception e) {
Activator
.getDefault()
.getLogManager()
.log(Level.ERROR.ordinal(),
new Date(),
"There is an error while creating the new attribute value.\n"
+ Utility.getSimulatorErrorString(
e, null));
}
if (null == attValue) {
invalid = true;
} else {
TypeInfo resTypeInfo = attValue.typeInfo();
if (type.mDepth != resTypeInfo.mDepth
|| type.mType != resTypeInfo.mType
|| type.mBaseType != resTypeInfo.mBaseType) {
invalid = true;
}
}
if (invalid) {
MessageBox dialog = new MessageBox(viewer.getTree()
.getShell(), SWT.ICON_ERROR | SWT.OK);
dialog.setText("Invalid Value");
dialog.setMessage("Given value is invalid");
dialog.open();
} else {
updateAttributeValue((AttributeElement) element, att,
attValue);
}
}
viewer.update(element, null);
}
private boolean isArrayAttributeValid(
SimulatorResourceAttribute attribute) {
if (null == attribute)
return false;
AttributeValue val = attribute.value();
if (null == val)
return false;
AttributeProperty prop = attribute.property();
if (null == prop || !prop.isArray())
return false;
ArrayProperty arrProp = prop.asArray();
if (null == arrProp)
return false;
AttributeProperty elementProp = arrProp.getElementProperty();
if (null == elementProp)
return false;
TypeInfo info = val.typeInfo();
if (info.mBaseType == ValueType.RESOURCEMODEL)
return false;
return true;
}
public String[] convertListToStringArray(List<String> values) {
String[] strArr;
if (null != values && values.size() > 0) {
strArr = values.toArray(new String[1]);
} else {
strArr = new String[1];
}
return strArr;
}
public void updateAttributeValue(AttributeElement attributeElement,
SimulatorResourceAttribute att, AttributeValue value) {
// Update the post status.
Object parent = attributeElement.getParent();
AttributeElement rootElement = attributeElement;
while (parent != null && parent instanceof AttributeElement) {
rootElement = (AttributeElement) parent;
parent = ((AttributeElement) parent).getParent();
}
rootElement.setPostState(true);
// Set the attribute value.
attributeElement.getSimulatorResourceAttribute().setValue(value);
// Update the hierarchy.
parent = attributeElement.getParent();
if (null != parent && parent instanceof AttributeElement) {
try {
((AttributeElement) parent).deepSetChildValue(att);
} catch (InvalidArgsException e) {
Activator
.getDefault()
.getLogManager()
.log(Level.ERROR.ordinal(), new Date(),
Utility.getSimulatorErrorString(e, null));
e.printStackTrace();
}
}
}
}
private static class PostSelectionEditor extends EditingSupport {
private final TreeViewer viewer;
public PostSelectionEditor(TreeViewer viewer) {
super(viewer);
this.viewer = viewer;
}
@Override
protected boolean canEdit(Object arg0) {
return true;
}
@Override
protected CellEditor getCellEditor(Object element) {
if (element instanceof AttributeElement
&& ((AttributeElement) element).getParent() instanceof ResourceRepresentation) {
return new CheckboxCellEditor(null, SWT.CHECK | SWT.READ_ONLY);
}
return null;
}
@Override
protected Object getValue(Object element) {
if (element instanceof AttributeElement) {
return ((AttributeElement) element).getPostState();
}
return false;
}
@Override
protected void setValue(Object element, Object value) {
if (!(element instanceof AttributeElement)) {
return;
}
boolean status = (Boolean) value;
((AttributeElement) element).setPostState(status);
viewer.update(element, null);
Tree t = viewer.getTree();
TreeItem item = t.getSelection()[0];
if (null == item) {
return;
}
// Update the post state of the top-most parent of this attribute.
TreeItem parent = item.getParentItem();
if (null != parent) {
while (parent.getParentItem() != null) {
parent = parent.getParentItem();
}
Object data = parent.getData();
((AttributeElement) data).setPostState(status);
}
}
}
private AttributeElement getRootElement(AttributeElement element) {
AttributeElement root = null;
Object parent = element.getParent();
if (parent instanceof ResourceRepresentation) {
return element;
}
while (!(parent instanceof ResourceRepresentation)) {
root = (AttributeElement) parent;
parent = ((AttributeElement) parent).getParent();
}
return root;
}
}
| apache-2.0 |
langfr/camunda-bpm-platform | engine/src/main/java/org/camunda/bpm/engine/impl/HistoricIncidentQueryProperty.java | 2517 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl;
import org.camunda.bpm.engine.query.QueryProperty;
/**
* @author Roman Smirnov
*
*/
public interface HistoricIncidentQueryProperty {
public static final QueryProperty INCIDENT_ID = new QueryPropertyImpl("ID_");
public static final QueryProperty INCIDENT_MESSAGE = new QueryPropertyImpl("INCIDENT_MSG_");
public static final QueryProperty INCIDENT_CREATE_TIME = new QueryPropertyImpl("CREATE_TIME_");
public static final QueryProperty INCIDENT_END_TIME = new QueryPropertyImpl("END_TIME_");
public static final QueryProperty INCIDENT_TYPE = new QueryPropertyImpl("INCIDENT_TYPE_");
public static final QueryProperty EXECUTION_ID = new QueryPropertyImpl("EXECUTION_ID_");
public static final QueryProperty ACTIVITY_ID = new QueryPropertyImpl("ACTIVITY_ID_");
public static final QueryProperty PROCESS_INSTANCE_ID = new QueryPropertyImpl("PROC_INST_ID_");
public static final QueryProperty PROCESS_DEFINITION_ID = new QueryPropertyImpl("PROC_DEF_ID_");
public static final QueryProperty PROCESS_DEFINITION_KEY = new QueryPropertyImpl("PROC_DEF_KEY_");
public static final QueryProperty CAUSE_INCIDENT_ID = new QueryPropertyImpl("CAUSE_INCIDENT_ID_");
public static final QueryProperty ROOT_CAUSE_INCIDENT_ID = new QueryPropertyImpl("ROOT_CAUSE_INCIDENT_ID_");
public static final QueryProperty HISTORY_CONFIGURATION = new QueryPropertyImpl("HISTORY_CONFIGURATION_");
public static final QueryProperty CONFIGURATION = new QueryPropertyImpl("CONFIGURATION_");
public static final QueryProperty TENANT_ID = new QueryPropertyImpl("TENANT_ID_");
public static final QueryProperty INCIDENT_STATE = new QueryPropertyImpl("INCIDENT_STATE_");
}
| apache-2.0 |
kod3r/graphhopper | core/src/test/java/com/graphhopper/routing/util/FastestWeightingTest.java | 3923 | /*
* Licensed to GraphHopper and Peter Karich under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.routing.util;
import com.graphhopper.routing.VirtualEdgeIteratorState;
import com.graphhopper.util.EdgeIterator;
import com.graphhopper.util.GHUtility;
import com.graphhopper.util.Helper;
import com.graphhopper.util.PMap;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* @author Peter Karich
*/
public class FastestWeightingTest
{
private final FlagEncoder encoder = new EncodingManager("CAR").getEncoder("CAR");
@Test
public void testMinWeightHasSameUnitAs_getWeight()
{
FastestWeighting instance = new FastestWeighting(encoder);
long flags = encoder.setProperties(encoder.getMaxSpeed(), true, true);
assertEquals(instance.getMinWeight(10), instance.calcWeight(createEdge(10, flags), false, EdgeIterator.NO_EDGE), 1e-8);
}
@Test
public void testWeightWrongHeading()
{
FastestWeighting instance = new FastestWeighting(encoder, new PMap().put("heading_penalty", "100"));
VirtualEdgeIteratorState virtEdge = new VirtualEdgeIteratorState(0, 1, 1, 2, 10,
encoder.setProperties(10, true, true), "test", Helper.createPointList(51, 0, 51, 1));
double time = instance.calcWeight(virtEdge, false, 0);
virtEdge.setVirtualEdgePreference(true, false);
// heading penalty on edge
assertEquals(time + 100, instance.calcWeight(virtEdge, false, 0), 1e-8);
// but not in reverse heading
assertEquals(time, instance.calcWeight(virtEdge, true, 0), 1e-8);
// only after setting it
virtEdge.setVirtualEdgePreference(true, true);
assertEquals(time + 100, instance.calcWeight(virtEdge, true, 0), 1e-8);
// but not after releasing it
virtEdge.setVirtualEdgePreference(false, true);
assertEquals(time, instance.calcWeight(virtEdge, true, 0), 1e-8);
// test default penalty
instance = new FastestWeighting(encoder);
assertEquals(time + FastestWeighting.DEFAULT_HEADING_PENALTY, instance.calcWeight(virtEdge, false, 0), 1e-8);
}
@Test
public void testSpeed0()
{
FastestWeighting instance = new FastestWeighting(encoder);
assertEquals(1.0 / 0, instance.calcWeight(createEdge(10, encoder.setProperties(0, true, true)), false, EdgeIterator.NO_EDGE), 1e-8);
// 0 / 0 returns NaN but calcWeight should not return NaN!
assertEquals(1.0 / 0, instance.calcWeight(createEdge(0, encoder.setProperties(0, true, true)), false, EdgeIterator.NO_EDGE), 1e-8);
}
EdgeIterator createEdge( final double distance, final long flags )
{
return new GHUtility.DisabledEdgeIterator()
{
@Override
public double getDistance()
{
return distance;
}
@Override
public long getFlags()
{
return flags;
}
@Override
public boolean getBoolean( int key, boolean reverse, boolean _default )
{
return _default;
}
};
}
}
| apache-2.0 |
lukecwik/incubator-beam | runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperationTest.java | 30668 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.dataflow.worker.fn.control;
import static org.apache.beam.runners.dataflow.worker.fn.control.RegisterAndProcessBundleOperation.encodeAndConcat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.beam.model.fnexecution.v1.BeamFnApi;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest.RequestCase;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionResponse;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleDescriptor;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleProgressResponse;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateAppendRequest;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateClearRequest;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateGetRequest;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateKey;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateResponse;
import org.apache.beam.runners.core.InMemoryMultimapSideInputView;
import org.apache.beam.runners.core.InMemoryStateInternals;
import org.apache.beam.runners.core.SideInputReader;
import org.apache.beam.runners.dataflow.worker.DataflowExecutionContext.DataflowStepContext;
import org.apache.beam.runners.dataflow.worker.DataflowPortabilityPCollectionView;
import org.apache.beam.runners.dataflow.worker.util.common.worker.OperationContext;
import org.apache.beam.runners.fnexecution.control.InstructionRequestHandler;
import org.apache.beam.runners.fnexecution.state.StateDelegator;
import org.apache.beam.runners.fnexecution.state.StateRequestHandler;
import org.apache.beam.sdk.coders.ByteArrayCoder;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.fn.IdGenerator;
import org.apache.beam.sdk.fn.IdGenerators;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.util.CoderUtils;
import org.apache.beam.sdk.util.MoreFutures;
import org.apache.beam.sdk.util.ThrowingRunnable;
import org.apache.beam.sdk.util.WindowedValue.FullWindowedValueCoder;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.ValueInSingleWindow.Coder;
import org.apache.beam.vendor.grpc.v1p36p0.com.google.protobuf.ByteString;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableTable;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/** Tests for {@link RegisterAndProcessBundleOperation}. */
@RunWith(JUnit4.class)
@SuppressWarnings({
"rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
"FutureReturnValueIgnored",
})
public class RegisterAndProcessBundleOperationTest {
private static final BeamFnApi.RegisterRequest REGISTER_REQUEST =
BeamFnApi.RegisterRequest.newBuilder()
.addProcessBundleDescriptor(BeamFnApi.ProcessBundleDescriptor.newBuilder().setId("555"))
.build();
@Mock private OperationContext mockContext;
@Mock private StateDelegator mockBeamFnStateDelegator;
@Captor private ArgumentCaptor<StateRequestHandler> stateHandlerCaptor;
private AtomicInteger stateServiceRegisterCounter;
private AtomicInteger stateServiceDeregisterCounter;
private AtomicInteger stateServiceAbortCounter;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
stateServiceRegisterCounter = new AtomicInteger();
stateServiceDeregisterCounter = new AtomicInteger();
stateServiceAbortCounter = new AtomicInteger();
when(mockBeamFnStateDelegator.registerForProcessBundleInstructionId(
any(String.class), any(StateRequestHandler.class)))
.thenAnswer(
new Answer<StateDelegator.Registration>() {
@Override
public StateDelegator.Registration answer(InvocationOnMock invocationOnMock)
throws Throwable {
stateServiceRegisterCounter.incrementAndGet();
return new StateDelegator.Registration() {
@Override
public void deregister() {
stateServiceDeregisterCounter.incrementAndGet();
}
@Override
public void abort() {
stateServiceAbortCounter.incrementAndGet();
}
};
}
});
}
private IdGenerator makeIdGeneratorStartingFrom(long initialValue) {
return new IdGenerator() {
AtomicLong longs = new AtomicLong(initialValue);
@Override
public String getId() {
return Long.toString(longs.getAndIncrement());
}
};
}
@Test
public void testSupportsRestart() {
new RegisterAndProcessBundleOperation(
IdGenerators.decrementingLongs(),
new TestInstructionRequestHandler() {
@Override
public CompletionStage<InstructionResponse> handle(InstructionRequest request) {
CompletableFuture<InstructionResponse> responseFuture = new CompletableFuture<>();
completeFuture(request, responseFuture);
return responseFuture;
}
},
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableTable.of(),
ImmutableMap.of(),
mockContext)
.supportsRestart();
}
@Test
public void testRegisterOnlyOnFirstBundle() throws Exception {
List<BeamFnApi.InstructionRequest> requests = new ArrayList<>();
IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L);
RegisterAndProcessBundleOperation operation =
new RegisterAndProcessBundleOperation(
idGenerator,
new TestInstructionRequestHandler() {
@Override
public CompletionStage<InstructionResponse> handle(InstructionRequest request) {
requests.add(request);
switch (request.getRequestCase()) {
case REGISTER:
case PROCESS_BUNDLE:
return CompletableFuture.completedFuture(responseFor(request).build());
default:
// block forever on other requests
return new CompletableFuture<>();
}
}
},
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableTable.of(),
ImmutableMap.of(),
mockContext);
// Ensure that the first time we start we send the register and process bundle requests
assertThat(requests, empty());
operation.start();
assertEquals(
requests.get(0),
BeamFnApi.InstructionRequest.newBuilder()
.setInstructionId("777")
.setRegister(REGISTER_REQUEST)
.build());
assertEquals(
requests.get(1),
BeamFnApi.InstructionRequest.newBuilder()
.setInstructionId("778")
.setProcessBundle(
BeamFnApi.ProcessBundleRequest.newBuilder().setProcessBundleDescriptorId("555"))
.build());
operation.finish();
// Ensure on restart that we only send the process bundle request
operation.start();
assertEquals(
requests.get(2),
BeamFnApi.InstructionRequest.newBuilder()
.setInstructionId("779")
.setProcessBundle(
BeamFnApi.ProcessBundleRequest.newBuilder().setProcessBundleDescriptorId("555"))
.build());
operation.finish();
}
@Test
public void testProcessingBundleBlocksOnFinish() throws Exception {
List<BeamFnApi.InstructionRequest> requests = new ArrayList<>();
IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L);
ExecutorService executorService = Executors.newCachedThreadPool();
RegisterAndProcessBundleOperation operation =
new RegisterAndProcessBundleOperation(
idGenerator,
new TestInstructionRequestHandler() {
@Override
public CompletionStage<InstructionResponse> handle(InstructionRequest request) {
requests.add(request);
switch (request.getRequestCase()) {
case REGISTER:
return CompletableFuture.completedFuture(responseFor(request).build());
case PROCESS_BUNDLE:
CompletableFuture<InstructionResponse> responseFuture =
new CompletableFuture<>();
executorService.submit(
() -> {
// Purposefully sleep simulating SDK harness doing work
Thread.sleep(100);
responseFuture.complete(responseFor(request).build());
completeFuture(request, responseFuture);
return null;
});
return responseFuture;
default:
// Anything else hangs; nothing else should be blocking
return new CompletableFuture<>();
}
}
},
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableTable.of(),
ImmutableMap.of(),
mockContext);
operation.start();
// This method blocks till the requests are completed
operation.finish();
// Ensure that the messages were received
assertEquals(
requests.get(0),
BeamFnApi.InstructionRequest.newBuilder()
.setInstructionId("777")
.setRegister(REGISTER_REQUEST)
.build());
assertEquals(
requests.get(1),
BeamFnApi.InstructionRequest.newBuilder()
.setInstructionId("778")
.setProcessBundle(
BeamFnApi.ProcessBundleRequest.newBuilder().setProcessBundleDescriptorId("555"))
.build());
}
@Test
public void testProcessingBundleHandlesUserStateRequests() throws Exception {
IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L);
ExecutorService executorService = Executors.newCachedThreadPool();
InMemoryStateInternals<ByteString> stateInternals =
InMemoryStateInternals.forKey(ByteString.EMPTY);
DataflowStepContext mockStepContext = mock(DataflowStepContext.class);
DataflowStepContext mockUserStepContext = mock(DataflowStepContext.class);
when(mockStepContext.namespacedToUser()).thenReturn(mockUserStepContext);
when(mockUserStepContext.stateInternals()).thenReturn(stateInternals);
InstructionRequestHandler instructionRequestHandler =
new TestInstructionRequestHandler() {
@Override
public CompletionStage<InstructionResponse> handle(InstructionRequest request) {
switch (request.getRequestCase()) {
case REGISTER:
return CompletableFuture.completedFuture(responseFor(request).build());
case PROCESS_BUNDLE:
return MoreFutures.supplyAsync(
() -> {
StateRequest partialRequest =
StateRequest.newBuilder()
.setStateKey(
StateKey.newBuilder()
.setBagUserState(
StateKey.BagUserState.newBuilder()
.setTransformId("testPTransformId")
.setWindow(ByteString.EMPTY)
.setUserStateId("testUserStateId")))
.buildPartial();
StateRequest get =
partialRequest
.toBuilder()
.setGet(StateGetRequest.getDefaultInstance())
.build();
StateRequest clear =
partialRequest
.toBuilder()
.setClear(StateClearRequest.getDefaultInstance())
.build();
StateRequest append =
partialRequest
.toBuilder()
.setAppend(
StateAppendRequest.newBuilder()
.setData(ByteString.copyFromUtf8("ABC")))
.build();
StateRequestHandler stateHandler = stateHandlerCaptor.getValue();
StateResponse.Builder getWhenEmptyResponse =
MoreFutures.get(stateHandler.handle(get));
assertEquals(ByteString.EMPTY, getWhenEmptyResponse.getGet().getData());
StateResponse.Builder appendWhenEmptyResponse =
MoreFutures.get(stateHandler.handle(append));
assertNotNull(appendWhenEmptyResponse);
StateResponse.Builder appendWhenEmptyResponse2 =
MoreFutures.get(stateHandler.handle(append));
assertNotNull(appendWhenEmptyResponse2);
StateResponse.Builder getWhenHasValueResponse =
MoreFutures.get(stateHandler.handle(get));
assertEquals(
ByteString.copyFromUtf8("ABC").concat(ByteString.copyFromUtf8("ABC")),
getWhenHasValueResponse.getGet().getData());
StateResponse.Builder clearResponse =
MoreFutures.get(stateHandler.handle(clear));
assertNotNull(clearResponse);
return responseFor(request).build();
});
default:
// block forever
return new CompletableFuture<>();
}
}
};
RegisterAndProcessBundleOperation operation =
new RegisterAndProcessBundleOperation(
idGenerator,
instructionRequestHandler,
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of("testPTransformId", mockStepContext),
ImmutableMap.of(),
ImmutableTable.of(),
ImmutableMap.of(),
mockContext);
operation.start();
verify(mockBeamFnStateDelegator)
.registerForProcessBundleInstructionId(eq("778"), stateHandlerCaptor.capture());
// This method blocks till the requests are completed
operation.finish();
// Ensure that the number of reigstrations matches the number of deregistrations
assertEquals(stateServiceRegisterCounter.get(), stateServiceDeregisterCounter.get());
assertEquals(0, stateServiceAbortCounter.get());
}
@Test
public void testProcessingBundleHandlesMultimapSideInputRequests() throws Exception {
IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L);
ExecutorService executorService = Executors.newCachedThreadPool();
DataflowStepContext mockStepContext = mock(DataflowStepContext.class);
DataflowStepContext mockUserStepContext = mock(DataflowStepContext.class);
when(mockStepContext.namespacedToUser()).thenReturn(mockUserStepContext);
CountDownLatch waitForStateHandler = new CountDownLatch(1);
// Issues state calls to the Runner after a process bundle request is sent.
InstructionRequestHandler fakeClient =
new TestInstructionRequestHandler() {
@Override
public CompletionStage<InstructionResponse> handle(InstructionRequest request) {
switch (request.getRequestCase()) {
case REGISTER:
return CompletableFuture.completedFuture(responseFor(request).build());
case PROCESS_BUNDLE:
return MoreFutures.supplyAsync(
() -> {
StateKey getKey =
StateKey.newBuilder()
.setMultimapSideInput(
StateKey.MultimapSideInput.newBuilder()
.setTransformId("testPTransformId")
.setSideInputId("testSideInputId")
.setWindow(
ByteString.copyFrom(
CoderUtils.encodeToByteArray(
GlobalWindow.Coder.INSTANCE,
GlobalWindow.INSTANCE)))
.setKey(
ByteString.copyFrom(
CoderUtils.encodeToByteArray(
ByteArrayCoder.of(),
"ABC".getBytes(StandardCharsets.UTF_8),
Coder.Context.NESTED))))
.build();
StateRequest getRequest =
StateRequest.newBuilder()
.setStateKey(getKey)
.setGet(StateGetRequest.getDefaultInstance())
.build();
waitForStateHandler.await();
StateRequestHandler stateHandler = stateHandlerCaptor.getValue();
StateResponse.Builder getResponse =
MoreFutures.get(stateHandler.handle(getRequest));
assertEquals(
encodeAndConcat(Arrays.asList("X", "Y", "Z"), StringUtf8Coder.of()),
getResponse.getGet().getData());
return responseFor(request).build();
});
default:
// block forever on other request types
return new CompletableFuture<>();
}
}
};
SideInputReader fakeSideInputReader =
new SideInputReader() {
@Override
public <T> @Nullable T get(PCollectionView<T> view, BoundedWindow window) {
assertEquals(GlobalWindow.INSTANCE, window);
assertEquals("testSideInputId", view.getTagInternal().getId());
return (T)
InMemoryMultimapSideInputView.fromIterable(
ByteArrayCoder.of(),
ImmutableList.of(
KV.of("ABC".getBytes(StandardCharsets.UTF_8), "X"),
KV.of("ABC".getBytes(StandardCharsets.UTF_8), "Y"),
KV.of("ABC".getBytes(StandardCharsets.UTF_8), "Z")));
}
@Override
public <T> boolean contains(PCollectionView<T> view) {
return "testSideInputId".equals(view.getTagInternal().getId());
}
@Override
public boolean isEmpty() {
return false;
}
};
RegisterAndProcessBundleOperation operation =
new RegisterAndProcessBundleOperation(
idGenerator,
fakeClient,
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of("testPTransformId", mockStepContext),
ImmutableMap.of("testPTransformId", fakeSideInputReader),
ImmutableTable.of(
"testPTransformId",
"testSideInputId",
DataflowPortabilityPCollectionView.with(
new TupleTag<>("testSideInputId"),
FullWindowedValueCoder.of(
KvCoder.of(ByteArrayCoder.of(), StringUtf8Coder.of()),
GlobalWindow.Coder.INSTANCE))),
ImmutableMap.of(),
mockContext);
operation.start();
verify(mockBeamFnStateDelegator)
.registerForProcessBundleInstructionId(eq("778"), stateHandlerCaptor.capture());
waitForStateHandler.countDown();
// This method blocks till the requests are completed
operation.finish();
// Ensure that the number of reigstrations matches the number of deregistrations
assertEquals(stateServiceRegisterCounter.get(), stateServiceDeregisterCounter.get());
assertEquals(0, stateServiceAbortCounter.get());
}
@Test
public void testAbortCancelsAndCleansUpDuringRegister() throws Exception {
IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L);
ExecutorService executorService = Executors.newCachedThreadPool();
CountDownLatch waitForAbortToComplete = new CountDownLatch(1);
AtomicReference<ThrowingRunnable> abortReference = new AtomicReference<>();
RegisterAndProcessBundleOperation operation =
new RegisterAndProcessBundleOperation(
idGenerator,
new TestInstructionRequestHandler() {
@Override
public CompletionStage<InstructionResponse> handle(InstructionRequest request) {
CompletableFuture<InstructionResponse> responseFuture = new CompletableFuture<>();
if (request.getRequestCase() == RequestCase.PROCESS_BUNDLE) {
executorService.submit(
(Callable<Void>)
() -> {
abortReference.get().run();
waitForAbortToComplete.countDown();
return null;
});
} else {
completeFuture(request, responseFuture);
}
return responseFuture;
}
},
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableTable.of(),
ImmutableMap.of(),
mockContext);
abortReference.set(operation::abort);
operation.start();
waitForAbortToComplete.await();
// Ensure that the number of registrations matches the number of aborts
assertEquals(stateServiceRegisterCounter.get(), stateServiceAbortCounter.get());
assertEquals(0, stateServiceDeregisterCounter.get());
}
@Test
public void testAbortCancelsAndCleansUpDuringProcessBundle() throws Exception {
IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L);
ExecutorService executorService = Executors.newCachedThreadPool();
CountDownLatch waitForAbortToComplete = new CountDownLatch(1);
AtomicReference<ThrowingRunnable> abortReference = new AtomicReference<>();
RegisterAndProcessBundleOperation operation =
new RegisterAndProcessBundleOperation(
idGenerator,
new TestInstructionRequestHandler() {
@Override
public CompletionStage<InstructionResponse> handle(InstructionRequest request) {
CompletableFuture<InstructionResponse> responseFuture = new CompletableFuture<>();
if (request.getRequestCase() == RequestCase.PROCESS_BUNDLE) {
executorService.submit(
(Callable<Void>)
() -> {
abortReference.get().run();
waitForAbortToComplete.countDown();
return null;
});
} else {
completeFuture(request, responseFuture);
}
return responseFuture;
}
},
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableTable.of(),
ImmutableMap.of(),
mockContext);
abortReference.set(operation::abort);
operation.start();
waitForAbortToComplete.await();
// Ensure that the number of registrations matches the number of aborts
assertEquals(stateServiceRegisterCounter.get(), stateServiceAbortCounter.get());
assertEquals(0, stateServiceDeregisterCounter.get());
}
private InstructionResponse.Builder responseFor(BeamFnApi.InstructionRequest request) {
BeamFnApi.InstructionResponse.Builder response =
BeamFnApi.InstructionResponse.newBuilder().setInstructionId(request.getInstructionId());
if (request.hasRegister()) {
response.setRegister(BeamFnApi.RegisterResponse.getDefaultInstance());
} else if (request.hasProcessBundle()) {
response.setProcessBundle(BeamFnApi.ProcessBundleResponse.getDefaultInstance());
} else if (request.hasFinalizeBundle()) {
response.setFinalizeBundle(BeamFnApi.FinalizeBundleResponse.getDefaultInstance());
} else if (request.hasProcessBundleProgress()) {
response.setProcessBundleProgress(
BeamFnApi.ProcessBundleProgressResponse.getDefaultInstance());
} else if (request.hasProcessBundleSplit()) {
response.setProcessBundleSplit(BeamFnApi.ProcessBundleSplitResponse.getDefaultInstance());
}
return response;
}
private void completeFuture(
BeamFnApi.InstructionRequest request, CompletableFuture<InstructionResponse> response) {
response.complete(responseFor(request).build());
}
@Test
public void testGetProcessBundleProgressReturnsDefaultInstanceIfNoBundleIdCached()
throws Exception {
InstructionRequestHandler mockInstructionRequestHandler = mock(InstructionRequestHandler.class);
RegisterAndProcessBundleOperation operation =
new RegisterAndProcessBundleOperation(
IdGenerators.decrementingLongs(),
mockInstructionRequestHandler,
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableTable.of(),
ImmutableMap.of(),
mockContext);
assertEquals(
ProcessBundleProgressResponse.getDefaultInstance(),
MoreFutures.get(operation.getProcessBundleProgress()));
}
@Test
public void testGetProcessBundleProgressFetchesProgressResponseWhenBundleIdCached()
throws Exception {
InstructionRequestHandler mockInstructionRequestHandler = mock(InstructionRequestHandler.class);
RegisterAndProcessBundleOperation operation =
new RegisterAndProcessBundleOperation(
IdGenerators.decrementingLongs(),
mockInstructionRequestHandler,
mockBeamFnStateDelegator,
REGISTER_REQUEST,
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableMap.of(),
ImmutableTable.of(),
ImmutableMap.of(),
mockContext);
operation.getProcessBundleInstructionId(); // this generates and caches bundleId
ProcessBundleProgressResponse expectedResult =
ProcessBundleProgressResponse.newBuilder().build();
InstructionResponse instructionResponse =
InstructionResponse.newBuilder().setProcessBundleProgress(expectedResult).build();
CompletableFuture resultFuture = CompletableFuture.completedFuture(instructionResponse);
when(mockInstructionRequestHandler.handle(any())).thenReturn(resultFuture);
final ProcessBundleProgressResponse result =
MoreFutures.get(operation.getProcessBundleProgress());
assertSame("Return value from mockInstructionRequestHandler", expectedResult, result);
}
private abstract static class TestInstructionRequestHandler implements InstructionRequestHandler {
@Override
public void registerProcessBundleDescriptor(ProcessBundleDescriptor descriptor) {}
@Override
public void close() {}
}
}
| apache-2.0 |
bsa01/qbit | qbit/servlet/src/test/java/io/advantageous/qbit/servlet/servletproto/PrototypeJetty.java | 1905 | /*
* Copyright (c) 2015. Rick Hightower, Geoff Chandler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web!
*/
package io.advantageous.qbit.servlet.servletproto;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.AbstractHandler;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* @author rhightower on 2/12/15.
*/
public class PrototypeJetty {
public static void main(final String... args) throws Exception {
Server server = new Server(8080);
server.setHandler(new HelloWorld());
server.start();
server.join();
}
public static class HelloWorld extends AbstractHandler {
public void handle(String target,
Request baseRequest,
HttpServletRequest request,
HttpServletResponse response)
throws IOException, ServletException {
response.setContentType("text/html;charset=utf-8");
response.setStatus(HttpServletResponse.SC_OK);
baseRequest.setHandled(true);
response.getWriter().println("<h1>Hello World</h1>");
}
}
}
| apache-2.0 |
vaglucas/cafeUnoesc | twitter4j-core/src/main/java/twitter4j/GeoLocation.java | 2496 | /*
* Copyright 2007 Yusuke Yamamoto
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package twitter4j;
/**
* A data class representing geo location.
*
* @author Yusuke Yamamoto - yusuke at mac.com
*/
public class GeoLocation implements java.io.Serializable {
protected double latitude;
protected double longitude;
private static final long serialVersionUID = -4847567157651889935L;
/**
* Creates a GeoLocation instance
*
* @param latitude the latitude
* @param longitude the longitude
*/
public GeoLocation(double latitude, double longitude) {
this.latitude = latitude;
this.longitude = longitude;
}
/* For serialization purposes only. */
/* package */ GeoLocation() {
}
/**
* returns the latitude of the geo location
*
* @return the latitude
*/
public double getLatitude() {
return latitude;
}
/**
* returns the longitude of the geo location
*
* @return the longitude
*/
public double getLongitude() {
return longitude;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof GeoLocation)) return false;
GeoLocation that = (GeoLocation) o;
if (Double.compare(that.getLatitude(), latitude) != 0) return false;
if (Double.compare(that.getLongitude(), longitude) != 0) return false;
return true;
}
@Override
public int hashCode() {
int result;
long temp;
temp = Double.doubleToLongBits(latitude);
result = (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(longitude);
result = 31 * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public String toString() {
return "GeoLocation{" +
"latitude=" + latitude +
", longitude=" + longitude +
'}';
}
}
| apache-2.0 |
bharathkk/samza | samza-autoscaling/src/main/java/org/apache/samza/autoscaling/deployer/ConfigManager.java | 14812 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.autoscaling.deployer;
import joptsimple.OptionSet;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.samza.autoscaling.utils.YarnUtil;
import org.apache.samza.config.Config;
import org.apache.samza.config.JobConfig;
import org.apache.samza.container.SamzaContainer;
import org.apache.samza.coordinator.stream.messages.CoordinatorStreamMessage;
import org.apache.samza.coordinator.stream.CoordinatorStreamSystemConsumer;
import org.apache.samza.coordinator.stream.messages.SetConfig;
import org.apache.samza.job.JobRunner;
import org.apache.samza.job.model.ContainerModel;
import org.apache.samza.metrics.MetricsRegistryMap;
import org.apache.samza.system.SystemStreamPartitionIterator;
import org.apache.samza.util.CommandLine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* This class is a separate module that runs along side with a job, and handles all config changes submitted to a job after the bootstrap of the job.
* All config changes are written to the coordinator stream using the @Link{CoordinatorStreamWriter}.
* The way this class works is that it reads all messages with type "set-config" written to the coordinator stream after
* the bootstrap of the job, and it handles the messages accordingly.
* The current configuration changes it handles are
* 1. changing the number of containers of a job
* 2. setting the server url for the first time (in order to get the JobModel).
* In order to use this class the run() method should be called to react to changes,
* or call the start(), processConfigMessages(), and stop() function instead.
* Additionally, you have to add the following configurations to the config file:
* yarn.rm.address=localhost //the ip of the resource manager in yarn
* yarn.rm.port=8088 //the port of the resource manager http server
* Additionally, the config manger will periodically poll the coordinator stream to see if there are any new messages.
* This period is set to 100 ms by default. However, it can be configured by adding the following property to the input config file.
* configManager.polling.interval=< polling interval >
*/
public class ConfigManager {
private final CoordinatorStreamSystemConsumer coordinatorStreamConsumer;
private SystemStreamPartitionIterator coordinatorStreamIterator;
private static final Logger log = LoggerFactory.getLogger(ConfigManager.class);
private final long defaultPollingInterval = 100;
private final int defaultReadJobModelDelayMs = 100;
private final long interval;
private String coordinatorServerURL = null;
private final String jobName;
private final int jobID;
private Config config;
private YarnUtil yarnUtil;
private final String rmAddressOpt = "yarn.rm.address";
private final String rmPortOpt = "yarn.rm.port";
private final String pollingIntervalOpt = "configManager.polling.interval";
private static final String SERVER_URL_OPT = "samza.autoscaling.server.url";
private static final String YARN_CONTAINER_COUNT_OPT = "yarn.container.count";
public ConfigManager(Config config) {
//get rm address and port
if (!config.containsKey(rmAddressOpt) || !config.containsKey(rmPortOpt)) {
throw new IllegalArgumentException("Missing config: the config file does not contain the rm host or port.");
}
String rmAddress = config.get(rmAddressOpt);
int rmPort = config.getInt(rmPortOpt);
//get job name and id;
if (!config.containsKey(JobConfig.JOB_NAME())) {
throw new IllegalArgumentException("Missing config: the config does not contain the job name");
}
jobName = config.get(JobConfig.JOB_NAME());
jobID = config.getInt(JobConfig.JOB_ID(), 1);
//set polling interval
if (config.containsKey(pollingIntervalOpt)) {
long pollingInterval = config.getLong(pollingIntervalOpt);
if (pollingInterval <= 0) {
throw new IllegalArgumentException("polling interval cannot be a negative value");
}
this.interval = pollingInterval;
} else {
this.interval = defaultPollingInterval;
}
this.config = config;
this.coordinatorStreamConsumer = new CoordinatorStreamSystemConsumer(config, new MetricsRegistryMap());
this.yarnUtil = new YarnUtil(rmAddress, rmPort);
}
/**
* This method is an infinite loop that periodically checks if there are any new messages in the job coordinator stream, and reads them if they exist.
* Then it reacts accordingly based on the configuration that is being set.
* The method the calls the start() method to initialized the system, runs in a infinite loop, and calls the stop() method at the end to stop the consumer and the system
*/
public void run() {
start();
try {
while (true) {
Thread.sleep(interval);
processConfigMessages();
}
} catch (InterruptedException e) {
e.printStackTrace();
log.warn("Got interrupt in config manager thread, so shutting down");
Thread.currentThread().interrupt();
} finally {
log.info("Stopping the config manager");
stop();
}
}
/**
* Starts the system by starting the consumer
*/
public void start() {
register();
coordinatorStreamConsumer.start();
coordinatorStreamIterator = coordinatorStreamConsumer.getStartIterator();
bootstrap();
}
/**
* stops the consumer making the system ready to stop
*/
public void stop() {
coordinatorStreamConsumer.stop();
coordinatorServerURL = null;
yarnUtil.stop();
}
/**
* registers the consumer
*/
private void register() {
coordinatorStreamConsumer.register();
}
/**
* This function will bootstrap by reading all the unread messages until the moment of calling the function, and therefore find the server url.
*/
private void bootstrap() {
List<String> keysToProcess = new LinkedList<>();
keysToProcess.add(SERVER_URL_OPT);
processConfigMessages(keysToProcess);
if (coordinatorServerURL == null) {
throw new IllegalStateException("coordinator server url is null, while the bootstrap has finished ");
}
log.info("Config manager bootstrapped");
}
/**
* notAValidEvent all the unread messages up to the time this function is called.
* This method just reads the messages, and it does not react to them or change any configuration of the system.
*/
private void skipUnreadMessages() {
processConfigMessages(new LinkedList<String>());
log.info("Config manager skipped messages");
}
/**
* This function reads all the messages with "set-config" type added to the coordinator stream since the last time the method was invoked
*/
public void processConfigMessages() {
List<String> keysToProcess = new LinkedList<>();
keysToProcess.add(YARN_CONTAINER_COUNT_OPT);
keysToProcess.add(SERVER_URL_OPT);
processConfigMessages(keysToProcess);
}
/**
* This function reads all the messages with "set-config" type added to the coordinator stream since the last time the method was invoked
*
* @param keysToProcess a list of keys to process. Only messages with these keys will call their handler function,
* and other messages will be skipped. If the list is empty all messages will be skipped.
*/
@SuppressWarnings("unchecked")
private void processConfigMessages(List<String> keysToProcess) {
if (!coordinatorStreamConsumer.hasNewMessages(coordinatorStreamIterator)) {
return;
}
if (keysToProcess == null) {
throw new IllegalArgumentException("The keys to process list is null");
}
for (CoordinatorStreamMessage message : coordinatorStreamConsumer.getUnreadMessages(coordinatorStreamIterator, SetConfig.TYPE)) {
String key = null;
try {
SetConfig setConfigMessage = new SetConfig(message);
key = setConfigMessage.getKey();
Map<String, String> valuesMap = (Map<String, String>) setConfigMessage.getMessageMap().get("values");
String value = null;
if (valuesMap != null) {
value = valuesMap.get("value");
}
log.debug("Received set-config message with key: " + key + " and value: " + value);
if (keysToProcess.contains(key)) {
if (key.equals(YARN_CONTAINER_COUNT_OPT)) {
handleYarnContainerChange(value);
} else if (key.equals(SERVER_URL_OPT)) {
handleServerURLChange(value);
} else {
log.info("Setting the " + key + " configuration is currently not supported, skipping the message");
}
}
//TODO: change the handlers to implement a common interface, to make them pluggable
} catch (Exception e) {
log.debug("Error in reading a message, skipping message with key " + key);
}
}
}
/**
* This method handle setConfig messages that want to change the url of the server the JobCoordinator has brought up.
*
* @param newServerURL the new value of the server URL
*/
private void handleServerURLChange(String newServerURL) {
this.coordinatorServerURL = newServerURL;
log.info("Server URL being set to " + newServerURL);
}
/**
* This method handles setConfig messages that want to change the number of containers of a job
*
* @param containerCountAsString the new number of containers in a String format
*/
private void handleYarnContainerChange(String containerCountAsString) throws IOException, YarnException {
String applicationId = yarnUtil.getRunningAppId(jobName, jobID);
int containerCount = Integer.valueOf(containerCountAsString);
//checking the input is valid
int currentNumTask = getCurrentNumTasks();
int currentNumContainers = getCurrentNumContainers();
if (containerCount == currentNumContainers) {
log.error("The new number of containers is equal to the current number of containers, skipping this message");
return;
}
if (containerCount <= 0) {
log.error("The number of containers cannot be zero or less, skipping this message");
return;
}
if (containerCount > currentNumTask) {
log.error("The number of containers cannot be more than the number of task, skipping this message");
return;
}
//killing the current job
log.info("Killing the current job");
yarnUtil.killApplication(applicationId);
//reset the global variables
coordinatorServerURL = null;
try {
//waiting for the job to be killed
String state = yarnUtil.getApplicationState(applicationId);
Thread.sleep(1000);
int countSleep = 1;
while (!state.equals("KILLED")) {
state = yarnUtil.getApplicationState(applicationId);
log.info("Job kill signal sent, but job not killed yet for " + applicationId + ". Sleeping for another 1000ms");
Thread.sleep(1000);
countSleep++;
if (countSleep > 10) {
throw new IllegalStateException("Job has not been killed after 10 attempts.");
}
}
} catch (InterruptedException e) {
e.printStackTrace();
Thread.currentThread().interrupt();
}
log.info("Killed the current job successfully");
//start the job again
log.info("Staring the job again");
skipUnreadMessages();
JobRunner jobRunner = new JobRunner(config);
jobRunner.run(false);
}
/**
* This method returns the number of tasks in the job. It works by querying the server, and getting the job model.
* Then it extracts the number of tasks from the job model
*
* @return current number of tasks in the job
*/
public int getCurrentNumTasks() {
int currentNumTasks = 0;
for (ContainerModel containerModel : SamzaContainer.readJobModel(coordinatorServerURL, defaultReadJobModelDelayMs).getContainers().values()) {
currentNumTasks += containerModel.getTasks().size();
}
return currentNumTasks;
}
/**
* This method returns the number of containers in the job. It works by querying the server, and getting the job model.
* Then it extracts the number of containers from the job model
*
* @return current number of containers in the job
*/
public int getCurrentNumContainers() {
return SamzaContainer.readJobModel(coordinatorServerURL, defaultReadJobModelDelayMs).getContainers().values().size();
}
/**
* Gets the current value of the server URL that the job coordinator is serving the job model on.
*
* @return the current server URL. If null, it means the job has not set the server yet.
*/
public String getCoordinatorServerURL() {
return coordinatorServerURL;
}
/**
* Main function for using the Config Manager. The main function starts a Config Manager, and reacts to all messages thereafter
* In order for this module to run, you have to add the following configurations to the config file:
* yarn.rm.address=localhost //the ip of the resource manager in yarn
* yarn.rm.port=8088 //the port of the resource manager http server
* Additionally, the config manger will periodically poll the coordinator stream to see if there are any new messages.
* This period is set to 100 ms by default. However, it can be configured by adding the following property to the input config file.
* configManager.polling.interval= < polling interval >
* To run the code use the following command:
* {path to samza deployment}/samza/bin/run-config-manager.sh --config-factory={config-factory} --config-path={path to config file of a job}
*
* @param args input arguments for running ConfigManager.
*/
public static void main(String[] args) {
CommandLine cmdline = new CommandLine();
OptionSet options = cmdline.parser().parse(args);
Config config = cmdline.loadConfig(options);
ConfigManager configManager = new ConfigManager(config);
configManager.run();
}
}
| apache-2.0 |
thomcz/hrv-band | app/src/main/java/hrv/band/app/ui/view/adapter/ValueAdapter.java | 2601 | package hrv.band.app.ui.view.adapter;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
import java.text.DecimalFormat;
import java.util.List;
import hrv.band.app.R;
import hrv.band.app.ui.view.fragment.MeasuredParameterFragment;
import hrv.calc.parameter.HRVParameter;
/**
* Copyright (c) 2017
* Created by Thomas Czogalik on 19.01.2017
* <p>
* This adapter holds the hrv parameters to show in the {@link MeasuredParameterFragment}.
*/
public class ValueAdapter extends BaseAdapter {
/**
* The context of activity holding the adapter.
**/
private final Context context;
/**
* The hrv parameter to display.
**/
private final List<HRVParameter> parameters;
public ValueAdapter(Context context, List<HRVParameter> parameters) {
this.context = context;
this.parameters = parameters;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
if (convertView == null) {
LayoutInflater inflater = (LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = inflater.inflate(R.layout.measure_list_item, parent, false);
holder = new ViewHolder();
holder.descText = (TextView) convertView.findViewById(R.id.measure_value_desc);
holder.valueText = (TextView) convertView.findViewById(R.id.hrv_value);
holder.unitText = (TextView) convertView.findViewById(R.id.measure_value_unit);
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
if (parameters != null) {
HRVParameter param = (HRVParameter) getItem(position);
holder.descText.setText(param.getName());
holder.valueText.setText(new DecimalFormat("#.##").format(param.getValue()));
holder.unitText.setText(param.getUnit());
}
return convertView;
}
@Override
public Object getItem(int i) {
return parameters.get(i);
}
@Override
public int getCount() {
return parameters.size();
}
@Override
public long getItemId(int i) {
return i;
}
/**
* The ViewHolder of this adapter.
*/
private static class ViewHolder {
private TextView descText;
private TextView valueText;
private TextView unitText;
}
}
| apache-2.0 |
mkotelba/sdcct | sdcct-core/src/main/java/gov/hhs/onc/sdcct/utils/SdcctDateUtils.java | 1901 | package gov.hhs.onc.sdcct.utils;
import java.util.TimeZone;
public final class SdcctDateUtils {
public final static long HOURS_IN_DAY = 24L;
public final static long MIN_IN_HOUR = 60L;
public final static long MIN_IN_DAY = MIN_IN_HOUR * HOURS_IN_DAY;
public final static long SEC_IN_MIN = 60L;
public final static long SEC_IN_HOUR = SEC_IN_MIN * MIN_IN_HOUR;
public final static long SEC_IN_DAY = SEC_IN_MIN * MIN_IN_DAY;
public final static long SEC_IN_YEAR = 31556952L;
public final static long MS_IN_SEC = 1000L;
public final static long MS_IN_MIN = MS_IN_SEC * SEC_IN_MIN;
public final static long MS_IN_HOUR = MS_IN_SEC * SEC_IN_HOUR;
public final static long MS_IN_DAY = MS_IN_SEC * SEC_IN_DAY;
public final static long MS_IN_YEAR = MS_IN_SEC * SEC_IN_YEAR;
public final static long US_IN_MS = 1000L;
public final static long US_IN_SEC = US_IN_MS * MS_IN_SEC;
public final static long US_IN_MIN = US_IN_MS * MS_IN_MIN;
public final static long US_IN_HOUR = US_IN_MS * MS_IN_HOUR;
public final static long US_IN_DAY = US_IN_MS * MS_IN_DAY;
public final static long US_IN_YEAR = US_IN_MS * MS_IN_YEAR;
public final static long NS_IN_US = 1000L;
public final static long NS_IN_MS = NS_IN_US * US_IN_MS;
public final static long NS_IN_SEC = NS_IN_US * US_IN_SEC;
public final static long NS_IN_MIN = NS_IN_US * US_IN_MIN;
public final static long NS_IN_HOUR = NS_IN_US * US_IN_HOUR;
public final static long NS_IN_DAY = NS_IN_US * US_IN_DAY;
public final static long NS_IN_YEAR = NS_IN_US * US_IN_YEAR;
public final static TimeZone DEFAULT_TZ = TimeZone.getDefault();
public final static String UTC_TZ_ID = "UTC";
public final static String UTC_ZULU_TZ_ID = "Z";
public final static TimeZone UTC_TZ = TimeZone.getTimeZone(UTC_TZ_ID);
private SdcctDateUtils() {
}
}
| apache-2.0 |
10045125/spring-boot | spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/security/SecurityProperties.java | 6846 | /*
* Copyright 2012-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.security;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.core.Ordered;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.util.StringUtils;
/**
* Properties for the security aspects of an application.
*
* @author Dave Syer
*/
@ConfigurationProperties(prefix = "security", ignoreUnknownFields = false)
public class SecurityProperties implements SecurityPrerequisite {
/**
* Order before the basic authentication access control provided by Boot. This is a
* useful place to put user-defined access rules if you want to override the default
* access rules.
*/
public static final int ACCESS_OVERRIDE_ORDER = SecurityProperties.BASIC_AUTH_ORDER - 2;
/**
* Order applied to the WebSecurityConfigurerAdapter that is used to configure basic
* authentication for application endpoints. If you want to add your own
* authentication for all or some of those endpoints the best thing to do is add your
* own WebSecurityConfigurerAdapter with lower order.
*/
public static final int BASIC_AUTH_ORDER = Ordered.LOWEST_PRECEDENCE - 5;
/**
* Order applied to the WebSecurityConfigurer that ignores standard static resource
* paths.
*/
public static final int IGNORED_ORDER = Ordered.HIGHEST_PRECEDENCE;
/**
* The default order of Spring Security's Filter
*/
public static final int DEFAULT_FILTER_ORDER = 0;
/**
* Enable secure channel for all requests.
*/
private boolean requireSsl;
/**
* Enable Cross Site Request Forgery support.
*/
// Flip this when session creation is disabled by default
private boolean enableCsrf = false;
private Basic basic = new Basic();
private final Headers headers = new Headers();
/**
* Session creation policy (always, never, if_required, stateless).
*/
private SessionCreationPolicy sessions = SessionCreationPolicy.STATELESS;
/**
* Comma-separated list of paths to exclude from the default secured paths.
*/
private List<String> ignored = new ArrayList<String>();
private final User user = new User();
/**
* Security filter chain order.
*/
private int filterOrder = DEFAULT_FILTER_ORDER;
public Headers getHeaders() {
return this.headers;
}
public User getUser() {
return this.user;
}
public SessionCreationPolicy getSessions() {
return this.sessions;
}
public void setSessions(SessionCreationPolicy sessions) {
this.sessions = sessions;
}
public Basic getBasic() {
return this.basic;
}
public void setBasic(Basic basic) {
this.basic = basic;
}
public boolean isRequireSsl() {
return this.requireSsl;
}
public void setRequireSsl(boolean requireSsl) {
this.requireSsl = requireSsl;
}
public boolean isEnableCsrf() {
return this.enableCsrf;
}
public void setEnableCsrf(boolean enableCsrf) {
this.enableCsrf = enableCsrf;
}
public void setIgnored(List<String> ignored) {
this.ignored = new ArrayList<String>(ignored);
}
public List<String> getIgnored() {
return this.ignored;
}
public int getFilterOrder() {
return this.filterOrder;
}
public void setFilterOrder(int filterOrder) {
this.filterOrder = filterOrder;
}
public static class Headers {
public static enum HSTS {
NONE, DOMAIN, ALL
}
/**
* Enable cross site scripting (XSS) protection.
*/
private boolean xss;
/**
* Enable cache control HTTP headers.
*/
private boolean cache;
/**
* Enable "X-Frame-Options" header.
*/
private boolean frame;
/**
* Enable "X-Content-Type-Options" header.
*/
private boolean contentType;
/**
* HTTP Strict Transport Security (HSTS) mode (none, domain, all).
*/
private HSTS hsts = HSTS.ALL;
public boolean isXss() {
return this.xss;
}
public void setXss(boolean xss) {
this.xss = xss;
}
public boolean isCache() {
return this.cache;
}
public void setCache(boolean cache) {
this.cache = cache;
}
public boolean isFrame() {
return this.frame;
}
public void setFrame(boolean frame) {
this.frame = frame;
}
public boolean isContentType() {
return this.contentType;
}
public void setContentType(boolean contentType) {
this.contentType = contentType;
}
public HSTS getHsts() {
return this.hsts;
}
public void setHsts(HSTS hsts) {
this.hsts = hsts;
}
}
public static class Basic {
/**
* Enable basic authentication.
*/
private boolean enabled = true;
/**
* HTTP basic realm name.
*/
private String realm = "Spring";
/**
* Comma-separated list of paths to secure.
*/
private String[] path = new String[] { "/**" };
public boolean isEnabled() {
return this.enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getRealm() {
return this.realm;
}
public void setRealm(String realm) {
this.realm = realm;
}
public String[] getPath() {
return this.path;
}
public void setPath(String... paths) {
this.path = paths;
}
}
public static class User {
/**
* Default user name.
*/
private String name = "user";
/**
* Password for the default user name.
*/
private String password = UUID.randomUUID().toString();
/**
* Granted roles for the default user name.
*/
private List<String> role = new ArrayList<String>(Arrays.asList("USER"));
private boolean defaultPassword = true;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getPassword() {
return this.password;
}
public void setPassword(String password) {
if (password.startsWith("${") && password.endsWith("}")
|| !StringUtils.hasLength(password)) {
return;
}
this.defaultPassword = false;
this.password = password;
}
public List<String> getRole() {
return this.role;
}
public void setRole(List<String> role) {
this.role = new ArrayList<String>(role);
}
public boolean isDefaultPassword() {
return this.defaultPassword;
}
}
}
| apache-2.0 |
shroman/ignite | modules/jms11/src/test/java/org/apache/ignite/stream/jms11/TestTransformers.java | 4279 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.stream.jms11;
import java.io.Serializable;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.jms.JMSException;
import javax.jms.ObjectMessage;
import javax.jms.TextMessage;
/**
* Test transformers for JmsStreamer tests.
*
* @author Raul Kripalani
*/
public class TestTransformers {
/**
* Returns a transformer for JMS {@link TextMessage}s, capable of extracting many tuples from a single message,
* if pipe characters are encountered.
*
* @return
*/
public static MessageTransformer<TextMessage, String, String> forTextMessage() {
return new MessageTransformer<TextMessage, String, String>() {
@Override
public Map<String, String> apply(TextMessage message) {
final Map<String, String> answer = new HashMap<>();
String text;
try {
text = message.getText();
}
catch (JMSException e) {
e.printStackTrace();
return Collections.emptyMap();
}
for (String s : text.split("\\|")) {
String[] tokens = s.split(",");
answer.put(tokens[0], tokens[1]);
}
return answer;
}
};
}
/**
* Returns a transformer for JMS {@link ObjectMessage}s, capable of extracting many tuples from a single message,
* if the payload is a {@link Collection}.
*
* @return
*/
public static MessageTransformer<ObjectMessage, String, String> forObjectMessage() {
return new MessageTransformer<ObjectMessage, String, String>() {
@Override @SuppressWarnings("unchecked")
public Map<String, String> apply(ObjectMessage message) {
Object object;
try {
object = message.getObject();
}
catch (JMSException e) {
e.printStackTrace();
return Collections.emptyMap();
}
final Map<String, String> answer = new HashMap<>();
if (object instanceof Collection) {
for (TestObject to : (Collection<TestObject>)object)
answer.put(to.getKey(), to.getValue());
}
else if (object instanceof TestObject) {
TestObject to = (TestObject)object;
answer.put(to.getKey(), to.getValue());
}
return answer;
}
};
}
public static MessageTransformer<TextMessage, String, String> generateNoEntries() {
return new MessageTransformer<TextMessage, String, String>() {
@Override
public Map<String, String> apply(TextMessage message) {
return null;
}
};
}
public static class TestObject implements Serializable {
private static final long serialVersionUID = -7332027566186690945L;
private String key;
private String value;
public TestObject(String key, String value) {
this.key = key;
this.value = value;
}
public String getKey() {
return key;
}
public String getValue() {
return value;
}
}
}
| apache-2.0 |
nkabir/jBloomberg | src/test/java/com/assylias/jbloomberg/MockSession.java | 1787 | /*
* Copyright (C) 2012 - present by Yann Le Tallec.
* Please see distribution for license.
*/
package com.assylias.jbloomberg;
import com.bloomberglp.blpapi.Event;
import com.bloomberglp.blpapi.EventHandler;
import com.bloomberglp.blpapi.Message;
import com.bloomberglp.blpapi.Session;
import com.bloomberglp.blpapi.SessionOptions;
import java.io.IOException;
import java.util.List;
import mockit.Mock;
import mockit.MockUp;
public class MockSession extends MockUp<Session> {
private EventHandler handler;
private Event startAsyncEvent;
private boolean openServiceOk;
public MockSession simulateStartAsyncOk() {
List<Message> messages = new MockMessageList("SessionStarted").getList();
this.startAsyncEvent = new MockEvent(Event.EventType.SESSION_STATUS, messages);
return this;
}
public MockSession simulateSessionStartupFailure() {
List<Message> messages = new MockMessageList("SessionStartupFailure").getList();
this.startAsyncEvent = new MockEvent(Event.EventType.SESSION_STATUS, messages);
return this;
}
public MockSession setOpenServiceOk() {
openServiceOk = true;
return this;
}
@Mock
public void $init(SessionOptions ignore, EventHandler handler) {
this.handler = handler;
}
@Mock
public void startAsync() throws IOException {
if (startAsyncEvent == null) {
throw new IOException();
}
handler.processEvent(startAsyncEvent, this.getMockInstance());
}
@Mock
public boolean openService(String serviceUri) throws IOException {
if (!openServiceOk) {
throw new IOException();
}
return true;
}
}
| apache-2.0 |
freeVM/freeVM | enhanced/java/classlib/modules/imageio/src/main/java/javax/imageio/event/IIOReadUpdateListener.java | 1989 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Sergey I. Salishev
*/
package javax.imageio.event;
import java.awt.image.BufferedImage;
import java.util.EventListener;
import javax.imageio.ImageReader;
/**
* @author Sergey I. Salishev
*/
public interface IIOReadUpdateListener extends EventListener {
void imageUpdate(ImageReader source, BufferedImage theImage, int minX,
int minY, int width, int height, int periodX, int periodY,
int[] bands);
void passComplete(ImageReader source, BufferedImage theImage);
void passStarted(ImageReader source, BufferedImage theImage, int pass,
int minPass, int maxPass, int minX, int minY, int periodX,
int periodY, int[] bands);
void thumbnailPassComplete(ImageReader source, BufferedImage theImage);
void thumbnailPassStarted(ImageReader source, BufferedImage theThumbnail,
int pass, int minPass, int maxPass, int minX, int minY,
int periodX, int periodY, int[] bands);
void thumbnailUpdate(ImageReader source, BufferedImage theThumbnail,
int minX, int minY, int width, int height, int periodX,
int periodY, int[] bands);
}
| apache-2.0 |
pperalta/ignite | modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectExImpl.java | 9818 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.binary;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.ignite.IgniteException;
import org.apache.ignite.binary.BinaryObject;
import org.apache.ignite.binary.BinaryObjectBuilder;
import org.apache.ignite.binary.BinaryObjectException;
import org.apache.ignite.binary.BinaryType;
import org.apache.ignite.internal.binary.builder.BinaryObjectBuilderImpl;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.SB;
import org.apache.ignite.lang.IgniteUuid;
import org.jetbrains.annotations.Nullable;
/**
* Internal binary object interface.
*/
public abstract class BinaryObjectExImpl implements BinaryObjectEx {
/**
* @return Length.
*/
public abstract int length();
/**
* @return Object start.
*/
public abstract int start();
/**
* @return {@code True} if object is array based.
*/
public abstract boolean hasArray();
/**
* @return Object array if object is array based, otherwise {@code null}.
*/
public abstract byte[] array();
/**
* @return Object offheap address is object is offheap based, otherwise 0.
*/
public abstract long offheapAddress();
/**
* Gets field value.
*
* @param fieldId Field ID.
* @return Field value.
* @throws org.apache.ignite.binary.BinaryObjectException In case of any other error.
*/
@Nullable public abstract <F> F field(int fieldId) throws BinaryObjectException;
/** {@inheritDoc} */
@Override public int enumOrdinal() throws BinaryObjectException {
throw new BinaryObjectException("Object is not enum.");
}
/**
* Get offset of data begin.
*
* @return Field value.
*/
public abstract int dataStartOffset();
/**
* Get offset of the footer begin.
*
* @return Field value.
*/
public abstract int footerStartOffset();
/**
* Get field by offset.
*
* @param order Field offset.
* @return Field value.
*/
@Nullable public abstract <F> F fieldByOrder(int order);
/**
* Create field comparer.
*
* @return Comparer.
*/
public abstract BinarySerializedFieldComparator createFieldComparator();
/**
* Writes field value defined by the given field offset to the given byte buffer.
*
* @param fieldOffset Field offset.
* @return Boolean flag indicating whether the field was successfully written to the buffer, {@code false}
* if there is no enough space for the field in the buffer.
*/
protected abstract boolean writeFieldByOrder(int fieldOffset, ByteBuffer buf);
/**
* @param ctx Reader context.
* @param fieldName Field name.
* @return Field value.
*/
@Nullable protected abstract <F> F field(BinaryReaderHandles ctx, String fieldName);
/**
* @return {@code True} if object has schema.
*/
public abstract boolean hasSchema();
/**
* Get schema ID.
*
* @return Schema ID.
*/
public abstract int schemaId();
/**
* Create schema for object.
*
* @return Schema.
*/
public abstract BinarySchema createSchema();
/**
* Get binary context.
*
* @return Binary context.
*/
public abstract BinaryContext context();
/** {@inheritDoc} */
@Override public BinaryObjectBuilder toBuilder() throws BinaryObjectException {
return BinaryObjectBuilderImpl.wrap(this);
}
/** {@inheritDoc} */
@Override public BinaryObject clone() throws CloneNotSupportedException {
return (BinaryObject)super.clone();
}
/** {@inheritDoc} */
public boolean equals(Object other) {
if (other == this)
return true;
if (!(other instanceof BinaryObject))
return false;
BinaryIdentityResolver identity = context().identity(typeId());
return identity.equals(this, (BinaryObject)other);
}
/** {@inheritDoc} */
@Override public String toString() {
try {
BinaryReaderHandles ctx = new BinaryReaderHandles();
ctx.put(start(), this);
return toString(ctx, new IdentityHashMap<BinaryObject, Integer>());
}
catch (BinaryObjectException e) {
throw new IgniteException("Failed to create string representation of binary object.", e);
}
}
/**
* @param ctx Reader context.
* @param handles Handles for already traversed objects.
* @return String representation.
*/
private String toString(BinaryReaderHandles ctx, IdentityHashMap<BinaryObject, Integer> handles) {
int idHash = System.identityHashCode(this);
int hash = hashCode();
BinaryType meta;
try {
meta = rawType();
}
catch (BinaryObjectException ignore) {
meta = null;
}
if (meta == null || !S.INCLUDE_SENSITIVE)
return S.toString(S.INCLUDE_SENSITIVE ? BinaryObject.class.getSimpleName() : "BinaryObject",
"idHash", idHash, false,
"hash", hash, false,
"typeId", typeId(), true);
handles.put(this, idHash);
SB buf = new SB(meta.typeName());
if (meta.fieldNames() != null) {
buf.a(" [idHash=").a(idHash).a(", hash=").a(hash);
for (String name : meta.fieldNames()) {
Object val = field(ctx, name);
buf.a(", ").a(name).a('=');
appendValue(val, buf, ctx, handles);
}
buf.a(']');
}
return buf.toString();
}
/**
* @param val Value to append.
* @param buf Buffer to append to.
* @param ctx Reader context.
* @param handles Handles for already traversed objects.
*/
@SuppressWarnings("unchecked")
private void appendValue(Object val, SB buf, BinaryReaderHandles ctx,
IdentityHashMap<BinaryObject, Integer> handles) {
if (val instanceof byte[])
buf.a(Arrays.toString((byte[]) val));
else if (val instanceof short[])
buf.a(Arrays.toString((short[])val));
else if (val instanceof int[])
buf.a(Arrays.toString((int[])val));
else if (val instanceof long[])
buf.a(Arrays.toString((long[])val));
else if (val instanceof float[])
buf.a(Arrays.toString((float[])val));
else if (val instanceof double[])
buf.a(Arrays.toString((double[])val));
else if (val instanceof char[])
buf.a(Arrays.toString((char[])val));
else if (val instanceof boolean[])
buf.a(Arrays.toString((boolean[]) val));
else if (val instanceof BigDecimal[])
buf.a(Arrays.toString((BigDecimal[])val));
else if (val instanceof IgniteUuid)
buf.a(val);
else if (val instanceof BinaryObjectExImpl) {
BinaryObjectExImpl po = (BinaryObjectExImpl)val;
Integer idHash0 = handles.get(val);
if (idHash0 != null) { // Circular reference.
BinaryType meta0 = po.rawType();
assert meta0 != null;
buf.a(meta0.typeName()).a(" [hash=").a(idHash0).a(", ...]");
}
else
buf.a(po.toString(ctx, handles));
}
else if (val instanceof Object[]) {
Object[] arr = (Object[])val;
buf.a('[');
for (int i = 0; i < arr.length; i++) {
Object o = arr[i];
appendValue(o, buf, ctx, handles);
if (i < arr.length - 1)
buf.a(", ");
}
}
else if (val instanceof Iterable) {
Iterable<Object> col = (Iterable<Object>)val;
buf.a(col.getClass().getSimpleName()).a(" {");
Iterator it = col.iterator();
while (it.hasNext()) {
Object o = it.next();
appendValue(o, buf, ctx, handles);
if (it.hasNext())
buf.a(", ");
}
buf.a('}');
}
else if (val instanceof Map) {
Map<Object, Object> map = (Map<Object, Object>)val;
buf.a(map.getClass().getSimpleName()).a(" {");
Iterator<Map.Entry<Object, Object>> it = map.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<Object, Object> e = it.next();
appendValue(e.getKey(), buf, ctx, handles);
buf.a('=');
appendValue(e.getValue(), buf, ctx, handles);
if (it.hasNext())
buf.a(", ");
}
buf.a('}');
}
else
buf.a(val);
}
}
| apache-2.0 |
liurl3/product-private-paas | tools/ppaas-migration/4.0.0/src/main/java/org/wso2/ppaas/rest/endpoint/bean/autoscaler/policy/autoscale/AutoscalePolicy.java | 1652 | /*
* Copyright (c) 2005-2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.ppaas.rest.endpoint.bean.autoscaler.policy.autoscale;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
public class AutoscalePolicy {
private String id;
private String displayName;
private String description;
private LoadThresholds loadThresholds;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public LoadThresholds getLoadThresholds() {
return loadThresholds;
}
public void setLoadThresholds(LoadThresholds loadThresholds) {
this.loadThresholds = loadThresholds;
}
}
| apache-2.0 |
mdecourci/assertj-core | src/test/java/org/assertj/core/api/Assertions_assertThat_with_Iterator_Test.java | 2358 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2015 the original author or authors.
*/
package org.assertj.core.api;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.util.Sets.newLinkedHashSet;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verifyZeroInteractions;
import java.util.Iterator;
import org.junit.Test;
/**
* Tests for <code>{@link Assertions#assertThat(Iterator)}</code>.
*
* @author Julien Meddah
* @author Joel Costigliola
* @author Mikhail Mazursky
*/
public class Assertions_assertThat_with_Iterator_Test {
@Test
public void should_create_Assert() {
AbstractIterableAssert<?, ? extends Iterable<? extends Object>, Object> assertions = Assertions.assertThat(newLinkedHashSet());
assertThat(assertions).isNotNull();
}
@Test
public void should_initialise_actual() {
Iterator<String> names = asList("Luke", "Leia").iterator();
AbstractIterableAssert<?, ? extends Iterable<? extends String>, String> assertions = assertThat(names);
assertThat(assertions.actual).containsOnly("Leia", "Luke");
}
@Test
public void should_allow_null() {
AbstractIterableAssert<?, ? extends Iterable<? extends String>, String> assertions = assertThat((Iterator<String>) null);
assertThat(assertions.actual).isNull();
}
@Test
public void should_not_consume_iterator_when_asserting_non_null() throws Exception {
Iterator<?> iterator = mock(Iterator.class);
assertThat(iterator).isNotNull();
verifyZeroInteractions(iterator);
}
@Test
public void iterator_can_be_asserted_twice_even_though_it_can_be_iterated_only_once() throws Exception {
Iterator<String> names = asList("Luke", "Leia").iterator();
assertThat(names).containsExactly("Luke", "Leia").containsExactly("Luke", "Leia");
}
} | apache-2.0 |
todotobe1/StormCV | stormcv/src/main/java/nl/tno/stormcv/model/Feature.java | 3179 | package nl.tno.stormcv.model;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import backtype.storm.tuple.Tuple;
/**
* This {@link CVParticle} implementation represents a single feature calculated for {@link Frame} and has the following fields:
* <ul>
* <li>name: the name of the feature like 'SIFT', 'SURF', 'HOG' etc</li>
* <li>duration: the duration of the feature in case it describes a temporal aspect of multiple frames</li>
* <li>sparseDescriptors: a list with {@link Descriptor} objects used to described sparse features like SIFT</li>
* <li>denseDescriptors: a three dimensional float array much like the OpenCV Mat object which can be used to represent
* dense features like dense Optical Flow</li>
* </ul>
* It is not always clear how a specific descriptor should be stored and it is typically up to the characteristics of the
* topology and context what is the best way to go.
*
* @author Corne Versloot
*
*/
public class Feature extends CVParticle{
private String name;
private long duration;
private List<Descriptor> sparseDescriptors = new ArrayList<Descriptor>();
private float[][][] denseDescriptors = new float[0][0][0];
public Feature(String streamId, long sequenceNr, String name, long duration, List<Descriptor> sparseDescriptors, float[][][] denseDescriptors) {
super(streamId, sequenceNr);
this.name = name;
this.duration = duration;
if(sparseDescriptors != null){
this.sparseDescriptors = sparseDescriptors;
}
if(denseDescriptors != null){
this.denseDescriptors = denseDescriptors;
}
}
public Feature(Tuple tuple, String name, long duration, List<Descriptor> sparseDescriptors, float[][][] denseDescriptors) {
super(tuple);
this.name = name;
this.duration = duration;
if(sparseDescriptors != null){
this.sparseDescriptors = sparseDescriptors;
}
if(denseDescriptors != null){
this.denseDescriptors = denseDescriptors;
}
}
public String getName() {
return name;
}
public List<Descriptor> getSparseDescriptors() {
return sparseDescriptors;
}
public float[][][] getDenseDescriptors(){
return denseDescriptors;
}
public long getDuration(){
return this.duration;
}
public Feature deepCopy(){
float[][][] denseCopy = new float[denseDescriptors.length][][];
for(int x=0; x<denseDescriptors.length; x++){
denseCopy[x] = new float[denseDescriptors[x].length][];
for(int y=0; y<denseDescriptors[x].length; y++){
denseCopy[x][y] = Arrays.copyOf(denseDescriptors[x][y], denseDescriptors[x][y].length);
}
}
List<Descriptor> sparseCopy = new ArrayList<Descriptor>(this.sparseDescriptors.size());
for(Descriptor d : sparseDescriptors){
sparseCopy.add(d.deepCopy());
}
Feature copyFeature = new Feature(new String(this.getStreamId()), this.getSequenceNr(), new String(this.getName()), this.getDuration(),
sparseCopy, denseCopy);
copyFeature.setRequestId(getRequestId());
copyFeature.setMetadata(this.getMetadata());
return copyFeature;
}
public String toString(){
return "Feature {stream:"+getStreamId()+", nr:"+getSequenceNr()+", name: "+name+", descriptors: "+sparseDescriptors+"}";
}
}
| apache-2.0 |
john-tornblom/bridgepoint | org.xtuml.bp.xtext.oal/src-gen/org/xtuml/bp/xtext/oal/xoal/unrelate_statement.java | 3389 | /**
* <copyright>
* </copyright>
*
*/
package org.xtuml.bp.xtext.oal.xoal;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>unrelate statement</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA1 <em>A1</em>}</li>
* <li>{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA2 <em>A2</em>}</li>
* <li>{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA3 <em>A3</em>}</li>
* </ul>
* </p>
*
* @see org.xtuml.bp.xtext.oal.xoal.XoalPackage#getunrelate_statement()
* @model
* @generated
*/
public interface unrelate_statement extends statement
{
/**
* Returns the value of the '<em><b>A1</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>A1</em>' containment reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>A1</em>' containment reference.
* @see #setA1(inst_ref_var)
* @see org.xtuml.bp.xtext.oal.xoal.XoalPackage#getunrelate_statement_A1()
* @model containment="true"
* @generated
*/
inst_ref_var getA1();
/**
* Sets the value of the '{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA1 <em>A1</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>A1</em>' containment reference.
* @see #getA1()
* @generated
*/
void setA1(inst_ref_var value);
/**
* Returns the value of the '<em><b>A2</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>A2</em>' containment reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>A2</em>' containment reference.
* @see #setA2(inst_ref_var)
* @see org.xtuml.bp.xtext.oal.xoal.XoalPackage#getunrelate_statement_A2()
* @model containment="true"
* @generated
*/
inst_ref_var getA2();
/**
* Sets the value of the '{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA2 <em>A2</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>A2</em>' containment reference.
* @see #getA2()
* @generated
*/
void setA2(inst_ref_var value);
/**
* Returns the value of the '<em><b>A3</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>A3</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>A3</em>' attribute.
* @see #setA3(String)
* @see org.xtuml.bp.xtext.oal.xoal.XoalPackage#getunrelate_statement_A3()
* @model
* @generated
*/
String getA3();
/**
* Sets the value of the '{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA3 <em>A3</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>A3</em>' attribute.
* @see #getA3()
* @generated
*/
void setA3(String value);
} // unrelate_statement
| apache-2.0 |
PasinduTennage/carbon-identity-framework | components/identity-mgt/org.wso2.carbon.identity.mgt.endpoint/src/main/java/org/wso2/carbon/identity/mgt/endpoint/serviceclient/beans/ConfirmSelfRegistrationRequest.java | 872 | package org.wso2.carbon.identity.mgt.endpoint.serviceclient.beans;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(propOrder = {
"user",
"code"
})
@XmlRootElement(name = "confirmSelfRegistrationRequest")
public class ConfirmSelfRegistrationRequest {
@XmlElement(required = true)
private User user;
@XmlElement(required = true)
private String code;
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
} | apache-2.0 |
hurricup/intellij-community | platform/platform-impl/src/com/intellij/application/options/SaveSchemeDialog.java | 3369 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.application.options;
import com.intellij.CommonBundle;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.util.text.UniqueNameGenerator;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.util.Collection;
public class SaveSchemeDialog extends DialogWrapper {
private final JTextField mySchemeName = new JTextField();
private final Collection<String> myExistingNames;
public SaveSchemeDialog(@NotNull Component parent, String title, @NotNull Collection<String> existingNames, @NotNull String selectedName) {
super(parent, false);
myExistingNames = existingNames;
setTitle(title);
mySchemeName.setText(UniqueNameGenerator.generateUniqueName(selectedName + " copy", existingNames));
init();
}
public String getSchemeName() {
return mySchemeName.getText();
}
@Override
protected JComponent createNorthPanel() {
JPanel panel = new JPanel(new GridBagLayout());
GridBagConstraints gc = new GridBagConstraints();
gc.gridx = 0;
gc.gridy = 0;
gc.weightx = 0;
gc.insets = new Insets(5, 0, 5, 5);
panel.add(new JLabel(ApplicationBundle.message("label.name")), gc);
gc = new GridBagConstraints();
gc.gridx = 1;
gc.gridy = 0;
gc.weightx = 1;
gc.fill = GridBagConstraints.HORIZONTAL;
gc.gridwidth = 2;
gc.insets = new Insets(0, 0, 5, 0);
panel.add(mySchemeName, gc);
panel.setPreferredSize(JBUI.size(220, 40));
return panel;
}
@Override
protected void doOKAction() {
if (getSchemeName().trim().isEmpty()) {
Messages.showMessageDialog(getContentPane(), ApplicationBundle.message("error.scheme.must.have.a.name"),
CommonBundle.getErrorTitle(), Messages.getErrorIcon());
return;
}
else if ("default".equals(getSchemeName())) {
Messages.showMessageDialog(getContentPane(), ApplicationBundle.message("error.illegal.scheme.name"),
CommonBundle.getErrorTitle(), Messages.getErrorIcon());
return;
}
else if (myExistingNames.contains(getSchemeName())) {
Messages.showMessageDialog(
getContentPane(),
ApplicationBundle.message("error.a.scheme.with.this.name.already.exists.or.was.deleted.without.applying.the.changes"),
CommonBundle.getErrorTitle(),
Messages.getErrorIcon()
);
return;
}
super.doOKAction();
}
@Override
protected JComponent createCenterPanel() {
return null;
}
@Override
public JComponent getPreferredFocusedComponent() {
return mySchemeName;
}
}
| apache-2.0 |
camilojd/elasticsearch | core/src/test/java/org/elasticsearch/script/ScriptModesTests.java | 12174 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.unmodifiableMap;
import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.common.util.set.Sets.newHashSet;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.containsString;
// TODO: this needs to be a base test class, and all scripting engines extend it
public class ScriptModesTests extends ESTestCase {
ScriptSettings scriptSettings;
ScriptContextRegistry scriptContextRegistry;
private ScriptContext[] scriptContexts;
private Map<String, ScriptEngineService> scriptEngines;
private ScriptModes scriptModes;
private Set<String> checkedSettings;
private boolean assertAllSettingsWereChecked;
private boolean assertScriptModesNonNull;
@Before
public void setupScriptEngines() {
//randomly register custom script contexts
int randomInt = randomIntBetween(0, 3);
//prevent duplicates using map
Map<String, ScriptContext.Plugin> contexts = new HashMap<>();
for (int i = 0; i < randomInt; i++) {
String plugin = randomAsciiOfLength(randomIntBetween(1, 10));
String operation = randomAsciiOfLength(randomIntBetween(1, 30));
String context = plugin + "-" + operation;
contexts.put(context, new ScriptContext.Plugin(plugin, operation));
}
scriptContextRegistry = new ScriptContextRegistry(contexts.values());
scriptContexts = scriptContextRegistry.scriptContexts().toArray(new ScriptContext[scriptContextRegistry.scriptContexts().size()]);
scriptEngines = buildScriptEnginesByLangMap(newHashSet(
//add the native engine just to make sure it gets filtered out
new NativeScriptEngineService(Settings.EMPTY, Collections.<String, NativeScriptFactory>emptyMap()),
new CustomScriptEngineService()));
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(
new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.NAME),
new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME)));
scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
checkedSettings = new HashSet<>();
assertAllSettingsWereChecked = true;
assertScriptModesNonNull = true;
}
@After
public void assertNativeScriptsAreAlwaysAllowed() {
if (assertScriptModesNonNull) {
assertThat(scriptModes.getScriptEnabled(NativeScriptEngineService.NAME, randomFrom(ScriptType.values()), randomFrom(scriptContexts)), equalTo(true));
}
}
@After
public void assertAllSettingsWereChecked() {
if (assertScriptModesNonNull) {
assertThat(scriptModes, notNullValue());
int numberOfSettings = ScriptType.values().length * scriptContextRegistry.scriptContexts().size();
numberOfSettings += 3; // for top-level inline/store/file settings
assertThat(scriptModes.scriptEnabled.size(), equalTo(numberOfSettings));
if (assertAllSettingsWereChecked) {
assertThat(checkedSettings.size(), equalTo(numberOfSettings));
}
}
}
public void testDefaultSettings() {
this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY);
assertScriptModesAllOps(true, ScriptType.FILE);
assertScriptModesAllOps(false, ScriptType.STORED, ScriptType.INLINE);
}
public void testMissingSetting() {
assertAllSettingsWereChecked = false;
this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY);
try {
scriptModes.getScriptEnabled("non_existing", randomFrom(ScriptType.values()), randomFrom(scriptContexts));
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("not found for lang [non_existing]"));
}
}
public void testScriptTypeGenericSettings() {
int randomInt = randomIntBetween(1, ScriptType.values().length - 1);
Set<ScriptType> randomScriptTypesSet = new HashSet<>();
boolean[] randomScriptModes = new boolean[randomInt];
for (int i = 0; i < randomInt; i++) {
boolean added = false;
while (added == false) {
added = randomScriptTypesSet.add(randomFrom(ScriptType.values()));
}
randomScriptModes[i] = randomBoolean();
}
ScriptType[] randomScriptTypes = randomScriptTypesSet.toArray(new ScriptType[randomScriptTypesSet.size()]);
Settings.Builder builder = Settings.builder();
for (int i = 0; i < randomInt; i++) {
builder.put("script" + "." + randomScriptTypes[i].getScriptType(), randomScriptModes[i]);
}
this.scriptModes = new ScriptModes(scriptSettings, builder.build());
for (int i = 0; i < randomInt; i++) {
assertScriptModesAllOps(randomScriptModes[i], randomScriptTypes[i]);
}
if (randomScriptTypesSet.contains(ScriptType.FILE) == false) {
assertScriptModesAllOps(true, ScriptType.FILE);
}
if (randomScriptTypesSet.contains(ScriptType.STORED) == false) {
assertScriptModesAllOps(false, ScriptType.STORED);
}
if (randomScriptTypesSet.contains(ScriptType.INLINE) == false) {
assertScriptModesAllOps(false, ScriptType.INLINE);
}
}
public void testScriptContextGenericSettings() {
int randomInt = randomIntBetween(1, scriptContexts.length - 1);
Set<ScriptContext> randomScriptContextsSet = new HashSet<>();
boolean[] randomScriptModes = new boolean[randomInt];
for (int i = 0; i < randomInt; i++) {
boolean added = false;
while (added == false) {
added = randomScriptContextsSet.add(randomFrom(scriptContexts));
}
randomScriptModes[i] = randomBoolean();
}
ScriptContext[] randomScriptContexts = randomScriptContextsSet.toArray(new ScriptContext[randomScriptContextsSet.size()]);
Settings.Builder builder = Settings.builder();
for (int i = 0; i < randomInt; i++) {
builder.put("script" + "." + randomScriptContexts[i].getKey(), randomScriptModes[i]);
}
this.scriptModes = new ScriptModes(scriptSettings, builder.build());
for (int i = 0; i < randomInt; i++) {
assertScriptModesAllTypes(randomScriptModes[i], randomScriptContexts[i]);
}
ScriptContext[] complementOf = complementOf(randomScriptContexts);
assertScriptModes(true, new ScriptType[]{ScriptType.FILE}, complementOf);
assertScriptModes(false, new ScriptType[]{ScriptType.STORED, ScriptType.INLINE}, complementOf);
}
public void testConflictingScriptTypeAndOpGenericSettings() {
ScriptContext scriptContext = randomFrom(scriptContexts);
Settings.Builder builder = Settings.builder()
.put("script." + scriptContext.getKey(), "false")
.put("script.stored", "true")
.put("script.inline", "true");
//operations generic settings have precedence over script type generic settings
this.scriptModes = new ScriptModes(scriptSettings, builder.build());
assertScriptModesAllTypes(false, scriptContext);
ScriptContext[] complementOf = complementOf(scriptContext);
assertScriptModes(true, new ScriptType[]{ScriptType.FILE, ScriptType.STORED}, complementOf);
assertScriptModes(true, new ScriptType[]{ScriptType.INLINE}, complementOf);
}
private void assertScriptModesAllOps(boolean expectedScriptEnabled, ScriptType... scriptTypes) {
assertScriptModes(expectedScriptEnabled, scriptTypes, scriptContexts);
}
private void assertScriptModesAllTypes(boolean expectedScriptEnabled, ScriptContext... scriptContexts) {
assertScriptModes(expectedScriptEnabled, ScriptType.values(), scriptContexts);
}
private void assertScriptModes(boolean expectedScriptEnabled, ScriptType[] scriptTypes, ScriptContext... scriptContexts) {
assert scriptTypes.length > 0;
assert scriptContexts.length > 0;
for (ScriptType scriptType : scriptTypes) {
checkedSettings.add("script.engine.custom." + scriptType);
for (ScriptContext scriptContext : scriptContexts) {
assertThat("custom." + scriptType + "." + scriptContext.getKey() + " doesn't have the expected value",
scriptModes.getScriptEnabled("custom", scriptType, scriptContext), equalTo(expectedScriptEnabled));
checkedSettings.add("custom." + scriptType + "." + scriptContext);
}
}
}
private ScriptContext[] complementOf(ScriptContext... scriptContexts) {
Map<String, ScriptContext> copy = new HashMap<>();
for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
copy.put(scriptContext.getKey(), scriptContext);
}
for (ScriptContext scriptContext : scriptContexts) {
copy.remove(scriptContext.getKey());
}
return copy.values().toArray(new ScriptContext[copy.size()]);
}
static Map<String, ScriptEngineService> buildScriptEnginesByLangMap(Set<ScriptEngineService> scriptEngines) {
Map<String, ScriptEngineService> builder = new HashMap<>();
for (ScriptEngineService scriptEngine : scriptEngines) {
String type = scriptEngine.getType();
builder.put(type, scriptEngine);
}
return unmodifiableMap(builder);
}
private static class CustomScriptEngineService implements ScriptEngineService {
public static final String NAME = "custom";
@Override
public String getType() {
return NAME;
}
@Override
public String getExtension() {
return NAME;
}
@Override
public Object compile(String scriptName, String scriptSource, Map<String, String> params) {
return null;
}
@Override
public ExecutableScript executable(CompiledScript compiledScript, @Nullable Map<String, Object> vars) {
return null;
}
@Override
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, @Nullable Map<String, Object> vars) {
return null;
}
@Override
public void close() {
}
@Override
public void scriptRemoved(@Nullable CompiledScript script) {
}
}
}
| apache-2.0 |
gemmellr/qpid-proton-j | proton-j/src/test/java/org/apache/qpid/proton/systemtests/ProtonEngineExampleTest.java | 15144 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.qpid.proton.systemtests;
import static java.util.EnumSet.of;
import static org.apache.qpid.proton.engine.EndpointState.ACTIVE;
import static org.apache.qpid.proton.engine.EndpointState.CLOSED;
import static org.apache.qpid.proton.engine.EndpointState.UNINITIALIZED;
import static org.apache.qpid.proton.systemtests.TestLoggingHelper.bold;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.logging.Logger;
import org.apache.qpid.proton.Proton;
import org.apache.qpid.proton.amqp.messaging.Accepted;
import org.apache.qpid.proton.amqp.messaging.AmqpValue;
import org.apache.qpid.proton.amqp.messaging.Section;
import org.apache.qpid.proton.amqp.messaging.Source;
import org.apache.qpid.proton.amqp.messaging.Target;
import org.apache.qpid.proton.amqp.transport.ReceiverSettleMode;
import org.apache.qpid.proton.amqp.transport.SenderSettleMode;
import org.apache.qpid.proton.engine.Delivery;
import org.apache.qpid.proton.engine.Receiver;
import org.apache.qpid.proton.message.Message;
import org.junit.Ignore;
import org.junit.Test;
/**
* Simple example to illustrate the use of the Engine and Message APIs.
*
* Implemented as a JUnit test for convenience, although the main purpose is to educate the reader
* rather than test the code.
*
* To see the protocol trace, add the following line to test/resources/logging.properties:
*
* org.apache.qpid.proton.logging.LoggingProtocolTracer.sent.level = ALL
*
* and to see the byte level trace, add the following:
*
* org.apache.qpid.proton.systemtests.ProtonEngineExampleTest.level = ALL
*
* Does not illustrate use of the Messenger API.
*/
public class ProtonEngineExampleTest extends EngineTestBase
{
private static final Logger LOGGER = Logger.getLogger(ProtonEngineExampleTest.class.getName());
private static final int BUFFER_SIZE = 4096;
private final String _targetAddress = getServer().containerId + "-link1-target";
@Test
public void test() throws Exception
{
LOGGER.fine(bold("======== About to create transports"));
getClient().transport = Proton.transport();
ProtocolTracerEnabler.setProtocolTracer(getClient().transport, TestLoggingHelper.CLIENT_PREFIX);
getServer().transport = Proton.transport();
ProtocolTracerEnabler.setProtocolTracer(getServer().transport, " " + TestLoggingHelper.SERVER_PREFIX);
doOutputInputCycle();
getClient().connection = Proton.connection();
getClient().transport.bind(getClient().connection);
getServer().connection = Proton.connection();
getServer().transport.bind(getServer().connection);
LOGGER.fine(bold("======== About to open connections"));
getClient().connection.open();
getServer().connection.open();
doOutputInputCycle();
LOGGER.fine(bold("======== About to open sessions"));
getClient().session = getClient().connection.session();
getClient().session.open();
pumpClientToServer();
getServer().session = getServer().connection.sessionHead(of(UNINITIALIZED), of(ACTIVE));
assertEndpointState(getServer().session, UNINITIALIZED, ACTIVE);
getServer().session.open();
assertEndpointState(getServer().session, ACTIVE, ACTIVE);
pumpServerToClient();
assertEndpointState(getClient().session, ACTIVE, ACTIVE);
LOGGER.fine(bold("======== About to create sender"));
getClient().source = new Source();
getClient().source.setAddress(null);
getClient().target = new Target();
getClient().target.setAddress(_targetAddress);
getClient().sender = getClient().session.sender("link1");
getClient().sender.setTarget(getClient().target);
getClient().sender.setSource(getClient().source);
// Exactly once delivery semantics
getClient().sender.setSenderSettleMode(SenderSettleMode.UNSETTLED);
getClient().sender.setReceiverSettleMode(ReceiverSettleMode.SECOND);
assertEndpointState(getClient().sender, UNINITIALIZED, UNINITIALIZED);
getClient().sender.open();
assertEndpointState(getClient().sender, ACTIVE, UNINITIALIZED);
pumpClientToServer();
LOGGER.fine(bold("======== About to set up implicitly created receiver"));
// A real application would be interested in more states than simply ACTIVE, as there
// exists the possibility that the link could have moved to another state already e.g. CLOSED.
// (See pipelining).
getServer().receiver = (Receiver) getServer().connection.linkHead(of(UNINITIALIZED), of(ACTIVE));
// Accept the settlement modes suggested by the client
getServer().receiver.setSenderSettleMode(getServer().receiver.getRemoteSenderSettleMode());
getServer().receiver.setReceiverSettleMode(getServer().receiver.getRemoteReceiverSettleMode());
org.apache.qpid.proton.amqp.transport.Target serverRemoteTarget = getServer().receiver.getRemoteTarget();
assertTerminusEquals(getClient().target, serverRemoteTarget);
getServer().receiver.setTarget(applicationDeriveTarget(serverRemoteTarget));
assertEndpointState(getServer().receiver, UNINITIALIZED, ACTIVE);
getServer().receiver.open();
assertEndpointState(getServer().receiver, ACTIVE, ACTIVE);
pumpServerToClient();
assertEndpointState(getClient().sender, ACTIVE, ACTIVE);
getServer().receiver.flow(1);
pumpServerToClient();
LOGGER.fine(bold("======== About to create a message and send it to the server"));
getClient().message = Proton.message();
Section messageBody = new AmqpValue("Hello");
getClient().message.setBody(messageBody);
getClient().messageData = new byte[BUFFER_SIZE];
int lengthOfEncodedMessage = getClient().message.encode(getClient().messageData, 0, BUFFER_SIZE);
getTestLoggingHelper().prettyPrint(TestLoggingHelper.MESSAGE_PREFIX, Arrays.copyOf(getClient().messageData, lengthOfEncodedMessage));
byte[] deliveryTag = "delivery1".getBytes();
getClient().delivery = getClient().sender.delivery(deliveryTag);
int numberOfBytesAcceptedBySender = getClient().sender.send(getClient().messageData, 0, lengthOfEncodedMessage);
assertEquals("For simplicity, assume the sender can accept all the data",
lengthOfEncodedMessage, numberOfBytesAcceptedBySender);
assertNull(getClient().delivery.getLocalState());
boolean senderAdvanced = getClient().sender.advance();
assertTrue("sender has not advanced", senderAdvanced);
pumpClientToServer();
LOGGER.fine(bold("======== About to process the message on the server"));
getServer().delivery = getServer().connection.getWorkHead();
assertEquals("The received delivery should be on our receiver",
getServer().receiver, getServer().delivery.getLink());
assertNull(getServer().delivery.getLocalState());
assertNull(getServer().delivery.getRemoteState());
assertFalse(getServer().delivery.isPartial());
assertTrue(getServer().delivery.isReadable());
getServer().messageData = new byte[BUFFER_SIZE];
int numberOfBytesProducedByReceiver = getServer().receiver.recv(getServer().messageData, 0, BUFFER_SIZE);
assertEquals(numberOfBytesAcceptedBySender, numberOfBytesProducedByReceiver);
getServer().message = Proton.message();
getServer().message.decode(getServer().messageData, 0, numberOfBytesProducedByReceiver);
boolean messageProcessed = applicationProcessMessage(getServer().message);
assertTrue(messageProcessed);
getServer().delivery.disposition(Accepted.getInstance());
assertEquals(Accepted.getInstance(), getServer().delivery.getLocalState());
pumpServerToClient();
assertEquals(Accepted.getInstance(), getClient().delivery.getRemoteState());
LOGGER.fine(bold("======== About to accept and settle the message on the client"));
Delivery clientDelivery = getClient().connection.getWorkHead();
assertEquals(getClient().delivery, clientDelivery);
assertTrue(clientDelivery.isUpdated());
assertEquals(getClient().sender, clientDelivery.getLink());
clientDelivery.disposition(clientDelivery.getRemoteState());
assertEquals(Accepted.getInstance(), getClient().delivery.getLocalState());
clientDelivery.settle();
assertNull("Now we've settled, the delivery should no longer be in the work list", getClient().connection.getWorkHead());
pumpClientToServer();
LOGGER.fine(bold("======== About to settle the message on the server"));
assertEquals(Accepted.getInstance(), getServer().delivery.getRemoteState());
Delivery serverDelivery = getServer().connection.getWorkHead();
assertEquals(getServer().delivery, serverDelivery);
assertTrue(serverDelivery.isUpdated());
assertTrue("Client should have already settled", serverDelivery.remotelySettled());
serverDelivery.settle();
assertTrue(serverDelivery.isSettled());
assertNull("Now we've settled, the delivery should no longer be in the work list", getServer().connection.getWorkHead());
// Increment the receiver's credit so its ready for another message.
// When using proton-c, this call is required in order to generate a Flow frame
// (proton-j sends one even without it to eagerly restore the session incoming window).
getServer().receiver.flow(1);
pumpServerToClient();
LOGGER.fine(bold("======== About to close client's sender"));
getClient().sender.close();
pumpClientToServer();
LOGGER.fine(bold("======== Server about to process client's link closure"));
assertSame(getServer().receiver, getServer().connection.linkHead(of(ACTIVE), of(CLOSED)));
getServer().receiver.close();
pumpServerToClient();
LOGGER.fine(bold("======== About to close client's session"));
getClient().session.close();
pumpClientToServer();
LOGGER.fine(bold("======== Server about to process client's session closure"));
assertSame(getServer().session, getServer().connection.sessionHead(of(ACTIVE), of(CLOSED)));
getServer().session.close();
pumpServerToClient();
LOGGER.fine(bold("======== About to close client's connection"));
getClient().connection.close();
pumpClientToServer();
LOGGER.fine(bold("======== Server about to process client's connection closure"));
assertEquals(CLOSED, getServer().connection.getRemoteState());
getServer().connection.close();
pumpServerToClient();
LOGGER.fine(bold("======== Checking client has nothing more to pump"));
assertClientHasNothingToOutput();
LOGGER.fine(bold("======== Done!"));
}
@Ignore("This test does not have a fix yet")
@Test
public void testPROTON_1017() throws Exception
{
LOGGER.fine(bold("======== About to create transports"));
getClient().transport = Proton.transport();
ProtocolTracerEnabler.setProtocolTracer(getClient().transport, TestLoggingHelper.CLIENT_PREFIX);
getServer().transport = Proton.transport();
ProtocolTracerEnabler.setProtocolTracer(getServer().transport, " " + TestLoggingHelper.SERVER_PREFIX);
doOutputInputCycle();
getClient().connection = Proton.connection();
getClient().transport.bind(getClient().connection);
getServer().connection = Proton.connection();
getServer().transport.bind(getServer().connection);
LOGGER.fine(bold("======== About to open connections"));
getClient().connection.open();
getServer().connection.open();
doOutputInputCycle();
LOGGER.fine(bold("======== About to open and close client session"));
getClient().session = getClient().connection.session();
getClient().session.open();
getClient().session.close();
pumpClientToServer();
getServer().session = getServer().connection.sessionHead(of(UNINITIALIZED), of(CLOSED));
assertEndpointState(getServer().session, UNINITIALIZED, CLOSED);
getServer().session.open();
assertEndpointState(getServer().session, ACTIVE, CLOSED);
getServer().session.close();
assertEndpointState(getServer().session, CLOSED, CLOSED);
pumpServerToClient();
assertEndpointState(getClient().session, CLOSED, CLOSED);
LOGGER.fine(bold("======== About to close client's connection"));
getClient().connection.close();
pumpClientToServer();
LOGGER.fine(bold("======== Server about to process client's connection closure"));
assertEquals(CLOSED, getServer().connection.getRemoteState());
getServer().connection.close();
pumpServerToClient();
LOGGER.fine(bold("======== Checking client has nothing more to pump"));
assertClientHasNothingToOutput();
LOGGER.fine(bold("======== Done!"));
}
/**
* Simulates creating a local terminus using the properties supplied by the remote link endpoint.
*
* In a broker you'd usually overlay serverRemoteTarget (eg its filter properties) onto
* an existing object (which eg contains whether it's a queue or a topic), creating a new one from that
* overlay. Also if this is link recovery then you'd fetch the unsettled map too.
*/
private org.apache.qpid.proton.amqp.transport.Target applicationDeriveTarget(org.apache.qpid.proton.amqp.transport.Target serverRemoteTarget)
{
return serverRemoteTarget;
}
/**
* Simulates processing a message.
*/
private boolean applicationProcessMessage(Message message)
{
Object messageBody = ((AmqpValue)message.getBody()).getValue();
return "Hello".equals(messageBody);
}
}
| apache-2.0 |
Darsstar/framework | uitest/src/test/java/com/vaadin/tests/components/combobox/ComboBoxItemStyleGeneratorTest.java | 1778 | /*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.tests.components.combobox;
import static org.junit.Assert.assertEquals;
import java.util.List;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import com.vaadin.testbench.elements.ComboBoxElement;
import com.vaadin.tests.tb3.SingleBrowserTest;
public class ComboBoxItemStyleGeneratorTest extends SingleBrowserTest {
@Test
public void testItemStyleGenerator() {
openTestURL();
ComboBoxElement comboBox = $(ComboBoxElement.class).first();
selectMenuPath("Component", "Features", "Item style generator",
"Bold fives");
comboBox.openPopup();
List<WebElement> boldItems = findElements(
By.className("v-filterselect-item-bold"));
assertEquals(1, boldItems.size());
assertEquals("Item 5", boldItems.get(0).getText());
selectMenuPath("Component", "Features", "Item style generator", "-");
boldItems = findElements(By.className("v-filterselect-item-bold"));
assertEquals(0, boldItems.size());
}
@Override
protected Class<?> getUIClass() {
return ComboBoxes2.class;
}
}
| apache-2.0 |
mcculls/maven | maven-core/src/main/java/org/apache/maven/lifecycle/internal/MojoDescriptorCreator.java | 10814 | package org.apache.maven.lifecycle.internal;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.StringTokenizer;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.BuildPluginManager;
import org.apache.maven.plugin.InvalidPluginDescriptorException;
import org.apache.maven.plugin.MojoNotFoundException;
import org.apache.maven.plugin.PluginDescriptorParsingException;
import org.apache.maven.plugin.PluginNotFoundException;
import org.apache.maven.plugin.PluginResolutionException;
import org.apache.maven.plugin.descriptor.MojoDescriptor;
import org.apache.maven.plugin.prefix.DefaultPluginPrefixRequest;
import org.apache.maven.plugin.prefix.NoPluginFoundForPrefixException;
import org.apache.maven.plugin.prefix.PluginPrefixRequest;
import org.apache.maven.plugin.prefix.PluginPrefixResolver;
import org.apache.maven.plugin.prefix.PluginPrefixResult;
import org.apache.maven.plugin.version.DefaultPluginVersionRequest;
import org.apache.maven.plugin.version.PluginVersionRequest;
import org.apache.maven.plugin.version.PluginVersionResolutionException;
import org.apache.maven.plugin.version.PluginVersionResolver;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.configuration.PlexusConfiguration;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* Resolves dependencies for the artifacts in context of the lifecycle build
* </p>
* <strong>NOTE:</strong> This class is not part of any public api and can be changed or deleted without prior notice.
*
* @since 3.0
* @author Benjamin Bentmann
* @author Jason van Zyl
* @author jdcasey
* @author Kristian Rosenvold (extracted class only)
*/
@Named
@Singleton
public class MojoDescriptorCreator
{
private final Logger logger = LoggerFactory.getLogger( getClass() );
private final PluginVersionResolver pluginVersionResolver;
private final BuildPluginManager pluginManager;
private final PluginPrefixResolver pluginPrefixResolver;
private final LifecyclePluginResolver lifecyclePluginResolver;
@Inject
public MojoDescriptorCreator(
PluginVersionResolver pluginVersionResolver,
BuildPluginManager pluginManager,
PluginPrefixResolver pluginPrefixResolver,
LifecyclePluginResolver lifecyclePluginResolver )
{
this.pluginVersionResolver = pluginVersionResolver;
this.pluginManager = pluginManager;
this.pluginPrefixResolver = pluginPrefixResolver;
this.lifecyclePluginResolver = lifecyclePluginResolver;
}
private Plugin findPlugin( String groupId, String artifactId, Collection<Plugin> plugins )
{
for ( Plugin plugin : plugins )
{
if ( artifactId.equals( plugin.getArtifactId() ) && groupId.equals( plugin.getGroupId() ) )
{
return plugin;
}
}
return null;
}
public static Xpp3Dom convert( MojoDescriptor mojoDescriptor )
{
Xpp3Dom dom = new Xpp3Dom( "configuration" );
PlexusConfiguration c = mojoDescriptor.getMojoConfiguration();
PlexusConfiguration[] ces = c.getChildren();
if ( ces != null )
{
for ( PlexusConfiguration ce : ces )
{
String value = ce.getValue( null );
String defaultValue = ce.getAttribute( "default-value", null );
if ( value != null || defaultValue != null )
{
Xpp3Dom e = new Xpp3Dom( ce.getName() );
e.setValue( value );
if ( defaultValue != null )
{
e.setAttribute( "default-value", defaultValue );
}
dom.addChild( e );
}
}
}
return dom;
}
// org.apache.maven.plugins:maven-remote-resources-plugin:1.0:process@executionId
public MojoDescriptor getMojoDescriptor( String task, MavenSession session, MavenProject project )
throws PluginNotFoundException, PluginResolutionException, PluginDescriptorParsingException,
MojoNotFoundException, NoPluginFoundForPrefixException, InvalidPluginDescriptorException,
PluginVersionResolutionException
{
String goal = null;
Plugin plugin = null;
StringTokenizer tok = new StringTokenizer( task, ":" );
int numTokens = tok.countTokens();
if ( numTokens >= 4 )
{
// We have everything that we need
//
// org.apache.maven.plugins:maven-remote-resources-plugin:1.0:process
//
// groupId
// artifactId
// version
// goal
//
plugin = new Plugin();
plugin.setGroupId( tok.nextToken() );
plugin.setArtifactId( tok.nextToken() );
plugin.setVersion( tok.nextToken() );
goal = tok.nextToken();
// This won't be valid, but it constructs something easy to read in the error message
while ( tok.hasMoreTokens() )
{
goal += ":" + tok.nextToken();
}
}
else if ( numTokens == 3 )
{
// We have everything that we need except the version
//
// org.apache.maven.plugins:maven-remote-resources-plugin:???:process
//
// groupId
// artifactId
// ???
// goal
//
plugin = new Plugin();
plugin.setGroupId( tok.nextToken() );
plugin.setArtifactId( tok.nextToken() );
goal = tok.nextToken();
}
else
{
// We have a prefix and goal
//
// idea:idea
//
String prefix = tok.nextToken();
if ( numTokens == 2 )
{
goal = tok.nextToken();
}
else
{
// goal was missing - pass through to MojoNotFoundException
goal = "";
}
// This is the case where someone has executed a single goal from the command line
// of the form:
//
// mvn remote-resources:process
//
// From the metadata stored on the server which has been created as part of a standard
// Maven plugin deployment we will find the right PluginDescriptor from the remote
// repository.
plugin = findPluginForPrefix( prefix, session );
}
int executionIdx = goal.indexOf( '@' );
if ( executionIdx > 0 )
{
goal = goal.substring( 0, executionIdx );
}
injectPluginDeclarationFromProject( plugin, project );
// If there is no version to be found then we need to look in the repository metadata for
// this plugin and see what's specified as the latest release.
//
if ( plugin.getVersion() == null )
{
resolvePluginVersion( plugin, session, project );
}
return pluginManager.getMojoDescriptor( plugin, goal.toString(), project.getRemotePluginRepositories(),
session.getRepositorySession() );
}
// TODO take repo mans into account as one may be aggregating prefixes of many
// TODO collect at the root of the repository, read the one at the root, and fetch remote if something is missing
// or the user forces the issue
public Plugin findPluginForPrefix( String prefix, MavenSession session )
throws NoPluginFoundForPrefixException
{
// [prefix]:[goal]
if ( session.getCurrentProject() != null )
{
try
{
lifecyclePluginResolver.resolveMissingPluginVersions( session.getCurrentProject(), session );
}
catch ( PluginVersionResolutionException e )
{
// not critical here
logger.debug( e.getMessage(), e );
}
}
PluginPrefixRequest prefixRequest = new DefaultPluginPrefixRequest( prefix, session );
PluginPrefixResult prefixResult = pluginPrefixResolver.resolve( prefixRequest );
Plugin plugin = new Plugin();
plugin.setGroupId( prefixResult.getGroupId() );
plugin.setArtifactId( prefixResult.getArtifactId() );
return plugin;
}
private void resolvePluginVersion( Plugin plugin, MavenSession session, MavenProject project )
throws PluginVersionResolutionException
{
PluginVersionRequest versionRequest =
new DefaultPluginVersionRequest( plugin, session.getRepositorySession(),
project.getRemotePluginRepositories() );
plugin.setVersion( pluginVersionResolver.resolve( versionRequest ).getVersion() );
}
private void injectPluginDeclarationFromProject( Plugin plugin, MavenProject project )
{
Plugin pluginInPom = findPlugin( plugin, project.getBuildPlugins() );
if ( pluginInPom == null && project.getPluginManagement() != null )
{
pluginInPom = findPlugin( plugin, project.getPluginManagement().getPlugins() );
}
if ( pluginInPom != null )
{
if ( plugin.getVersion() == null )
{
plugin.setVersion( pluginInPom.getVersion() );
}
plugin.setDependencies( new ArrayList<>( pluginInPom.getDependencies() ) );
}
}
private Plugin findPlugin( Plugin plugin, Collection<Plugin> plugins )
{
return findPlugin( plugin.getGroupId(), plugin.getArtifactId(), plugins );
}
}
| apache-2.0 |
trampi/stagemonitor | stagemonitor-requestmonitor/src/main/java/org/stagemonitor/requestmonitor/ejb/IsDeclaredInInterfaceHierarchyElementMatcher.java | 1446 | package org.stagemonitor.requestmonitor.ejb;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static net.bytebuddy.matcher.ElementMatchers.returns;
import static net.bytebuddy.matcher.ElementMatchers.takesArguments;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.matcher.ElementMatcher;
import net.bytebuddy.matcher.ElementMatchers;
class IsDeclaredInInterfaceHierarchyElementMatcher implements ElementMatcher<TypeDescription> {
private final MethodDescription.InDefinedShape targetMethod;
static ElementMatcher<TypeDescription> isDeclaredInInterfaceHierarchy(MethodDescription.InDefinedShape method) {
return new IsDeclaredInInterfaceHierarchyElementMatcher(method);
}
public IsDeclaredInInterfaceHierarchyElementMatcher(MethodDescription.InDefinedShape targetMethod) {
this.targetMethod = targetMethod;
}
@Override
public boolean matches(TypeDescription targetInterface) {
if (ElementMatchers.declaresMethod(named(targetMethod.getName())
.and(returns(targetMethod.getReturnType().asErasure()))
.and(takesArguments(targetMethod.getParameters().asTypeList().asErasures())))
.matches(targetInterface)) {
return true;
} else {
for (TypeDescription typeDescription : targetInterface.getInterfaces().asErasures()) {
if (matches(typeDescription)) {
return true;
}
}
}
return false;
}
}
| apache-2.0 |
christophd/camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ClassEndpointBuilderFactory.java | 12027 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.*;
import java.util.Map;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.stream.*;
import javax.annotation.Generated;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
/**
* Invoke methods of Java beans specified by class name.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface ClassEndpointBuilderFactory {
/**
* Builder for endpoint for the Class component.
*/
public interface ClassEndpointBuilder extends EndpointProducerBuilder {
default AdvancedClassEndpointBuilder advanced() {
return (AdvancedClassEndpointBuilder) this;
}
/**
* Use scope option instead.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: common
*
* @param cache the value to set
* @return the dsl builder
*/
@Deprecated
default ClassEndpointBuilder cache(Boolean cache) {
doSetProperty("cache", cache);
return this;
}
/**
* Use scope option instead.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: common
*
* @param cache the value to set
* @return the dsl builder
*/
@Deprecated
default ClassEndpointBuilder cache(String cache) {
doSetProperty("cache", cache);
return this;
}
/**
* Sets the name of the method to invoke on the bean.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param method the value to set
* @return the dsl builder
*/
default ClassEndpointBuilder method(String method) {
doSetProperty("method", method);
return this;
}
/**
* Scope of bean. When using singleton scope (default) the bean is
* created or looked up only once and reused for the lifetime of the
* endpoint. The bean should be thread-safe in case concurrent threads
* is calling the bean at the same time. When using request scope the
* bean is created or looked up once per request (exchange). This can be
* used if you want to store state on a bean while processing a request
* and you want to call the same bean instance multiple times while
* processing the request. The bean does not have to be thread-safe as
* the instance is only called from the same request. When using
* prototype scope, then the bean will be looked up or created per call.
* However in case of lookup then this is delegated to the bean registry
* such as Spring or CDI (if in use), which depends on their
* configuration can act as either singleton or prototype scope. so when
* using prototype then this depends on the delegated registry.
*
* The option is a: <code>org.apache.camel.BeanScope</code>
* type.
*
* Default: Singleton
* Group: common
*
* @param scope the value to set
* @return the dsl builder
*/
default ClassEndpointBuilder scope(org.apache.camel.BeanScope scope) {
doSetProperty("scope", scope);
return this;
}
/**
* Scope of bean. When using singleton scope (default) the bean is
* created or looked up only once and reused for the lifetime of the
* endpoint. The bean should be thread-safe in case concurrent threads
* is calling the bean at the same time. When using request scope the
* bean is created or looked up once per request (exchange). This can be
* used if you want to store state on a bean while processing a request
* and you want to call the same bean instance multiple times while
* processing the request. The bean does not have to be thread-safe as
* the instance is only called from the same request. When using
* prototype scope, then the bean will be looked up or created per call.
* However in case of lookup then this is delegated to the bean registry
* such as Spring or CDI (if in use), which depends on their
* configuration can act as either singleton or prototype scope. so when
* using prototype then this depends on the delegated registry.
*
* The option will be converted to a
* <code>org.apache.camel.BeanScope</code> type.
*
* Default: Singleton
* Group: common
*
* @param scope the value to set
* @return the dsl builder
*/
default ClassEndpointBuilder scope(String scope) {
doSetProperty("scope", scope);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default ClassEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default ClassEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Advanced builder for endpoint for the Class component.
*/
public interface AdvancedClassEndpointBuilder
extends
EndpointProducerBuilder {
default ClassEndpointBuilder basic() {
return (ClassEndpointBuilder) this;
}
/**
* Used for configuring additional properties on the bean.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
* The option is multivalued, and you can use the parameters(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: advanced
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default AdvancedClassEndpointBuilder parameters(String key, Object value) {
doSetMultiValueProperty("parameters", "bean." + key, value);
return this;
}
/**
* Used for configuring additional properties on the bean.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
* The option is multivalued, and you can use the parameters(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: advanced
*
* @param values the values
* @return the dsl builder
*/
default AdvancedClassEndpointBuilder parameters(Map values) {
doSetMultiValueProperties("parameters", "bean.", values);
return this;
}
}
public interface ClassBuilders {
/**
* Class (camel-bean)
* Invoke methods of Java beans specified by class name.
*
* Category: core,java
* Since: 2.4
* Maven coordinates: org.apache.camel:camel-bean
*
* Syntax: <code>class:beanName</code>
*
* Path parameter: beanName (required)
* Sets the name of the bean to invoke
*
* @param path beanName
* @return the dsl builder
*/
default ClassEndpointBuilder clas(String path) {
return ClassEndpointBuilderFactory.endpointBuilder("class", path);
}
/**
* Class (camel-bean)
* Invoke methods of Java beans specified by class name.
*
* Category: core,java
* Since: 2.4
* Maven coordinates: org.apache.camel:camel-bean
*
* Syntax: <code>class:beanName</code>
*
* Path parameter: beanName (required)
* Sets the name of the bean to invoke
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path beanName
* @return the dsl builder
*/
default ClassEndpointBuilder clas(String componentName, String path) {
return ClassEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static ClassEndpointBuilder endpointBuilder(
String componentName,
String path) {
class ClassEndpointBuilderImpl extends AbstractEndpointBuilder implements ClassEndpointBuilder, AdvancedClassEndpointBuilder {
public ClassEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new ClassEndpointBuilderImpl(path);
}
} | apache-2.0 |
tjordanchat/rundeck | core/src/main/java/com/dtolabs/rundeck/core/resources/DirectoryResourceModelSourceFactory.java | 2236 | /*
* Copyright 2011 DTO Solutions, Inc. (http://dtosolutions.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* DirectoryFileResourceModelSourceFactory.java
*
* User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
* Created: 7/21/11 11:12 AM
*
*/
package com.dtolabs.rundeck.core.resources;
import com.dtolabs.rundeck.core.common.Framework;
import com.dtolabs.rundeck.core.plugins.Plugin;
import com.dtolabs.rundeck.core.plugins.configuration.ConfigurationException;
import com.dtolabs.rundeck.core.plugins.configuration.Describable;
import com.dtolabs.rundeck.core.plugins.configuration.Description;
import java.util.Properties;
/**
* DirectoryFileResourceModelSourceFactory is ...
*
* @author Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
*/
@Plugin (name = "directory", service = "ResourceModelSource")
public class DirectoryResourceModelSourceFactory implements ResourceModelSourceFactory, Describable {
public static final String SERVICE_PROVIDER_TYPE = "directory";
private Framework framework;
public DirectoryResourceModelSourceFactory(Framework framework) {
this.framework = framework;
}
public ResourceModelSource createResourceModelSource(final Properties configuration) throws ConfigurationException {
final DirectoryResourceModelSource fileResourceModelSource = new DirectoryResourceModelSource(framework);
fileResourceModelSource.configure(DirectoryResourceModelSource.Configuration.fromProperties(configuration));
return fileResourceModelSource;
}
public Description getDescription() {
return DirectoryResourceModelSource.DESCRIPTION;
}
}
| apache-2.0 |
android-ia/platform_tools_idea | java/java-tests/testSrc/com/intellij/codeInsight/daemon/JavadocResolveTest.java | 1547 | /*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.javaDoc.JavaDocLocalInspection;
import com.intellij.codeInspection.javaDoc.JavaDocReferenceInspection;
public class JavadocResolveTest extends DaemonAnalyzerTestCase {
private static final String BASE_PATH = "/codeInsight/daemonCodeAnalyzer/javaDoc/resolve";
@Override
protected LocalInspectionTool[] configureLocalInspectionTools() {
return new LocalInspectionTool[]{new JavaDocLocalInspection(), new JavaDocReferenceInspection()};
}
public void testSee0() throws Exception { doTest(); }
public void testSee1() throws Exception { doTest(); }
public void testSee2() throws Exception { doTest(); }
public void testSee3() throws Exception { doTest(); }
private void doTest() throws Exception {
doTest(BASE_PATH + "/pkg/" + getTestName(false) + ".java", BASE_PATH, false, false);
}
}
| apache-2.0 |
paulnguyen/cmpe279 | modules/module10/web/jsf/hello1-rlc/src/main/java/javaeetutorial/hello1rlc/Hello.java | 574 | /**
* Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved.
*
* You may not modify, use, reproduce, or distribute this software except in
* compliance with the terms of the License at:
* http://java.net/projects/javaeetutorial/pages/BerkeleyLicense
*/
package javaeetutorial.hello1rlc;
import javax.enterprise.inject.Model;
@Model
public class Hello {
private String name;
public Hello() {
}
public String getName() {
return name;
}
public void setName(String user_name) {
this.name = user_name;
}
}
| apache-2.0 |
HanSolo/Enzo | src/main/java/eu.hansolo.enzo/onoffswitch/SelectionEvent.java | 665 | package eu.hansolo.enzo.onoffswitch;
import javafx.event.Event;
import javafx.event.EventTarget;
import javafx.event.EventType;
/**
* User: hansolo
* Date: 10.10.13
* Time: 09:48
*/
public class SelectionEvent extends Event {
public static final EventType<SelectionEvent> SELECT = new EventType(ANY, "select");
public static final EventType<SelectionEvent> DESELECT = new EventType(ANY, "deselect");
// ******************** Constructors **********************************
public SelectionEvent(final Object SOURCE, final EventTarget TARGET, final EventType<SelectionEvent> EVENT_TYPE) {
super(SOURCE, TARGET, EVENT_TYPE);
}
}
| apache-2.0 |
igniterealtime/Smack | smack-experimental/src/main/java/org/jivesoftware/smackx/iot/data/provider/IoTDataRequestProvider.java | 1443 | /**
*
* Copyright © 2016-2019 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.iot.data.provider;
import java.io.IOException;
import org.jivesoftware.smack.packet.XmlEnvironment;
import org.jivesoftware.smack.provider.IQProvider;
import org.jivesoftware.smack.util.ParserUtils;
import org.jivesoftware.smack.xml.XmlPullParser;
import org.jivesoftware.smackx.iot.data.element.IoTDataRequest;
public class IoTDataRequestProvider extends IQProvider<IoTDataRequest> {
@Override
public IoTDataRequest parse(XmlPullParser parser, int initialDepth, XmlEnvironment xmlEnvironment) throws IOException {
int seqNr = ParserUtils.getIntegerAttributeOrThrow(parser, "seqnr", "IoT data request without sequence number");
boolean momentary = ParserUtils.getBooleanAttribute(parser, "momentary", false);
return new IoTDataRequest(seqNr, momentary);
}
}
| apache-2.0 |
smgoller/geode | geode-core/src/main/java/org/apache/geode/internal/cache/locks/TXLockBatch.java | 5005 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.locks;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.geode.distributed.internal.locks.DLockBatch;
import org.apache.geode.distributed.internal.locks.DLockBatchId;
import org.apache.geode.distributed.internal.locks.LockGrantorId;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.cache.IdentityArrayList;
import org.apache.geode.internal.cache.TXRegionLockRequestImpl;
import org.apache.geode.internal.serialization.DataSerializableFixedID;
import org.apache.geode.internal.serialization.DeserializationContext;
import org.apache.geode.internal.serialization.KnownVersion;
import org.apache.geode.internal.serialization.SerializationContext;
/**
* Adapts multiple TXRegionLockRequests to one DLockBatch for DLock to use.
*
*/
public class TXLockBatch implements DLockBatch, DataSerializableFixedID {
/** Identifies the batch as a single entity */
private TXLockIdImpl txLockId;
/** List of <code>TXRegionLockRequests</code> */
private List reqs;
/** Identifies the members participating in the transaction */
private Set participants;
/**
* Constructs a <code>TXLockBatch</code> for the list of <code>TXRegionLockRequests</code>
*/
public TXLockBatch(TXLockId txLockId, List reqs, Set participants) {
this.txLockId = (TXLockIdImpl) txLockId;
this.reqs = reqs;
this.participants = participants;
}
@Override
public InternalDistributedMember getOwner() {
return this.txLockId.getMemberId();
}
public TXLockId getTXLockId() {
return this.txLockId;
}
@Override
public DLockBatchId getBatchId() {
return this.txLockId;
}
public void setParticipants(Set participants) {
this.participants = participants;
}
@Override
public void grantedBy(LockGrantorId lockGrantorId) {
this.txLockId.setLockGrantorId(lockGrantorId);
}
@Override
public List getReqs() {
if (this.reqs != null && !(this.reqs instanceof IdentityArrayList)) {
this.reqs = new IdentityArrayList(this.reqs);
}
return this.reqs;
}
@Override
public String toString() {
return "[TXLockBatch: txLockId=" + txLockId + "; reqs=" + reqs + "; participants="
+ participants + "]";
}
/**
* Each lock batch contains a set of distributed system member ids that are participating in the
* transaction. Public access for testing purposes.
*
* @return participants in the transaction
*/
public Set getParticipants() {
return this.participants;
}
// -------------------------------------------------------------------------
// DataSerializable support
// -------------------------------------------------------------------------
public TXLockBatch() {}
@Override
public int getDSFID() {
return TX_LOCK_BATCH;
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
this.txLockId = TXLockIdImpl.createFromData(in);
this.participants = InternalDataSerializer.readSet(in);
{
int reqsSize = in.readInt();
if (reqsSize >= 0) {
this.reqs = new IdentityArrayList(reqsSize);
for (int i = 0; i < reqsSize; i++) {
this.reqs.add(TXRegionLockRequestImpl.createFromData(in));
}
}
}
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
InternalDataSerializer.invokeToData(this.txLockId, out);
InternalDataSerializer.writeSet(this.participants, out);
if (this.reqs == null) {
out.writeInt(-1);
} else {
out.writeInt(this.reqs.size());
for (Iterator iter = this.reqs.iterator(); iter.hasNext();) {
TXRegionLockRequestImpl elem = (TXRegionLockRequestImpl) iter.next();
InternalDataSerializer.invokeToData(elem, out);
}
}
}
@Override
public KnownVersion[] getSerializationVersions() {
// TODO Auto-generated method stub
return null;
}
}
| apache-2.0 |
RCheungIT/phoenix | phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterMultiTenantTableWithViews.java | 39739 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.query.QueryConstants.BASE_TABLE_BASE_COLUMN_COUNT;
import static org.apache.phoenix.query.QueryConstants.DIVERGED_VIEW_BASE_COLUMN_COUNT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.jdbc.PhoenixStatement;
import org.apache.phoenix.schema.ColumnNotFoundException;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTableKey;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.util.IndexUtil;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.SchemaUtil;
import org.junit.Test;
import com.google.common.base.Objects;
public class AlterMultiTenantTableWithViews extends BaseHBaseManagedTimeIT {
private Connection getTenantConnection(String tenantId) throws Exception {
Properties tenantProps = new Properties();
tenantProps.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
return DriverManager.getConnection(getUrl(), tenantProps);
}
private static long getTableSequenceNumber(PhoenixConnection conn, String tableName) throws SQLException {
PTable table = conn.getTable(new PTableKey(conn.getTenantId(), SchemaUtil.normalizeIdentifier(tableName)));
return table.getSequenceNumber();
}
private static short getMaxKeySequenceNumber(PhoenixConnection conn, String tableName) throws SQLException {
PTable table = conn.getTable(new PTableKey(conn.getTenantId(), SchemaUtil.normalizeIdentifier(tableName)));
return SchemaUtil.getMaxKeySeq(table);
}
private static void verifyNewColumns(ResultSet rs, String ... values) throws SQLException {
assertTrue(rs.next());
int i = 1;
for (String value : values) {
assertEquals(value, rs.getString(i++));
}
assertFalse(rs.next());
assertEquals(values.length, i - 1);
}
@Test
public void testAddDropColumnToBaseTablePropagatesToEntireViewHierarchy() throws Exception {
String baseTable = "testViewHierarchy";
String view1 = "view1";
String view2 = "view2";
String view3 = "view3";
String view4 = "view4";
/* baseTable
/ | \
view1(tenant1) view3(tenant2) view4(global)
/
view2(tenant1)
*/
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
String view1DDL = "CREATE VIEW " + view1 + " AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
String view2DDL = "CREATE VIEW " + view2 + " AS SELECT * FROM " + view1;
tenant1Conn.createStatement().execute(view2DDL);
}
try (Connection tenant2Conn = getTenantConnection("tenant2")) {
String view3DDL = "CREATE VIEW " + view3 + " AS SELECT * FROM " + baseTable;
tenant2Conn.createStatement().execute(view3DDL);
}
String view4DDL = "CREATE VIEW " + view4 + " AS SELECT * FROM " + baseTable;
conn.createStatement().execute(view4DDL);
String alterBaseTable = "ALTER TABLE " + baseTable + " ADD V3 VARCHAR";
conn.createStatement().execute(alterBaseTable);
// verify that the column is visible to view4
conn.createStatement().execute("SELECT V3 FROM " + view4);
// verify that the column is visible to view1 and view2
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
tenant1Conn.createStatement().execute("SELECT V3 from " + view1);
tenant1Conn.createStatement().execute("SELECT V3 from " + view2);
}
// verify that the column is visible to view3
try (Connection tenant2Conn = getTenantConnection("tenant2")) {
tenant2Conn.createStatement().execute("SELECT V3 from " + view3);
}
alterBaseTable = "ALTER TABLE " + baseTable + " DROP COLUMN V1";
conn.createStatement().execute(alterBaseTable);
// verify that the column is not visible to view4
try {
conn.createStatement().execute("SELECT V1 FROM " + view4);
fail();
} catch (ColumnNotFoundException e) {
}
// verify that the column is not visible to view1 and view2
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
try {
tenant1Conn.createStatement().execute("SELECT V1 from " + view1);
fail();
} catch (ColumnNotFoundException e) {
}
try {
tenant1Conn.createStatement().execute("SELECT V1 from " + view2);
fail();
} catch (ColumnNotFoundException e) {
}
}
// verify that the column is not visible to view3
try (Connection tenant2Conn = getTenantConnection("tenant2")) {
try {
tenant2Conn.createStatement().execute("SELECT V1 from " + view3);
fail();
} catch (ColumnNotFoundException e) {
}
}
}
}
@Test
public void testChangingPKOfBaseTableChangesPKForAllViews() throws Exception {
String baseTable = "testChangePKOfBaseTable";
String view1 = "view1";
String view2 = "view2";
String view3 = "view3";
String view4 = "view4";
/* baseTable
/ | \
view1(tenant1) view3(tenant2) view4(global)
/
view2(tenant1)
*/
Connection tenant1Conn = null, tenant2Conn = null;
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE "
+ baseTable
+ " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
globalConn.createStatement().execute(baseTableDDL);
tenant1Conn = getTenantConnection("tenant1");
String view1DDL = "CREATE VIEW " + view1 + " AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
String view2DDL = "CREATE VIEW " + view2 + " AS SELECT * FROM " + view1;
tenant1Conn.createStatement().execute(view2DDL);
tenant2Conn = getTenantConnection("tenant2");
String view3DDL = "CREATE VIEW " + view3 + " AS SELECT * FROM " + baseTable;
tenant2Conn.createStatement().execute(view3DDL);
String view4DDL = "CREATE VIEW " + view4 + " AS SELECT * FROM " + baseTable;
globalConn.createStatement().execute(view4DDL);
String alterBaseTable = "ALTER TABLE " + baseTable + " ADD NEW_PK varchar primary key ";
globalConn.createStatement().execute(alterBaseTable);
// verify that the new column new_pk is now part of the primary key for the entire hierarchy
globalConn.createStatement().execute("SELECT * FROM " + baseTable);
assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), "NEW_PK", baseTable));
tenant1Conn.createStatement().execute("SELECT * FROM " + view1);
assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), "NEW_PK", view1));
tenant1Conn.createStatement().execute("SELECT * FROM " + view2);
assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), "NEW_PK", view2));
tenant2Conn.createStatement().execute("SELECT * FROM " + view3);
assertTrue(checkColumnPartOfPk(tenant2Conn.unwrap(PhoenixConnection.class), "NEW_PK", view3));
globalConn.createStatement().execute("SELECT * FROM " + view4);
assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), "NEW_PK", view4));
} finally {
if (tenant1Conn != null) {
try {
tenant1Conn.close();
} catch (Throwable ignore) {}
}
if (tenant2Conn != null) {
try {
tenant2Conn.close();
} catch (Throwable ignore) {}
}
}
}
private boolean checkColumnPartOfPk(PhoenixConnection conn, String columnName, String tableName) throws SQLException {
String normalizedTableName = SchemaUtil.normalizeIdentifier(tableName);
PTable table = conn.getTable(new PTableKey(conn.getTenantId(), normalizedTableName));
List<PColumn> pkCols = table.getPKColumns();
String normalizedColumnName = SchemaUtil.normalizeIdentifier(columnName);
for (PColumn pkCol : pkCols) {
if (pkCol.getName().getString().equals(normalizedColumnName)) {
return true;
}
}
return false;
}
private int getIndexOfPkColumn(PhoenixConnection conn, String columnName, String tableName) throws SQLException {
String normalizedTableName = SchemaUtil.normalizeIdentifier(tableName);
PTable table = conn.getTable(new PTableKey(conn.getTenantId(), normalizedTableName));
List<PColumn> pkCols = table.getPKColumns();
String normalizedColumnName = SchemaUtil.normalizeIdentifier(columnName);
int i = 0;
for (PColumn pkCol : pkCols) {
if (pkCol.getName().getString().equals(normalizedColumnName)) {
return i;
}
i++;
}
return -1;
}
@Test
public void testAddPKColumnToBaseTableWhoseViewsHaveIndices() throws Exception {
String baseTable = "testAddPKColumnToBaseTableWhoseViewsHaveIndices";
String view1 = "view1";
String view2 = "view2";
String view3 = "view3";
String tenant1 = "tenant1";
String tenant2 = "tenant2";
String view2Index = view2 + "_idx";
String view3Index = view3 + "_idx";
/* baseTable(mutli-tenant)
/ \
view1(tenant1) view3(tenant2, index)
/
view2(tenant1, index)
*/
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
// make sure that the tables are empty, but reachable
globalConn
.createStatement()
.execute(
"CREATE TABLE "
+ baseTable
+ " (TENANT_ID VARCHAR NOT NULL, K1 varchar not null, V1 VARCHAR, V2 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, K1)) MULTI_TENANT = true ");
}
try (Connection viewConn = getTenantConnection(tenant1)) {
// create tenant specific view for tenant1 - view1
viewConn.createStatement().execute("CREATE VIEW " + view1 + " AS SELECT * FROM " + baseTable);
PhoenixConnection phxConn = viewConn.unwrap(PhoenixConnection.class);
assertEquals(0, getTableSequenceNumber(phxConn, view1));
assertEquals(2, getMaxKeySequenceNumber(phxConn, view1));
// create a view - view2 on view - view1
viewConn.createStatement().execute("CREATE VIEW " + view2 + " AS SELECT * FROM " + view1);
assertEquals(0, getTableSequenceNumber(phxConn, view2));
assertEquals(2, getMaxKeySequenceNumber(phxConn, view2));
// create an index on view2
viewConn.createStatement().execute("CREATE INDEX " + view2Index + " ON " + view2 + " (v1) include (v2)");
assertEquals(0, getTableSequenceNumber(phxConn, view2Index));
assertEquals(4, getMaxKeySequenceNumber(phxConn, view2Index));
}
try (Connection viewConn = getTenantConnection(tenant2)) {
// create tenant specific view for tenant2 - view3
viewConn.createStatement().execute("CREATE VIEW " + view3 + " AS SELECT * FROM " + baseTable);
PhoenixConnection phxConn = viewConn.unwrap(PhoenixConnection.class);
assertEquals(0, getTableSequenceNumber(phxConn, view3));
assertEquals(2, getMaxKeySequenceNumber(phxConn, view3));
// create an index on view3
viewConn.createStatement().execute("CREATE INDEX " + view3Index + " ON " + view3 + " (v1) include (v2)");
assertEquals(0, getTableSequenceNumber(phxConn, view3Index));
assertEquals(4, getMaxKeySequenceNumber(phxConn, view3Index));
}
// alter the base table by adding 1 non-pk and 2 pk columns
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
globalConn.createStatement().execute("ALTER TABLE " + baseTable + " ADD v3 VARCHAR, k2 VARCHAR PRIMARY KEY, k3 VARCHAR PRIMARY KEY");
assertEquals(4, getMaxKeySequenceNumber(globalConn.unwrap(PhoenixConnection.class), baseTable));
// Upsert records in the base table
String upsert = "UPSERT INTO " + baseTable + " (TENANT_ID, K1, K2, K3, V1, V2, V3) VALUES (?, ?, ?, ?, ?, ?, ?)";
PreparedStatement stmt = globalConn.prepareStatement(upsert);
stmt.setString(1, tenant1);
stmt.setString(2, "K1");
stmt.setString(3, "K2");
stmt.setString(4, "K3");
stmt.setString(5, "V1");
stmt.setString(6, "V2");
stmt.setString(7, "V3");
stmt.executeUpdate();
stmt.setString(1, tenant2);
stmt.setString(2, "K11");
stmt.setString(3, "K22");
stmt.setString(4, "K33");
stmt.setString(5, "V11");
stmt.setString(6, "V22");
stmt.setString(7, "V33");
stmt.executeUpdate();
globalConn.commit();
}
// Verify now that the sequence number of data table, indexes and views have changed.
// Also verify that the newly added pk columns show up as pk columns of data table, indexes and views.
try (Connection viewConn = getTenantConnection(tenant1)) {
ResultSet rs = viewConn.createStatement().executeQuery("SELECT K2, K3, V3 FROM " + view1);
PhoenixConnection phxConn = viewConn.unwrap(PhoenixConnection.class);
assertEquals(2, getIndexOfPkColumn(phxConn, "k2", view1));
assertEquals(3, getIndexOfPkColumn(phxConn, "k3", view1));
assertEquals(1, getTableSequenceNumber(phxConn, view1));
assertEquals(4, getMaxKeySequenceNumber(phxConn, view1));
verifyNewColumns(rs, "K2", "K3", "V3");
rs = viewConn.createStatement().executeQuery("SELECT K2, K3, V3 FROM " + view2);
assertEquals(2, getIndexOfPkColumn(phxConn, "k2", view2));
assertEquals(3, getIndexOfPkColumn(phxConn, "k3", view2));
assertEquals(1, getTableSequenceNumber(phxConn, view2));
assertEquals(4, getMaxKeySequenceNumber(phxConn, view2));
verifyNewColumns(rs, "K2", "K3", "V3");
assertEquals(4, getIndexOfPkColumn(phxConn, IndexUtil.getIndexColumnName(null, "k2"), view2Index));
assertEquals(5, getIndexOfPkColumn(phxConn, IndexUtil.getIndexColumnName(null, "k3"), view2Index));
assertEquals(1, getTableSequenceNumber(phxConn, view2Index));
assertEquals(6, getMaxKeySequenceNumber(phxConn, view2Index));
}
try (Connection viewConn = getTenantConnection(tenant2)) {
ResultSet rs = viewConn.createStatement().executeQuery("SELECT K2, K3, V3 FROM " + view3);
PhoenixConnection phxConn = viewConn.unwrap(PhoenixConnection.class);
assertEquals(2, getIndexOfPkColumn(phxConn, "k2", view3));
assertEquals(3, getIndexOfPkColumn(phxConn, "k3", view3));
assertEquals(1, getTableSequenceNumber(phxConn, view3));
verifyNewColumns(rs, "K22", "K33", "V33");
assertEquals(4, getIndexOfPkColumn(phxConn, IndexUtil.getIndexColumnName(null, "k2"), view3Index));
assertEquals(5, getIndexOfPkColumn(phxConn, IndexUtil.getIndexColumnName(null, "k3"), view3Index));
assertEquals(1, getTableSequenceNumber(phxConn, view3Index));
assertEquals(6, getMaxKeySequenceNumber(phxConn, view3Index));
}
// Verify that the index is actually being used when using newly added pk col
try (Connection viewConn = getTenantConnection(tenant1)) {
String upsert = "UPSERT INTO " + view2 + " (K1, K2, K3, V1, V2, V3) VALUES ('key1', 'key2', 'key3', 'value1', 'value2', 'value3')";
viewConn.createStatement().executeUpdate(upsert);
viewConn.commit();
Statement stmt = viewConn.createStatement();
String sql = "SELECT V2 FROM " + view2 + " WHERE V1 = 'value1' AND K3 = 'key3'";
QueryPlan plan = stmt.unwrap(PhoenixStatement.class).optimizeQuery(sql);
assertTrue(plan.getTableRef().getTable().getName().getString().equals(SchemaUtil.normalizeIdentifier(view2Index)));
ResultSet rs = viewConn.createStatement().executeQuery(sql);
verifyNewColumns(rs, "value2");
}
}
@Test
public void testAddingPkAndKeyValueColumnsToBaseTableWithDivergedView() throws Exception {
String baseTable = "testAlteringPkOfBaseTableWithDivergedView".toUpperCase();
String view1 = "view1".toUpperCase();
String divergedView = "divergedView".toUpperCase();
String divergedViewIndex = divergedView + "_IDX";
/* baseTable
/ |
view1(tenant1) divergedView(tenant2)
*/
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
}
try (Connection tenant2Conn = getTenantConnection("tenant2")) {
String divergedViewDDL = "CREATE VIEW " + divergedView + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant2Conn.createStatement().execute(divergedViewDDL);
// Drop column V2 from the view to have it diverge from the base table
tenant2Conn.createStatement().execute("ALTER VIEW " + divergedView + " DROP COLUMN V2");
// create an index on the diverged view
String indexDDL = "CREATE INDEX " + divergedViewIndex + " ON " + divergedView + " (V1) include (V3)";
tenant2Conn.createStatement().execute(indexDDL);
}
String alterBaseTable = "ALTER TABLE " + baseTable + " ADD KV VARCHAR, PK2 VARCHAR PRIMARY KEY";
conn.createStatement().execute(alterBaseTable);
// verify that the both columns were added to view1
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
tenant1Conn.createStatement().execute("SELECT KV from " + view1);
tenant1Conn.createStatement().execute("SELECT PK2 from " + view1);
}
// verify that only the primary key column PK2 was added to diverged view
try (Connection tenant2Conn = getTenantConnection("tenant2")) {
tenant2Conn.createStatement().execute("SELECT PK2 from " + divergedView);
try {
tenant2Conn.createStatement().execute("SELECT KV FROM " + divergedView);
} catch (SQLException e) {
assertEquals(SQLExceptionCode.COLUMN_NOT_FOUND.getErrorCode(), e.getErrorCode());
}
}
// Upsert records in diverged view. Verify that the PK column was added to the index on it.
String upsert = "UPSERT INTO " + divergedView + " (PK1, PK2, V1, V3) VALUES ('PK1', 'PK2', 'V1', 'V3')";
try (Connection viewConn = getTenantConnection("tenant2")) {
viewConn.createStatement().executeUpdate(upsert);
viewConn.commit();
Statement stmt = viewConn.createStatement();
String sql = "SELECT V3 FROM " + divergedView + " WHERE V1 = 'V1' AND PK2 = 'PK2'";
QueryPlan plan = stmt.unwrap(PhoenixStatement.class).optimizeQuery(sql);
assertTrue(plan.getTableRef().getTable().getName().getString().equals(SchemaUtil.normalizeIdentifier(divergedViewIndex)));
ResultSet rs = viewConn.createStatement().executeQuery(sql);
verifyNewColumns(rs, "V3");
}
// For non-diverged view, base table columns will be added at the same position as base table
assertTableDefinition(conn, view1, PTableType.VIEW, baseTable, 1, 9, 7, "TENANT_ID", "PK1", "V1", "V2", "V3", "KV", "PK2", "VIEW_COL1", "VIEW_COL2");
// For a diverged view, only base table's pk column will be added and that too at the end.
assertTableDefinition(conn, divergedView, PTableType.VIEW, baseTable, 2, 7, DIVERGED_VIEW_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V3", "VIEW_COL1", "VIEW_COL2", "PK2");
// Add existing column VIEW_COL2 to the base table
alterBaseTable = "ALTER TABLE " + baseTable + " ADD VIEW_COL2 CHAR(256)";
conn.createStatement().execute(alterBaseTable);
// For the non-diverged view, adding the column VIEW_COL2 will end up changing its ordinal position in the view.
assertTableDefinition(conn, view1, PTableType.VIEW, baseTable, 2, 9, 8, "TENANT_ID", "PK1", "V1", "V2", "V3", "KV", "PK2", "VIEW_COL2", "VIEW_COL1");
// For the diverged view, adding the column VIEW_COL2 will not change its ordinal position in the view. It also won't change the base column count or the sequence number
assertTableDefinition(conn, divergedView, PTableType.VIEW, baseTable, 2, 7, DIVERGED_VIEW_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V3", "VIEW_COL1", "VIEW_COL2", "PK2");
}
}
@Test
public void testAddColumnsToSaltedBaseTableWithViews() throws Exception {
String baseTable = "testAddColumnsToSaltedBaseTableWithViews".toUpperCase();
String view1 = "view1".toUpperCase();
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
}
assertTableDefinition(conn, baseTable, PTableType.TABLE, null, 0, 5, BASE_TABLE_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V2", "V3");
assertTableDefinition(conn, view1, PTableType.VIEW, baseTable, 0, 7, 5, "TENANT_ID", "PK1", "V1", "V2", "V3", "VIEW_COL1", "VIEW_COL2");
String alterBaseTable = "ALTER TABLE " + baseTable + " ADD KV VARCHAR, PK2 VARCHAR PRIMARY KEY";
conn.createStatement().execute(alterBaseTable);
assertTableDefinition(conn, baseTable, PTableType.TABLE, null, 1, 7, BASE_TABLE_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V2", "V3", "KV", "PK2");
assertTableDefinition(conn, view1, PTableType.VIEW, baseTable, 1, 9, 7, "TENANT_ID", "PK1", "V1", "V2", "V3", "KV", "PK2", "VIEW_COL1", "VIEW_COL2");
// verify that the both columns were added to view1
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
tenant1Conn.createStatement().execute("SELECT KV from " + view1);
tenant1Conn.createStatement().execute("SELECT PK2 from " + view1);
}
}
}
@Test
public void testDropColumnsFromSaltedBaseTableWithViews() throws Exception {
String baseTable = "testDropColumnsFromSaltedBaseTableWithViews".toUpperCase();
String view1 = "view1".toUpperCase();
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
}
assertTableDefinition(conn, baseTable, PTableType.TABLE, null, 0, 5, BASE_TABLE_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V2", "V3");
assertTableDefinition(conn, view1, PTableType.VIEW, baseTable, 0, 7, 5, "TENANT_ID", "PK1", "V1", "V2", "V3", "VIEW_COL1", "VIEW_COL2");
String alterBaseTable = "ALTER TABLE " + baseTable + " DROP COLUMN V2";
conn.createStatement().execute(alterBaseTable);
assertTableDefinition(conn, baseTable, PTableType.TABLE, null, 1, 4, BASE_TABLE_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V3");
assertTableDefinition(conn, view1, PTableType.VIEW, baseTable, 1, 6, 4, "TENANT_ID", "PK1", "V1", "V3", "VIEW_COL1", "VIEW_COL2");
// verify that the dropped columns aren't visible
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
try {
tenant1Conn.createStatement().execute("SELECT KV from " + view1);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.COLUMN_NOT_FOUND.getErrorCode(), e.getErrorCode());
}
try {
tenant1Conn.createStatement().execute("SELECT PK2 from " + view1);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.COLUMN_NOT_FOUND.getErrorCode(), e.getErrorCode());
}
}
}
}
@Test
public void testAlteringViewConditionallyModifiesHTableMetadata() throws Exception {
String baseTable = "testAlteringViewConditionallyModifiesBaseTable".toUpperCase();
String view1 = "view1".toUpperCase();
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
HTableDescriptor tableDesc1 = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
// This should not modify the base table
String alterView = "ALTER VIEW " + view1 + " ADD NEWCOL1 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
HTableDescriptor tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
assertEquals(tableDesc1, tableDesc2);
// Add a new column family that doesn't already exist in the base table
alterView = "ALTER VIEW " + view1 + " ADD CF.NEWCOL2 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
// Verify that the column family now shows up in the base table descriptor
tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
assertFalse(tableDesc2.equals(tableDesc1));
assertNotNull(tableDesc2.getFamily(Bytes.toBytes("CF")));
// Add a column with an existing column family. This shouldn't modify the base table.
alterView = "ALTER VIEW " + view1 + " ADD CF.NEWCOL3 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
HTableDescriptor tableDesc3 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
assertTrue(tableDesc3.equals(tableDesc2));
assertNotNull(tableDesc3.getFamily(Bytes.toBytes("CF")));
}
}
}
@Test
public void testCacheInvalidatedAfterAddingColumnToBaseTableWithViews() throws Exception {
String baseTable = "testCacheInvalidatedAfterAddingColumnToBaseTableWithViews";
String viewName = baseTable + "_view";
String tenantId = "tenantId";
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
String tableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true" ;
globalConn.createStatement().execute(tableDDL);
Properties tenantProps = new Properties();
tenantProps.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
// create a tenant specific view
try (Connection tenantConn = DriverManager.getConnection(getUrl(), tenantProps)) {
String viewDDL = "CREATE VIEW " + viewName + " AS SELECT * FROM " + baseTable;
tenantConn.createStatement().execute(viewDDL);
// Add a column to the base table using global connection
globalConn.createStatement().execute("ALTER TABLE " + baseTable + " ADD NEW_COL VARCHAR");
// Check now whether the tenant connection can see the column that was added
tenantConn.createStatement().execute("SELECT NEW_COL FROM " + viewName);
tenantConn.createStatement().execute("SELECT NEW_COL FROM " + baseTable);
}
}
}
@Test
public void testCacheInvalidatedAfterDroppingColumnFromBaseTableWithViews() throws Exception {
String baseTable = "testCacheInvalidatedAfterDroppingColumnFromBaseTableWithViews";
String viewName = baseTable + "_view";
String tenantId = "tenantId";
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
String tableDDL =
"CREATE TABLE "
+ baseTable
+ " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true" ;
globalConn.createStatement().execute(tableDDL);
Properties tenantProps = new Properties();
tenantProps.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
// create a tenant specific view
try (Connection tenantConn = DriverManager.getConnection(getUrl(), tenantProps)) {
String viewDDL = "CREATE VIEW " + viewName + " AS SELECT * FROM " + baseTable;
tenantConn.createStatement().execute(viewDDL);
// Add a column to the base table using global connection
globalConn.createStatement()
.execute("ALTER TABLE " + baseTable + " DROP COLUMN V1");
// Check now whether the tenant connection can see the column that was dropped
try {
tenantConn.createStatement().execute("SELECT V1 FROM " + viewName);
fail();
} catch (ColumnNotFoundException e) {
}
try {
tenantConn.createStatement().execute("SELECT V1 FROM " + baseTable);
fail();
} catch (ColumnNotFoundException e) {
}
}
}
}
public static void assertTableDefinition(Connection conn, String tableName, PTableType tableType, String parentTableName, int sequenceNumber, int columnCount, int baseColumnCount, String... columnName) throws Exception {
PreparedStatement p = conn.prepareStatement("SELECT * FROM SYSTEM.CATALOG WHERE TABLE_NAME=? AND TABLE_TYPE=?");
p.setString(1, tableName);
p.setString(2, tableType.getSerializedValue());
ResultSet rs = p.executeQuery();
assertTrue(rs.next());
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, tableName, "Mismatch in BaseColumnCount"), baseColumnCount, rs.getInt("BASE_COLUMN_COUNT"));
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, tableName, "Mismatch in columnCount"), columnCount, rs.getInt("COLUMN_COUNT"));
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, tableName, "Mismatch in sequenceNumber"), sequenceNumber, rs.getInt("TABLE_SEQ_NUM"));
rs.close();
ResultSet parentTableColumnsRs = null;
if (parentTableName != null) {
parentTableColumnsRs = conn.getMetaData().getColumns(null, null, parentTableName, null);
parentTableColumnsRs.next();
}
ResultSet viewColumnsRs = conn.getMetaData().getColumns(null, null, tableName, null);
for (int i = 0; i < columnName.length; i++) {
if (columnName[i] != null) {
assertTrue(viewColumnsRs.next());
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, tableName, "Mismatch in columnName: i=" + i), columnName[i], viewColumnsRs.getString(PhoenixDatabaseMetaData.COLUMN_NAME));
int viewColOrdinalPos = viewColumnsRs.getInt(PhoenixDatabaseMetaData.ORDINAL_POSITION);
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, tableName, "Mismatch in ordinalPosition: i=" + i), i+1, viewColOrdinalPos);
// validate that all the columns in the base table are present in the view
if (parentTableColumnsRs != null && !parentTableColumnsRs.isAfterLast()) {
ResultSetMetaData parentTableColumnsMetadata = parentTableColumnsRs.getMetaData();
assertEquals(parentTableColumnsMetadata.getColumnCount(), viewColumnsRs.getMetaData().getColumnCount());
int parentTableColOrdinalRs = parentTableColumnsRs.getInt(PhoenixDatabaseMetaData.ORDINAL_POSITION);
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, tableName, "Mismatch in ordinalPosition of view and base table for i=" + i), parentTableColOrdinalRs, viewColOrdinalPos);
for (int columnIndex = 1; columnIndex < parentTableColumnsMetadata.getColumnCount(); columnIndex++) {
String viewColumnValue = viewColumnsRs.getString(columnIndex);
String parentTableColumnValue = parentTableColumnsRs.getString(columnIndex);
if (!Objects.equal(viewColumnValue, parentTableColumnValue)) {
if (parentTableColumnsMetadata.getColumnName(columnIndex).equals(PhoenixDatabaseMetaData.TABLE_NAME)) {
assertEquals(parentTableName, parentTableColumnValue);
assertEquals(tableName, viewColumnValue);
}
}
}
parentTableColumnsRs.next();
}
}
}
assertFalse(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, tableName, ""), viewColumnsRs.next());
}
}
| apache-2.0 |
android-ia/platform_tools_idea | platform/external-system-impl/src/com/intellij/openapi/externalSystem/service/project/ProjectStructureHelper.java | 9454 | package com.intellij.openapi.externalSystem.service.project;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.ProjectKeys;
import com.intellij.openapi.externalSystem.model.project.ContentRootData;
import com.intellij.openapi.externalSystem.model.project.LibraryData;
import com.intellij.openapi.externalSystem.model.project.ModuleData;
import com.intellij.openapi.externalSystem.model.project.ModuleDependencyData;
import com.intellij.openapi.externalSystem.util.ArtifactInfo;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.ModuleLibraryOrderEntryImpl;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Thread-safe.
*
* @author Denis Zhdanov
* @since 2/6/12 3:28 PM
*/
public class ProjectStructureHelper {
@NotNull private final PlatformFacade myFacade;
@NotNull private final ExternalLibraryPathTypeMapper myLibraryPathTypeMapper;
public ProjectStructureHelper(@NotNull PlatformFacade facade, @NotNull ExternalLibraryPathTypeMapper mapper) {
myFacade = facade;
myLibraryPathTypeMapper = mapper;
}
@Nullable
public Module findIdeModule(@NotNull ModuleData module, @NotNull Project ideProject) {
return findIdeModule(module.getName(), ideProject);
}
@Nullable
public Module findIdeModule(@NotNull String ideModuleName, @NotNull Project ideProject) {
for (Module module : myFacade.getModules(ideProject)) {
if (ideModuleName.equals(module.getName())) {
return module;
}
}
return null;
}
@Nullable
public ModuleAwareContentRoot findIdeContentRoot(@NotNull DataNode<ContentRootData> node, @NotNull Project ideProject) {
ModuleData moduleData = node.getData(ProjectKeys.MODULE);
if (moduleData == null) {
return null;
}
final Module module = findIdeModule(moduleData.getName(), ideProject);
if (module == null) {
return null;
}
for (ModuleAwareContentRoot contentRoot : myFacade.getContentRoots(module)) {
final VirtualFile file = contentRoot.getFile();
if (node.getData().getRootPath().equals(file.getPath())) {
return contentRoot;
}
}
return null;
}
@Nullable
public Library findIdeLibrary(@NotNull final LibraryData library, @NotNull Project ideProject) {
return findIdeLibrary(library.getName(), ideProject);
}
/**
* Gradle library names follow the following pattern: {@code '[base library name]-[library-version]'}.
* <p/>
* This methods serves as an utility which tries to find a library by it's given base name.
*
* @param baseName base name of the target library
* @param ideProject target ide project
* @return target library for the given base name if there is one and only one library for it;
* <code>null</code> otherwise (if there are no libraries or more than one library for the given base name)
*/
@Nullable
public Library findIdeLibraryByBaseName(@NotNull String baseName, @NotNull Project ideProject) {
final LibraryTable libraryTable = myFacade.getProjectLibraryTable(ideProject);
Library result = null;
for (Library library : libraryTable.getLibraries()) {
ArtifactInfo info = ExternalSystemApiUtil.parseArtifactInfo(ExternalSystemApiUtil.getLibraryName(library));
if (info == null || !baseName.equals(info.getName())) {
continue;
}
if (result != null) {
return null;
}
result = library;
}
return result;
}
@Nullable
public Library findIdeLibrary(@NotNull String libraryName, @NotNull Project ideProject) {
final LibraryTable libraryTable = myFacade.getProjectLibraryTable(ideProject);
for (Library ideLibrary : libraryTable.getLibraries()) {
if (libraryName.equals(ExternalSystemApiUtil.getLibraryName(ideLibrary))) {
return ideLibrary;
}
}
return null;
}
@Nullable
public Library findIdeLibrary(@NotNull String libraryName,
@NotNull OrderRootType jarType,
@NotNull String jarPath,
@NotNull Project ideProject)
{
Library library = findIdeLibrary(libraryName, ideProject);
if (library == null) {
return null;
}
for (VirtualFile file : library.getFiles(jarType)) {
if (jarPath.equals(ExternalSystemApiUtil.getLocalFileSystemPath(file))) {
return library;
}
}
return null;
}
@Nullable
public LibraryOrderEntry findIdeLibraryDependency(@NotNull final String moduleName,
@NotNull final String libraryName,
@NotNull Project ideProject)
{
final Module ideModule = findIdeModule(moduleName, ideProject);
if (ideModule == null) {
return null;
}
RootPolicy<LibraryOrderEntry> visitor = new RootPolicy<LibraryOrderEntry>() {
@Override
public LibraryOrderEntry visitLibraryOrderEntry(LibraryOrderEntry ideDependency, LibraryOrderEntry value) {
if (libraryName.equals(ideDependency.getLibraryName())) {
return ideDependency;
}
return value;
}
};
for (OrderEntry entry : myFacade.getOrderEntries(ideModule)) {
final LibraryOrderEntry result = entry.accept(visitor, null);
if (result != null) {
return result;
}
}
return null;
}
@Nullable
public ModuleLibraryOrderEntryImpl findIdeModuleLocalLibraryDependency(@NotNull final String moduleName,
@NotNull final String libraryName,
@NotNull Project ideProject)
{
final Module ideModule = findIdeModule(moduleName, ideProject);
if (ideModule == null) {
return null;
}
RootPolicy<ModuleLibraryOrderEntryImpl> visitor = new RootPolicy<ModuleLibraryOrderEntryImpl>() {
@Override
public ModuleLibraryOrderEntryImpl visitLibraryOrderEntry(LibraryOrderEntry ideDependency, ModuleLibraryOrderEntryImpl value) {
Library library = ideDependency.getLibrary();
if (library == null) {
return value;
}
if (ideDependency instanceof ModuleLibraryOrderEntryImpl && libraryName.equals(ExternalSystemApiUtil.getLibraryName(library))) {
return (ModuleLibraryOrderEntryImpl)ideDependency;
}
return value;
}
};
for (OrderEntry entry : myFacade.getOrderEntries(ideModule)) {
final ModuleLibraryOrderEntryImpl result = entry.accept(visitor, null);
if (result != null) {
return result;
}
}
return null;
}
@SuppressWarnings("MethodMayBeStatic")
@Nullable
public LibraryOrderEntry findIdeLibraryDependency(@NotNull final String libraryName,
@NotNull ModifiableRootModel model)
{
for (OrderEntry entry : model.getOrderEntries()) {
if (entry instanceof LibraryOrderEntry) {
LibraryOrderEntry candidate = (LibraryOrderEntry)entry;
if (libraryName.equals(candidate.getLibraryName())) {
return candidate;
}
}
}
return null;
}
@Nullable
public ModuleOrderEntry findIdeModuleDependency(@NotNull final ModuleDependencyData gradleDependency, @NotNull Project ideProject) {
return findIdeModuleDependency(gradleDependency.getOwnerModule().getName(), gradleDependency.getTarget().getName(), ideProject);
}
@Nullable
public ModuleOrderEntry findIdeModuleDependency(@NotNull final String ownerModuleName,
@NotNull final String dependencyModuleName,
@NotNull Project ideProject)
{
final Module ideOwnerModule = findIdeModule(ownerModuleName, ideProject);
if (ideOwnerModule == null) {
return null;
}
RootPolicy<ModuleOrderEntry> visitor = new RootPolicy<ModuleOrderEntry>() {
@Override
public ModuleOrderEntry visitModuleOrderEntry(ModuleOrderEntry ideDependency, ModuleOrderEntry value) {
if (dependencyModuleName.equals(ideDependency.getModuleName())) {
return ideDependency;
}
return value;
}
};
for (OrderEntry orderEntry : myFacade.getOrderEntries(ideOwnerModule)) {
final ModuleOrderEntry result = orderEntry.accept(visitor, null);
if (result != null) {
return result;
}
}
return null;
}
@SuppressWarnings("MethodMayBeStatic")
@Nullable
public ModuleOrderEntry findIdeModuleDependency(@NotNull ModuleDependencyData dependency, @NotNull ModifiableRootModel model) {
for (OrderEntry entry : model.getOrderEntries()) {
if (entry instanceof ModuleOrderEntry) {
ModuleOrderEntry candidate = (ModuleOrderEntry)entry;
if (dependency.getName().equals(candidate.getModuleName())) {
return candidate;
}
}
}
return null;
}
}
| apache-2.0 |
android-ia/platform_tools_idea | java/java-indexing-impl/src/com/intellij/psi/impl/search/JavaSourceFilterScope.java | 2412 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author max
*/
package com.intellij.psi.impl.search;
import com.intellij.ide.highlighter.JavaClassFileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.JdkOrderEntry;
import com.intellij.openapi.roots.OrderEntry;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.SdkResolveScopeProvider;
import com.intellij.psi.search.DelegatingGlobalSearchScope;
import com.intellij.psi.search.GlobalSearchScope;
import org.jetbrains.annotations.NotNull;
public class JavaSourceFilterScope extends DelegatingGlobalSearchScope {
private final ProjectFileIndex myIndex;
public JavaSourceFilterScope(@NotNull final GlobalSearchScope delegate) {
super(delegate);
myIndex = ProjectRootManager.getInstance(getProject()).getFileIndex();
}
@Override
public boolean contains(final VirtualFile file) {
if (!super.contains(file)) {
return false;
}
if (JavaClassFileType.INSTANCE == file.getFileType()) {
return myIndex.isInLibraryClasses(file);
}
if (myIndex.isInSourceContent(file)) {
return true;
}
final Project project = getProject();
if (project != null) {
for (OrderEntry entry : myIndex.getOrderEntriesForFile(file)) {
if (entry instanceof JdkOrderEntry) {
final JdkOrderEntry jdkOrderEntry = (JdkOrderEntry)entry;
for (SdkResolveScopeProvider provider : SdkResolveScopeProvider.EP_NAME.getExtensions()) {
final GlobalSearchScope scope = provider.getScope(project, jdkOrderEntry);
if (scope != null && scope.contains(file)) {
return true;
}
}
}
}
}
return false;
}
} | apache-2.0 |
palaniyappanBala/MobFox-Android-SDK | src/main/java/com/adsdk/sdk/nativeformats/NativeFormat.java | 6782 | package com.adsdk.sdk.nativeformats;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.Thread.UncaughtExceptionHandler;
import java.nio.charset.Charset;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.methods.HttpGet;
import android.content.Context;
import android.net.http.AndroidHttpClient;
import android.os.Build;
import android.os.Handler;
import android.view.ViewGroup;
import com.adsdk.sdk.Log;
import com.adsdk.sdk.RequestException;
import com.adsdk.sdk.Util;
import com.adsdk.sdk.nativeformats.creative.Creative;
import com.adsdk.sdk.nativeformats.creative.CreativesManager;
/**
* Created by itamar on 16/03/15.
*/
public class NativeFormat {
private static final String BASE_URL = "http://my.mobfox.com/request.php";
private Handler handler;
String publicationId;
CreativesManager creative_manager;
int width;
int height;
Context ctx;
final static String TYPE_BLOCK = "block";
final static String TYPE_STRIPE = "stripe";
// public void WriteTemp(String data) {
//
// FileOutputStream fop = null;
//
// try {
//
// File temp = File.createTempFile("creative", ".html");
// fop = new FileOutputStream(temp);
//
// fop.write(data.getBytes(Charset.forName("UTF-8")));
//
// android.util.Log.d("FilePath", temp.getAbsolutePath());
// android.util.Log.d("FileData", data);
//
// } catch(IOException e) {
//
// e.printStackTrace();
//
// }
// }
public interface Listener {
public void onSuccess(String template, String data);
public void onError(Exception e);
}
NativeFormat(Context ctx, int width, int height, String publicationId) {
this.ctx = ctx;
this.width = width;
this.height = height;
this.publicationId = publicationId;
this.creative_manager = CreativesManager.getInstance(this.ctx,publicationId);
}
// ---------------------------------------------------------
public void loadAd(String webviewUserAgent, final Listener listener) {
float ratio = height / width;
String type = NativeFormat.TYPE_BLOCK;
if ( ratio < 0.5 ) {
type = NativeFormat.TYPE_STRIPE;
}
if(Build.FINGERPRINT.startsWith("generic")){
webviewUserAgent = "";
}
final Creative creative = creative_manager.getCreative(type,webviewUserAgent);
final NativeFormatRequest request = new NativeFormatRequest();
request.setRequestUrl(BASE_URL);
request.setPublisherId(this.publicationId); // TODO: check if correctly set
String ipAddress = Utils.getIPAddress(); //TODO: can we remove it? Other requests don't send IP
if (ipAddress.indexOf("10.") == 0 || ipAddress.length() == 0) {
ipAddress = "2.122.29.194";
}
request.ip = ipAddress;
// request.add("o_androidid", Settings.Secure.getString(getContext().getContentResolver(), Settings.Secure.ANDROID_ID)); //TODO: we cannot use this ID anymore (only Google Advertising ID)
// params.add("o_andadvid", "c86f7529-33e2-4346-be0d-777ac53be320");//AdvertisingIdClient.getAdvertisingIdInfo(this.getContext()).getId());
request.setAndroidAdId(Util.getAndroidAdId());
request.setAdDoNotTrack(Util.hasAdDoNotTrack());
request.setUserAgent(Util.getDefaultUserAgentString(ctx));
request.setUserAgent2(Util.buildUserAgent());
request.setTemplateName(creative.getName());
Log.d("starting build");
Log.d("native req: "+request.toUri());
handler = new Handler();
Thread requestThread = new Thread(new Runnable() {
@Override
public void run() {
AndroidHttpClient client = null;
try {
client = AndroidHttpClient.newInstance(System.getProperty("http.agent"));
final String url = request.toString();
HttpGet request = new HttpGet(url);
request.setHeader("User-Agent", System.getProperty("http.agent"));
HttpResponse response = client.execute(request);
Log.v("sent request");
StatusLine statusLine = response.getStatusLine();
int statusCode = statusLine.getStatusCode();
if (statusCode == 200) {
Log.v("start build response");
StringBuilder builder = new StringBuilder();
HttpEntity entity = response.getEntity();
InputStream content = entity.getContent();
BufferedReader reader = new BufferedReader(new InputStreamReader(content));
String line;
while ((line = reader.readLine()) != null) {
builder.append(line + "\n");
}
final String data = builder.toString();
android.util.Log.d("builder.toString()", builder.toString());
Log.v("build got data");
if (data.length() == 0) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onError(new RequestException("empty response from: " + url));
}
});
return;
}
Log.v("builder: "+data);
handler.post(new Runnable() {
@Override
public void run() {
listener.onSuccess(creative.getTemplate(), data);
}
});
} else {
handler.post(new Runnable() {
@Override
public void run() {
listener.onError(new RequestException("request failed: " + url));
}
});
return;
}
} catch (final Exception e) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onError(e);
}
});
} finally {
if (client != null) {
client.close();
}
}
}
});
requestThread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable ex) {
listener.onError(new Exception(ex));
}
});
requestThread.start();
};
// ---------------------------------------------------------
}
| apache-2.0 |
papicella/snappy-store | gemfirexd/tools/src/testing/java/org/apache/derbyTesting/functionTests/tests/derbynet/sysinfo_withproperties.java | 1456 | /*
Derby - Class org.apache.derbyTesting.functionTests.tests.derbynet.sysinfo_withproperties
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.derbynet;
/**
Purpose of this class is to test the sysinfo command when
server is started with some drda properties. The derby properties
in the test harness framework are added to
sysinfo_withproperties_gemfirexd.properties.
Most of the work of calling sysinfo is done in sysinfo.
@see sysinfo#test
*/
public class sysinfo_withproperties
{
public static void main (String args[]) throws Exception
{
// test the sysinfo calls.
sysinfo.test(args);
}
}
| apache-2.0 |
kovalcikr/ali-idea-plugin | ali-plugin-main/src/test/java/com/hp/alm/ali/idea/services/AttachmentServiceTest.java | 9833 | /*
* Copyright 2013 Hewlett-Packard Development Company, L.P
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hp.alm.ali.idea.services;
import com.hp.alm.ali.ServerVersion;
import com.hp.alm.ali.idea.IntellijTest;
import com.hp.alm.ali.idea.entity.EntityAdapter;
import com.hp.alm.ali.idea.entity.EntityListener;
import com.hp.alm.ali.idea.entity.EntityRef;
import com.hp.alm.ali.idea.model.Entity;
import com.hp.alm.ali.idea.progress.IndicatingInputStream;
import com.hp.alm.ali.idea.rest.RestException;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringReader;
public class AttachmentServiceTest extends IntellijTest {
public AttachmentServiceTest() {
super(ServerVersion.AGM);
}
private AttachmentService attachmentService;
private File file;
@Before
public void preCleanup() throws IOException {
attachmentService = getComponent(AttachmentService.class);
file = createFile();
}
@Test
public void testCreateAttachment() throws IOException {
handler.addRequest("POST", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments", 201)
.expectHeader("Content-Type", "application/octet-stream")
.expectHeader("Slug", "logfile.txt")
.content("attachmentServiceTest_attachment.xml");
handler.async();
addEntityListener(new EntityLoaded(handler, new EntityLoaded.Listener() {
@Override
public void evaluate(Entity entity, EntityListener.Event event) {
checkAttachment(entity);
Assert.assertEquals(EntityListener.Event.CREATE, event);
}
}));
String name = attachmentService.createAttachment("logfile.txt", new IndicatingInputStream(file, null), file.length(), new EntityRef("defect", 1));
Assert.assertEquals("logfile.txt", name);
}
@Test
public void testCreateAttachment_failure() throws IOException {
handler.addRequest("POST", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments", 500)
.responseBody("Failed");
String name = attachmentService.createAttachment("logfile.txt", new IndicatingInputStream(file, null), file.length(), new EntityRef("defect", 1));
Assert.assertNull(name);
checkError("Failed");
}
@Test
public void testDeleteAttachment() {
handler.addRequest("DELETE", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/logfile.txt", 200)
.content("attachmentServiceTest_attachment.xml");
handler.async();
addEntityListener(new EntityNotFound(handler, "attachment", 653, true));
attachmentService.deleteAttachment("logfile.txt", new EntityRef("defect", 1));
}
@Test
public void testDeleteAttachment_failure() {
handler.addRequest("DELETE", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/logfile.txt", 500)
.responseBody("Failed");
attachmentService.deleteAttachment("logfile.txt", new EntityRef("defect", 1));
checkError("Failed");
}
@Test
public void testUpdateAttachmentProperty() {
handler.addRequest("PUT", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/logfile.txt", 200)
.content("attachmentServiceTest_attachment.xml");
handler.async();
addEntityListener(new EntityLoaded(handler, new EntityLoaded.Listener() {
@Override
public void evaluate(Entity entity, EntityListener.Event event) {
checkAttachment(entity);
Assert.assertEquals(EntityListener.Event.GET, event);
}
}));
Entity attachment = attachmentService.updateAttachmentProperty("logfile.txt", new EntityRef("defect", 1), "description", "newValue", false);
checkAttachment(attachment);
}
@Test
public void testUpdateAttachmentProperty_fail() {
handler.addRequest("PUT", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/logfile.txt", 500)
.responseBody("Failed");
Entity attachment = attachmentService.updateAttachmentProperty("logfile.txt", new EntityRef("defect", 1), "description", "newValue", false);
Assert.assertNull(attachment);
checkError("Failed");
}
@Test
public void testUpdateAttachmentProperty_failSilently() {
handler.addRequest("PUT", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/logfile.txt", 500)
.responseBody("Failed");
Entity attachment = attachmentService.updateAttachmentProperty("logfile.txt", new EntityRef("defect", 1), "description", "newValue", true);
Assert.assertNull(attachment);
}
@Test
public void testUpdateAttachmentContent() throws FileNotFoundException {
handler.addRequest("PUT", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/logfile.txt", 200)
.expectHeader("Content-Type", "application/octet-stream")
.content("attachmentServiceTest_attachment.xml");
handler.async();
addEntityListener(new EntityLoaded(handler, new EntityLoaded.Listener() {
@Override
public void evaluate(Entity entity, EntityListener.Event event) {
checkAttachment(entity);
Assert.assertEquals(EntityListener.Event.GET, event);
}
}));
boolean updated = attachmentService.updateAttachmentContent("logfile.txt", new EntityRef("defect", 1), new IndicatingInputStream(file, null), file.length(), false);
Assert.assertTrue(updated);
}
@Test
public void testUpdateAttachmentContent_fail() throws FileNotFoundException {
handler.addRequest("PUT", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/logfile.txt", 500)
.responseBody("Failed");
boolean updated = attachmentService.updateAttachmentContent("logfile.txt", new EntityRef("defect", 1), new IndicatingInputStream(file, null), file.length(), false);
Assert.assertFalse(updated);
checkError("Failed");
}
@Test
public void testUpdateAttachmentContent_failSilently() throws FileNotFoundException {
handler.addRequest("PUT", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/logfile.txt", 500)
.responseBody("Failed");
boolean updated = attachmentService.updateAttachmentContent("logfile.txt", new EntityRef("defect", 1), new IndicatingInputStream(file, null), file.length(), true);
Assert.assertFalse(updated);
}
@Test
public void testGetAttachmentEntity() {
handler.addRequest("GET", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/my%20name?alt=application/xml", 200)
.content("attachmentServiceTest_attachment.xml");
Entity attachment = new Entity("attachment", 653);
attachment.setProperty("name", "my name");
attachment.setProperty("parent-type", "defect");
attachment.setProperty("parent-id", "1");
Entity entity = attachmentService.getAttachmentEntity(attachment);
checkAttachment(entity);
}
@Test
public void testGetAttachmentEntity_alternative() {
handler.addRequest("GET", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/my%20name?alt=application/xml", 200)
.content("attachmentServiceTest_attachment.xml");
Entity entity = attachmentService.getAttachmentEntity("my name", new EntityRef("defect", 1));
checkAttachment(entity);
}
@Test
public void testGetAttachmentEntity_failure() {
handler.addRequest("GET", "/qcbin/rest/domains/domain/projects/project/defects/1/attachments/my%20name?alt=application/xml", 500)
.responseBody("Not this time");
Entity entity = attachmentService.getAttachmentEntity("my name", new EntityRef("defect", 1));
Assert.assertNull(entity);
checkError("Not this time");
}
@Test
public void testGetAttachmentEntity_illegal() {
Entity defect = new Entity("defect", 1);
try {
attachmentService.getAttachmentEntity(defect);
Assert.fail("should have failed");
} catch (IllegalArgumentException e) {
}
}
private void checkAttachment(Entity entity) {
Assert.assertEquals("attachment", entity.getType());
Assert.assertEquals(653, entity.getId());
Assert.assertEquals("1", entity.getPropertyValue("parent-id"));
Assert.assertEquals("defect", entity.getPropertyValue("parent-type"));
Assert.assertEquals("7", entity.getPropertyValue("file-size"));
}
private File createFile() throws IOException {
File tempFile = File.createTempFile("AttachmentServiceTest", null);
tempFile.deleteOnExit();
FileWriter fw = new FileWriter(tempFile);
IOUtils.copy(new StringReader("content"), fw);
fw.close();
return tempFile;
}
}
| apache-2.0 |