index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/ClusterMetadataSupplier.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
public interface ClusterMetadataSupplier {
NeptuneClusterMetadata getClusterMetadata();
NeptuneClusterMetadata refreshClusterMetadata();
}
| 7,500 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/GetEndpointsFromNeptuneManagementApi.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.neptune.AmazonNeptune;
import com.amazonaws.services.neptune.AmazonNeptuneClientBuilder;
import com.amazonaws.services.neptune.model.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.tinkerpop.gremlin.driver.EndpointCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.utils.RegionUtils;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
class GetEndpointsFromNeptuneManagementApi implements ClusterEndpointsFetchStrategy, ClusterMetadataSupplier {
private static final Logger logger = LoggerFactory.getLogger(GetEndpointsFromNeptuneManagementApi.class);
private final ClusterEndpointsFetchStrategy innerStrategy;
private final String clusterId;
private final String region;
private final String iamProfile;
private final AWSCredentialsProvider credentials;
private final AtomicReference<NeptuneClusterMetadata> cachedClusterMetadata = new AtomicReference<>();
private final ClientConfiguration clientConfiguration;
GetEndpointsFromNeptuneManagementApi(String clusterId) {
this(clusterId, RegionUtils.getCurrentRegionName());
}
GetEndpointsFromNeptuneManagementApi(String clusterId, String region) {
this(clusterId, region, IamAuthConfig.DEFAULT_PROFILE);
}
GetEndpointsFromNeptuneManagementApi(String clusterId, String region, String iamProfile) {
this(clusterId, region, iamProfile, null, null);
}
GetEndpointsFromNeptuneManagementApi(String clusterId, String region, String iamProfile, ClientConfiguration clientConfiguration) {
this(clusterId, region, iamProfile, null, clientConfiguration);
}
GetEndpointsFromNeptuneManagementApi(String clusterId, String region, AWSCredentialsProvider credentials) {
this(clusterId, region, IamAuthConfig.DEFAULT_PROFILE, credentials, null);
}
GetEndpointsFromNeptuneManagementApi(String clusterId, String region, AWSCredentialsProvider credentials, ClientConfiguration clientConfiguration) {
this(clusterId, region, IamAuthConfig.DEFAULT_PROFILE, credentials, clientConfiguration);
}
private GetEndpointsFromNeptuneManagementApi(String clusterId,
String region,
String iamProfile,
AWSCredentialsProvider credentials,
ClientConfiguration clientConfiguration) {
this.innerStrategy = new CommonClusterEndpointsFetchStrategy(this);
this.clusterId = clusterId;
this.region = region;
this.iamProfile = iamProfile;
this.credentials = credentials;
this.clientConfiguration = clientConfiguration;
}
@Override
public NeptuneClusterMetadata refreshClusterMetadata() {
try {
AmazonNeptuneClientBuilder builder = AmazonNeptuneClientBuilder.standard();
if (clientConfiguration != null){
builder = builder.withClientConfiguration(clientConfiguration);
}
if (StringUtils.isNotEmpty(region)) {
builder = builder.withRegion(region);
}
if (credentials != null) {
builder = builder.withCredentials(credentials);
} else if (!iamProfile.equals(IamAuthConfig.DEFAULT_PROFILE)) {
builder = builder.withCredentials(new ProfileCredentialsProvider(iamProfile));
}
AmazonNeptune neptune = builder.build();
DescribeDBClustersResult describeDBClustersResult = neptune
.describeDBClusters(new DescribeDBClustersRequest().withDBClusterIdentifier(clusterId));
if (describeDBClustersResult.getDBClusters().isEmpty()) {
throw new IllegalStateException(String.format("Unable to find cluster %s", clusterId));
}
DBCluster dbCluster = describeDBClustersResult.getDBClusters().get(0);
String clusterEndpoint = dbCluster.getEndpoint();
String readerEndpoint = dbCluster.getReaderEndpoint();
List<DBClusterMember> dbClusterMembers = dbCluster.getDBClusterMembers();
Optional<DBClusterMember> clusterWriter = dbClusterMembers.stream()
.filter(DBClusterMember::isClusterWriter)
.findFirst();
String primary = clusterWriter.map(DBClusterMember::getDBInstanceIdentifier).orElse("");
List<String> replicas = dbClusterMembers.stream()
.filter(dbClusterMember -> !dbClusterMember.isClusterWriter())
.map(DBClusterMember::getDBInstanceIdentifier)
.collect(Collectors.toList());
DescribeDBInstancesRequest describeDBInstancesRequest = new DescribeDBInstancesRequest()
.withFilters(Collections.singletonList(
new Filter()
.withName("db-cluster-id")
.withValues(dbCluster.getDBClusterIdentifier())));
DescribeDBInstancesResult describeDBInstancesResult = neptune
.describeDBInstances(describeDBInstancesRequest);
Collection<NeptuneInstanceMetadata> instances = new ArrayList<>();
describeDBInstancesResult.getDBInstances()
.forEach(c -> {
String role = "unknown";
if (primary.equals(c.getDBInstanceIdentifier())) {
role = "writer";
}
if (replicas.contains(c.getDBInstanceIdentifier())) {
role = "reader";
}
String address = c.getEndpoint() == null ? null : c.getEndpoint().getAddress();
instances.add(
new NeptuneInstanceMetadata()
.withInstanceId(c.getDBInstanceIdentifier())
.withRole(role)
.withAddress(address)
.withStatus(c.getDBInstanceStatus())
.withAvailabilityZone(c.getAvailabilityZone())
.withInstanceType(c.getDBInstanceClass())
.withTags(getTags(c.getDBInstanceArn(), neptune)));
}
);
neptune.shutdown();
NeptuneClusterMetadata clusterMetadata = new NeptuneClusterMetadata()
.withInstances(instances)
.withClusterEndpoint(clusterEndpoint)
.withReaderEndpoint(readerEndpoint);
cachedClusterMetadata.set(clusterMetadata);
return clusterMetadata;
} catch (AmazonNeptuneException e) {
if (e.getErrorCode().equals("Throttling")) {
logger.warn("Calls to the Neptune Management API are being throttled. Reduce the refresh rate and stagger refresh agent requests, or use a NeptuneEndpointsInfoLambda proxy.");
NeptuneClusterMetadata clusterMetadata = cachedClusterMetadata.get();
if (clusterMetadata != null) {
return clusterMetadata;
} else {
throw e;
}
} else {
throw e;
}
}
}
@Override
public NeptuneClusterMetadata getClusterMetadata() {
NeptuneClusterMetadata clusterMetadata = cachedClusterMetadata.get();
if (clusterMetadata == null) {
return refreshClusterMetadata();
}
return clusterMetadata;
}
@Override
public ClusterMetadataSupplier clusterMetadataSupplier() {
return this;
}
@Override
public Map<? extends EndpointsSelector, EndpointCollection> getEndpoints(Collection<? extends EndpointsSelector> selectors, boolean refresh) {
return innerStrategy.getEndpoints(selectors, refresh);
}
private Map<String, String> getTags(String dbInstanceArn, AmazonNeptune neptune) {
List<Tag> tagList = neptune.listTagsForResource(
new ListTagsForResourceRequest()
.withResourceName(dbInstanceArn)).getTagList();
Map<String, String> tags = new HashMap<>();
tagList.forEach(t -> tags.put(t.getKey(), t.getValue()));
return tags;
}
}
| 7,501 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/EndpointsSelector.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import org.apache.tinkerpop.gremlin.driver.EndpointCollection;
public interface EndpointsSelector {
EndpointCollection getEndpoints(NeptuneClusterMetadata clusterMetadata);
}
| 7,502 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/EndpointsType.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import org.apache.tinkerpop.gremlin.driver.Endpoint;
import org.apache.tinkerpop.gremlin.driver.EndpointCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
public enum EndpointsType implements EndpointsSelector {
ClusterEndpoint {
@Override
public EndpointCollection getEndpoints(NeptuneClusterMetadata clusterMetadata) {
return new EndpointCollection(
Collections.singletonList(
clusterMetadata.getClusterEndpoint()));
}
},
ReaderEndpoint {
@Override
public EndpointCollection getEndpoints(NeptuneClusterMetadata clusterMetadata) {
return new EndpointCollection(
Collections.singletonList(
clusterMetadata.getReaderEndpoint()));
}
},
All {
@Override
public EndpointCollection getEndpoints(NeptuneClusterMetadata clusterMetadata) {
List<Endpoint> results = clusterMetadata.getInstances().stream()
.filter(NeptuneInstanceMetadata::isAvailable)
.collect(Collectors.toList());
if (results.isEmpty()) {
logger.warn("Unable to get any endpoints so getting ReaderEndpoint instead");
return ReaderEndpoint.getEndpoints(clusterMetadata);
}
return new EndpointCollection(results);
}
},
Primary {
@Override
public EndpointCollection getEndpoints(NeptuneClusterMetadata clusterMetadata) {
List<Endpoint> results = clusterMetadata.getInstances().stream()
.filter(NeptuneInstanceMetadata::isPrimary)
.filter(NeptuneInstanceMetadata::isAvailable)
.collect(Collectors.toList());
if (results.isEmpty()) {
logger.warn("Unable to get Primary endpoint so getting ClusterEndpoint instead");
return ClusterEndpoint.getEndpoints(clusterMetadata);
}
return new EndpointCollection(results);
}
},
ReadReplicas {
@Override
public EndpointCollection getEndpoints(NeptuneClusterMetadata clusterMetadata) {
List<Endpoint> results = clusterMetadata.getInstances().stream()
.filter(NeptuneInstanceMetadata::isReader)
.filter(NeptuneInstanceMetadata::isAvailable)
.collect(Collectors.toList());
if (results.isEmpty()) {
logger.warn("Unable to get ReadReplicas endpoints so getting ReaderEndpoint instead");
return ReaderEndpoint.getEndpoints(clusterMetadata);
}
return new EndpointCollection(results);
}
};
private static final Logger logger = LoggerFactory.getLogger(EndpointsType.class);
}
| 7,503 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/SuspendedEndpoints.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import org.apache.tinkerpop.gremlin.driver.ApprovalResult;
import org.apache.tinkerpop.gremlin.driver.EndpointFilter;
import org.apache.tinkerpop.gremlin.driver.Endpoint;
import java.util.Map;
public class SuspendedEndpoints implements EndpointFilter {
public static final String STATE_ANNOTATION = "AWS:endpoint_state";
public static final String SUSPENDED = "suspended";
@Override
public ApprovalResult approveEndpoint(Endpoint endpoint) {
Map<String, String> annotations = endpoint.getAnnotations();
if (annotations.containsKey(STATE_ANNOTATION) && annotations.get(STATE_ANNOTATION).equals(SUSPENDED)){
return new ApprovalResult(false, SUSPENDED);
} else {
return ApprovalResult.APPROVED;
}
}
}
| 7,504 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/ClusterEndpointsRefreshAgent.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSCredentialsProvider;
import org.apache.tinkerpop.gremlin.driver.EndpointCollection;
import org.apache.tinkerpop.gremlin.driver.GremlinClient;
import org.apache.tinkerpop.gremlin.driver.RefreshTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.utils.RegionUtils;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
public class ClusterEndpointsRefreshAgent implements AutoCloseable {
public static ClusterEndpointsRefreshAgent monitor(GremlinClient client,
long delay,
TimeUnit timeUnit){
return monitor(Collections.singletonList(client), delay, timeUnit);
}
public static ClusterEndpointsRefreshAgent monitor(Collection<GremlinClient> clients,
long delay,
TimeUnit timeUnit){
EndpointsSelector nullSelector = clusterMetadata -> {
throw new UnsupportedOperationException();
};
ClusterEndpointsRefreshAgent refreshAgent = new ClusterEndpointsRefreshAgent(new GetCurrentEndpointsFromGremlinClient());
refreshAgent.startPollingNeptuneAPI(clients.stream().map(c -> new RefreshTask(c, nullSelector)).collect(Collectors.toList()), delay, timeUnit);
return refreshAgent;
}
public static ClusterEndpointsRefreshAgent lambdaProxy(String lambdaName) {
return lambdaProxy(lambdaName, RegionUtils.getCurrentRegionName());
}
public static ClusterEndpointsRefreshAgent lambdaProxy(String lambdaName, String region) {
return lambdaProxy(lambdaName, region, IamAuthConfig.DEFAULT_PROFILE);
}
public static ClusterEndpointsRefreshAgent lambdaProxy(String lambdaName, String region, AWSCredentialsProvider credentialsProvider) {
return new ClusterEndpointsRefreshAgent(
new GetEndpointsFromLambdaProxy(lambdaName, region, credentialsProvider));
}
public static ClusterEndpointsRefreshAgent lambdaProxy(String lambdaName, String region, AWSCredentialsProvider credentialsProvider, ClientConfiguration clientConfiguration) {
return new ClusterEndpointsRefreshAgent(
new GetEndpointsFromLambdaProxy(lambdaName, region, credentialsProvider, clientConfiguration));
}
public static ClusterEndpointsRefreshAgent lambdaProxy(String lambdaName, String region, String iamProfile) {
return new ClusterEndpointsRefreshAgent(
new GetEndpointsFromLambdaProxy(lambdaName, region, iamProfile));
}
public static ClusterEndpointsRefreshAgent lambdaProxy(String lambdaName, String region, String iamProfile, ClientConfiguration clientConfiguration) {
return new ClusterEndpointsRefreshAgent(
new GetEndpointsFromLambdaProxy(lambdaName, region, iamProfile, clientConfiguration));
}
public static ClusterEndpointsRefreshAgent managementApi(String clusterId) {
return managementApi(clusterId, RegionUtils.getCurrentRegionName());
}
public static ClusterEndpointsRefreshAgent managementApi(String clusterId, String region) {
return managementApi(clusterId, region, IamAuthConfig.DEFAULT_PROFILE);
}
public static ClusterEndpointsRefreshAgent managementApi(String clusterId, String region, AWSCredentialsProvider credentialsProvider) {
return new ClusterEndpointsRefreshAgent(
new GetEndpointsFromNeptuneManagementApi(clusterId, region, credentialsProvider));
}
public static ClusterEndpointsRefreshAgent managementApi(String clusterId, String region, AWSCredentialsProvider credentialsProvider, ClientConfiguration clientConfiguration) {
return new ClusterEndpointsRefreshAgent(
new GetEndpointsFromNeptuneManagementApi(clusterId, region, credentialsProvider, clientConfiguration));
}
public static ClusterEndpointsRefreshAgent managementApi(String clusterId, String region, String iamProfile) {
return new ClusterEndpointsRefreshAgent(
new GetEndpointsFromNeptuneManagementApi(clusterId, region, iamProfile));
}
public static ClusterEndpointsRefreshAgent managementApi(String clusterId, String region, String iamProfile, ClientConfiguration clientConfiguration ) {
return new ClusterEndpointsRefreshAgent(
new GetEndpointsFromNeptuneManagementApi(clusterId, region, iamProfile, clientConfiguration));
}
private static final Logger logger = LoggerFactory.getLogger(ClusterEndpointsRefreshAgent.class);
private final ClusterEndpointsFetchStrategy endpointsFetchStrategy;
private final ScheduledExecutorService scheduledExecutorService = Executors.newSingleThreadScheduledExecutor();
private AtomicBoolean isRunning = new AtomicBoolean(false);
public ClusterEndpointsRefreshAgent(ClusterEndpointsFetchStrategy endpointsFetchStrategy) {
this.endpointsFetchStrategy = endpointsFetchStrategy;
}
public <T extends EndpointsSelector> void startPollingNeptuneAPI(GremlinClient client,
T selector,
long delay,
TimeUnit timeUnit) {
startPollingNeptuneAPI(RefreshTask.refresh(client, selector), delay, timeUnit);
}
public <T extends EndpointsSelector> void startPollingNeptuneAPI(RefreshTask refreshTask,
long delay,
TimeUnit timeUnit) {
startPollingNeptuneAPI(Collections.singletonList(refreshTask), delay, timeUnit);
}
public <T extends EndpointsSelector> void startPollingNeptuneAPI(Collection<RefreshTask> tasks,
long delay,
TimeUnit timeUnit) {
boolean isAlreadyRunning = !isRunning.compareAndSet(false, true);
if (isAlreadyRunning){
throw new IllegalStateException("Refresh agent is already running");
}
scheduledExecutorService.scheduleWithFixedDelay(() -> {
try {
Map<EndpointsSelector, GremlinClient> clientSelectors = new HashMap<>();
for (RefreshTask task : tasks) {
clientSelectors.put(task.selector(), task.client());
}
Map<? extends EndpointsSelector, EndpointCollection> refreshResults = refreshEndpoints(clientSelectors);
for (Map.Entry<? extends EndpointsSelector, EndpointCollection> entry : refreshResults.entrySet()) {
EndpointCollection endpoints = entry.getValue();
GremlinClient client = clientSelectors.get(entry.getKey());
logger.info("Refresh: [client: {}, endpoints: {}]", client.hashCode(), endpoints);
client.refreshEndpoints(endpoints);
}
} catch (Exception e) {
logger.error("Error while getting cluster metadata", e);
}
}, delay, delay, timeUnit);
}
public void startPollingNeptuneAPI(OnNewClusterMetadata onNewClusterMetadata,
long delay,
TimeUnit timeUnit) {
scheduledExecutorService.scheduleWithFixedDelay(() -> {
try {
NeptuneClusterMetadata clusterMetadata = refreshClusterMetadata();
logger.info("New cluster metadata: {}", clusterMetadata);
onNewClusterMetadata.apply(clusterMetadata);
} catch (Exception e) {
logger.error("Error while refreshing cluster metadata", e);
}
}, delay, delay, timeUnit);
}
public void stop() {
scheduledExecutorService.shutdownNow();
}
@Override
public void close() throws Exception {
stop();
}
public <T extends EndpointsSelector> EndpointCollection getEndpoints(T selector) {
return endpointsFetchStrategy.getEndpoints(Collections.singletonList(selector), false).get(selector);
}
public NeptuneClusterMetadata getClusterMetadata() {
return endpointsFetchStrategy.clusterMetadataSupplier().getClusterMetadata();
}
public void awake() throws InterruptedException, ExecutionException {
this.scheduledExecutorService.submit(() -> {}).get();
}
private Map<? extends EndpointsSelector, EndpointCollection> refreshEndpoints(Map<EndpointsSelector, GremlinClient> clientSelectors){
return endpointsFetchStrategy.getEndpoints(clientSelectors, true);
}
private NeptuneClusterMetadata refreshClusterMetadata() {
return endpointsFetchStrategy.clusterMetadataSupplier().refreshClusterMetadata();
}
}
| 7,505 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/HandshakeInterceptorConfigurator.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package software.amazon.neptune.cluster;
import com.amazonaws.auth.AWSCredentialsProvider;
import org.apache.tinkerpop.gremlin.driver.*;
import java.util.stream.Collectors;
class HandshakeInterceptorConfigurator implements TopologyAwareBuilderConfigurator {
private final boolean isDirectConnection;
private final HandshakeInterceptor interceptor;
private final boolean enableIamAuth;
private final int port;
private final int proxyPort;
private final String proxyAddress;
private final String serviceRegion;
private final String iamProfile;
private final AWSCredentialsProvider credentials;
private final boolean removeHostHeader;
HandshakeInterceptorConfigurator(boolean isDirectConnection,
HandshakeInterceptor interceptor,
boolean enableIamAuth,
int port,
int proxyPort,
String proxyAddress,
String serviceRegion,
String iamProfile,
AWSCredentialsProvider credentials,
boolean removeHostHeader) {
this.isDirectConnection = isDirectConnection;
this.interceptor = interceptor;
this.enableIamAuth = enableIamAuth;
this.port = port;
this.proxyPort = proxyPort;
this.proxyAddress = proxyAddress;
this.serviceRegion = serviceRegion;
this.iamProfile = iamProfile;
this.credentials = credentials;
this.removeHostHeader = removeHostHeader;
}
@Override
public void apply(Cluster.Builder builder, EndpointCollection endpoints) {
if (endpoints == null || endpoints.isEmpty()) {
return;
}
if (isDirectConnection) {
builder.port(port);
for (Endpoint endpoint : endpoints) {
builder.addContactPoint(endpoint.getAddress());
}
} else {
builder.port(proxyPort);
if (proxyAddress != null) {
builder.addContactPoint(proxyAddress);
}
}
if (interceptor != null) {
builder.handshakeInterceptor(interceptor);
} else {
IamAuthConfig.IamAuthConfigBuilder iamAuthConfigBuilder =
IamAuthConfig.builder()
.addNeptuneEndpoints(endpoints.stream().map(Endpoint::getAddress).collect(Collectors.toList()))
.setNeptunePort(port)
.setServiceRegion(serviceRegion)
.setIamProfile(iamProfile)
.setCredentials(credentials);
if (enableIamAuth) {
iamAuthConfigBuilder.enableIamAuth();
}
if (!isDirectConnection) {
iamAuthConfigBuilder.connectViaLoadBalancer();
}
if (removeHostHeader) {
iamAuthConfigBuilder.removeHostHeaderAfterSigning();
}
IamAuthConfig iamAuthConfig = iamAuthConfigBuilder.build();
builder.handshakeInterceptor(new LBAwareHandshakeInterceptor(iamAuthConfig));
}
}
}
| 7,506 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/NeptuneInstanceMetadata.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.tinkerpop.gremlin.driver.Endpoint;
import java.io.IOException;
import java.util.*;
public class NeptuneInstanceMetadata implements Endpoint {
public static NeptuneInstanceMetadata fromByteArray(byte[] bytes) throws IOException {
return new ObjectMapper().readerFor(NeptuneInstanceMetadata.class).readValue(bytes);
}
private static final Collection<String> AVAILABLE_STATES = Arrays.asList("available", "backing-up", "modifying", "upgrading");
private String instanceId;
private String role;
private String address;
private String status;
private String availabilityZone;
private String instanceType;
private final Map<String, String> annotations = new HashMap<>();
private final Map<String, String> tags = new HashMap<>();
public NeptuneInstanceMetadata() {
}
public void setInstanceId(String instanceId) {
this.instanceId = instanceId;
}
public void setRole(String role) {
this.role = role;
}
public void setAddress(String address) {
this.address = address;
}
@Deprecated
public void setEndpoint(String endpoint) {
this.address = endpoint;
}
public void setStatus(String status) {
this.status = status;
}
public void setAvailabilityZone(String availabilityZone) {
this.availabilityZone = availabilityZone;
}
public void setInstanceType(String instanceType) {
this.instanceType = instanceType;
}
public void setTags(Map<String, String> tags) {
this.tags.clear();
this.tags.putAll(tags);
}
public void setAnnotations(Map<String, String> annotations) {
this.annotations.clear();
this.annotations.putAll(annotations);
}
@Override
public void setAnnotation(String key, String value){
annotations.put(key, value);
}
public NeptuneInstanceMetadata withInstanceId(String instanceId) {
setInstanceId(instanceId);
return this;
}
public NeptuneInstanceMetadata withRole(String role) {
setRole(role);
return this;
}
public NeptuneInstanceMetadata withAddress(String address) {
setAddress(address);
return this;
}
public NeptuneInstanceMetadata withStatus(String status) {
setStatus(status);
return this;
}
public NeptuneInstanceMetadata withAvailabilityZone(String availabilityZone) {
setAvailabilityZone(availabilityZone);
return this;
}
public NeptuneInstanceMetadata withInstanceType(String instanceType) {
setInstanceType(instanceType);
return this;
}
public NeptuneInstanceMetadata withTags(Map<String, String> tags) {
setTags(tags);
return this;
}
public NeptuneInstanceMetadata withAnnotations(Map<String, String> annotations) {
setAnnotations(annotations);
return this;
}
public NeptuneInstanceMetadata withAnnotation(String key, String value) {
annotations.put(key, value);
return this;
}
public String getInstanceId() {
return instanceId;
}
public String getRole() {
return role;
}
@Override
public String getAddress() {
return address;
}
public String getStatus() {
return status;
}
public String getAvailabilityZone() {
return availabilityZone;
}
public String getInstanceType() {
return instanceType;
}
public Map<String, String> getTags() {
return tags;
}
@Override
public Map<String, String> getAnnotations() {
return annotations;
}
public boolean hasTag(String tag) {
return tags.containsKey(tag);
}
public String getTag(String tag) {
return tags.get(tag);
}
public String getTag(String tag, String defaultValue) {
if (!tags.containsKey(tag)) {
return defaultValue;
}
return tags.get(tag);
}
public boolean hasTag(String tag, String value) {
return hasTag(tag) && getTag(tag).equals(value);
}
@JsonIgnore
public boolean isAvailable() {
return address != null && AVAILABLE_STATES.contains(getStatus().toLowerCase());
}
@JsonIgnore
public boolean isPrimary() {
return getRole().equalsIgnoreCase("writer");
}
@JsonIgnore
public boolean isReader() {
return getRole().equalsIgnoreCase("reader");
}
@Override
public String toString() {
return "NeptuneEndpointMetadata{" +
"instanceId='" + instanceId + '\'' +
", role='" + role + '\'' +
", address='" + address + '\'' +
", status='" + status + '\'' +
", availabilityZone='" + availabilityZone + '\'' +
", instanceType='" + instanceType + '\'' +
", annotations=" + annotations +
", tags=" + tags +
'}';
}
public String toJsonString() throws JsonProcessingException {
return new ObjectMapper().writerFor(this.getClass()).writeValueAsString(this);
}
}
| 7,507 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/GetCurrentEndpointsFromGremlinClient.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import org.apache.tinkerpop.gremlin.driver.EndpointCollection;
import org.apache.tinkerpop.gremlin.driver.GremlinClient;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
class GetCurrentEndpointsFromGremlinClient implements ClusterEndpointsFetchStrategy {
@Override
public ClusterMetadataSupplier clusterMetadataSupplier() {
throw new UnsupportedOperationException("This operation is not supported for the GetCurrentEndpointsFromGremlinClient strategy.");
}
@Override
public Map<? extends EndpointsSelector, EndpointCollection> getEndpoints(Collection<? extends EndpointsSelector> selectors, boolean refresh) {
throw new UnsupportedOperationException("This operation is not supported for the GetCurrentEndpointsFromGremlinClient strategy.");
}
@Override
public Map<? extends EndpointsSelector, EndpointCollection> getEndpoints(Map<? extends EndpointsSelector, GremlinClient> clientSelectors, boolean refresh) {
Map<EndpointsSelector, EndpointCollection> results = new HashMap<>();
for (Map.Entry<? extends EndpointsSelector, GremlinClient> clientSelector : clientSelectors.entrySet()) {
results.put(clientSelector.getKey(), clientSelector.getValue().currentEndpoints());
}
return results;
}
}
| 7,508 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/GetEndpointsFromLambdaProxy.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.lambda.AWSLambda;
import com.amazonaws.services.lambda.AWSLambdaClientBuilder;
import com.amazonaws.services.lambda.model.InvokeRequest;
import com.amazonaws.services.lambda.model.InvokeResult;
import com.amazonaws.services.lambda.model.TooManyRequestsException;
import com.evanlennick.retry4j.CallExecutor;
import com.evanlennick.retry4j.CallExecutorBuilder;
import com.evanlennick.retry4j.Status;
import com.evanlennick.retry4j.config.RetryConfig;
import com.evanlennick.retry4j.config.RetryConfigBuilder;
import com.evanlennick.retry4j.exception.UnexpectedException;
import com.fasterxml.jackson.databind.exc.MismatchedInputException;
import org.apache.commons.lang3.StringUtils;
import org.apache.tinkerpop.gremlin.driver.EndpointCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.utils.RegionUtils;
import java.time.temporal.ChronoUnit;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
class GetEndpointsFromLambdaProxy implements ClusterEndpointsFetchStrategy, ClusterMetadataSupplier {
private static final Logger logger = LoggerFactory.getLogger(GetEndpointsFromLambdaProxy.class);
private static final long FIFTEEN_SECONDS = 15000;
private final ClusterEndpointsFetchStrategy innerStrategy;
private final String lambdaName;
private final AWSLambda lambdaClient;
private final RetryConfig retryConfig;
private final AtomicReference<NeptuneClusterMetadata> cachedClusterMetadata = new AtomicReference<>();
private final AtomicLong lastRefreshTime = new AtomicLong(System.currentTimeMillis());
GetEndpointsFromLambdaProxy(String lambdaName) {
this(lambdaName, RegionUtils.getCurrentRegionName());
}
GetEndpointsFromLambdaProxy(String lambdaName, String region) {
this(lambdaName, region, IamAuthConfig.DEFAULT_PROFILE);
}
GetEndpointsFromLambdaProxy(String lambdaName, String region, String iamProfile) {
this(lambdaName, region, iamProfile, null, null);
}
GetEndpointsFromLambdaProxy(String lambdaName, String region, String iamProfile, ClientConfiguration clientConfiguration) {
this(lambdaName, region, iamProfile, null, clientConfiguration);
}
GetEndpointsFromLambdaProxy(String lambdaName, String region, AWSCredentialsProvider credentials) {
this(lambdaName, region, IamAuthConfig.DEFAULT_PROFILE, credentials, null);
}
GetEndpointsFromLambdaProxy(String lambdaName, String region, AWSCredentialsProvider credentials, ClientConfiguration clientConfiguration) {
this(lambdaName, region, IamAuthConfig.DEFAULT_PROFILE, credentials, clientConfiguration);
}
private GetEndpointsFromLambdaProxy(String lambdaName,
String region,
String iamProfile,
AWSCredentialsProvider credentials,
ClientConfiguration clientConfiguration) {
this.innerStrategy = new CommonClusterEndpointsFetchStrategy(this);
this.lambdaName = lambdaName;
this.lambdaClient = createLambdaClient(region, iamProfile, credentials, clientConfiguration);
this.retryConfig = new RetryConfigBuilder()
.retryOnSpecificExceptions(TooManyRequestsException.class)
.withMaxNumberOfTries(5)
.withDelayBetweenTries(100, ChronoUnit.MILLIS)
.withExponentialBackoff()
.build();
}
@Override
public ClusterMetadataSupplier clusterMetadataSupplier() {
return this;
}
@Override
public NeptuneClusterMetadata refreshClusterMetadata() {
Callable<NeptuneClusterMetadata> query = () -> {
InvokeRequest invokeRequest = new InvokeRequest()
.withFunctionName(lambdaName)
.withPayload("\"\"");
InvokeResult result = lambdaClient.invoke(invokeRequest);
return NeptuneClusterMetadata.fromByteArray(result.getPayload().array());
};
@SuppressWarnings("unchecked")
CallExecutor<NeptuneClusterMetadata> executor =
new CallExecutorBuilder<NeptuneClusterMetadata>().config(retryConfig).build();
Status<NeptuneClusterMetadata> status;
try {
status = executor.execute(query);
} catch (UnexpectedException e) {
if (e.getCause() instanceof MismatchedInputException) {
throw new IllegalStateException(String.format("The AWS Lambda proxy (%s) isn't returning a NeptuneClusterMetadata JSON document. Check that the function supports returning a NeptuneClusterMetadata JSON document.", lambdaName), e.getCause());
} else {
throw new IllegalStateException(String.format("There was an unexpected error while attempting to get a NeptuneClusterMetadata JSON document from the AWS Lambda proxy (%s). Check that the function supports returning a NeptuneClusterMetadata JSON document.", lambdaName), e.getCause());
}
}
NeptuneClusterMetadata clusterMetadata = status.getResult();
cachedClusterMetadata.set(clusterMetadata);
logger.debug("clusterMetadata: {}", clusterMetadata);
return clusterMetadata;
}
@Override
public NeptuneClusterMetadata getClusterMetadata() {
NeptuneClusterMetadata clusterMetadata = cachedClusterMetadata.get();
if (clusterMetadata == null) {
return refreshClusterMetadata();
}
if (shouldRefresh()){
return refreshClusterMetadata();
}
return clusterMetadata;
}
@Override
public Map<? extends EndpointsSelector, EndpointCollection> getEndpoints(Collection<? extends EndpointsSelector> selectors, boolean refresh) {
return innerStrategy.getEndpoints(selectors, refresh);
}
private boolean shouldRefresh() {
// Ensure cached values are refreshed every 5 seconds
final long now = System.currentTimeMillis();
long refreshTime = lastRefreshTime.updateAndGet(currentValue -> now - currentValue > FIFTEEN_SECONDS ? now : currentValue);
return (refreshTime == now);
}
private AWSLambda createLambdaClient(String region, String iamProfile, AWSCredentialsProvider credentials, ClientConfiguration clientConfiguration) {
AWSLambdaClientBuilder builder = AWSLambdaClientBuilder.standard();
if (clientConfiguration != null){
builder = builder.withClientConfiguration(clientConfiguration);
}
if (credentials != null) {
builder = builder.withCredentials(credentials);
} else {
if (!iamProfile.equals(IamAuthConfig.DEFAULT_PROFILE)) {
builder = builder.withCredentials(new ProfileCredentialsProvider(iamProfile));
} else {
builder = builder.withCredentials(DefaultAWSCredentialsProviderChain.getInstance());
}
}
if (StringUtils.isNotEmpty(region)) {
builder = builder.withRegion(region);
}
return builder.build();
}
}
| 7,509 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/LBAwareHandshakeInterceptor.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package software.amazon.neptune.cluster;
import com.amazonaws.neptune.auth.NeptuneNettyHttpSigV4Signer;
import com.amazonaws.neptune.auth.NeptuneSigV4SignerException;
import io.netty.handler.codec.http.FullHttpRequest;
import org.apache.commons.lang3.StringUtils;
import org.apache.tinkerpop.gremlin.driver.HandshakeInterceptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.utils.RegionUtils;
class LBAwareHandshakeInterceptor implements HandshakeInterceptor {
private static final Logger logger = LoggerFactory.getLogger(LBAwareHandshakeInterceptor.class);
private final IamAuthConfig iamAuthConfig;
private final String serviceRegion;
private final NeptuneNettyHttpSigV4Signer sigV4Signer;
LBAwareHandshakeInterceptor(IamAuthConfig iamAuthConfig) {
this.iamAuthConfig = iamAuthConfig;
this.serviceRegion = getServiceRegion();
this.sigV4Signer = createSigV4Signer();
}
private NeptuneNettyHttpSigV4Signer createSigV4Signer() {
if (iamAuthConfig.enableIamAuth()) {
try {
return new NeptuneNettyHttpSigV4Signer(
serviceRegion,
iamAuthConfig.credentialsProviderChain());
} catch (NeptuneSigV4SignerException e) {
throw new RuntimeException("Exception occurred while creating NeptuneSigV4Signer", e);
}
} else {
return null;
}
}
@Override
public FullHttpRequest apply(FullHttpRequest request) {
logger.trace("iamAuthConfig: {}, serviceRegion: {}", iamAuthConfig, serviceRegion);
if (iamAuthConfig.enableIamAuth() || iamAuthConfig.connectViaLoadBalancer()) {
request.headers().remove("Host");
request.headers().remove("host");
request.headers().add("Host", iamAuthConfig.chooseHostHeader());
}
if (iamAuthConfig.enableIamAuth()) {
try {
NeptuneNettyHttpSigV4Signer signer = sigV4Signer != null ?
sigV4Signer :
new NeptuneNettyHttpSigV4Signer(
serviceRegion,
iamAuthConfig.credentialsProviderChain());
signer.signRequest(request);
if (iamAuthConfig.removeHostHeaderAfterSigning()) {
request.headers().remove("Host");
}
} catch (NeptuneSigV4SignerException e) {
throw new RuntimeException("Exception occurred while signing the request", e);
}
}
return request;
}
private String getServiceRegion() {
if (StringUtils.isNotEmpty(iamAuthConfig.serviceRegion())) {
logger.debug("Using service region supplied in config");
return iamAuthConfig.serviceRegion();
} else if (StringUtils.isNotEmpty(System.getenv("SERVICE_REGION"))) {
logger.debug("Using SERVICE_REGION environment variable as service region");
return StringUtils.trim(System.getenv("SERVICE_REGION"));
} else if (StringUtils.isNotEmpty(System.getProperty("SERVICE_REGION"))) {
logger.debug("Using SERVICE_REGION system property as service region");
return StringUtils.trim(System.getProperty("SERVICE_REGION"));
} else {
String currentRegionName = RegionUtils.getCurrentRegionName();
if (currentRegionName != null) {
logger.debug("Using current region as service region");
return currentRegionName;
} else {
throw new IllegalStateException("Unable to determine Neptune service region. Use the SERVICE_REGION environment variable or system property, or the NeptuneGremlinClusterBuilder.serviceRegion() method to specify the Neptune service region.");
}
}
}
}
| 7,510 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/NeptuneClusterMetadata.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune.cluster;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.tinkerpop.gremlin.driver.DatabaseEndpoint;
import org.apache.tinkerpop.gremlin.driver.EndpointCollection;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
public class NeptuneClusterMetadata {
public static NeptuneClusterMetadata fromByteArray(byte[] bytes) throws IOException {
return new ObjectMapper().readerFor(NeptuneClusterMetadata.class).readValue(bytes);
}
private final Collection<NeptuneInstanceMetadata> instances = new ArrayList<>();
private DatabaseEndpoint clusterEndpoint;
private DatabaseEndpoint readerEndpoint;
public NeptuneClusterMetadata(){
}
public void setClusterEndpoint(DatabaseEndpoint clusterEndpoint) {
this.clusterEndpoint = clusterEndpoint;
}
public void setReaderEndpoint(DatabaseEndpoint readerEndpoint) {
this.readerEndpoint = readerEndpoint;
}
public void setInstances(Collection<NeptuneInstanceMetadata> instances) {
this.instances.clear();
this.instances.addAll(instances);
}
public NeptuneClusterMetadata withClusterEndpoint(String clusterEndpoint) {
setClusterEndpoint(new DatabaseEndpoint().withAddress(clusterEndpoint));
return this;
}
public NeptuneClusterMetadata withReaderEndpoint(String readerEndpoint) {
setReaderEndpoint(new DatabaseEndpoint().withAddress(readerEndpoint));
return this;
}
public NeptuneClusterMetadata withInstances(Collection<NeptuneInstanceMetadata> instances) {
setInstances(instances);
return this;
}
public Collection<NeptuneInstanceMetadata> getInstances() {
return instances;
}
public DatabaseEndpoint getClusterEndpoint() {
return clusterEndpoint;
}
public DatabaseEndpoint getReaderEndpoint() {
return readerEndpoint;
}
public EndpointCollection select(EndpointsSelector selector){
return selector.getEndpoints(this);
}
@Override
public String toString() {
return "NeptuneClusterMetadata{" +
"instances=" + instances +
", clusterEndpoint='" + clusterEndpoint + '\'' +
", readerEndpoint='" + readerEndpoint + '\'' +
'}';
}
public String toJsonString() throws JsonProcessingException {
return new ObjectMapper().writerFor(this.getClass()).writeValueAsString(this);
}
}
| 7,511 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/neptune/cluster/IamAuthConfig.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package software.amazon.neptune.cluster;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSCredentialsProviderChain;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import java.util.*;
class IamAuthConfig {
public static final String DEFAULT_PROFILE = "default";
public static IamAuthConfigBuilder builder() {
return new IamAuthConfigBuilder();
}
private final List<String> endpoints;
private final int port;
private final boolean connectViaLoadBalancer;
private final boolean enableIamAuth;
private final boolean removeHostHeaderAfterSigning;
private final String serviceRegion;
private final String iamProfile;
private final AWSCredentialsProvider credentials;
private final Random random = new Random(System.currentTimeMillis());
IamAuthConfig(Collection<String> endpoints,
int port,
boolean enableIamAuth,
boolean connectViaLoadBalancer,
boolean removeHostHeaderAfterSigning,
String serviceRegion,
String iamProfile,
AWSCredentialsProvider credentials) {
this.endpoints = new ArrayList<>(endpoints);
this.port = port;
this.enableIamAuth = enableIamAuth;
this.connectViaLoadBalancer = connectViaLoadBalancer;
this.removeHostHeaderAfterSigning = removeHostHeaderAfterSigning;
this.serviceRegion = serviceRegion;
this.iamProfile = iamProfile;
this.credentials = credentials;
}
public String serviceRegion() {
return serviceRegion;
}
public AWSCredentialsProviderChain credentialsProviderChain() {
if (credentials != null) {
return new AWSCredentialsProviderChain(Collections.singletonList(credentials));
} else if (!iamProfile.equals(DEFAULT_PROFILE)) {
return new AWSCredentialsProviderChain(Collections.singletonList(new ProfileCredentialsProvider(iamProfile)));
} else {
return new DefaultAWSCredentialsProviderChain();
}
}
public String chooseHostHeader() {
String address = endpoints.size() == 1 ? endpoints.get(0) : endpoints.get(random.nextInt(endpoints.size()));
return String.format("%s:%s", address, port);
}
public boolean enableIamAuth() {
return enableIamAuth;
}
public boolean connectViaLoadBalancer() {
return connectViaLoadBalancer;
}
public boolean removeHostHeaderAfterSigning() {
return removeHostHeaderAfterSigning;
}
public String asJsonString() {
ObjectNode json = JsonNodeFactory.instance.objectNode();
ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode();
for (String endpoint : endpoints) {
arrayNode.add(endpoint);
}
json.set("endpoints", arrayNode);
json.put("port", port);
json.put("enableIamAuth", enableIamAuth);
json.put("connectViaLoadBalancer", connectViaLoadBalancer);
json.put("removeHostHeaderAfterSigning", removeHostHeaderAfterSigning);
json.put("serviceRegion", serviceRegion);
json.put("iamProfile", iamProfile);
return json.toString();
}
@Override
public String toString() {
return asJsonString();
}
public static final class IamAuthConfigBuilder {
private final List<String> endpoints = new ArrayList<>();
private int port = 8182;
private boolean enableIamAuth = false;
private boolean connectViaLoadBalancer = false;
private boolean removeHostHeaderAfterSigning = false;
private String serviceRegion = "";
private String iamProfile = DEFAULT_PROFILE;
private AWSCredentialsProvider credentials = null;
public IamAuthConfigBuilder addNeptuneEndpoints(String... endpoints) {
this.endpoints.addAll(Arrays.asList(endpoints));
return this;
}
public IamAuthConfigBuilder addNeptuneEndpoints(List<String> endpoints) {
this.endpoints.addAll(endpoints);
return this;
}
public IamAuthConfigBuilder setNeptunePort(int port) {
this.port = port;
return this;
}
public IamAuthConfigBuilder setServiceRegion(String serviceRegion) {
this.serviceRegion = serviceRegion;
return this;
}
public IamAuthConfigBuilder setIamProfile(String iamProfile) {
this.iamProfile = iamProfile;
return this;
}
public IamAuthConfigBuilder setCredentials(AWSCredentialsProvider credentials) {
this.credentials = credentials;
return this;
}
public IamAuthConfigBuilder enableIamAuth() {
this.enableIamAuth = true;
return this;
}
public IamAuthConfigBuilder removeHostHeaderAfterSigning() {
this.removeHostHeaderAfterSigning = true;
return this;
}
public IamAuthConfigBuilder connectViaLoadBalancer() {
this.connectViaLoadBalancer = true;
return this;
}
public IamAuthConfig build() {
return new IamAuthConfig(
endpoints,
port,
enableIamAuth,
connectViaLoadBalancer,
removeHostHeaderAfterSigning,
serviceRegion,
iamProfile,
credentials);
}
}
}
| 7,512 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/utils/GitProperties.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.utils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
public class GitProperties {
public static final GitProperties FromResource = GitProperties.fromResource();
private final String commitId;
private final String buildVersion;
private final String commitTime;
private final String buildTime;
private GitProperties(String commitId, String buildVersion, String commitTime, String buildTime) {
this.commitId = commitId;
this.buildVersion = buildVersion;
this.commitTime = commitTime;
this.buildTime = buildTime;
}
private static GitProperties fromResource() {
Properties properties = new Properties();
try {
InputStream stream = ClassLoader.getSystemResourceAsStream("git.properties");
if (stream != null) {
properties.load(stream);
stream.close();
} else {
// this is where we think the git properties are on AWS Lambda
File file = new File("/var/task/git.properties");
if (file.exists()){
try (InputStream filestream = new FileInputStream(file)){
properties.load(filestream);
};
}
}
} catch (IOException e) {
// Do nothing
}
return new GitProperties(
properties.getProperty("git.commit.id", "unknown"),
properties.getProperty("git.build.version", "unknown"),
properties.getProperty("git.commit.time", "unknown"),
properties.getProperty("git.build.time", "unknown"));
}
@Override
public String toString() {
return "[" +
"buildVersion='" + buildVersion + '\'' +
", buildTime='" + buildTime + '\'' +
", commitId='" + commitId + '\'' +
", commitTime='" + commitTime + '\'' +
"]";
}
public String getCommitId() {
return commitId;
}
public String getBuildVersion() {
return buildVersion;
}
public String getCommitTime() {
return commitTime;
}
public String getBuildTime() {
return buildTime;
}
}
| 7,513 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/utils/SoftwareVersion.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.utils;
import org.apache.commons.lang3.StringUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
public class SoftwareVersion {
public static final SoftwareVersion FromResource = SoftwareVersion.fromResource();
private static final SoftwareVersion fromResource() {
String name = SoftwareVersion.class.getPackage().getImplementationTitle();
String version = SoftwareVersion.class.getPackage().getImplementationVersion();
try {
if (StringUtils.isEmpty(name) || StringUtils.isEmpty(version)) {
// this is where we think the pom properties are on AWS Lambda
File file = new File("/var/task/META-INF/maven/software.amazon.neptune/gremlin-client/pom.properties");
if (file.exists()) {
try (InputStream filestream = new FileInputStream(file)) {
Properties properties = new Properties();
properties.load(filestream);
name = properties.getProperty("artifactId", "unknown");
version = properties.getProperty("version", "unknown");
} ;
} else {
name = "unknown";
version = "unknown";
}
}
} catch (IOException e) {
// Do nothing
}
return new SoftwareVersion(name, version);
}
private final String name;
private final String version;
private SoftwareVersion(String name, String version) {
this.name = name;
this.version = version;
}
@Override
public String toString() {
return name + ":" + version;
}
public String getName() {
return name;
}
public String getVersion() {
return version;
}
}
| 7,514 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/utils/RegionUtils.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.utils;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import org.apache.commons.lang3.StringUtils;
public class RegionUtils {
public static String getCurrentRegionName(){
String result = EnvironmentVariableUtils.getOptionalEnv("AWS_REGION", null);
if (StringUtils.isEmpty(result)) {
Region currentRegion = Regions.getCurrentRegion();
if (currentRegion != null) {
result = currentRegion.getName();
}
}
return result;
}
}
| 7,515 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/utils/RetryUtils.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.utils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.tinkerpop.gremlin.driver.exception.NoHostAvailableException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.ConnectException;
public class RetryUtils {
public static Result isRetryableException(Exception ex) {
Throwable e = ExceptionUtils.getRootCause(ex);
Class<? extends Throwable> exceptionClass = e.getClass();
String message = getMessage(e);
if (NoHostAvailableException.class.isAssignableFrom(exceptionClass)) {
return Result.RETRYABLE(e, message);
}
if (ConnectException.class.isAssignableFrom(exceptionClass)) {
return Result.RETRYABLE(e, message);
}
// Check for connection issues
if (message.contains("Timed out while waiting for an available host") ||
message.contains("waiting for connection") ||
message.contains("Connection to server is no longer active") ||
message.contains("Connection reset by peer") ||
message.contains("Connection refused") ||
message.contains("SSLEngine closed already") ||
message.contains("Pool is shutdown") ||
message.contains("ExtendedClosedChannelException") ||
message.contains("Broken pipe") ||
message.contains("StacklessClosedChannelException") ) {
return Result.RETRYABLE(e, message);
}
// Concurrent writes can sometimes trigger a ConcurrentModificationException.
// In these circumstances you may want to backoff and retry.
if (message.contains("ConcurrentModificationException")) {
return Result.RETRYABLE(e, message);
}
// If the primary fails over to a new instance, existing connections to the old primary will
// throw a ReadOnlyViolationException. You may want to back and retry.
if (message.contains("ReadOnlyViolationException")) {
return Result.RETRYABLE(e, message);
}
// CVEs can sometimes occur if a previous transaction is not yet visible to the current transaction.
if (message.contains("ConstraintViolationException")) {
return Result.RETRYABLE(e, message);
}
return Result.NOT_RETRYABLE(e, message);
}
private static String getMessage(Throwable e) {
StringWriter stringWriter = new StringWriter();
e.printStackTrace(new PrintWriter(stringWriter));
return stringWriter.toString();
}
public static class Result {
private static Result RETRYABLE(Throwable e, String message){
return new Result(true, e, message);
}
private static Result NOT_RETRYABLE(Throwable e, String message){
return new Result(false, e, message);
}
private final boolean isRetryable;
private final Throwable e;
private final String message;
public Result(boolean isRetryable, Throwable e, String message) {
this.isRetryable = isRetryable;
this.e = e;
this.message = message;
}
public boolean isRetryable() {
return isRetryable;
}
public Throwable rootCause() {
return e;
}
public String message() {
return message;
}
@Override
public String toString() {
return "Result{" +
"isRetryable=" + isRetryable +
", e=" + e +
", message='" + message + '\'' +
'}';
}
}
}
| 7,516 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/utils/CollectionUtils.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.utils;
import java.util.ArrayList;
import java.util.List;
public class CollectionUtils {
public static <T> List<T> join(List<T> l1, List<T> l2) {
List<T> results = new ArrayList<>();
results.addAll(l1);
results.addAll(l2);
return results;
}
}
| 7,517 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/utils/EnvironmentVariableUtils.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.utils;
public class EnvironmentVariableUtils {
public static String getMandatoryEnv(String name) {
if (isNullOrEmpty(System.getenv(name))) {
throw new IllegalStateException(String.format("Missing environment variable: %s", name));
}
return System.getenv(name);
}
public static String getOptionalEnv(String name, String defaultValue) {
if (isNullOrEmpty(System.getenv(name))) {
return defaultValue;
}
return System.getenv(name);
}
private static boolean isNullOrEmpty(String value) {
return value == null || value.isEmpty();
}
}
| 7,518 |
0 | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client/src/main/java/software/amazon/utils/Clock.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.utils;
public interface Clock {
long currentTimeMillis();
}
| 7,519 |
0 | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon/neptune/RetryDemo.java | package software.amazon.neptune;
import com.evanlennick.retry4j.CallExecutor;
import com.evanlennick.retry4j.CallExecutorBuilder;
import com.evanlennick.retry4j.Status;
import com.evanlennick.retry4j.config.RetryConfig;
import com.evanlennick.retry4j.config.RetryConfigBuilder;
import com.evanlennick.retry4j.exception.RetriesExhaustedException;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.Once;
import com.github.rvesse.airline.annotations.restrictions.Port;
import com.github.rvesse.airline.annotations.restrictions.PortType;
import com.github.rvesse.airline.annotations.restrictions.RequireOnlyOne;
import org.apache.commons.lang3.StringUtils;
import org.apache.tinkerpop.gremlin.driver.ClusterContext;
import org.apache.tinkerpop.gremlin.driver.GremlinClient;
import org.apache.tinkerpop.gremlin.driver.GremlinCluster;
import org.apache.tinkerpop.gremlin.driver.remote.DriverRemoteConnection;
import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.neptune.cluster.*;
import software.amazon.utils.RegionUtils;
import software.amazon.utils.RetryUtils;
import java.time.temporal.ChronoUnit;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
@Command(name = "retry-demo", description = "Demonstrates backoff-and-retry strategies when creating connecting and submitting query")
public class RetryDemo implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(RetryDemo.class);
@Option(name = {"--cluster-id"}, description = "Amazon Neptune cluster Id. You must supply either a cluster Id or the name of an AWS Lambda proxy function (using --lambda-proxy).")
@Once
@RequireOnlyOne(tag = "cluster-id-or-lambda-proxy")
private String clusterId;
@Option(name = {"--lambda-proxy"}, description = "Name of the AWS Lambda proxy function used to fetch cluster topology. You must supply either the name of an AWS Lambda proxy function or a cluster Id or (using --cluster-id). If you are using a Lambda proxy, amke sure you have installed it (using the AWS CloudFormation template) before running this demo.")
@Once
@RequireOnlyOne(tag = "cluster-id-or-lambda-proxy")
private String lambdaProxy;
@Option(name = {"--port"}, description = "Neptune port (optional, default 8182)")
@Port(acceptablePorts = {PortType.SYSTEM, PortType.USER})
@Once
private int neptunePort = 8182;
@Option(name = {"--disable-ssl"}, description = "Disables connectivity over SSL (optional, default false)")
@Once
private boolean disableSsl = false;
@Option(name = {"--enable-iam"}, description = "Enables IAM database authentication (optional, default false)")
@Once
private boolean enableIam = false;
@Option(name = {"--query-count"}, description = "Number of queries to execute")
@Once
private int queryCount = 1000000;
@Option(name = {"--log-level"}, description = "Log level")
@Once
private String logLevel = "info";
@Option(name = {"--profile"}, description = "Credentials profile")
@Once
private String profile = "default";
@Option(name = {"--service-region"}, description = "Neptune service region")
@Once
private String serviceRegion = null;
@Option(name = {"--interval"}, description = "Interval (in seconds) between refreshing addresses")
@Once
private int intervalSeconds = 15;
@Override
public void run() {
try {
ClusterEndpointsRefreshAgent refreshAgent = createRefreshAgent();
RetryConfig retryConfig = new RetryConfigBuilder()
.retryOnCustomExceptionLogic(new Function<Exception, Boolean>() {
@Override
public Boolean apply(Exception e) {
RetryUtils.Result result = RetryUtils.isRetryableException(e);
logger.info("isRetriableException: {}", result);
return result.isRetryable();
}
})
.withExponentialBackoff()
.withMaxNumberOfTries(5)
.withDelayBetweenTries(1, ChronoUnit.SECONDS)
.build();
ClusterContext readerContext = createClusterContext(retryConfig, refreshAgent, EndpointsType.ReadReplicas);
ClusterContext writerContext = createClusterContext(retryConfig, refreshAgent, EndpointsType.Primary);
// Use same GraphTraversalSources across threads
GraphTraversalSource gReader = readerContext.graphTraversalSource();
GraphTraversalSource gWriter = writerContext.graphTraversalSource();
logger.info("Starting queries...");
AtomicInteger currentQueryCount = new AtomicInteger(0);
ExecutorService taskExecutor = Executors.newFixedThreadPool(5);
for (int i = 0; i < 5; i++) {
taskExecutor.submit(new Runnable() {
@Override
public void run() {
try {
int count = 0;
int readCount = 0;
int writeCount = 0;
int tries = 0;
int failedReads = 0;
int failedWrites = 0;
CallExecutor executor = new CallExecutorBuilder()
.config(retryConfig)
.build();
while (count < queryCount) {
count = currentQueryCount.incrementAndGet();
if (count % 7 == 0) {
// write
writeCount++;
Callable<Edge> query = () ->
gWriter.addV("Thing").as("v1").
addV("Thing").as("v2").
addE("Connection").from("v1").to("v2").
next();
try {
Status<Edge> status = executor.execute(query);
tries += status.getTotalTries();
} catch (RetriesExhaustedException e) {
failedWrites++;
}
} else {
// read
readCount++;
Callable<List<Map<Object, Object>>> query = () ->
gReader.V().limit(10).valueMap(true).toList();
try {
Status<List<Map<Object, Object>>> status = executor.execute(query);
tries += status.getTotalTries();
List<Map<Object, Object>> results = status.getResult();
for (Map<Object, Object> result : results) {
//Do nothing
}
} catch (RetriesExhaustedException e) {
failedReads++;
}
}
logger.info("Progress: [queries: {}, tries: {}, reads: {}, writes: {}, failedReads: {}, failedWrites: {}]",
(readCount + writeCount),
tries,
readCount,
writeCount,
failedReads,
failedWrites);
}
} catch (Exception e) {
logger.error("Unexpected error", e);
}
}
});
}
taskExecutor.shutdown();
try {
if (!taskExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS)) {
logger.warn("Timeout expired with uncompleted tasks");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
logger.info("Closing...");
refreshAgent.close();
readerContext.close();
writerContext.close();
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
}
}
private ClusterContext createClusterContext(RetryConfig retryConfig,
ClusterEndpointsRefreshAgent refreshAgent,
EndpointsSelector selector) {
logger.info("Creating ClusterContext for {}", selector);
CallExecutor executor = new CallExecutorBuilder()
.config(retryConfig)
.build();
Status<ClusterContext> status = executor.execute((Callable<ClusterContext>) () -> {
NeptuneGremlinClusterBuilder builder = NeptuneGremlinClusterBuilder.build()
.enableSsl(!disableSsl)
.enableIamAuth(enableIam)
.iamProfile(profile)
.addContactPoints(refreshAgent.getEndpoints(selector))
.port(neptunePort);
if (StringUtils.isNotEmpty(serviceRegion)) {
builder = builder.serviceRegion(serviceRegion);
}
GremlinCluster cluster = builder.create();
GremlinClient client = cluster.connect();
refreshAgent.startPollingNeptuneAPI(
client,
selector,
intervalSeconds,
TimeUnit.SECONDS
);
DriverRemoteConnection connection = DriverRemoteConnection.using(client);
GraphTraversalSource g = AnonymousTraversalSource.traversal().withRemote(connection);
return new ClusterContext(cluster, client, g);
});
return status.getResult();
}
private ClusterEndpointsRefreshAgent createRefreshAgent() {
if (StringUtils.isNotEmpty(clusterId)) {
return ClusterEndpointsRefreshAgent.managementApi(clusterId, RegionUtils.getCurrentRegionName(), profile);
} else if (StringUtils.isNotEmpty(lambdaProxy)) {
return ClusterEndpointsRefreshAgent.lambdaProxy(lambdaProxy, RegionUtils.getCurrentRegionName(), profile);
} else {
throw new IllegalStateException("You must supply either a cluster Id or AWS Lambda proxy name");
}
}
}
| 7,520 |
0 | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon/neptune/CustomSelectorsDemo.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.Once;
import com.github.rvesse.airline.annotations.restrictions.Port;
import com.github.rvesse.airline.annotations.restrictions.PortType;
import com.github.rvesse.airline.annotations.restrictions.RequireOnlyOne;
import org.apache.commons.lang3.StringUtils;
import org.apache.tinkerpop.gremlin.driver.EndpointCollection;
import org.apache.tinkerpop.gremlin.driver.GremlinClient;
import org.apache.tinkerpop.gremlin.driver.GremlinCluster;
import org.apache.tinkerpop.gremlin.driver.RefreshTask;
import org.apache.tinkerpop.gremlin.driver.remote.DriverRemoteConnection;
import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.neptune.cluster.*;
import software.amazon.utils.RegionUtils;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@Command(name = "custom-selectors-demo", description = "Demo using custom endpoint selectors")
public class CustomSelectorsDemo implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(CustomSelectorsDemo.class);
@Option(name = {"--cluster-id"}, description = "Amazon Neptune cluster Id. You must supply either a cluster Id or the name of an AWS Lambda proxy function (using --lambda-proxy).")
@Once
@RequireOnlyOne(tag = "cluster-id-or-lambda-proxy")
private String clusterId;
@Option(name = {"--lambda-proxy"}, description = "Name of the AWS Lambda proxy function used to fetch cluster topology. You must supply either the name of an AWS Lambda proxy function or a cluster Id or (using --cluster-id). If you are using a Lambda proxy, amke sure you have installed it (using the AWS CloudFormation template) before running this demo.")
@Once
@RequireOnlyOne(tag = "cluster-id-or-lambda-proxy")
private String lambdaProxy;
@Option(name = {"--port"}, description = "Neptune port (optional, default 8182)")
@Port(acceptablePorts = {PortType.SYSTEM, PortType.USER})
@Once
private int neptunePort = 8182;
@Option(name = {"--disable-ssl"}, description = "Disables connectivity over SSL (optional, default false)")
@Once
private boolean disableSsl = false;
@Option(name = {"--enable-iam"}, description = "Enables IAM database authentication (optional, default false)")
@Once
private boolean enableIam = false;
@Option(name = {"--query-count"}, description = "Number of queries to execute")
@Once
private int queryCount = 1000000;
@Option(name = {"--log-level"}, description = "Log level")
@Once
private String logLevel = "info";
@Option(name = {"--profile"}, description = "Credentials profile")
@Once
private String profile = "default";
@Option(name = {"--service-region"}, description = "Neptune service region")
@Once
private String serviceRegion = null;
@Option(name = {"--interval"}, description = "Interval (in seconds) between refreshing addresses")
@Once
private int intervalSeconds = 15;
@Override
public void run() {
try {
EndpointsSelector writerSelector = (cluster) -> {
List<NeptuneInstanceMetadata> endpoints = cluster.getInstances().stream()
.filter(NeptuneInstanceMetadata::isPrimary)
.filter(NeptuneInstanceMetadata::isAvailable)
.collect(Collectors.toList());
return endpoints.isEmpty() ?
new EndpointCollection(Collections.singletonList(cluster.getClusterEndpoint())) :
new EndpointCollection(endpoints);
};
EndpointsSelector readerSelector = (cluster) ->
new EndpointCollection(
cluster.getInstances().stream()
.filter(NeptuneInstanceMetadata::isReader)
.filter(NeptuneInstanceMetadata::isAvailable)
.collect(Collectors.toList()));
ClusterEndpointsRefreshAgent refreshAgent = createRefreshAgent();
GremlinCluster writerCluster = createCluster(writerSelector, refreshAgent);
GremlinCluster readerCluster = createCluster(readerSelector, refreshAgent);
GremlinClient writer = writerCluster.connect();
GremlinClient reader = readerCluster.connect();
refreshAgent.startPollingNeptuneAPI(
Arrays.asList(
RefreshTask.refresh(writer, writerSelector),
RefreshTask.refresh(reader, readerSelector)
),
60,
TimeUnit.SECONDS);
GraphTraversalSource gWriter = createGraphTraversalSource(writer);
GraphTraversalSource gReader = createGraphTraversalSource(reader);
for (int i = 0; i < queryCount; i++) {
try {
if (i % 2 == 1) {
List<Map<Object, Object>> results = gReader.V().limit(10).valueMap(true).toList();
for (Map<Object, Object> result : results) {
//Do nothing
}
} else {
gWriter.addV("TestNode").property("my-id", i).next();
}
} catch (Exception e) {
logger.warn("Error processing query: {}", e.getMessage());
}
if (i % 10000 == 0) {
System.out.println();
System.out.println("Number of queries: " + i);
}
}
refreshAgent.close();
writer.close();
reader.close();
writerCluster.close();
readerCluster.close();
} catch (Exception e) {
System.err.println("An error occurred while connecting to Neptune:");
e.printStackTrace();
System.exit(-1);
}
}
private static GraphTraversalSource createGraphTraversalSource(GremlinClient writer) {
DriverRemoteConnection connection = DriverRemoteConnection.using(writer);
return AnonymousTraversalSource.traversal().withRemote(connection);
}
private ClusterEndpointsRefreshAgent createRefreshAgent() {
if (StringUtils.isNotEmpty(clusterId)) {
return ClusterEndpointsRefreshAgent.managementApi(clusterId, RegionUtils.getCurrentRegionName(), profile);
} else if (StringUtils.isNotEmpty(lambdaProxy)) {
return ClusterEndpointsRefreshAgent.lambdaProxy(lambdaProxy, RegionUtils.getCurrentRegionName(), profile);
} else {
throw new IllegalStateException("You must supply either a cluster Id or AWS Lambda proxy name");
}
}
private GremlinCluster createCluster(EndpointsSelector selector, ClusterEndpointsRefreshAgent refreshAgent) {
NeptuneGremlinClusterBuilder clusterBuilder = NeptuneGremlinClusterBuilder.build()
.enableSsl(!disableSsl)
.enableIamAuth(enableIam)
.iamProfile(profile)
.addContactPoints(refreshAgent.getEndpoints(selector))
.minConnectionPoolSize(3)
.maxConnectionPoolSize(3)
.port(neptunePort);
if (StringUtils.isNotEmpty(serviceRegion)) {
clusterBuilder = clusterBuilder.serviceRegion(serviceRegion);
}
return clusterBuilder.create();
}
}
| 7,521 |
0 | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon/neptune/RefreshAgentDemo.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.Once;
import com.github.rvesse.airline.annotations.restrictions.Port;
import com.github.rvesse.airline.annotations.restrictions.PortType;
import com.github.rvesse.airline.annotations.restrictions.RequireOnlyOne;
import org.apache.commons.lang3.StringUtils;
import org.apache.tinkerpop.gremlin.driver.GremlinClient;
import org.apache.tinkerpop.gremlin.driver.GremlinCluster;
import org.apache.tinkerpop.gremlin.driver.remote.DriverRemoteConnection;
import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.neptune.cluster.ClusterEndpointsRefreshAgent;
import software.amazon.neptune.cluster.EndpointsType;
import software.amazon.neptune.cluster.NeptuneGremlinClusterBuilder;
import software.amazon.utils.RegionUtils;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@Command(name = "refresh-agent-demo", description = "Demo using refresh client with topology aware cluster and client")
public class RefreshAgentDemo implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(RefreshAgentDemo.class);
@Option(name = {"--cluster-id"}, description = "Amazon Neptune cluster Id. You must supply either a cluster Id or the name of an AWS Lambda proxy function (using --lambda-proxy).")
@Once
@RequireOnlyOne(tag = "cluster-id-or-lambda-proxy")
private String clusterId;
@Option(name = {"--lambda-proxy"}, description = "Name of the AWS Lambda proxy function used to fetch cluster topology. You must supply either the name of an AWS Lambda proxy function or a cluster Id or (using --cluster-id). If you are using a Lambda proxy, amke sure you have installed it (using the AWS CloudFormation template) before running this demo.")
@Once
@RequireOnlyOne(tag = "cluster-id-or-lambda-proxy")
private String lambdaProxy;
@Option(name = {"--port"}, description = "Neptune port (optional, default 8182)")
@Port(acceptablePorts = {PortType.SYSTEM, PortType.USER})
@Once
private int neptunePort = 8182;
@Option(name = {"--disable-ssl"}, description = "Disables connectivity over SSL (optional, default false)")
@Once
private boolean disableSsl = false;
@Option(name = {"--enable-iam"}, description = "Enables IAM database authentication (optional, default false)")
@Once
private boolean enableIam = false;
@Option(name = {"--query-count"}, description = "Number of queries to execute")
@Once
private int queryCount = 1000000;
@Option(name = {"--log-level"}, description = "Log level")
@Once
private String logLevel = "info";
@Option(name = {"--profile"}, description = "Credentials profile")
@Once
private String profile = "default";
@Option(name = {"--service-region"}, description = "Neptune service region")
@Once
private String serviceRegion = null;
@Option(name = {"--interval"}, description = "Interval (in seconds) between refreshing addresses")
@Once
private int intervalSeconds = 15;
@Override
public void run() {
try {
EndpointsType selector = EndpointsType.ReadReplicas;
ClusterEndpointsRefreshAgent refreshAgent = createRefreshAgent();
NeptuneGremlinClusterBuilder builder = NeptuneGremlinClusterBuilder.build()
.enableSsl(!disableSsl)
.enableIamAuth(enableIam)
.iamProfile(profile)
.addContactPoints(refreshAgent.getEndpoints(selector))
.minConnectionPoolSize(3)
.maxConnectionPoolSize(3)
.port(neptunePort);
if (StringUtils.isNotEmpty(serviceRegion)) {
builder = builder.serviceRegion(serviceRegion);
}
GremlinCluster cluster = builder.create();
GremlinClient client = cluster.connect();
refreshAgent.startPollingNeptuneAPI(
client,
selector,
intervalSeconds,
TimeUnit.SECONDS
);
DriverRemoteConnection connection = DriverRemoteConnection.using(client);
GraphTraversalSource g = AnonymousTraversalSource.traversal().withRemote(connection);
for (int i = 0; i < queryCount; i++) {
try {
List<Map<Object, Object>> results = g.V().limit(10).valueMap(true).toList();
for (Map<Object, Object> result : results) {
//Do nothing
}
if (i % 10000 == 0) {
System.out.println();
System.out.println("Number of queries: " + i);
}
} catch (Exception e) {
logger.warn("Error processing query: {}", e.getMessage());
}
}
refreshAgent.close();
client.close();
cluster.close();
} catch (Exception e) {
System.err.println("An error occurred while connecting to Neptune:");
e.printStackTrace();
System.exit(-1);
}
}
private ClusterEndpointsRefreshAgent createRefreshAgent() {
if (StringUtils.isNotEmpty(clusterId)) {
return ClusterEndpointsRefreshAgent.managementApi(clusterId, RegionUtils.getCurrentRegionName(), profile);
} else if (StringUtils.isNotEmpty(lambdaProxy)) {
return ClusterEndpointsRefreshAgent.lambdaProxy(lambdaProxy, RegionUtils.getCurrentRegionName(), profile);
} else {
throw new IllegalStateException("You must supply either a cluster Id or AWS Lambda proxy name");
}
}
} | 7,522 |
0 | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon/neptune/ApplicationRunner.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune;
import com.github.rvesse.airline.annotations.Cli;
import com.github.rvesse.airline.help.Help;
@Cli(name = "java -jar neptune-topology-aware-client-demo.jar",
description = "Demo of topology aware Gremlin cluster and client",
defaultCommand = Help.class,
commands = {
RefreshAgentDemo.class,
TxDemo.class,
CustomSelectorsDemo.class,
RetryDemo.class,
Help.class})
public class ApplicationRunner {
public static void main(String[] args) {
com.github.rvesse.airline.Cli<Runnable> cli = new com.github.rvesse.airline.Cli<>(ApplicationRunner.class);
String logLevel = "info";
for (int i = 0; i < args.length; i++) {
if (args[i].equalsIgnoreCase("--log-level")) {
logLevel = args[i + 1];
break;
}
}
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", logLevel);
System.setProperty("org.slf4j.simpleLogger.log.org.apache.tinkerpop.gremlin.driver.GremlinClient", logLevel);
System.setProperty("org.slf4j.simpleLogger.log.org.apache.tinkerpop.gremlin.driver", logLevel);
System.setProperty("org.slf4j.simpleLogger.log.software.amazon.awssdk", "info");
System.setProperty("org.slf4j.simpleLogger.log.io.netty", "info");
try {
Runnable cmd = cli.parse(args);
cmd.run();
} catch (Exception e) {
System.err.println(e.getMessage());
Runnable cmd = cli.parse("help", args[0]);
cmd.run();
System.exit(-1);
}
}
}
| 7,523 |
0 | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon/neptune/TxDemo.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.neptune;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.restrictions.Once;
import com.github.rvesse.airline.annotations.restrictions.Port;
import com.github.rvesse.airline.annotations.restrictions.PortType;
import com.github.rvesse.airline.annotations.restrictions.RequireOnlyOne;
import org.apache.commons.lang3.StringUtils;
import org.apache.tinkerpop.gremlin.driver.GremlinClient;
import org.apache.tinkerpop.gremlin.driver.GremlinCluster;
import org.apache.tinkerpop.gremlin.driver.remote.DriverRemoteConnection;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.structure.T;
import org.apache.tinkerpop.gremlin.structure.Transaction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.neptune.cluster.ClusterEndpointsRefreshAgent;
import software.amazon.neptune.cluster.EndpointsSelector;
import software.amazon.neptune.cluster.EndpointsType;
import software.amazon.neptune.cluster.NeptuneGremlinClusterBuilder;
import software.amazon.utils.RegionUtils;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource.traversal;
@Command(name = "tx-demo", description = "Transactional writes demo using the Neptune Gremlin Client")
public class TxDemo implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(TxDemo.class);
@Option(name = {"--cluster-id"}, description = "Amazon Neptune cluster Id. You must supply either a cluster Id or the name of an AWS Lambda proxy function (using --lambda-proxy).")
@Once
@RequireOnlyOne(tag = "cluster-id-or-lambda-proxy")
private String clusterId;
@Option(name = {"--lambda-proxy"}, description = "Name of the AWS Lambda proxy function used to fetch cluster topology. You must supply either the name of an AWS Lambda proxy function or a cluster Id or (using --cluster-id). If you are using a Lambda proxy, amke sure you have installed it (using the AWS CloudFormation template) before running this demo.")
@Once
@RequireOnlyOne(tag = "cluster-id-or-lambda-proxy")
private String lambdaProxy;
@Option(name = {"--port"}, description = "Neptune port (optional, default 8182)")
@Port(acceptablePorts = {PortType.SYSTEM, PortType.USER})
@Once
private int neptunePort = 8182;
@Option(name = {"--disable-ssl"}, description = "Disables connectivity over SSL (optional, default false)")
@Once
private boolean disableSsl = false;
@Option(name = {"--enable-iam"}, description = "Enables IAM database authentication (optional, default false)")
@Once
private boolean enableIam = false;
@Option(name = {"--tx-count"}, description = "Number of transactions to execute")
@Once
private int txCount = 10;
@Option(name = {"--log-level"}, description = "Log level")
@Once
private String logLevel = "info";
@Option(name = {"--profile"}, description = "Credentials profile")
@Once
private String profile = "default";
@Option(name = {"--service-region"}, description = "Neptune service region")
@Once
private String serviceRegion = null;
@Option(name = {"--interval"}, description = "Interval (in seconds) between refreshing addresses")
@Once
private int intervalSeconds = 15;
@Override
public void run() {
try {
EndpointsSelector selector = EndpointsType.ClusterEndpoint;
ClusterEndpointsRefreshAgent refreshAgent = createRefreshAgent();
NeptuneGremlinClusterBuilder builder = NeptuneGremlinClusterBuilder.build()
.enableSsl(!disableSsl)
.enableIamAuth(enableIam)
.iamProfile(profile)
.addContactPoints(refreshAgent.getEndpoints(selector))
.minConnectionPoolSize(3)
.maxConnectionPoolSize(3)
.port(neptunePort);
if (StringUtils.isNotEmpty(serviceRegion)) {
builder = builder.serviceRegion(serviceRegion);
}
GremlinCluster cluster = builder.create();
GremlinClient client = cluster.connect();
refreshAgent.startPollingNeptuneAPI(
client, selector, intervalSeconds,
TimeUnit.SECONDS
);
DriverRemoteConnection connection = DriverRemoteConnection.using(client);
for (int i = 0; i < txCount; i++) {
Transaction tx = traversal().withRemote(connection).tx();
GraphTraversalSource g = tx.begin();
try {
String id1 = UUID.randomUUID().toString();
String id2 = UUID.randomUUID().toString();
g.addV("testNode").property(T.id, id1).iterate();
g.addV("testNode").property(T.id, id2).iterate();
g.addE("testEdge").from(__.V(id1)).to(__.V(id2)).iterate();
tx.commit();
System.out.println("Tx complete: " + i);
System.out.println("id1 : " + id1);
System.out.println("id2 : " + id2);
} catch (Exception e) {
logger.warn("Error processing query: {}", e.getMessage());
tx.rollback();
}
}
refreshAgent.close();
client.close();
cluster.close();
} catch (Exception e) {
System.err.println("An error occurred while connecting to Neptune:");
e.printStackTrace();
System.exit(-1);
}
}
private ClusterEndpointsRefreshAgent createRefreshAgent() {
if (StringUtils.isNotEmpty(clusterId)) {
return ClusterEndpointsRefreshAgent.managementApi(clusterId, RegionUtils.getCurrentRegionName(), profile);
} else if (StringUtils.isNotEmpty(lambdaProxy)) {
return ClusterEndpointsRefreshAgent.lambdaProxy(lambdaProxy, RegionUtils.getCurrentRegionName(), profile);
} else {
throw new IllegalStateException("You must supply either a cluster Id or AWS Lambda proxy name");
}
}
}
| 7,524 |
0 | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon | Create_ds/neptune-gremlin-client/gremlin-client-demo/src/main/java/software/amazon/lambda/NeptuneGremlinClientExampleLambda.java | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
package software.amazon.lambda;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestStreamHandler;
import com.evanlennick.retry4j.CallExecutor;
import com.evanlennick.retry4j.CallExecutorBuilder;
import com.evanlennick.retry4j.Status;
import com.evanlennick.retry4j.config.RetryConfig;
import com.evanlennick.retry4j.config.RetryConfigBuilder;
import org.apache.tinkerpop.gremlin.driver.GremlinClient;
import org.apache.tinkerpop.gremlin.driver.GremlinCluster;
import org.apache.tinkerpop.gremlin.driver.remote.DriverRemoteConnection;
import org.apache.tinkerpop.gremlin.driver.ser.Serializers;
import org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException;
import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.T;
import software.amazon.neptune.cluster.*;
import java.io.*;
import java.time.temporal.ChronoUnit;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.addV;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.unfold;
public class NeptuneGremlinClientExampleLambda implements RequestStreamHandler {
private static final EndpointsSelector ENDPOINT_TYPE = EndpointsType.ClusterEndpoint;
private final ClusterEndpointsRefreshAgent refreshAgent;
private final GremlinClient client;
private final GraphTraversalSource g;
private final CallExecutor<Object> executor;
private final Random idGenerator = new Random();
public NeptuneGremlinClientExampleLambda() {
this.refreshAgent = ClusterEndpointsRefreshAgent.lambdaProxy(
System.getenv("neptuneEndpointsInfoLambda"),
System.getenv("AWS_REGION"));
GremlinCluster cluster = NeptuneGremlinClusterBuilder.build()
.enableSsl(true)
.addContactPoints(refreshAgent.getEndpoints(ENDPOINT_TYPE))
.port(8182)
.serializer(Serializers.GRAPHBINARY_V1D0)
.minConnectionPoolSize(1)
.maxConnectionPoolSize(1)
.create();
this.client = cluster.connect();
refreshAgent.startPollingNeptuneAPI(
client, ENDPOINT_TYPE, 5, TimeUnit.SECONDS
);
this.g = AnonymousTraversalSource.traversal().withRemote(DriverRemoteConnection.using(client));
RetryConfig retryConfig = new RetryConfigBuilder()
.retryOnCustomExceptionLogic(retryLogic())
.withDelayBetweenTries(2000, ChronoUnit.MILLIS)
.withMaxNumberOfTries(10)
.withExponentialBackoff()
.build();
this.executor = new CallExecutorBuilder<Object>()
.config(retryConfig)
.afterFailedTryListener(this::afterFailedTry)
.build();
}
@Override
public void handleRequest(InputStream input,
OutputStream output,
Context context) throws IOException {
try {
String id = String.valueOf(idGenerator.nextInt());
@SuppressWarnings("unchecked")
Callable<Object> query = () -> g.V(id)
.fold()
.coalesce(
unfold(),
addV("Person").property(T.id, id))
.id().next();
Status<Object> status = executor.execute(query);
try (Writer writer = new BufferedWriter(new OutputStreamWriter(output, UTF_8))) {
writer.write(status.getResult().toString());
}
} finally {
input.close();
output.close();
}
}
private Function<Exception, Boolean> retryLogic() {
return e -> {
StringWriter stringWriter = new StringWriter();
e.printStackTrace(new PrintWriter(stringWriter));
String message = stringWriter.toString();
Class<? extends Exception> exceptionClass = e.getClass();
if (RemoteConnectionException.class.isAssignableFrom(exceptionClass)) {
System.out.println("Retrying because RemoteConnectionException");
return true;
}
// Check for connection issues
if (message.contains("Timed out while waiting for an available host") ||
message.contains("Timed-out waiting for connection on Host") ||
message.contains("Connection to server is no longer active") ||
message.contains("Connection reset by peer") ||
message.contains("SSLEngine closed already") ||
message.contains("Pool is shutdown") ||
message.contains("ExtendedClosedChannelException") ||
message.contains("Broken pipe")) {
return true;
}
// Concurrent writes can sometimes trigger a ConcurrentModificationException.
// In these circumstances you may want to backoff and retry.
if (message.contains("ConcurrentModificationException")) {
return true;
}
// If the primary fails over to a new instance, existing connections to the old primary will
// throw a ReadOnlyViolationException. You may want to back and retry.
if (message.contains("ReadOnlyViolationException")) {
return true;
}
return false;
};
}
private void afterFailedTry(Status<?> status) {
// If the primary fails over to a new instance, existing connections to the old primary will
// throw a ReadOnlyViolationException. While the client is backing off, you may want to
// refresh the endpoint addresses.
if (status.getLastExceptionThatCausedRetry().getMessage().contains("ReadOnlyViolationException")) {
client.refreshEndpoints(ENDPOINT_TYPE.getEndpoints(refreshAgent.getClusterMetadata()));
}
}
}
| 7,525 |
0 | Create_ds/astyanax/astyanax-examples/src/main/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-examples/src/main/java/com/netflix/astyanax/examples/AstClient.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.examples;
import static com.netflix.astyanax.examples.ModelConstants.*;
import java.util.Iterator;
import java.util.Properties;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.serializers.IntegerSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
/**
* Example code for demonstrating how to access Cassandra using Astyanax.
*
* @author elandau
* @author Marko Asplund
*/
public class AstClient {
private static final Logger logger = LoggerFactory.getLogger(AstClient.class);
private AstyanaxContext<Keyspace> keyspaceContext;
private Keyspace keyspace;
private ColumnFamily<Integer, String> EMP_CF;
private static final String KEYSPACE_NAME = "test1";
private static final String EMP_CF_NAME = "employees2";
public void init() {
logger.debug("init()");
keyspaceContext = new AstyanaxContext.Builder()
.forCluster("Test Cluster")
.forKeyspace(KEYSPACE_NAME)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
)
.withConnectionPoolConfiguration(new ConnectionPoolConfigurationImpl("MyConnectionPool")
.setPort(9160)
.setMaxConnsPerHost(1)
.setSeeds("127.0.0.1:9160")
)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl()
.setCqlVersion("3.0.0")
.setTargetCassandraVersion("1.2"))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
// Create keyspace if it doesn't already exist.
// Don't do in production; better to create from cqlsh to avoid parallel issues from eventual consistency.
Keyspace ks = createKeyspaceIfNotExists();
keyspace = keyspaceContext.getEntity();
EMP_CF = ColumnFamily.newColumnFamily(
EMP_CF_NAME,
IntegerSerializer.get(),
StringSerializer.get());
// Create column family if it doesn't already exist.
// Don't do in production; better to create from cqlsh to avoid parallel issues from eventual consistency.
createColumnFamilyIfNotExists(ks);
}
private Keyspace createKeyspaceIfNotExists() {
// Don't do in production; better to create from cqlsh to avoid parallel issues from eventual consistency.
Keyspace ks = null;
try {
ks = keyspaceContext.getClient();
Properties props = new Properties();
props.setProperty("name", KEYSPACE_NAME);
props.setProperty("strategy_class", "SimpleStrategy");
props.setProperty("strategy_options.replication_factor", "1");
ks.createKeyspaceIfNotExists(props);
KeyspaceDefinition ksDef = ks.describeKeyspace();
} catch (Exception e) {
logger.info("Didn't (re)create keyspace, message={}", e.getMessage());
}
return ks;
}
private void createColumnFamilyIfNotExists(Keyspace ks) {
// Don't do in production; better to create from cqlsh to avoid parallel issues from eventual consistency.
if(ks != null) {
try {
ks.createColumnFamily(EMP_CF, null);
} catch (Exception e) {
// Do nothing
}
}
}
public void insert(int empId, int deptId, String firstName, String lastName) {
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(EMP_CF, empId)
.putColumn(COL_NAME_EMPID, empId, null)
.putColumn(COL_NAME_DEPTID, deptId, null)
.putColumn(COL_NAME_FIRST_NAME, firstName, null)
.putColumn(COL_NAME_LAST_NAME, lastName, null)
;
try {
@SuppressWarnings("unused")
OperationResult<Void> result = m.execute();
} catch (ConnectionException e) {
logger.error("failed to write data to C*", e);
throw new RuntimeException("failed to write data to C*", e);
}
logger.debug("insert ok");
}
public void read(int empId) {
OperationResult<ColumnList<String>> result;
try {
result = keyspace.prepareQuery(EMP_CF)
.getKey(empId)
.execute();
ColumnList<String> cols = result.getResult();
logger.debug("read: isEmpty: "+cols.isEmpty());
// process data
// a) iterate over columsn
logger.debug("emp");
for(Iterator<Column<String>> i = cols.iterator(); i.hasNext(); ) {
Column<String> c = i.next();
Object v = null;
if(c.getName().endsWith("id")) // type induction hack
v = c.getIntegerValue();
else
v = c.getStringValue();
logger.debug("- col: '"+c.getName()+"': "+v);
}
// b) get columns by name
logger.debug("emp");
logger.debug("- emp id: "+cols.getIntegerValue(COL_NAME_EMPID, null));
logger.debug("- dept: "+cols.getIntegerValue(COL_NAME_DEPTID, null));
logger.debug("- firstName: "+cols.getStringValue(COL_NAME_FIRST_NAME, null));
logger.debug("- lastName: "+cols.getStringValue(COL_NAME_LAST_NAME, null));
} catch (ConnectionException e) {
logger.error("failed to read from C*", e);
throw new RuntimeException("failed to read from C*", e);
}
}
public static void main(String[] args) {
AstClient c = new AstClient();
c.init();
c.insert(222, 333, "Eric", "Cartman");
c.read(222);
}
}
| 7,526 |
0 | Create_ds/astyanax/astyanax-examples/src/main/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-examples/src/main/java/com/netflix/astyanax/examples/AstCQLClient.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.examples;
import static com.netflix.astyanax.examples.ModelConstants.*;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.CqlResult;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.serializers.IntegerSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import java.util.Properties;
/**
* Example code for demonstrating how to access Cassandra using Astyanax and CQL3.
*
* @author elandau
* @author Marko Asplund
*/
public class AstCQLClient {
private static final Logger logger = LoggerFactory.getLogger(AstCQLClient.class);
private AstyanaxContext<Keyspace> keyspaceContext;
private Keyspace keyspace;
private ColumnFamily<Integer, String> EMP_CF;
private static final String KEYSPACE_NAME = "test1";
private static final String EMP_CF_NAME = "employees1";
private static final String INSERT_STATEMENT =
String.format("INSERT INTO %s (%s, %s, %s, %s) VALUES (?, ?, ?, ?);",
EMP_CF_NAME, COL_NAME_EMPID, COL_NAME_DEPTID, COL_NAME_FIRST_NAME, COL_NAME_LAST_NAME);
private static final String CREATE_STATEMENT =
String.format("CREATE TABLE %s (%s int, %s int, %s varchar, %s varchar, PRIMARY KEY (%s, %s))",
EMP_CF_NAME, COL_NAME_EMPID, COL_NAME_DEPTID, COL_NAME_FIRST_NAME, COL_NAME_LAST_NAME,
COL_NAME_EMPID, COL_NAME_DEPTID);
public void init() {
logger.debug("init()");
keyspaceContext = new AstyanaxContext.Builder()
.forCluster("Test Cluster")
.forKeyspace(KEYSPACE_NAME)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
)
.withConnectionPoolConfiguration(new ConnectionPoolConfigurationImpl("MyConnectionPool")
.setPort(9160)
.setMaxConnsPerHost(1)
.setSeeds("127.0.0.1:9160")
)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl()
.setCqlVersion("3.0.0")
.setTargetCassandraVersion("1.2"))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
// Create keyspace if it doesn't already exist.
// Don't do in production; better to create from cqlsh to avoid parallel issues from eventual consistency.
Keyspace ks = createKeyspaceIfNotExists();
keyspace = keyspaceContext.getEntity();
EMP_CF = ColumnFamily.newColumnFamily(
EMP_CF_NAME,
IntegerSerializer.get(),
StringSerializer.get());
// Create column family if it doesn't already exist.
// Don't do in production; better to create from cqlsh to avoid parallel issues from eventual consistency.
createColumnFamilyIfNotExists();
}
private Keyspace createKeyspaceIfNotExists() {
// Don't do in production; better to create from cqlsh to avoid parallel issues from eventual consistency.
Keyspace ks = null;
try {
ks = keyspaceContext.getClient();
Properties props = new Properties();
props.setProperty("name", KEYSPACE_NAME);
props.setProperty("strategy_class", "SimpleStrategy");
props.setProperty("strategy_options.replication_factor", "1");
ks.createKeyspaceIfNotExists(props);
KeyspaceDefinition ksDef = ks.describeKeyspace();
} catch (Exception e) {
logger.info("Didn't (re)create keyspace, message={}", e.getMessage());
}
return ks;
}
public void insert(int empId, int deptId, String firstName, String lastName) {
try {
@SuppressWarnings("unused")
OperationResult<CqlResult<Integer, String>> result = keyspace
.prepareQuery(EMP_CF)
.withCql(INSERT_STATEMENT)
.asPreparedStatement()
.withIntegerValue(empId)
.withIntegerValue(deptId)
.withStringValue(firstName)
.withStringValue(lastName)
.execute();
} catch (ConnectionException e) {
logger.error("failed to write data to C*", e);
throw new RuntimeException("failed to write data to C*", e);
}
logger.debug("insert ok");
}
public void insertDynamicProperties(int id, String[] ... entries) {
MutationBatch m = keyspace.prepareMutationBatch();
ColumnListMutation<String> clm = m.withRow(EMP_CF, id);
for(String[] kv : entries) {
clm.putColumn(kv[0], kv[1], null);
}
try {
@SuppressWarnings("unused")
OperationResult<Void> result = m.execute();
} catch (ConnectionException e) {
logger.error("failed to write data to C*", e);
throw new RuntimeException("failed to write data to C*", e);
}
logger.debug("insert ok");
}
public void createColumnFamilyIfNotExists() {
// Don't do in production; better to create from cqlsh to avoid parallel issues from eventual consistency.
logger.debug("CQL: "+CREATE_STATEMENT);
try {
@SuppressWarnings("unused")
OperationResult<CqlResult<Integer, String>> result = keyspace
.prepareQuery(EMP_CF)
.withCql(CREATE_STATEMENT)
.execute();
} catch (Exception e) {
logger.info("Didn't (re)create column family, message={}", e.getMessage());
}
}
public void read(int empId) {
logger.debug("read()");
try {
OperationResult<CqlResult<Integer, String>> result
= keyspace.prepareQuery(EMP_CF)
.withCql(String.format("SELECT * FROM %s WHERE %s=%d;", EMP_CF_NAME, COL_NAME_EMPID, empId))
.execute();
for (Row<Integer, String> row : result.getResult().getRows()) {
logger.debug("row: "+row.getKey()+","+row); // why is rowKey null?
ColumnList<String> cols = row.getColumns();
logger.debug("emp");
logger.debug("- emp id: "+cols.getIntegerValue(COL_NAME_EMPID, null));
logger.debug("- dept: "+cols.getIntegerValue(COL_NAME_DEPTID, null));
logger.debug("- firstName: "+cols.getStringValue(COL_NAME_FIRST_NAME, null));
logger.debug("- lastName: "+cols.getStringValue(COL_NAME_LAST_NAME, null));
}
} catch (ConnectionException e) {
logger.error("failed to read from C*", e);
throw new RuntimeException("failed to read from C*", e);
}
}
public static void main(String[] args) {
logger.debug("main");
AstCQLClient c = new AstCQLClient();
c.init();
// c.createCF();
c.insert(222, 333, "Eric", "Cartman");
c.read(222);
}
}
| 7,527 |
0 | Create_ds/astyanax/astyanax-examples/src/main/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-examples/src/main/java/com/netflix/astyanax/examples/ModelConstants.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.examples;
public class ModelConstants {
public static final String COL_NAME_EMPID = "empid";
public static final String COL_NAME_DEPTID = "deptid";
public static final String COL_NAME_FIRST_NAME = "first_name";
public static final String COL_NAME_LAST_NAME = "last_name";
}
| 7,528 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/impl/RingDescribeHostSupplierTest.java | package com.netflix.astyanax.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.TokenRange;
import com.netflix.astyanax.test.TestKeyspace;
import com.netflix.astyanax.test.TestTokenRange;
/**
* User: mkoch
* Date: 5/23/12
*/
public class RingDescribeHostSupplierTest {
private static final String NODE1 = "127.0.0.1";
private static final String NODE2 = "127.0.0.2";
private static final String NODE3 = "127.0.0.3";
private static final String RANGE_1_END_TOKEN = "0";
private static final String RANGE_2_END_TOKEN = "2000";
private static final String RANGE_3_END_TOKEN = "4000";
private RingDescribeHostSupplier hostSupplier;
private TestKeyspace keyspace;
@Before
public void setUp() throws Exception {
keyspace = new TestKeyspace("ringDescribeTestKeyspace");
keyspace.setTokenRange(createTokenRange());
hostSupplier = new RingDescribeHostSupplier(keyspace,1234);
}
@Test
public void testGet() throws Exception {
// Map<BigInteger,List<Host>> hostMap = hostSupplier.get();
// assertNotNull(hostMap);
// assertEquals(3, hostMap.size());
//
// List<Host> endpoints = hostMap.get(new BigInteger(RANGE_1_END_TOKEN));
// assertEquals(1,endpoints.size());
// assertEquals(NODE1, endpoints.get(0).getIpAddress());
//
// endpoints = hostMap.get(new BigInteger(RANGE_2_END_TOKEN));
// assertEquals(1,endpoints.size());
// assertEquals(NODE2, endpoints.get(0).getIpAddress());
//
// endpoints = hostMap.get(new BigInteger(RANGE_3_END_TOKEN));
// assertEquals(1,endpoints.size());
// assertEquals(NODE3,endpoints.get(0).getIpAddress());
}
private List<TokenRange> createTokenRange() {
List<TokenRange> tokenRanges = new ArrayList<TokenRange>();
TokenRange node1Range = new TestTokenRange(RANGE_3_END_TOKEN, RANGE_1_END_TOKEN, Arrays.asList(NODE1));
TokenRange node2Range = new TestTokenRange(RANGE_1_END_TOKEN, RANGE_2_END_TOKEN, Arrays.asList(NODE2));
TokenRange node3Range = new TestTokenRange(RANGE_2_END_TOKEN, RANGE_3_END_TOKEN, Arrays.asList(NODE3));
tokenRanges.addAll(Arrays.asList(node1Range, node2Range, node3Range));
return tokenRanges;
}
}
| 7,529 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/impl/FilteringHostSupplierTest.java | package com.netflix.astyanax.impl;
import java.util.Arrays;
import java.util.List;
import junit.framework.Assert;
import org.junit.Test;
import com.google.common.base.Supplier;
import com.google.common.collect.Lists;
import com.netflix.astyanax.connectionpool.Host;
public class FilteringHostSupplierTest {
public static class TestHostSupplier implements Supplier<List<Host>> {
private List<Host> hostList;
private boolean bThrowException = false;
@Override
public List<Host> get() {
if (bThrowException) {
throw new RuntimeException("Unknown exception");
}
return hostList;
}
public List<Host> getHostList() {
return hostList;
}
public void setHostList(List<Host> hostList) {
this.hostList = hostList;
}
public boolean isThrowException() {
return bThrowException;
}
public void setThrowException(boolean bThrowException) {
this.bThrowException = bThrowException;
}
}
@Test
public void testFilter() {
List<Host> list1 = Arrays.asList(
new Host("127.0.1.1", 7102),
new Host("127.0.1.2", 7102),
new Host("127.0.1.3", 7102)
);
List<Host> list2 = Arrays.asList(
new Host("127.0.2.1", 7102),
new Host("127.0.2.2", 7102),
new Host("127.0.2.3", 7102)
);
List<Host> emptyList = Lists.newArrayList();
TestHostSupplier discoverySupplier = new TestHostSupplier();
TestHostSupplier ringSupplier = new TestHostSupplier();
FilteringHostSupplier filteringSupplier = new FilteringHostSupplier(ringSupplier, discoverySupplier);
List<Host> result;
// Discovery only, no exception
discoverySupplier.setHostList(list1);
discoverySupplier.setThrowException(false);
ringSupplier.setHostList(emptyList);
ringSupplier.setThrowException(false);
result = filteringSupplier.get();
System.out.println("Discovery only, no exception : " + result);
Assert.assertEquals(list1, result);
// Ring only, no exception
discoverySupplier.setHostList(emptyList);
discoverySupplier.setThrowException(false);
ringSupplier.setHostList(list1);
ringSupplier.setThrowException(false);
result = filteringSupplier.get();
System.out.println("Ring only, no exception : " + result);
Assert.assertEquals(emptyList, result);
// Discovery and ring match
discoverySupplier.setHostList(list1);
discoverySupplier.setThrowException(false);
ringSupplier.setHostList(list1);
ringSupplier.setThrowException(false);
result = filteringSupplier.get();
System.out.println("Discovery and ring match : " + result);
Assert.assertEquals(list1, result);
// Discovery and ring complete mismatch
discoverySupplier.setHostList(list1);
discoverySupplier.setThrowException(false);
ringSupplier.setHostList(list2);
ringSupplier.setThrowException(false);
result = filteringSupplier.get();
System.out.println("Discovery and ring mismatch : " + result);
Assert.assertEquals(list1, result);
// Discovery OK, ring exception
discoverySupplier.setHostList(list1);
discoverySupplier.setThrowException(false);
ringSupplier.setHostList(emptyList);
ringSupplier.setThrowException(true);
result = filteringSupplier.get();
System.out.println("Discovery OK, ring exception : " + result);
Assert.assertEquals(list1, result);
// Discovery exception, ring ok
discoverySupplier.setHostList(list1);
discoverySupplier.setThrowException(true);
ringSupplier.setHostList(list2);
ringSupplier.setThrowException(false);
result = filteringSupplier.get();
System.out.println("Discovery exception, ring ok : " + result);
Assert.assertEquals(emptyList, result);
// Both empty
discoverySupplier.setHostList(emptyList);
discoverySupplier.setThrowException(false);
ringSupplier.setHostList(emptyList);
ringSupplier.setThrowException(false);
result = filteringSupplier.get();
System.out.println("Both empty : " + result);
Assert.assertEquals(emptyList, result);
// Both exception
discoverySupplier.setHostList(list1);
discoverySupplier.setThrowException(true);
ringSupplier.setHostList(list2);
ringSupplier.setThrowException(true);
result = filteringSupplier.get();
System.out.println("Bath exception : " + result);
Assert.assertEquals(emptyList, result);
}
}
| 7,530 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/mapping/TestMapping.java | package com.netflix.astyanax.mapping;
import junit.framework.Assert;
import org.junit.Test;
public class TestMapping {
@Test
public void testKeyspaceAnnotations() {
FakeKeyspaceBean override = new FakeKeyspaceBean();
override.setId("1");
override.setCountry("USA");
override.setCountryStatus(2);
override.setCreateTS(12345678L);
override.setExpirationTS(87654321L);
override.setLastUpdateTS(24681357L);
override.setType("thing");
override.setUpdatedBy("John Galt");
override.setByteArray("Some Bytes".getBytes());
Mapping<FakeKeyspaceBean> mapping = Mapping
.make(FakeKeyspaceBean.class);
Assert.assertEquals(mapping.getIdValue(override, String.class),
override.getId());
Assert.assertEquals(
mapping.getColumnValue(override, "PK", String.class),
override.getId());
Assert.assertEquals(mapping.getColumnValue(override,
"COUNTRY_OVERRIDE", String.class), override.getCountry());
Assert.assertEquals(mapping.getColumnValue(override,
"COUNTRY_STATUS_OVERRIDE", Integer.class), override
.getCountryStatus());
Assert.assertEquals(
mapping.getColumnValue(override, "CREATE_TS", Long.class),
override.getCreateTS());
Assert.assertEquals(
mapping.getColumnValue(override, "EXP_TS", Long.class),
override.getExpirationTS());
Assert.assertEquals(
mapping.getColumnValue(override, "LAST_UPDATE_TS", Long.class),
override.getLastUpdateTS());
Assert.assertEquals(mapping.getColumnValue(override,
"OVERRIDE_BY_TYPE", String.class), override.getType());
Assert.assertEquals(
mapping.getColumnValue(override, "UPDATED_BY", String.class),
override.getUpdatedBy());
Assert.assertEquals(
mapping.getColumnValue(override, "BYTE_ARRAY", byte[].class),
override.getByteArray());
FakeKeyspaceBean copy = new FakeKeyspaceBean();
for (String fieldName : mapping.getNames()) {
mapping.setColumnValue(copy, fieldName,
mapping.getColumnValue(override, fieldName, Object.class));
}
Assert.assertEquals(copy.getId(), override.getId());
Assert.assertEquals(copy.getCountry(), override.getCountry());
Assert.assertEquals(copy.getCountryStatus(),
override.getCountryStatus());
Assert.assertEquals(copy.getCreateTS(), override.getCreateTS());
Assert.assertEquals(copy.getExpirationTS(), override.getExpirationTS());
Assert.assertEquals(copy.getLastUpdateTS(), override.getLastUpdateTS());
Assert.assertEquals(copy.getType(), override.getType());
Assert.assertEquals(copy.getUpdatedBy(), override.getUpdatedBy());
Assert.assertEquals(copy.getByteArray(), override.getByteArray());
}
@Test
public void testCache() {
MappingCache cache = new MappingCache();
Mapping<FakeKeyspaceBean> keyspaceBeanMapping1 = cache
.getMapping(FakeKeyspaceBean.class);
Mapping<FakeKeyspaceBean> keyspaceBeanMapping2 = cache
.getMapping(FakeKeyspaceBean.class);
Assert.assertSame(keyspaceBeanMapping1, keyspaceBeanMapping2);
}
}
| 7,531 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/mapping/FakeKeyspaceBean.java | package com.netflix.astyanax.mapping;
@SuppressWarnings({ "UnusedDeclaration", "SimplifiableIfStatement" })
public class FakeKeyspaceBean implements Comparable<FakeKeyspaceBean> {
@Id("PK")
private String id;
@Column("OVERRIDE_BY_TYPE")
private String type;
@Column("COUNTRY_OVERRIDE")
private String country;
@Column("COUNTRY_STATUS_OVERRIDE")
private Integer countryStatus;
@Column("UPDATED_BY")
private String updatedBy;
@Column("EXP_TS")
private Long expirationTS;
@Column("CREATE_TS")
private Long createTS;
@Column("LAST_UPDATE_TS")
private Long lastUpdateTS;
@Column("BYTE_ARRAY")
private byte[] byteArray;
/**
* Public empty constructor needed
*/
public FakeKeyspaceBean() {
}
/**
* Unique identifying id
*
* @return value
*/
public String getId() {
return id;
}
/**
* Set unique override id.
*
* @param id
* value
*/
public void setId(String id) {
this.id = id;
}
/**
* Returns the type of override
*
* @return value
*/
public String getType() {
return type;
}
/**
* Sets the override type
*
* @param type
* value
*/
public void setType(String type) {
this.type = type;
}
/**
* Return the ISO 3166 country code to force on the current request context
*
* @return value
*/
public String getCountry() {
return country;
}
/**
* Sets the ISOCountry to force on the current request context
*
* @param country
* value
*/
public void setCountry(String country) {
this.country = country;
}
/**
* Return country status to force on the overriden country.
*
* @return value
*/
public Integer getCountryStatus() {
return countryStatus;
}
/**
* Sets country status to force on the overriden country
*
* @param countryStatus
* value
*/
public void setCountryStatus(Integer countryStatus) {
this.countryStatus = countryStatus;
}
/**
* Returns the ldap login that made the last update
*
* @return value
*/
public String getUpdatedBy() {
return updatedBy;
}
/**
* Sets who made the last update
*
* @param login
* value
*/
public void setUpdatedBy(String login) {
updatedBy = login;
}
/**
* Returns the creation timestamp in milis since epoch.
*
* @return value
*/
public Long getCreateTS() {
return createTS;
}
/**
* Sets the creation timestamp in millis since epoch
*
* @param createTimestamp
* value
*/
public void setCreateTS(Long createTimestamp) {
createTS = createTimestamp;
}
/**
* Returns the last updated timestamp in millis since epoch
*
* @return value
*/
public Long getLastUpdateTS() {
return lastUpdateTS;
}
/**
* Sets the last updated timestamp in millis since epoch
*
* @param updateTimestamp
* value
*/
public void setLastUpdateTS(Long updateTimestamp) {
lastUpdateTS = updateTimestamp;
}
public byte[] getByteArray()
{
return byteArray;
}
public void setByteArray(byte[] byteArray)
{
this.byteArray = byteArray;
}
/**
* Returns the expiration timestamp in millis since epoch
*
* @return value
*/
public Long getExpirationTS() {
return expirationTS;
}
/**
* Sets the expiration timestamp in millis since epoch
*
* @param expTS
* value
*/
public void setExpirationTS(Long expTS) {
expirationTS = expTS;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof FakeKeyspaceBean) {
return ((FakeKeyspaceBean) o).getId().equals(getId());
} else {
return false;
}
}
@Override
public int hashCode() {
return getId().hashCode();
}
/**
* {@inheritDoc}
*/
public int compareTo(FakeKeyspaceBean o) {
if (o == null) {
return -1;
} else {
return getId().compareTo(o.getId());
}
}
} | 7,532 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/EntityMapperTest.java | package com.netflix.astyanax.entitystore;
import java.lang.reflect.Field;
import java.util.Collection;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import junit.framework.Assert;
import org.junit.Test;
public class EntityMapperTest {
@Test
public void basic() {
EntityMapper<SampleEntity, String> entityMapper = new EntityMapper<SampleEntity, String>(SampleEntity.class, null);
// test id field
Field idField = entityMapper.getId();
Assert.assertEquals("id", idField.getName());
// test column number
Collection<ColumnMapper> cols = entityMapper.getColumnList();
System.out.println(cols);
// 19 simple + 1 nested Bar
Assert.assertEquals(24, cols.size());
// test field without explicit column name
// simple field name is used
boolean foundUUID = false;
boolean founduuid = false;
for(ColumnMapper mapper: cols) {
if(mapper.getColumnName().equals("UUID"))
foundUUID = true;
if(mapper.getColumnName().equals("uuid"))
founduuid = true;
}
Assert.assertFalse(foundUUID);
Assert.assertTrue(founduuid);
}
@Test(expected = IllegalArgumentException.class)
public void missingEntityAnnotation() {
new EntityMapper<String, String>(String.class, null);
}
@Entity
private static class InvalidColumnNameEntity {
@SuppressWarnings("unused")
@Id
private String id;
@SuppressWarnings("unused")
@Column(name="LONG.PRIMITIVE")
private long longPrimitive;
}
@Test(expected = IllegalArgumentException.class)
public void invalidColumnName() {
new EntityMapper<InvalidColumnNameEntity, String>(InvalidColumnNameEntity.class, null);
}
@Test
public void doubleIdColumnAnnotation() {
EntityMapper<DoubleIdColumnEntity, String> entityMapper = new EntityMapper<DoubleIdColumnEntity, String>(DoubleIdColumnEntity.class, null);
// test id field
Field idField = entityMapper.getId();
Assert.assertEquals("id", idField.getName());
// test column number
Collection<ColumnMapper> cols = entityMapper.getColumnList();
System.out.println(cols);
// 3 cols: id, num, str
Assert.assertEquals(3, cols.size());
}
}
| 7,533 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/SimpleEntity.java | package com.netflix.astyanax.entitystore;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
@Entity
public class SimpleEntity {
@Id
private String id;
@Column
private String column;
public SimpleEntity() {
}
public SimpleEntity(String id, String column) {
this.id = id;
this.column = column;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getColumn() {
return column;
}
public void setColumn(String column) {
this.column = column;
}
@Override
public String toString() {
return "SimpleEntity [id=" + id + ", column=" + column + "]";
}
}
| 7,534 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/SampleEntity.java | package com.netflix.astyanax.entitystore;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import com.netflix.astyanax.shaded.org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.ReflectionToStringBuilder;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import com.netflix.astyanax.serializers.AbstractSerializer;
import com.netflix.astyanax.serializers.ComparatorType;
/**
* id is not counted as column
* 17 columns
*/
@Entity
public class SampleEntity {
////////////////////////////////////////////////////////
// custom serializer
public static class Foo {
public int i;
public String s;
public Foo(int i, String s) {
this.i = i;
this.s = s;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Foo other = (Foo) obj;
if(i == other.i && s.equals(other.s))
return true;
else
return false;
}
@Override
public String toString() {
try {
JSONObject jsonObj = new JSONObject();
jsonObj.put("i", i);
jsonObj.put("s", s);
return jsonObj.toString();
} catch (JSONException e) {
throw new RuntimeException("failed to construct JSONObject for toString", e);
}
}
public static Foo fromString(String str) {
try {
JSONObject jsonObj = new JSONObject(str);
return new Foo(jsonObj.getInt("i"), jsonObj.getString("s"));
} catch (JSONException e) {
throw new RuntimeException("failed to construct JSONObject for toString", e);
}
}
}
public static class FooSerializer extends AbstractSerializer<Foo> {
private static final String UTF_8 = "UTF-8";
private static final Charset charset = Charset.forName(UTF_8);
private static final FooSerializer instance = new FooSerializer();
public static FooSerializer get() {
return instance;
}
@Override
public ByteBuffer toByteBuffer(Foo obj) {
if (obj == null) {
return null;
}
return ByteBuffer.wrap(obj.toString().getBytes(charset));
}
@Override
public Foo fromByteBuffer(ByteBuffer byteBuffer) {
if (byteBuffer == null) {
return null;
}
return Foo.fromString(charset.decode(byteBuffer).toString());
}
@Override
public ComparatorType getComparatorType() {
return ComparatorType.UTF8TYPE;
}
@Override
public ByteBuffer fromString(String str) {
return UTF8Type.instance.fromString(str);
}
@Override
public String getString(ByteBuffer byteBuffer) {
return UTF8Type.instance.getString(byteBuffer);
}
}
////////////////////////////////////////////////////////
// nested entity
@Entity
public static class Bar {
@Entity
public static class BarBar {
@Column(name="i")
public int i;
@Column(name="s")
public String s;
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
BarBar other = (BarBar) obj;
if(i == other.i && s.equals(other.s))
return true;
else
return false;
}
@Override
public String toString() {
try {
JSONObject jsonObj = new JSONObject();
jsonObj.put("i", i);
jsonObj.put("s", s);
return jsonObj.toString();
} catch (JSONException e) {
throw new RuntimeException("failed to construct JSONObject for toString", e);
}
}
}
@Column(name="i")
public int i;
@Column(name="s")
public String s;
@Column(name="barbar")
public BarBar barbar;
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Bar other = (Bar) obj;
if(i == other.i && s.equals(other.s) && barbar.equals(other.barbar))
return true;
else
return false;
}
@Override
public String toString() {
try {
JSONObject jsonObj = new JSONObject();
jsonObj.put("i", i);
jsonObj.put("s", s);
JSONObject barbarObj = new JSONObject(barbar.toString());
jsonObj.put("barbar", barbarObj);
return jsonObj.toString();
} catch (JSONException e) {
throw new RuntimeException("failed to construct JSONObject for toString", e);
}
}
}
////////////////////////////////////////////////////////
// root fields
@Id
private String id;
@Column(name="BOOLEAN_PRIMITIVE")
private boolean booleanPrimitive;
@Column(name="BOOLEAN_OBJECT")
private Boolean booleanObject;
@Column(name="BYTE_PRIMITIVE")
private byte bytePrimitive;
@Column(name="BYTE_OBJECT")
private Byte byteObject;
@Column(name="SHORT_PRIMITIVE")
private short shortPrimitive;
@Column(name="SHORT_OBJECT")
private Short shortObject;
@Column(name="INT_PRIMITIVE")
private int intPrimitive;
@Column(name="INT_OBJECT")
private Integer intObject;
@Column(name="LONG_PRIMITIVE")
private long longPrimitive;
@Column(name="LONG_OBJECT")
private Long longObject;
@Column(name="FLOAT_PRIMITIVE")
private float floatPrimitive;
@Column(name="FLOAT_OBJECT")
private Float floatObject;
@Column(name="DOUBLE_PRIMITIVE")
private double doublePrimitive;
@Column(name="DOUBLE_OBJECT")
private Double doubleObject;
@Column(name="STRING")
private String string;
@Column(name="BYTE_ARRAY")
private byte[] byteArray;
@Column(name="DATE")
private Date date;
// name should default to field name
@Column()
private UUID uuid;
@Column(name="FOO")
@Serializer(FooSerializer.class)
private Foo foo;
@Column(name="BAR")
private Bar bar;
@Column
private Map<String, String> stringMap;
@Column
private Set<String> stringSet;
@Column
private Map<Long, Long> longMap;
@Column
private Set<Long> longSet;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public boolean isBooleanPrimitive() {
return booleanPrimitive;
}
public void setBooleanPrimitive(boolean booleanPrimitive) {
this.booleanPrimitive = booleanPrimitive;
}
public Boolean getBooleanObject() {
return booleanObject;
}
public void setBooleanObject(Boolean booleanObject) {
this.booleanObject = booleanObject;
}
public byte getBytePrimitive() {
return bytePrimitive;
}
public void setBytePrimitive(byte bytePrimitive) {
this.bytePrimitive = bytePrimitive;
}
public Byte getByteObject() {
return byteObject;
}
public void setByteObject(Byte byteObject) {
this.byteObject = byteObject;
}
public short getShortPrimitive() {
return shortPrimitive;
}
public void setShortPrimitive(short shortPrimitive) {
this.shortPrimitive = shortPrimitive;
}
public Short getShortObject() {
return shortObject;
}
public void setShortObject(Short shortObject) {
this.shortObject = shortObject;
}
public int getIntPrimitive() {
return intPrimitive;
}
public void setIntPrimitive(int intPrimitive) {
this.intPrimitive = intPrimitive;
}
public Integer getIntObject() {
return intObject;
}
public void setIntObject(Integer intObject) {
this.intObject = intObject;
}
public long getLongPrimitive() {
return longPrimitive;
}
public void setLongPrimitive(long longPrimitive) {
this.longPrimitive = longPrimitive;
}
public Long getLongObject() {
return longObject;
}
public void setLongObject(Long longObject) {
this.longObject = longObject;
}
public float getFloatPrimitive() {
return floatPrimitive;
}
public void setFloatPrimitive(float floatPrimitive) {
this.floatPrimitive = floatPrimitive;
}
public Float getFloatObject() {
return floatObject;
}
public void setFloatObject(Float floatObject) {
this.floatObject = floatObject;
}
public double getDoublePrimitive() {
return doublePrimitive;
}
public void setDoublePrimitive(double doublePrimitive) {
this.doublePrimitive = doublePrimitive;
}
public Double getDoubleObject() {
return doubleObject;
}
public void setDoubleObject(Double doubleObject) {
this.doubleObject = doubleObject;
}
public String getString() {
return string;
}
public void setString(String string) {
this.string = string;
}
public byte[] getByteArray() {
return byteArray;
}
public void setByteArray(byte[] byteArray) {
this.byteArray = byteArray;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
public UUID getUuid() {
return uuid;
}
public void setUuid(UUID uuid) {
this.uuid = uuid;
}
public Foo getFoo() {
return foo;
}
public void setFoo(Foo foo) {
this.foo = foo;
}
public Bar getBar() {
return bar;
}
public void setBar(Bar bar) {
this.bar = bar;
}
public Map<String, String> getStringMap() {
return stringMap;
}
public void setStringMap(Map<String, String> stringMap) {
this.stringMap = stringMap;
}
public Set<String> getStringSet() {
return stringSet;
}
public void setStringSet(Set<String> stringSet) {
this.stringSet = stringSet;
}
@Override
public String toString() {
return ReflectionToStringBuilder.toString(this);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
return EqualsBuilder.reflectionEquals(this, obj);
}
public Map<Long, Long> getLongMap() {
return longMap;
}
public void setLongMap(Map<Long, Long> longMap) {
this.longMap = longMap;
}
public Set<Long> getLongSet() {
return longSet;
}
public void setLongSet(Set<Long> longSet) {
this.longSet = longSet;
}
}
| 7,535 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/DefaultEntityManagerTtlTest.java | package com.netflix.astyanax.entitystore;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import org.apache.commons.lang.RandomStringUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
public class DefaultEntityManagerTtlTest {
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static String TEST_CLUSTER_NAME = "junit_cass_sandbox";
private static String TEST_KEYSPACE_NAME = "EntityPersisterTestKeyspace";
private static final String SEEDS = "localhost:9160";
public static ColumnFamily<String, String> CF_SAMPLE_ENTITY = ColumnFamily.newColumnFamily(
"SampleEntityColumnFamily",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_SIMPLE_ENTITY = ColumnFamily.newColumnFamily(
"SimpleEntityColumnFamily",
StringSerializer.get(),
StringSerializer.get());
@BeforeClass
public static void setup() throws Exception {
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(1000 * 3);
createKeyspace();
Thread.sleep(1000 * 3);
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(1000 * 10);
}
private static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
e.printStackTrace();
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
keyspace.createColumnFamily(CF_SAMPLE_ENTITY, null);
keyspace.createColumnFamily(CF_SIMPLE_ENTITY, null);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@Entity
@TTL(2)
private static class TtlEntity {
@Id
private String id;
@Column
private String column;
public TtlEntity() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getColumn() {
return column;
}
public void setColumn(String column) {
this.column = column;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TtlEntity other = (TtlEntity) obj;
if(id.equals(other.id) && column.equals(other.column))
return true;
else
return false;
}
@Override
public String toString() {
return "SimpleEntity [id=" + id + ", column=" + column + "]";
}
}
private TtlEntity createTtlEntity(String id) {
TtlEntity e = new TtlEntity();
e.setId(id);
e.setColumn(RandomStringUtils.randomAlphanumeric(4));
return e;
}
@Test
public void testTtlClassAnnotation() throws Exception {
final String id = "testTtlClassAnnotation";
EntityManager<TtlEntity, String> entityPersister = new DefaultEntityManager.Builder<TtlEntity, String>()
.withEntityType(TtlEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.build();
TtlEntity origEntity = createTtlEntity(id);
entityPersister.put(origEntity);
// use low-level astyanax API to confirm the write
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
// test column number
Assert.assertEquals(1, cl.size());
// test column value
Assert.assertEquals(origEntity.getColumn(), cl.getColumnByName("column").getStringValue());
// custom ttl
Assert.assertEquals(2, cl.getColumnByName("column").getTtl());
}
TtlEntity getEntity = entityPersister.get(id);
Assert.assertEquals(id, getEntity.getId());
Assert.assertEquals(origEntity, getEntity);
// entity should expire after 3s since TTL is 2s in annotation
Thread.sleep(1000 * 3);
// use low-level astyanax API to confirm the TTL expiration
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
Assert.assertEquals(0, cl.size());
}
}
@Test
public void testConstructorTtlOverride() throws Exception {
final String id = "testConstructorTtlOverride";
EntityManager<TtlEntity, String> entityPersister = new DefaultEntityManager.Builder<TtlEntity, String>()
.withEntityType(TtlEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.withTTL(5)
.build();
TtlEntity origEntity = createTtlEntity(id);
entityPersister.put(origEntity);
// use low-level astyanax API to confirm the write
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
// test column number
Assert.assertEquals(1, cl.size());
// test column value
Assert.assertEquals(origEntity.getColumn(), cl.getColumnByName("column").getStringValue());
// custom ttl
Assert.assertEquals(5, cl.getColumnByName("column").getTtl());
}
TtlEntity getEntity = entityPersister.get(id);
Assert.assertEquals(origEntity, getEntity);
// entity should still be alive after 3s since TTL is overriden to 5s
Thread.sleep(1000 * 3);
getEntity = entityPersister.get(id);
Assert.assertEquals(origEntity, getEntity);
// entity should expire after 3s since 6s have passed with 5s TTL
Thread.sleep(1000 * 3);
// use low-level astyanax API to confirm the TTL expiration
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
Assert.assertEquals(0, cl.size());
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@Entity
private static class MethodTtlEntity {
@Id
private String id;
@Column
private String column;
public MethodTtlEntity() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getColumn() {
return column;
}
public void setColumn(String column) {
this.column = column;
}
@SuppressWarnings("unused")
@TTL
public Integer getTTL() {
return 2;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
MethodTtlEntity other = (MethodTtlEntity) obj;
if(id.equals(other.id) && column.equals(other.column))
return true;
else
return false;
}
@Override
public String toString() {
return "MethodTtlEntity [id=" + id + ", column=" + column + "]";
}
}
private MethodTtlEntity createMethodTtlEntity(String id) {
MethodTtlEntity e = new MethodTtlEntity();
e.setId(id);
e.setColumn(RandomStringUtils.randomAlphanumeric(4));
return e;
}
@Test
public void testMethodTtlOverride() throws Exception {
final String id = "testMethodTtlOverride";
EntityManager<MethodTtlEntity, String> entityPersister = new DefaultEntityManager.Builder<MethodTtlEntity, String>()
.withEntityType(MethodTtlEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.withTTL(60) // constructor TTL value is 60s
.build();
MethodTtlEntity origEntity = createMethodTtlEntity(id);
entityPersister.put(origEntity);
// use low-level astyanax API to confirm the write
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
// test column number
Assert.assertEquals(1, cl.size());
// test column value
Assert.assertEquals(origEntity.getColumn(), cl.getColumnByName("column").getStringValue());
// custom ttl
Assert.assertEquals(2, cl.getColumnByName("column").getTtl());
}
MethodTtlEntity getEntity = entityPersister.get(id);
Assert.assertEquals(id, getEntity.getId());
Assert.assertEquals(origEntity, getEntity);
// entity should still be alive after 4s since TTL is overridden to 2s
Thread.sleep(1000 * 4);
// use low-level astyanax API to confirm the TTL expiration
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
Assert.assertEquals(0, cl.size());
}
}
}
| 7,536 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/DefaultEntityManagerTest.java | package com.netflix.astyanax.entitystore;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import java.util.Random;
import org.apache.commons.lang.RandomStringUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.entitystore.SampleEntity.Bar;
import com.netflix.astyanax.entitystore.SampleEntity.Bar.BarBar;
import com.netflix.astyanax.entitystore.SampleEntity.Foo;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
import com.netflix.astyanax.util.TimeUUIDUtils;
public class DefaultEntityManagerTest {
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static String TEST_CLUSTER_NAME = "junit_cass_sandbox";
private static String TEST_KEYSPACE_NAME = "EntityPersisterTestKeyspace";
private static final String SEEDS = "localhost:9160";
public static ColumnFamily<String, String> CF_SAMPLE_ENTITY = ColumnFamily.newColumnFamily(
"sampleentity",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_SIMPLE_ENTITY = ColumnFamily.newColumnFamily(
"simpleentity",
StringSerializer.get(),
StringSerializer.get());
@BeforeClass
public static void setup() throws Exception {
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(1000 * 3);
createKeyspace();
Thread.sleep(1000 * 3);
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(1000 * 10);
}
private static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.TOKEN_AWARE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
e.printStackTrace();
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
// keyspace.createColumnFamily(CF_SAMPLE_ENTITY, null);
// keyspace.createColumnFamily(CF_SIMPLE_ENTITY, null);
{
EntityManager<SampleEntity, String> entityPersister = new DefaultEntityManager.Builder<SampleEntity, String>()
.withEntityType(SampleEntity.class)
.withKeyspace(keyspace)
.build();
entityPersister.createStorage(null);
}
{
EntityManager<SimpleEntity, String> entityPersister = new DefaultEntityManager.Builder<SimpleEntity, String>()
.withEntityType(SimpleEntity.class)
.withKeyspace(keyspace)
.build();
entityPersister.createStorage(null);
}
}
private SampleEntity createSampleEntity(String id) {
Random prng = new Random();
SampleEntity entity = new SampleEntity();
entity.setId(id);
entity.setBooleanPrimitive(prng.nextBoolean());
entity.setBooleanObject(prng.nextBoolean());
entity.setBytePrimitive((byte)prng.nextInt(Byte.MAX_VALUE));
entity.setByteObject((byte)prng.nextInt(Byte.MAX_VALUE));
entity.setShortPrimitive((short)prng.nextInt(Short.MAX_VALUE));
entity.setShortObject((short)prng.nextInt(Short.MAX_VALUE));
entity.setIntPrimitive(prng.nextInt());
entity.setIntObject(prng.nextInt());
entity.setLongPrimitive(prng.nextLong());
entity.setLongObject(prng.nextLong());
entity.setFloatPrimitive(prng.nextFloat());
entity.setFloatObject(prng.nextFloat());
entity.setDoublePrimitive(prng.nextDouble());
entity.setDoubleObject(prng.nextDouble());
entity.setString(RandomStringUtils.randomAlphanumeric(16));
entity.setByteArray(RandomStringUtils.randomAlphanumeric(16).getBytes(Charsets.UTF_8));
entity.setDate(new Date());
entity.setUuid(TimeUUIDUtils.getUniqueTimeUUIDinMicros());
entity.setStringSet(ImmutableSet.of("A", "B"));
entity.setStringMap(ImmutableMap.of("KA", "VA", "KB", "VB"));
entity.setLongSet(ImmutableSet.of(123L, 456L));
entity.setLongMap(ImmutableMap.of(1L, 11L, 2L, 22L));
Foo foo = new Foo(prng.nextInt(), RandomStringUtils.randomAlphanumeric(4));
entity.setFoo(foo);
BarBar barbar = new BarBar();
barbar.i = prng.nextInt();
barbar.s = RandomStringUtils.randomAlphanumeric(4);
Bar bar = new Bar();
bar.i = prng.nextInt();
bar.s = RandomStringUtils.randomAlphanumeric(4);
bar.barbar = barbar;
entity.setBar(bar);
return entity;
}
@Test
public void basicLifecycle() throws Exception {
final String id = "basicLifecycle";
EntityManager<SampleEntity, String> entityPersister = new DefaultEntityManager.Builder<SampleEntity, String>()
.withEntityType(SampleEntity.class)
.withKeyspace(keyspace)
.build();
SampleEntity origEntity = createSampleEntity(id);
entityPersister.put(origEntity);
// use low-level astyanax API to confirm the write
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
// 19 simple columns
// 2 one-level-deep nested columns from Bar
// 2 two-level-deep nested columns from BarBar
// Assert.assertEquals(31, cl.size());
for (Column<String> c : cl) {
System.out.println("Got column : " + c.getName());
}
// simple columns
Assert.assertEquals(origEntity.getString(), cl.getColumnByName("STRING").getStringValue());
Assert.assertArrayEquals(origEntity.getByteArray(), cl.getColumnByName("BYTE_ARRAY").getByteArrayValue());
// nested fields
Assert.assertEquals(origEntity.getBar().i, cl.getColumnByName("BAR.i").getIntegerValue());
Assert.assertEquals(origEntity.getBar().s, cl.getColumnByName("BAR.s").getStringValue());
Assert.assertEquals(origEntity.getBar().barbar.i, cl.getColumnByName("BAR.barbar.i").getIntegerValue());
Assert.assertEquals(origEntity.getBar().barbar.s, cl.getColumnByName("BAR.barbar.s").getStringValue());
}
SampleEntity getEntity = entityPersister.get(id);
System.out.println(getEntity.toString());
Assert.assertEquals(origEntity, getEntity);
entityPersister.delete(id);
// use low-level astyanax API to confirm the delete
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
Assert.assertEquals(0, cl.size());
}
}
@Test
public void testMultiCalls() throws Exception {
EntityManager<SimpleEntity, String> entityPersister = new DefaultEntityManager.Builder<SimpleEntity, String>()
.withEntityType(SimpleEntity.class)
.withKeyspace(keyspace)
.build();
final Map<String, SimpleEntity> entities = Maps.newHashMap();
for (int i = 0; i < 10; i++) {
String str = Integer.toString(i);
entities.put(str, new SimpleEntity(str, str));
}
// Add multiple
entityPersister.put(entities.values());
{
final Map<String, SimpleEntity> entities2 = collectionToMap(entityPersister.get(entities.keySet()));
Assert.assertEquals(entities.keySet(), entities2.keySet());
}
// Read all
{
final Map<String, SimpleEntity> entities2 = collectionToMap(entityPersister.getAll());
Assert.assertEquals(entities.keySet(), entities2.keySet());
}
// Delete multiple
{
System.out.println(entities.keySet());
entityPersister.delete(entities.keySet());
final Map<String, SimpleEntity> entities3 = collectionToMap(entityPersister.get(entities.keySet()));
System.out.println(entities3);
Assert.assertTrue(entities3.isEmpty());
final Map<String, SimpleEntity> entities4 = collectionToMap(entityPersister.getAll());
System.out.println(entities4);
Assert.assertTrue(entities4.isEmpty());
}
}
@Test
public void testBuilder() {
new DefaultEntityManager.Builder<DoubleIdColumnEntity, String>()
.withColumnFamily(CF_SAMPLE_ENTITY);
try {
new DefaultEntityManager.Builder<DoubleIdColumnEntity, String>()
.withColumnFamily("Test")
.withColumnFamily(CF_SAMPLE_ENTITY);
Assert.fail();
}
catch (Exception e) {
}
try {
new DefaultEntityManager.Builder<DoubleIdColumnEntity, String>()
.withColumnFamily(CF_SAMPLE_ENTITY)
.withColumnFamily("Test");
Assert.fail();
}
catch (Exception e) {
}
new DefaultEntityManager.Builder<DoubleIdColumnEntity, String>()
.withColumnFamily("test");
}
private static Map<String, SimpleEntity> collectionToMap(Collection<SimpleEntity> entities) {
Map<String, SimpleEntity> map = Maps.newHashMap();
for (SimpleEntity entity : entities) {
map.put(entity.getId(), entity);
}
return map;
}
private DoubleIdColumnEntity createDoubleIdColumnEntity(String id) {
Random prng = new Random();
DoubleIdColumnEntity entity = new DoubleIdColumnEntity();
entity.setId(id);
entity.setNum(prng.nextInt());
entity.setStr(RandomStringUtils.randomAlphanumeric(4));
return entity;
}
@Test
public void doubleIdColumnAnnotation() throws Exception {
final String id = "doubleIdColumnAnnotation";
EntityManager<DoubleIdColumnEntity, String> entityPersister = new DefaultEntityManager.Builder<DoubleIdColumnEntity, String>()
.withEntityType(DoubleIdColumnEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.build();
DoubleIdColumnEntity origEntity = createDoubleIdColumnEntity(id);
entityPersister.put(origEntity);
// use low-level astyanax API to confirm the write
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
// test column number
Assert.assertEquals(3, cl.size());
// test column value
Assert.assertEquals(origEntity.getId(), cl.getColumnByName("id").getStringValue());
Assert.assertEquals(origEntity.getNum(), cl.getColumnByName("num").getIntegerValue());
Assert.assertEquals(origEntity.getStr(), cl.getColumnByName("str").getStringValue());
}
DoubleIdColumnEntity getEntity = entityPersister.get(id);
Assert.assertEquals(origEntity, getEntity);
entityPersister.delete(id);
// use low-level astyanax API to confirm the delete
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
Assert.assertEquals(0, cl.size());
}
}
}
| 7,537 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/NullableEntity.java | package com.netflix.astyanax.entitystore;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
@Entity
final class NullableEntity {
@Entity
static class AllOptionalNestedEntity {
@Column()
private String nullable;
public String getNullable() {
return nullable;
}
public void setNullable(String nullable) {
this.nullable = nullable;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AllOptionalNestedEntity other = (AllOptionalNestedEntity) obj;
if(((nullable == null && other.nullable == null) || (nullable != null && nullable.equals(other.nullable))))
return true;
else
return false;
}
@Override
public String toString() {
return "AllOptionalNestedEntity [nullable=" + nullable + "]";
}
}
@Entity
static class AllMandatoryNestedEntity {
@Column(nullable=false)
private String notnullable;
public String getNotnullable() {
return notnullable;
}
public void setNotnullable(String notnullable) {
this.notnullable = notnullable;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AllMandatoryNestedEntity other = (AllMandatoryNestedEntity) obj;
if(((notnullable == null && other.notnullable == null) || (notnullable != null && notnullable.equals(other.notnullable))))
return true;
else
return false;
}
@Override
public String toString() {
return "AllMandatoryNestedEntity [notnullable=" + notnullable + "]";
}
}
@Id
private String id;
@Column(nullable=false)
private String notnullable;
@Column()
private String nullable;
@Column()
private AllOptionalNestedEntity notnullableAllOptionalNestedEntity;
@Column()
private AllOptionalNestedEntity nullableAllOptionalNestedEntity;
@Column(nullable=false)
private AllMandatoryNestedEntity notnullableAllMandatoryNestedEntity;
@Column()
private AllMandatoryNestedEntity nullableAllMandatoryNestedEntity;
public NullableEntity() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getNotnullable() {
return notnullable;
}
public void setNotnullable(String notnullable) {
this.notnullable = notnullable;
}
public String getNullable() {
return nullable;
}
public void setNullable(String nullable) {
this.nullable = nullable;
}
public AllOptionalNestedEntity getNotnullableAllOptionalNestedEntity() {
return notnullableAllOptionalNestedEntity;
}
public void setNotnullableAllOptionalNestedEntity(
AllOptionalNestedEntity notnullableAllOptionalNestedEntity) {
this.notnullableAllOptionalNestedEntity = notnullableAllOptionalNestedEntity;
}
public AllOptionalNestedEntity getNullableAllOptionalNestedEntity() {
return nullableAllOptionalNestedEntity;
}
public void setNullableAllOptionalNestedEntity(
AllOptionalNestedEntity nullableAllOptionalNestedEntity) {
this.nullableAllOptionalNestedEntity = nullableAllOptionalNestedEntity;
}
public AllMandatoryNestedEntity getNotnullableAllMandatoryNestedEntity() {
return notnullableAllMandatoryNestedEntity;
}
public void setNotnullableAllMandatoryNestedEntity(
AllMandatoryNestedEntity notnullableAllMandatoryNestedEntity) {
this.notnullableAllMandatoryNestedEntity = notnullableAllMandatoryNestedEntity;
}
public AllMandatoryNestedEntity getNullableAllMandatoryNestedEntity() {
return nullableAllMandatoryNestedEntity;
}
public void setNullableAllMandatoryNestedEntity(
AllMandatoryNestedEntity nullableAllMandatoryNestedEntity) {
this.nullableAllMandatoryNestedEntity = nullableAllMandatoryNestedEntity;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
NullableEntity other = (NullableEntity) obj;
if(id.equals(other.id) &&
((notnullable == null && other.notnullable == null) || (notnullable != null && notnullable.equals(other.notnullable))) &&
((nullable == null && other.nullable == null) || (notnullable != null && notnullable.equals(other.notnullable))) &&
((notnullableAllOptionalNestedEntity == null && other.notnullableAllOptionalNestedEntity == null)
|| (notnullableAllOptionalNestedEntity != null && notnullableAllOptionalNestedEntity.equals(other.notnullableAllOptionalNestedEntity))) &&
((nullableAllOptionalNestedEntity == null && other.nullableAllOptionalNestedEntity == null)
|| (nullableAllOptionalNestedEntity != null && nullableAllOptionalNestedEntity.equals(other.nullableAllOptionalNestedEntity))) &&
((notnullableAllMandatoryNestedEntity == null && other.notnullableAllMandatoryNestedEntity == null)
|| (notnullableAllMandatoryNestedEntity != null && notnullableAllMandatoryNestedEntity.equals(other.notnullableAllMandatoryNestedEntity))) &&
((nullableAllMandatoryNestedEntity == null && other.nullableAllMandatoryNestedEntity == null)
|| (nullableAllMandatoryNestedEntity != null && nullableAllMandatoryNestedEntity.equals(other.nullableAllMandatoryNestedEntity)))
)
return true;
else
return false;
}
@Override
public String toString() {
return "NullableEntity [id=" + id + ", notnullable=" + notnullable + ", nullable=" + nullable
+ ", notnullableAllOptionalNestedEntity=" + notnullableAllOptionalNestedEntity
+ ", nullableAllOptionalNestedEntity=" + nullableAllOptionalNestedEntity + ", notnullableAllMandatoryNestedEntity="
+ notnullableAllMandatoryNestedEntity + ", nullableAllMandatoryNestedEntity=" + nullableAllMandatoryNestedEntity
+ "]";
}
}
| 7,538 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/DoubleIdColumnEntity.java | package com.netflix.astyanax.entitystore;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
/**
* same field annotated by both @Id and @Column
* @author stevenwu
*
*/
@Entity
public class DoubleIdColumnEntity {
@Id
@Column(name="id")
private String id;
@Column(name="num")
private int num;
@Column(name="str")
private String str;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public int getNum() {
return num;
}
public void setNum(int num) {
this.num = num;
}
public String getStr() {
return str;
}
public void setStr(String str) {
this.str = str;
}
@Override
public String toString() {
return String.format("DoubleIdColumnEntity(id = %s, num = %d, str = %s",
id, num, str);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DoubleIdColumnEntity other = (DoubleIdColumnEntity) obj;
if(id.equals(other.id) &&
num == other.num &&
str.equals(other.str)
)
return true;
else
return false;
}
}
| 7,539 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/DefaultEntityManagerNullableTest.java | package com.netflix.astyanax.entitystore;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import javax.persistence.PersistenceException;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.entitystore.NullableEntity.AllMandatoryNestedEntity;
import com.netflix.astyanax.entitystore.NullableEntity.AllOptionalNestedEntity;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
public class DefaultEntityManagerNullableTest {
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static String TEST_CLUSTER_NAME = "junit_cass_sandbox";
private static String TEST_KEYSPACE_NAME = "EntityPersisterTestKeyspace";
private static final String SEEDS = "localhost:9160";
public static ColumnFamily<String, String> CF_SAMPLE_ENTITY = ColumnFamily.newColumnFamily(
"SampleEntityColumnFamily",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_SIMPLE_ENTITY = ColumnFamily.newColumnFamily(
"SimpleEntityColumnFamily",
StringSerializer.get(),
StringSerializer.get());
@BeforeClass
public static void setup() throws Exception {
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(1000 * 3);
createKeyspace();
Thread.sleep(1000 * 3);
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(1000 * 10);
}
private static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
e.printStackTrace();
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
keyspace.createColumnFamily(CF_SAMPLE_ENTITY, null);
keyspace.createColumnFamily(CF_SIMPLE_ENTITY, null);
}
private NullableEntity createNullableEntity(final String id) {
NullableEntity entity = new NullableEntity();
entity.setId(id);
entity.setNotnullable("notnullable");
entity.setNullable("nullable");
AllOptionalNestedEntity notnullableAllOptionalNestedEntity = new AllOptionalNestedEntity();
notnullableAllOptionalNestedEntity.setNullable("notnullableAllOptionalNestedEntity");
entity.setNotnullableAllOptionalNestedEntity(notnullableAllOptionalNestedEntity);
AllOptionalNestedEntity nullableAllOptionalNestedEntity = new AllOptionalNestedEntity();
nullableAllOptionalNestedEntity.setNullable("nullableAllOptionalNestedEntity");
entity.setNullableAllOptionalNestedEntity(nullableAllOptionalNestedEntity);
AllMandatoryNestedEntity notnullableAllMandatoryNestedEntity = new AllMandatoryNestedEntity();
notnullableAllMandatoryNestedEntity.setNotnullable("notnullableAllMandatoryNestedEntity");
entity.setNotnullableAllMandatoryNestedEntity(notnullableAllMandatoryNestedEntity);
AllMandatoryNestedEntity nullableAllMandatoryNestedEntity = new AllMandatoryNestedEntity();
nullableAllMandatoryNestedEntity.setNotnullable("nullableAllMandatoryNestedEntity");
entity.setNullableAllMandatoryNestedEntity(nullableAllMandatoryNestedEntity);
return entity;
}
@Test
public void nullableColumn() throws Exception {
final String id = "nullableColumn";
EntityManager<NullableEntity, String> entityPersister = new DefaultEntityManager.Builder<NullableEntity, String>()
.withEntityType(NullableEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.build();
NullableEntity origEntity = createNullableEntity(id);
origEntity.setNullable(null);
entityPersister.put(origEntity);
// use low-level astyanax API to confirm the null column
// is not written as empty column
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
// test column number
Assert.assertEquals(5, cl.size());
// assert non-existent
Assert.assertNull(cl.getColumnByName("nullable"));
// test column value
Assert.assertEquals(origEntity.getNotnullable(), cl.getColumnByName("notnullable").getStringValue());
}
NullableEntity getEntity = entityPersister.get(id);
assertNull(getEntity.getNullable());
assertEquals(origEntity, getEntity);
entityPersister.delete(id);
// use low-level astyanax API to confirm the delete
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
Assert.assertEquals(0, cl.size());
}
}
@Test
public void expectNullColumnException() throws Exception {
final String id = "expectNullColumnException";
try {
EntityManager<NullableEntity, String> entityPersister = new DefaultEntityManager.Builder<NullableEntity, String>()
.withEntityType(NullableEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.build();
NullableEntity origEntity = createNullableEntity(id);
origEntity.setNotnullable(null);
entityPersister.put(origEntity);
} catch(PersistenceException e) {
// catch expected exception and verify the cause
Throwable rootCause = ExceptionUtils.getRootCause(e);
assertEquals(IllegalArgumentException.class, rootCause.getClass());
assertEquals("cannot write non-nullable column with null value: notnullable", rootCause.getMessage());
}
}
@Test
public void nullableNestedColumn() throws Exception {
final String id = "nullableNestedColumn";
EntityManager<NullableEntity, String> entityPersister = new DefaultEntityManager.Builder<NullableEntity, String>()
.withEntityType(NullableEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.build();
NullableEntity origEntity = createNullableEntity(id);
origEntity.setNullableAllOptionalNestedEntity(null);
origEntity.getNotnullableAllOptionalNestedEntity().setNullable(null);
entityPersister.put(origEntity);
// use low-level astyanax API to confirm the null column
// is not written as empty column
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
// test column number
Assert.assertEquals(4, cl.size());
// assert non-existent
Assert.assertNull(cl.getColumnByName("nullableAllOptionalNestedEntity.nullable"));
Assert.assertNull(cl.getColumnByName("notnullableAllOptionalNestedEntity.nullable"));
// test column value
Assert.assertEquals(origEntity.getNotnullable(), cl.getColumnByName("notnullable").getStringValue());
Assert.assertEquals(origEntity.getNullable(), cl.getColumnByName("nullable").getStringValue());
Assert.assertEquals(origEntity.getNotnullableAllMandatoryNestedEntity().getNotnullable(), cl.getColumnByName("notnullableAllMandatoryNestedEntity.notnullable").getStringValue());
Assert.assertEquals(origEntity.getNullableAllMandatoryNestedEntity().getNotnullable(), cl.getColumnByName("nullableAllMandatoryNestedEntity.notnullable").getStringValue());
}
NullableEntity getEntity = entityPersister.get(id);
Assert.assertNull(getEntity.getNullableAllOptionalNestedEntity());
// note this is special. it is NOT null
// Assert.assertNotNull(getEntity.getNotnullableAllOptionalNestedEntity());
// Assert.assertNull(getEntity.getNotnullableAllOptionalNestedEntity().getNullable());
// Assert.assertEquals(origEntity, getEntity);
entityPersister.delete(id);
// use low-level astyanax API to confirm the delete
{
ColumnList<String> cl = keyspace.prepareQuery(CF_SAMPLE_ENTITY).getKey(id).execute().getResult();
Assert.assertEquals(0, cl.size());
}
}
@Test
public void expectNullColumnExceptionNotnullableAllOptionalNestedEntity() throws Exception {
final String id = "expectNullColumnExceptionNotnullableAllOptionalNestedEntity";
try {
EntityManager<NullableEntity, String> entityPersister = new DefaultEntityManager.Builder<NullableEntity, String>()
.withEntityType(NullableEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.build();
NullableEntity origEntity = createNullableEntity(id);
origEntity.setNotnullableAllOptionalNestedEntity(null);
entityPersister.put(origEntity);
} catch(PersistenceException e) {
// catch expected exception and verify the cause
Throwable rootCause = ExceptionUtils.getRootCause(e);
assertEquals(IllegalArgumentException.class, rootCause.getClass());
assertEquals("cannot write non-nullable column with null value: notnullableAllOptionalNestedEntity", rootCause.getMessage());
}
}
@Test
public void expectNullColumnExceptionNotnullableAllMandatoryNestedEntity() throws Exception {
final String id = "expectNullColumnExceptionNotnullableAllMandatoryNestedEntity";
try {
EntityManager<NullableEntity, String> entityPersister = new DefaultEntityManager.Builder<NullableEntity, String>()
.withEntityType(NullableEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.build();
NullableEntity origEntity = createNullableEntity(id);
origEntity.setNotnullableAllMandatoryNestedEntity(null);
entityPersister.put(origEntity);
} catch(PersistenceException e) {
// catch expected exception and verify the cause
Throwable rootCause = ExceptionUtils.getRootCause(e);
assertEquals(IllegalArgumentException.class, rootCause.getClass());
assertEquals("cannot write non-nullable column with null value: notnullableAllMandatoryNestedEntity", rootCause.getMessage());
}
}
@Test
public void expectNestedNullColumnExceptionNullableAllMandatoryNestedEntityNullChild() throws Exception {
final String id = "expectNestedNullColumnException";
try {
EntityManager<NullableEntity, String> entityPersister = new DefaultEntityManager.Builder<NullableEntity, String>()
.withEntityType(NullableEntity.class)
.withKeyspace(keyspace)
.withColumnFamily(CF_SAMPLE_ENTITY)
.build();
NullableEntity origEntity = createNullableEntity(id);
origEntity.getNullableAllMandatoryNestedEntity().setNotnullable(null);
entityPersister.put(origEntity);
} catch(PersistenceException e) {
// catch expected exception and verify the cause
Throwable rootCause = ExceptionUtils.getRootCause(e);
assertEquals(IllegalArgumentException.class, rootCause.getClass());
assertEquals("cannot write non-nullable column with null value: notnullable", rootCause.getMessage());
}
}
}
| 7,540 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/entitystore/CompositeEntityManagerTest.java | package com.netflix.astyanax.entitystore;
import java.util.Collection;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
public class CompositeEntityManagerTest {
private static Logger LOG = LoggerFactory.getLogger(CompositeEntityManagerTest.class);
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static String TEST_CLUSTER_NAME = "junit_cass_sandbox";
private static String TEST_KEYSPACE_NAME = "CompositeEntityManagerTest";
private static final String SEEDS = "localhost:9160";
@Entity
public static class TestEntity {
public TestEntity() {
}
public TestEntity(String rowKey, String part1, Long part2, Long value) {
super();
this.part1 = part1;
this.part2 = part2;
this.value = value;
this.rowKey = rowKey;
}
@Id String rowKey; // This will be the row key
@Column String part1; // This will be the first part of the composite
@Column Long part2; // This will be the second part of the composite
@Column Long value; // This will be the value of the composite
@Override
public String toString() {
return "TestEntityChild ["
+ "key=" + rowKey
+ ", part1=" + part1
+ ", part2=" + part2
+ ", value=" + value + "]";
}
}
@BeforeClass
public static void setup() throws Exception {
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(1000 * 3);
createKeyspace();
Thread.sleep(1000 * 3);
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(1000 * 10);
}
private static CompositeEntityManager<TestEntity, String> manager;
private static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setCqlVersion("3.0.0")
.setTargetCassandraVersion("1.2")
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.TOKEN_AWARE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getClient();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
manager = CompositeEntityManager.<TestEntity, String>builder()
.withKeyspace(keyspace)
.withColumnFamily("testentity")
.withEntityType(TestEntity.class)
.withVerboseTracing(true)
.build();
manager.createStorage(null);
List<TestEntity> children = Lists.newArrayList();
for (long i = 0; i < 10; i++) {
children.add(new TestEntity("A", "a", i, i*i));
children.add(new TestEntity("A", "b", i, i*i));
children.add(new TestEntity("B", "a", i, i*i));
children.add(new TestEntity("B", "b", i, i*i));
}
manager.put(children);
// Read back all rows and log
logResultSet(manager.getAll(), "ALL: ");
}
@Test
public void test() throws Exception {
List<TestEntity> cqlEntities;
Collection<TestEntity> entitiesNative;
// Simple row query
entitiesNative = manager.createNativeQuery()
.whereId().in("A")
.getResultSet();
Assert.assertEquals(20, entitiesNative.size());
LOG.info("NATIVE: " + entitiesNative);
// Multi row query
cqlEntities = manager.find("SELECT * from TestEntity WHERE KEY IN ('A', 'B')");
Assert.assertEquals(40, cqlEntities.size());
entitiesNative = manager.createNativeQuery()
.whereId().in("A", "B")
.getResultSet();
LOG.info("NATIVE: " + entitiesNative);
Assert.assertEquals(40, entitiesNative.size());
// Simple prefix
entitiesNative = manager.createNativeQuery()
.whereId().equal("A")
.whereColumn("part1").equal("a")
.getResultSet();
LOG.info("NATIVE: " + entitiesNative);
Assert.assertEquals(10, entitiesNative.size());
cqlEntities = manager.find("SELECT * from TestEntity WHERE KEY = 'A' AND column1='b' AND column2>=5 AND column2<8");
Assert.assertEquals(3, cqlEntities.size());
LOG.info(cqlEntities.toString());
manager.remove(new TestEntity("A", "b", 5L, null));
cqlEntities = manager.find("SELECT * from TestEntity WHERE KEY = 'A' AND column1='b' AND column2>=5 AND column2<8");
Assert.assertEquals(2, cqlEntities.size());
LOG.info(cqlEntities.toString());
manager.delete("A");
cqlEntities = manager.find("SELECT * from TestEntity WHERE KEY = 'A' AND column1='b' AND column2>=5 AND column2<8");
Assert.assertEquals(0, cqlEntities.size());
}
@Test
public void testQuery() throws Exception {
Collection<TestEntity> entitiesNative;
entitiesNative = manager.createNativeQuery()
.whereId().in("B")
.whereColumn("part1").equal("b")
.whereColumn("part2").greaterThanEqual(5L)
.whereColumn("part2").lessThan(8L)
.getResultSet();
LOG.info("NATIVE: " + entitiesNative.toString());
Assert.assertEquals(3, entitiesNative.size());
}
// ... Not sure this use case makes sense since cassandra will end up returning
// columns with part2 greater than 8 but less than b
// @Test
// public void testQueryComplexRange() throws Exception {
// Collection<TestEntity> entitiesNative;
//
// entitiesNative = manager.createNativeQuery()
// .whereId().in("B")
// .whereColumn("part1").lessThan("b")
// .whereColumn("part2").lessThan(8L)
// .getResultSet();
//
// LOG.info("NATIVE: " + entitiesNative.toString());
// logResultSet(manager.getAll(), "COMPLEX RANGE: ");
// Assert.assertEquals(2, entitiesNative.size());
// }
@Test
public void testBadFieldName() throws Exception {
try {
manager.createNativeQuery()
.whereId().in("A")
.whereColumn("badfield").equal("b")
.getResultSet();
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage(), e);
}
}
private static void logResultSet(List<TestEntity> result, String prefix) {
// Read back all rows and log
List<TestEntity> all = manager.getAll();
for (TestEntity entity : all) {
LOG.info(prefix + entity.toString());
}
}
}
| 7,541 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/test/TokenTestOperation.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.test;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import com.netflix.astyanax.serializers.BigIntegerSerializer;
import com.netflix.astyanax.serializers.LongSerializer;
public class TokenTestOperation extends TestOperation {
private final ByteBuffer rowKey;
public TokenTestOperation(ByteBuffer rowKey) {
this.rowKey = rowKey;
}
public TokenTestOperation(BigInteger rowKey) {
this.rowKey = BigIntegerSerializer.get().toByteBuffer(rowKey);
}
public TokenTestOperation(Long rowKey) {
this.rowKey = LongSerializer.get().toByteBuffer(rowKey);
}
@Override
public ByteBuffer getRowKey() {
return rowKey.duplicate();
}
}
| 7,542 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/test/TestKeyspace.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.test;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import com.netflix.astyanax.AstyanaxConfiguration;
import com.netflix.astyanax.ColumnMutation;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.SerializerPackage;
import com.netflix.astyanax.connectionpool.ConnectionPool;
import com.netflix.astyanax.connectionpool.Operation;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.TokenRange;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.OperationException;
import com.netflix.astyanax.cql.CqlStatement;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.ddl.SchemaChangeResult;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.partitioner.Partitioner;
import com.netflix.astyanax.query.ColumnFamilyQuery;
import com.netflix.astyanax.retry.RetryPolicy;
import com.netflix.astyanax.serializers.SerializerPackageImpl;
public class TestKeyspace implements Keyspace {
private String keyspaceName;
private List<TokenRange> tokenRange;
public TestKeyspace(String name) {
this.keyspaceName = name;
}
public void setTokenRange(List<TokenRange> tokens) {
this.tokenRange = tokens;
}
@Override
public String getKeyspaceName() {
return this.keyspaceName;
}
@Override
public List<TokenRange> describeRing() throws ConnectionException {
return this.tokenRange;
}
@Override
public MutationBatch prepareMutationBatch() {
return null;
}
@Override
public <K, C> ColumnFamilyQuery<K, C> prepareQuery(ColumnFamily<K, C> cf) {
return null;
}
@Override
public <K, C> ColumnMutation prepareColumnMutation(
ColumnFamily<K, C> columnFamily, K rowKey, C column) {
return null;
}
@Override
public AstyanaxConfiguration getConfig() {
return null;
}
@Override
public KeyspaceDefinition describeKeyspace() throws ConnectionException {
return null;
}
@Override
public SerializerPackage getSerializerPackage(String columnFamily,
boolean ignoreErrors) {
return SerializerPackageImpl.DEFAULT_SERIALIZER_PACKAGE;
}
@Override
public OperationResult<Void> testOperation(Operation<?, ?> operation)
throws ConnectionException {
return null;
}
@Override
public <K, C> OperationResult<Void> truncateColumnFamily(
ColumnFamily<K, C> columnFamily) throws OperationException,
ConnectionException {
return null;
}
@Override
public OperationResult<Void> testOperation(Operation<?, ?> operation,
RetryPolicy retry) throws ConnectionException {
return null;
}
@Override
public List<TokenRange> describeRing(boolean cached) throws ConnectionException {
return null;
}
@Override
public List<TokenRange> describeRing(String dc) throws ConnectionException {
return this.tokenRange;
}
@Override
public List<TokenRange> describeRing(String dc, String rack) throws ConnectionException {
return this.tokenRange;
}
@Override
public OperationResult<Void> truncateColumnFamily(String columnFamily) throws ConnectionException {
return null;
}
@Override
public String describePartitioner() throws ConnectionException {
return null;
}
@Override
public OperationResult<SchemaChangeResult> dropColumnFamily(String columnFamilyName) throws ConnectionException {
return null;
}
@Override
public <K, C> OperationResult<SchemaChangeResult> dropColumnFamily(ColumnFamily<K, C> columnFamily) throws ConnectionException {
return null;
}
@Override
public OperationResult<SchemaChangeResult> dropKeyspace() throws ConnectionException {
return null;
}
@Override
public <K, C> OperationResult<SchemaChangeResult> createColumnFamily(ColumnFamily<K, C> columnFamily, Map<String, Object> options)
throws ConnectionException {
return null;
}
@Override
public <K, C> OperationResult<SchemaChangeResult> updateColumnFamily(ColumnFamily<K, C> columnFamily, Map<String, Object> options)
throws ConnectionException {
return null;
}
@Override
public OperationResult<SchemaChangeResult> createKeyspace(Map<String, Object> options) throws ConnectionException {
return null;
}
@Override
public OperationResult<SchemaChangeResult> updateKeyspace(Map<String, Object> options) throws ConnectionException {
return null;
}
@Override
public Map<String, List<String>> describeSchemaVersions() throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public ConnectionPool<?> getConnectionPool() throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public CqlStatement prepareCqlStatement() {
// TODO Auto-generated method stub
return null;
}
@Override
public Partitioner getPartitioner() throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> createKeyspace(Map<String, Object> options,
Map<ColumnFamily, Map<String, Object>> cfs) throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> createColumnFamily(
Properties props) {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> createKeyspace(
Properties properties) throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> updateKeyspace(Properties props)
throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public Properties getKeyspaceProperties() throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public Properties getColumnFamilyProperties(String columnFamily)
throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> updateColumnFamily(
Properties props) throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> updateColumnFamily(
Map<String, Object> options) throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> createColumnFamily(
Map<String, Object> options) throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> createKeyspaceIfNotExists(
Map<String, Object> options) throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> createKeyspaceIfNotExists(
Properties properties) throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public OperationResult<SchemaChangeResult> createKeyspaceIfNotExists(
Map<String, Object> options,
Map<ColumnFamily, Map<String, Object>> cfs)
throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
}
| 7,543 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/test/TestConstants.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.test;
import java.nio.ByteBuffer;
import java.util.UUID;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnType;
import com.netflix.astyanax.serializers.AnnotatedCompositeSerializer;
import com.netflix.astyanax.serializers.ByteBufferSerializer;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.serializers.TimeUUIDSerializer;
public class TestConstants {
public static final ColumnFamily<String, String> CF_STANDARD1 = new ColumnFamily<String, String>(
"Standard1", StringSerializer.get(), StringSerializer.get());
public static final ColumnFamily<String, Long> CF_LONGCOLUMN = new ColumnFamily<String, Long>(
"LongColumn1", StringSerializer.get(), LongSerializer.get());
public static final ColumnFamily<String, String> CF_STANDARD2 = new ColumnFamily<String, String>(
"Standard2", StringSerializer.get(), StringSerializer.get());
public static final ColumnFamily<String, String> CF_SUPER1 = new ColumnFamily<String, String>(
"Super1", StringSerializer.get(), StringSerializer.get(),
ColumnType.SUPER);
public static final ColumnFamily<String, String> CF_COUNTER1 = new ColumnFamily<String, String>(
"Counter1", StringSerializer.get(), StringSerializer.get());
public static final ColumnFamily<String, String> CF_COUNTER_SUPER1 = new ColumnFamily<String, String>(
"CounterSuper1", StringSerializer.get(), StringSerializer.get(),
ColumnType.SUPER);
public static final ColumnFamily<String, String> CF_NOT_DEFINED = new ColumnFamily<String, String>(
"NotDefined", StringSerializer.get(), StringSerializer.get());
public static final ColumnFamily<String, String> CF_EMPTY = new ColumnFamily<String, String>(
"NotDefined", StringSerializer.get(), StringSerializer.get());
public static final ColumnFamily<String, TestCompositeType> CF_COMPOSITE = new ColumnFamily<String, TestCompositeType>(
"CompositeColumn", StringSerializer.get(),
new AnnotatedCompositeSerializer<TestCompositeType>(
TestCompositeType.class));
public static final ColumnFamily<ByteBuffer, ByteBuffer> CF_COMPOSITE_CSV = new ColumnFamily<ByteBuffer, ByteBuffer>(
"CompositeCsv", ByteBufferSerializer.get(),
ByteBufferSerializer.get());
public static final ColumnFamily<TestCompositeType, String> CF_COMPOSITE_KEY = new ColumnFamily<TestCompositeType, String>(
"CompositeKey",
new AnnotatedCompositeSerializer<TestCompositeType>(
TestCompositeType.class), StringSerializer.get());
public static final ColumnFamily<String, UUID> CF_TIME_UUID = new ColumnFamily<String, UUID>(
"TimeUUID1", StringSerializer.get(), TimeUUIDSerializer.get());
public static final AnnotatedCompositeSerializer<SessionEvent> SE_SERIALIZER = new AnnotatedCompositeSerializer<SessionEvent>(
SessionEvent.class);
public static final ColumnFamily<String, SessionEvent> CF_CLICK_STREAM = new ColumnFamily<String, SessionEvent>(
"ClickStream", StringSerializer.get(), SE_SERIALIZER);
public static final String CLUSTER_NAME = "TestCluster1";
public static final String KEYSPACE_NAME = "Keyspace1";
}
| 7,544 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/retry/BoundedExponentialBackoffTest.java | package com.netflix.astyanax.retry;
import static com.netflix.astyanax.retry.ExponentialBackoffTest.setAttemptCount;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public final class BoundedExponentialBackoffTest {
@Test
public void testSleepTimeNeverNegative() throws NoSuchFieldException, IllegalAccessException {
BoundedExponentialBackoff backoff = new BoundedExponentialBackoff(500, 5000, -1);
for(int i = 0; i < 1000; i++) {
setAttemptCount(backoff, i);
assertTrue("Backoff at retry " + i + " was not positive", backoff.getSleepTimeMs() >= 0);
}
}
} | 7,545 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/retry/ExponentialBackoffTest.java | package com.netflix.astyanax.retry;
import java.lang.reflect.Field;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public final class ExponentialBackoffTest {
@Test
public void testSleepTimeNeverNegative() throws NoSuchFieldException, IllegalAccessException {
ExponentialBackoff backoff = new ExponentialBackoff(500, -1);
for(int i = 22; i < 1000; i++) {
setAttemptCount(backoff, i);
assertTrue("Backoff at retry " + i + " was not positive", backoff.getSleepTimeMs() >= 0);
}
}
public static void setAttemptCount(SleepingRetryPolicy backoff, int attempt)
throws NoSuchFieldException, IllegalAccessException {
Field attempts = SleepingRetryPolicy.class.getDeclaredField("attempts");
attempts.setAccessible(true);
attempts.setInt(backoff, attempt);
}
} | 7,546 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/util/ExecuteWithRetryTest.java | package com.netflix.astyanax.util;
import org.junit.Test;
import com.netflix.astyanax.Execution;
public class ExecuteWithRetryTest {
public static class ExecuteWithRetry<T> {
private final Execution<T> execution;
public ExecuteWithRetry(Execution<T> execution) {
this.execution = execution;
}
};
@Test
public void testRetry() {
}
}
| 7,547 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/util/RateLimiterTest.java | package com.netflix.astyanax.util;
import junit.framework.Assert;
import org.junit.Test;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.SimpleRateLimiterImpl;
public class RateLimiterTest {
@Test
public void testRateLimiter() {
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
"cluster_keyspace");
config.setConnectionLimiterMaxPendingCount(10);
config.setConnectionLimiterWindowSize(1000);
SimpleRateLimiterImpl limit = new SimpleRateLimiterImpl(config);
int time = 0;
boolean result;
for (int i = 0; i < 10; i++) {
time += 10;
result = limit.check(time);
Assert.assertTrue(result);
}
result = limit.check(time + 10);
Assert.assertFalse(result);
result = limit.check(time + 1000);
Assert.assertTrue(result);
}
@Test
public void testRateLimiter2() {
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
"cluster_keyspace");
config.setConnectionLimiterMaxPendingCount(10);
config.setConnectionLimiterWindowSize(1000);
SimpleRateLimiterImpl limit = new SimpleRateLimiterImpl(config);
int time = 0;
int interval = 100;
for (int i = 0; i < 10; i++) {
time += interval;
boolean result = limit.check(time);
Assert.assertTrue(result);
}
boolean result = limit.check(time + interval);
Assert.assertTrue(result);
}
}
| 7,548 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/util/TimeUUIDTest.java | package com.netflix.astyanax.util;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import junit.framework.Assert;
import org.junit.Ignore;
import org.junit.Test;
import com.netflix.astyanax.Clock;
import com.netflix.astyanax.clock.MicrosecondsSyncClock;
public class TimeUUIDTest {
@Test
@Ignore
public void testMicrosResolution() {
Clock clock = new MicrosecondsSyncClock();
long time = clock.getCurrentTime();
UUID uuid = TimeUUIDUtils.getUniqueTimeUUIDinMicros();
long uuidTime = TimeUUIDUtils.getMicrosTimeFromUUID(uuid);
Assert.assertEquals(time / 10000, uuidTime / 10000);
}
@Test
public void testAddMicrosReslution() {
UUID uuid = TimeUUIDUtils.getUniqueTimeUUIDinMicros();
long uuidTime = TimeUUIDUtils.getMicrosTimeFromUUID(uuid);
UUID uuidPlusOneDay = TimeUUIDUtils.getMicrosTimeUUID(uuidTime
+ TimeUnit.DAYS.toMicros(1));
long uuidTimePlusOneDay = TimeUUIDUtils
.getMicrosTimeFromUUID(uuidPlusOneDay);
Assert.assertEquals(TimeUnit.DAYS.toMicros(1), uuidTimePlusOneDay
- uuidTime);
}
}
| 7,549 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/util/SingletonEmbeddedCassandra.java | package com.netflix.astyanax.util;
import com.netflix.astyanax.test.EmbeddedCassandra;
public class SingletonEmbeddedCassandra {
private static class Holder {
private static final SingletonEmbeddedCassandra instance = new SingletonEmbeddedCassandra();
}
private final EmbeddedCassandra cassandra;
private SingletonEmbeddedCassandra() {
try {
cassandra = new EmbeddedCassandra();
cassandra.start();
} catch (Exception e) {
throw new RuntimeException("Failed to start embedded cassandra", e);
}
}
public static SingletonEmbeddedCassandra getInstance() {
return Holder.instance;
}
public void shutdown() {
try {
cassandra.stop();
}
catch (Exception e) {
}
}
}
| 7,550 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/partitioner/PartitionerTest.java | package com.netflix.astyanax.partitioner;
import java.util.List;
import org.junit.Test;
import com.netflix.astyanax.connectionpool.TokenRange;
public class PartitionerTest {
@Test
public void testSplit() {
BigInteger127Partitioner partitioner = new BigInteger127Partitioner();
List<TokenRange> ranges = partitioner.splitTokenRange(4);
for (TokenRange range : ranges) {
System.out.println(range);
}
}
@Test
public void testSplitWithStartEnd() {
BigInteger127Partitioner partitioner = new BigInteger127Partitioner();
List<TokenRange> ranges = partitioner.splitTokenRange(BigInteger127Partitioner.MINIMUM.toString(), BigInteger127Partitioner.MAXIMUM.toString(), 4);
for (TokenRange range : ranges) {
System.out.println(range);
}
}
@Test
public void testSplitWithZeros() {
BigInteger127Partitioner partitioner = new BigInteger127Partitioner();
List<TokenRange> ranges = partitioner.splitTokenRange("0", "0", 4);
for (TokenRange range : ranges) {
System.out.println(range);
}
}
}
| 7,551 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/contrib | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/contrib/valve/MultiThreadTestControl.java | package com.netflix.astyanax.contrib.valve;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
public class MultiThreadTestControl {
// threads and thread pool
final int numThreads;
ExecutorService threadPool;
// multi-thread test control
// barrier tells all threads to wait.
final CyclicBarrier barrier;
// atomic boolean tells them to stop running
final AtomicBoolean stop = new AtomicBoolean(false);
// latch tells main test that threads have stopped running and hence successCount is not moving fwd
final CountDownLatch latch;
public MultiThreadTestControl() {
this(8);
}
public MultiThreadTestControl(int nThreads) {
numThreads = nThreads;
barrier = new CyclicBarrier(numThreads);
latch = new CountDownLatch(numThreads);
}
public void runTest(final Callable<Void> perThreadIterationCall) {
threadPool = Executors.newFixedThreadPool(numThreads);
for (int i=0; i<numThreads; i++) {
threadPool.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
barrier.await();
while (!stop.get()) {
perThreadIterationCall.call();
}
// stopping test thread
latch.countDown();
return null;
}
});
}
}
public void stopTest() {
stop.set(true);
threadPool.shutdownNow();
try {
latch.await(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
| 7,552 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/contrib | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/contrib/valve/RollingTimeWindowValveTest.java | package com.netflix.astyanax.contrib.valve;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Assert;
import org.junit.Test;
public class RollingTimeWindowValveTest {
public void testSingleThreadSingleBucket() throws Exception {
final RollingTimeWindowValve valve = new RollingTimeWindowValve(10000, 1);
Long counter = new Long(0L);
for (int i=0; i<100000; i++) {
boolean success = valve.decrementAndCheckQuota();
if (success) {
counter++;
}
}
Assert.assertTrue("Success: " + counter, counter == 10000);
}
public void testSingleThreadMultipleBucketsSingleSecond() throws Exception {
final RollingTimeWindowValve valve = new RollingTimeWindowValve(10000, 10);
runMultiThreadTest(valve, 1, 750, 8000);
}
public void testSingleThreadMultipleBucketsMultipleSeconds() throws Exception {
final RollingTimeWindowValve valve = new RollingTimeWindowValve(10000, 10);
runMultiThreadTest(valve, 1, 1750, 18000);
}
public void testMultipleThreadsSingleBucketSingleSecond() throws Exception {
final RollingTimeWindowValve valve = new RollingTimeWindowValve(10000, 1);
runMultiThreadTest(valve, 8, 800, 10000);
}
public void testMultipleThreadsSingleBucketMultipleSeconds() throws Exception {
final RollingTimeWindowValve valve = new RollingTimeWindowValve(10000, 1);
runMultiThreadTest(valve, 8, 1700, 20000);
}
public void testMultipleThreadsMultipleBucketsMultipleSeconds() throws Exception {
final RollingTimeWindowValve valve = new RollingTimeWindowValve(10000, 10);
runMultiThreadTest(valve, 8, 1750, 18000);
}
private void runMultiThreadTest(final RollingTimeWindowValve valve, int numThreads, int sleepMillis, long expectedSuccesses) throws Exception {
final AtomicLong successCount = new AtomicLong(0L);
final MultiThreadTestControl testControl = new MultiThreadTestControl(numThreads);
testControl.runTest(new Callable<Void>() {
@Override
public Void call() throws Exception {
boolean success = valve.decrementAndCheckQuota();
if (success) {
successCount.incrementAndGet();
}
return null;
}
});
Thread.sleep(sleepMillis);
testControl.stopTest();
long delta = Math.abs(expectedSuccesses-successCount.get());
int percentageDiff = (int) (delta*100/expectedSuccesses);
Assert.assertTrue("Success: " + successCount.get() + ", expected: " + expectedSuccesses + ", percentageDiff: " + percentageDiff, percentageDiff < 10);
//System.out.println("Success: " + successCount.get() + ", expected: " + expectedSuccesses + ", percentageDiff: " + percentageDiff);
}
public void testChangeInRate() throws Exception {
final RollingTimeWindowValve valve = new RollingTimeWindowValve(10000, 10);
final AtomicReference<PauseTest> pause = new AtomicReference<PauseTest>(new PauseTest(8));
final AtomicLong successCount = new AtomicLong(0L);
final MultiThreadTestControl testControl = new MultiThreadTestControl(8);
testControl.runTest(new Callable<Void>() {
@Override
public Void call() throws Exception {
if (pause.get().shouldPause()) {
pause.get().waitOnResume();
}
boolean success = valve.decrementAndCheckQuota();
if (success) {
successCount.incrementAndGet();
}
return null;
}
});
Thread.sleep(1450);
pause.get().pauseTest();
valve.setRatePerSecond(20000L);
pause.get().resumeTest();
Thread.sleep(970);
pause.set(new PauseTest(8));
pause.get().pauseTest();
valve.setRatePerSecond(5000L);
pause.get().resumeTest();
Thread.sleep(1000);
testControl.stopTest();
long expectedSuccesses = 40000; // 10000 for the 1st 1000 ms. and then 20000 for the next 1000 ms and 5000 for the last 1000 ms
long delta = Math.abs(expectedSuccesses-successCount.get());
int percentageDiff = (int) (delta*100/expectedSuccesses);
//Assert.assertTrue("Success: " + successCount.get() + ", expected: " + expectedSuccesses + ", percentageDiff: " + percentageDiff, percentageDiff < 10);
//System.out.println("Success: " + successCount.get() + ", expected: " + expectedSuccesses + ", percentageDiff: " + percentageDiff);
}
private class PauseTest {
private final CountDownLatch waitLatch = new CountDownLatch(1);
private final AtomicBoolean pauseEnabled = new AtomicBoolean(false);
private PauseTest(int numWorkersToPause) {
}
private void pauseTest() {
pauseEnabled.set(true);
}
private void resumeTest() {
pauseEnabled.set(false);
waitLatch.countDown();
}
private boolean shouldPause() {
return pauseEnabled.get();
}
private void waitOnResume() {
try {
waitLatch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
}
| 7,553 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/contrib | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/contrib/valve/TimeWindowValveTest.java | package com.netflix.astyanax.contrib.valve;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicLong;
import junit.framework.Assert;
import org.junit.Test;
import com.netflix.astyanax.contrib.valve.TimeWindowValve.RequestStatus;
public class TimeWindowValveTest {
@Test
public void testSingleThread1SecWindow() throws Exception {
TimeWindowValve window = new TimeWindowValve(1000L, System.currentTimeMillis(), 1000);
testSingleThread(window, 100000, 1000);
}
@Test
public void testSingleThread100MsWindow() throws Exception {
TimeWindowValve window = new TimeWindowValve(1000L, System.currentTimeMillis(), 100);
testSingleThread(window, 100000, 1000);
}
private void testSingleThread(final TimeWindowValve window, int numRequests, int expectedSuccesses) {
Map<RequestStatus, Long> status = new HashMap<RequestStatus, Long>();
for (int i=0; i<numRequests; i++) {
RequestStatus ret = window.decrementAndCheckQuota();
Long count = status.get(ret);
if (count == null) {
status.put(ret, 1L);
} else {
status.put(ret, ++count);
}
}
Assert.assertTrue(expectedSuccesses == status.get(RequestStatus.Permitted));
}
@Test
public void testMultipleThreads1SecWindow() throws Exception {
final TimeWindowValve window = new TimeWindowValve(100000L, System.currentTimeMillis(), 1000);
testMultipleThreads(window, 100000L, 300);
}
@Test
public void testMultipleThreads100MsWindow() throws Exception {
final TimeWindowValve window = new TimeWindowValve(10000L, System.currentTimeMillis(), 500);
testMultipleThreads(window, 10000L, 300);
}
private void testMultipleThreads(final TimeWindowValve window, long expectedSuccesses, int runTestMillis) throws Exception {
final MultiThreadTestControl testControl = new MultiThreadTestControl();
// track success rate of rps that is allowed to pass through
final AtomicLong successCount = new AtomicLong(0L);
testControl.runTest(new Callable<Void>() {
@Override
public Void call() throws Exception {
RequestStatus status = window.decrementAndCheckQuota();
if (status == RequestStatus.Permitted) {
successCount.incrementAndGet();
}
return null;
}
});
Thread.sleep(runTestMillis);
testControl.stopTest();
long delta = Math.abs(expectedSuccesses-successCount.get());
int percentageDiff = (int) (delta*100/expectedSuccesses);
Assert.assertTrue("Success: " + successCount.get() + ", expected: " + expectedSuccesses + ", percentageDiff: " + percentageDiff, percentageDiff < 10);
}
@Test
public void testPastWindow() throws Exception {
final TimeWindowValve window = new TimeWindowValve(100000L, System.currentTimeMillis(), 100);
for (int i=0; i<1000; i++) {
RequestStatus status = window.decrementAndCheckQuota();
Assert.assertEquals(RequestStatus.Permitted, status);
}
Thread.sleep(150);
for (int i=0; i<1000; i++) {
RequestStatus status = window.decrementAndCheckQuota();
Assert.assertEquals(RequestStatus.PastWindow, status);
}
}
}
| 7,554 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/thrift/CqlTest.java | package com.netflix.astyanax.thrift;
import java.util.Iterator;
import java.util.Map;
import java.util.UUID;
import java.util.Map.Entry;
import junit.framework.Assert;
import com.netflix.astyanax.shaded.org.apache.cassandra.db.marshal.UTF8Type;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.cql.CqlSchema;
import com.netflix.astyanax.cql.CqlStatementResult;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.CqlResult;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.serializers.IntegerSerializer;
import com.netflix.astyanax.serializers.MapSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.serializers.UUIDSerializer;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
public class CqlTest {
private static Logger Log = LoggerFactory.getLogger(CqlTest.class);
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static String TEST_CLUSTER_NAME = "cass_sandbox";
private static String TEST_KEYSPACE_NAME = "CqlTest";
private static final String SEEDS = "localhost:9160";
private static final long CASSANDRA_WAIT_TIME = 1000;
static ColumnFamily<Integer, String> CQL3_CF = ColumnFamily
.newColumnFamily("Cql3CF", IntegerSerializer.get(),
StringSerializer.get());
static ColumnFamily<String, String> User_CF = ColumnFamily.newColumnFamily(
"UserCF", StringSerializer.get(), StringSerializer.get());
static ColumnFamily<UUID, String> UUID_CF = ColumnFamily.newColumnFamily(
"uuidtest", UUIDSerializer.get(), StringSerializer.get());
@BeforeClass
public static void setup() throws Exception {
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(CASSANDRA_WAIT_TIME);
createKeyspace();
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(CASSANDRA_WAIT_TIME);
}
public static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(
NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(
ConnectionPoolType.TOKEN_AWARE)
.setDiscoveryDelayInSeconds(60000)
.setTargetCassandraVersion("1.2")
.setCqlVersion("3.0.0"))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(10).setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
Thread.sleep(CASSANDRA_WAIT_TIME);
} catch (Exception e) {
Log.info("Error dropping keyspace " + e.getMessage());
}
keyspace.createKeyspace(ImmutableMap
.<String, Object> builder()
.put("strategy_options",
ImmutableMap.<String, Object> builder()
.put("replication_factor", "1").build())
.put("strategy_class", "SimpleStrategy").build());
Thread.sleep(CASSANDRA_WAIT_TIME);
OperationResult<CqlStatementResult> result;
result = keyspace
.prepareCqlStatement()
.withCql(
"CREATE TABLE employees (empID int, deptID int, first_name varchar, last_name varchar, PRIMARY KEY (empID, deptID));")
.execute();
result = keyspace
.prepareCqlStatement()
.withCql(
"CREATE TABLE users (id text PRIMARY KEY, given text, surname text, favs map<text, text>);")
.execute();
Thread.sleep(CASSANDRA_WAIT_TIME);
KeyspaceDefinition ki = keyspaceContext.getEntity().describeKeyspace();
Log.info("Describe Keyspace: " + ki.getName());
}
@Test
public void testCompoundKey() throws Exception {
OperationResult<CqlStatementResult> result;
result = keyspace
.prepareCqlStatement()
.withCql(
"INSERT INTO employees (empID, deptID, first_name, last_name) VALUES (111, 222, 'eran', 'landau');")
.execute();
result = keyspace
.prepareCqlStatement()
.withCql(
"INSERT INTO employees (empID, deptID, first_name, last_name) VALUES (111, 233, 'netta', 'landau');")
.execute();
result = keyspace.prepareCqlStatement()
.withCql("SELECT * FROM employees WHERE empId=111;")
.execute();
Assert.assertTrue(!result.getResult().getRows(CQL3_CF).isEmpty());
for (Row<Integer, String> row : result.getResult().getRows(CQL3_CF)) {
Log.info("CQL Key: " + row.getKey());
ColumnList<String> columns = row.getColumns();
Log.info(" empid : "
+ columns.getIntegerValue("empid", null));
Log.info(" deptid : "
+ columns.getIntegerValue("deptid", null));
Log.info(" first_name : "
+ columns.getStringValue("first_name", null));
Log.info(" last_name : "
+ columns.getStringValue("last_name", null));
}
}
//@Test
public void testPreparedCql() throws Exception {
OperationResult<CqlResult<Integer, String>> result;
final String INSERT_STATEMENT = "INSERT INTO employees (empID, deptID, first_name, last_name) VALUES (?, ?, ?, ?);";
result = keyspace.prepareQuery(CQL3_CF)
.withCql(INSERT_STATEMENT)
.asPreparedStatement()
.withIntegerValue(222)
.withIntegerValue(333)
.withStringValue("Netta")
.withStringValue("Landau")
.execute();
result = keyspace.prepareQuery(CQL3_CF)
.withCql("SELECT * FROM employees WHERE empId=222;")
.execute();
Assert.assertTrue(!result.getResult().getRows().isEmpty());
for (Row<Integer, String> row : result.getResult().getRows()) {
Log.info("CQL Key: " + row.getKey());
ColumnList<String> columns = row.getColumns();
Log.info(" empid : "
+ columns.getIntegerValue("empid", null));
Log.info(" deptid : "
+ columns.getIntegerValue("deptid", null));
Log.info(" first_name : "
+ columns.getStringValue("first_name", null));
Log.info(" last_name : "
+ columns.getStringValue("last_name", null));
}
}
@Test
public void testKeyspaceCql() throws Exception {
keyspace.prepareQuery(CQL3_CF)
.withCql(
"INSERT INTO employees (empID, deptID, first_name, last_name) VALUES (999, 233, 'arielle', 'landau');")
.execute();
CqlStatementResult result = keyspace.prepareCqlStatement()
.withCql("SELECT * FROM employees WHERE empID=999;")
.execute().getResult();
CqlSchema schema = result.getSchema();
Rows<Integer, String> rows = result.getRows(CQL3_CF);
Assert.assertEquals(1, rows.size());
// Assert.assertTrue(999 == rows.getRowByIndex(0).getKey());
}
@Test
public void testCollections() throws Exception {
OperationResult<CqlStatementResult> result;
result = keyspace
.prepareCqlStatement()
.withCql(
"INSERT INTO users (id, given, surname, favs) VALUES ('jsmith', 'John', 'Smith', { 'fruit' : 'apple', 'band' : 'Beatles' })")
.execute();
Rows<String, String> rows = keyspace.prepareCqlStatement()
.withCql("SELECT * FROM users;").execute().getResult()
.getRows(User_CF);
MapSerializer<String, String> mapSerializer = new MapSerializer<String, String>(
UTF8Type.instance, UTF8Type.instance);
for (Row<String, String> row : rows) {
Log.info(row.getKey());
for (Column<String> column : row.getColumns()) {
Log.info(" " + column.getName());
}
Column<String> favs = row.getColumns().getColumnByName("favs");
Map<String, String> map = favs.getValue(mapSerializer);
for (Entry<String, String> entry : map.entrySet()) {
Log.info(" fav: " + entry.getKey() + " = " + entry.getValue());
}
}
}
@Test
public void testUUIDPart() throws Exception {
CqlStatementResult result;
keyspace.prepareCqlStatement()
.withCql(
"CREATE TABLE uuidtest (id UUID PRIMARY KEY, given text, surname text);")
.execute();
keyspace.prepareCqlStatement()
.withCql(
"INSERT INTO uuidtest (id, given, surname) VALUES (00000000-0000-0000-0000-000000000000, 'x', 'arielle');")
.execute();
result = keyspace.prepareCqlStatement()
.withCql("SELECT given,surname FROM uuidtest ;").execute()
.getResult();
Rows<UUID, String> rows = result.getRows(UUID_CF);
Iterator<Row<UUID, String>> iter = rows.iterator();
while (iter.hasNext()) {
Row<UUID, String> row = iter.next();
ColumnList<String> cols = row.getColumns();
Iterator<Column<String>> colIter = cols.iterator();
while (colIter.hasNext()) {
Column<String> col = colIter.next();
String name = col.getName();
Log.info("*************************************");
if (name.equals("given")) {
String val = col.getValue(StringSerializer.get());
Log.info("columnname= " + name + " columnvalue= " + val);
Assert.assertEquals("x", val);
}
if (name.equals("surname")) {
String val = col.getValue(StringSerializer.get());
Log.info("columnname= " + name + " columnvalue= " + val);
Assert.assertEquals("arielle", val);
}
}
Log.info("*************************************");
}
Assert.assertEquals(1, rows.size());
}
@Test
public void testUUID() throws Exception {
keyspace.prepareCqlStatement()
.withCql(
"CREATE TABLE uuidtest1 (id UUID PRIMARY KEY, given text, surname text);")
.execute();
keyspace.prepareCqlStatement()
.withCql(
"INSERT INTO uuidtest1 (id, given, surname) VALUES (00000000-0000-0000-0000-000000000000, 'x', 'arielle');")
.execute();
CqlStatementResult result = keyspace.prepareCqlStatement()
.withCql("SELECT * FROM uuidtest1 ;").execute().getResult();
Rows<UUID, String> rows = result.getRows(UUID_CF);
Iterator<Row<UUID, String>> iter = rows.iterator();
while (iter.hasNext()) {
Row<UUID, String> row = iter.next();
ColumnList<String> cols = row.getColumns();
Iterator<Column<String>> colIter = cols.iterator();
while (colIter.hasNext()) {
Column<String> col = colIter.next();
String name = col.getName();
Log.info("*************************************");
if (name.equals("id")) {
UUID val = col.getValue(UUIDSerializer.get());
Log.info("columnname= " + name + " columnvalue= " + val);
Assert.assertEquals("00000000-0000-0000-0000-000000000000",
val.toString());
}
if (name.equals("given")) {
String val = col.getValue(StringSerializer.get());
Log.info("columnname= " + name + " columnvalue= "
+ val.toString());
Assert.assertEquals("x", val);
}
if (name.equals("surname")) {
String val = col.getValue(StringSerializer.get());
Log.info("columnname= " + name + " columnvalue= " + val);
Assert.assertEquals("arielle", val);
}
}
Log.info("*************************************");
}
Assert.assertEquals(1, rows.size());
}
}
| 7,555 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/thrift/QueueTest.java | package com.netflix.astyanax.thrift;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.recipes.locks.BusyLockException;
import com.netflix.astyanax.recipes.queue.CountingQueueStats;
import com.netflix.astyanax.recipes.queue.KeyExistsException;
import com.netflix.astyanax.recipes.queue.Message;
import com.netflix.astyanax.recipes.queue.MessageConsumer;
import com.netflix.astyanax.recipes.queue.MessageContext;
import com.netflix.astyanax.recipes.queue.MessageProducer;
import com.netflix.astyanax.recipes.queue.MessageQueue;
import com.netflix.astyanax.recipes.queue.MessageQueueDispatcher;
import com.netflix.astyanax.recipes.queue.MessageQueueException;
import com.netflix.astyanax.recipes.queue.SendMessageResponse;
import com.netflix.astyanax.recipes.queue.ShardLock;
import com.netflix.astyanax.recipes.queue.ShardLockManager;
import com.netflix.astyanax.recipes.queue.ShardedDistributedMessageQueue;
import com.netflix.astyanax.recipes.queue.triggers.RepeatingTrigger;
import com.netflix.astyanax.recipes.queue.triggers.RunOnceTrigger;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
import java.util.Arrays;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized.Parameters;
import static org.junit.Assert.*;
@RunWith(value = Parameterized.class)
public class QueueTest {
private static Logger LOG = LoggerFactory.getLogger(QueueTest.class);
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static String TEST_CLUSTER_NAME = "cass_sandbox";
private static String TEST_KEYSPACE_NAME = "AstyanaxUnitTests";
private static String SCHEDULER_NAME_CF_NAME = "SchedulerQueue";
private static final String SEEDS = "localhost:9160";
private static final long CASSANDRA_WAIT_TIME = 3000;
private static final int TTL = 20;
private static final int TIMEOUT = 10;
private static final ConsistencyLevel CONSISTENCY_LEVEL = ConsistencyLevel.CL_ONE;
private ReentrantLockManager slm = null;
private String qNameSfx = null;
@BeforeClass
public static void setup() throws Exception {
LOG.info("TESTING THRIFT KEYSPACE");
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(CASSANDRA_WAIT_TIME);
createKeyspace();
}
public QueueTest(ReentrantLockManager s, String sfx) {
slm = s;
qNameSfx = sfx;
System.out.println((s == null? "Running without SLM":"Running WITH SLM") + " and suffix " + qNameSfx);
}
@Parameters
public static Collection<Object[]> parameters() {
Object[][] data = new Object[][]{{new ReentrantLockManager(), "WITHSLM"},{null, "NOSLM"}};
// Object[][] data = new Object[][]{{new ReentrantLockManager(), "WITHSLM"}};
return Arrays.asList(data);
}
public static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.TOKEN_AWARE)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
} catch (Exception e) {
LOG.info(e.getMessage());
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build());
final CountingQueueStats stats = new CountingQueueStats();
final ShardedDistributedMessageQueue queue = new ShardedDistributedMessageQueue.Builder()
.withColumnFamily(SCHEDULER_NAME_CF_NAME)
.withQueueName("TestQueue")
.withKeyspace(keyspace)
.withConsistencyLevel(CONSISTENCY_LEVEL)
.withStats(stats)
.withTimeBuckets(2, 30, TimeUnit.SECONDS)
.withShardCount(2)
.withPollInterval(100L, TimeUnit.MILLISECONDS)
.build();
queue.createStorage();
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null) {
keyspaceContext.shutdown();
}
Thread.sleep(CASSANDRA_WAIT_TIME);
}
@Test
// This tests for a known bug that has yet to be fixed
public void testRepeatingMessage() throws Exception {
final CountingQueueStats stats = new CountingQueueStats();
// Create a simple queue
final ShardedDistributedMessageQueue queue = new ShardedDistributedMessageQueue.Builder()
.withColumnFamily(SCHEDULER_NAME_CF_NAME)
.withQueueName("RepeatingMessageQueue" + qNameSfx)
.withKeyspace(keyspace)
.withConsistencyLevel(CONSISTENCY_LEVEL)
.withStats(stats)
.withShardCount(1)
.withPollInterval(100L, TimeUnit.MILLISECONDS)
.withShardLockManager(slm)
.build();
queue.createQueue();
MessageProducer producer = queue.createProducer();
MessageConsumer consumer = queue.createConsumer();
// Enqueue a recurring message
final String key = "RepeatingMessageWithTimeout";
final Message message = new Message()
.setUniqueKey(key)
.setTimeout(1, TimeUnit.SECONDS)
.setTrigger(new RepeatingTrigger.Builder().withInterval(1, TimeUnit.SECONDS).build());
producer.sendMessage(message);
// Make sure it's unique by trying to submit again
try {
producer.sendMessage(message);
Assert.fail();
} catch (KeyExistsException e) {
LOG.info("Key already exists");
}
// Confirm that the message is there
Assert.assertEquals(1, queue.getMessageCount());
printMessages("Pending messages after insert ORIG message", queue.peekMessagesByKey(key));
// Consume the message
LOG.info("*** Reading first message ***");
final List<MessageContext> m1 = consumer.readMessages(10);
printMessages("Consuming the ORIG message", m1);
Assert.assertEquals(1, m1.size());
printMessages("Pending messages after consume ORIG " + key, queue.peekMessagesByKey(key));
// Exceed the timeout
Thread.sleep(2000);
// Consume the timeout event
LOG.info("*** Reading timeout message ***");
final List<MessageContext> m2 = consumer.readMessages(10);
printMessages("Consuming the TIMEOUT message", m2);
Assert.assertEquals(1, m2.size());
printMessages("Pending messages after consume TIMEOUT " + key, queue.peekMessagesByKey(key));
// Assert.assertEquals(2, m2a.size());
LOG.info("*** Acking both messages ***");
consumer.ackMessages(m1);
consumer.ackMessages(m2);
printMessages("Pending messages after both acks " + key, queue.peekMessagesByKey(key));
// Assert.assertEquals(2, m2a.size());
// Consume anything that is in the queue
final List<MessageContext> m3 = consumer.readMessages(10);
printMessages("Consuming messages", m3);
Assert.assertEquals(1, m3.size());
printMessages("Pending messages after 2nd consume " + key, queue.peekMessagesByKey(key));
consumer.ackMessages(m3);
Thread.sleep(2000);
final List<MessageContext> m4 = consumer.readMessages(10);
printMessages("Consuming messages", m4);
Assert.assertEquals(1, m4.size());
// There should be only one message
// Assert.assertEquals(1, queue.getMessageCount());
for (int i = 0; i < 10; i++) {
final List<MessageContext> m5 = consumer.readMessages(10);
Assert.assertEquals(1, m5.size());
long systemtime = System.currentTimeMillis();
MessageContext m = Iterables.getFirst(m5, null);
LOG.info("MessageTime: " + (systemtime - m.getMessage().getTrigger().getTriggerTime()));
consumer.ackMessages(m5);
}
}
private <T> void printMessages(String caption, List<T> messages) {
LOG.info(caption + "(" + messages.size() + ")");
for (T message : messages) {
LOG.info(" " + message);
}
}
@Test
public void testNoKeyQueue() throws Exception {
final CountingQueueStats stats = new CountingQueueStats();
final ShardedDistributedMessageQueue scheduler = new ShardedDistributedMessageQueue.Builder()
.withColumnFamily(SCHEDULER_NAME_CF_NAME)
.withQueueName("TestNoKeyQueue" + qNameSfx)
.withKeyspace(keyspace)
.withConsistencyLevel(CONSISTENCY_LEVEL)
.withStats(stats)
.withShardCount(1)
.withPollInterval(100L, TimeUnit.MILLISECONDS)
.withShardLockManager(slm)
.build();
scheduler.createQueue();
String key = "MyEvent";
String key2 = "MyEvent2";
MessageProducer producer = scheduler.createProducer();
MessageConsumer consumer = scheduler.createConsumer();
{
final Message m = new Message();
// Add a message
LOG.info(m.toString());
String messageId = producer.sendMessage(m);
LOG.info("MessageId: " + messageId);
}
}
@Test
public void testQueue() throws Exception {
final CountingQueueStats stats = new CountingQueueStats();
final ShardedDistributedMessageQueue scheduler = new ShardedDistributedMessageQueue.Builder()
.withColumnFamily(SCHEDULER_NAME_CF_NAME)
.withQueueName("TestQueue" + qNameSfx)
.withKeyspace(keyspace)
.withConsistencyLevel(CONSISTENCY_LEVEL)
.withStats(stats)
.withShardCount(1)
.withPollInterval(100L, TimeUnit.MILLISECONDS)
.withShardLockManager(slm)
.build();
scheduler.createQueue();
String key = "MyEvent";
String key2 = "MyEvent2";
MessageProducer producer = scheduler.createProducer();
MessageConsumer consumer = scheduler.createConsumer();
{
final Message m = new Message().setKey(key);
// Add a message
LOG.info(m.toString());
String messageId = producer.sendMessage(m);
LOG.info("MessageId: " + messageId);
Assert.assertEquals(1, scheduler.getMessageCount());
// Read it by the messageId
final Message m1rm = scheduler.peekMessage(messageId);
LOG.info("m1rm: " + m1rm);
Assert.assertNotNull(m1rm);
// Read it by the key
final Message m1rk = scheduler.peekMessageByKey(key);
LOG.info("m1rk:" + m1rk);
Assert.assertNotNull(m1rk);
// Delete the message
scheduler.deleteMessageByKey(key);
// Read and verify that it is gone
final Message m1rkd = scheduler.peekMessageByKey(key);
Assert.assertNull(m1rkd);
// Read and verify that it is gone
final Message m1rmd = scheduler.peekMessage(messageId);
Assert.assertNull(m1rmd);
}
{
// Send another message
final Message m = new Message().setUniqueKey(key);
LOG.info("m2: " + m);
final String messageId2 = producer.sendMessage(m);
LOG.info("MessageId2: " + messageId2);
try {
final Message m2 = new Message().setUniqueKey(key);
producer.sendMessage(m2);
Assert.fail("Message should already exists");
} catch (MessageQueueException e) {
LOG.info("Failed to insert duplicate key", e);
}
try {
List<Message> messages = Lists.newArrayList(
new Message().setUniqueKey(key),
new Message().setUniqueKey(key2));
SendMessageResponse result = producer.sendMessages(messages);
Assert.assertEquals(1, result.getMessages().size());
Assert.assertEquals(1, result.getNotUnique().size());
} catch (MessageQueueException e) {
Assert.fail(e.getMessage());
}
Map<String, Integer> counts = scheduler.getShardCounts();
LOG.info(counts.toString());
Assert.assertEquals(2, scheduler.getMessageCount());
// Delete the message
scheduler.deleteMessageByKey(key2);
// Read the message
final Collection<MessageContext> lm2 = consumer.readMessages(10, 10, TimeUnit.SECONDS);
LOG.info("Read message: " + lm2);
Assert.assertEquals(1, lm2.size());
LOG.info(lm2.toString());
Assert.assertEquals(1, scheduler.getMessageCount());
consumer.ackMessages(lm2);
Assert.assertEquals(0, scheduler.getMessageCount());
}
{
final Message m = new Message()
.setKey("Key12345")
.setTrigger(new RepeatingTrigger.Builder()
.withInterval(3, TimeUnit.SECONDS)
.withRepeatCount(10)
.build());
final String messageId3 = producer.sendMessage(m);
Assert.assertNotNull(messageId3);
final Message m3rm = scheduler.peekMessage(messageId3);
Assert.assertNotNull(m3rm);
LOG.info(m3rm.toString());
Assert.assertEquals(1, scheduler.getMessageCount());
scheduler.deleteMessage(messageId3);
Assert.assertEquals(0, scheduler.getMessageCount());
}
// {
// final String repeatingKey = "RepeatingMessage";
// final Message m = new Message()
// .setKey(repeatingKey)
// .setKeepHistory(true)
// .setTaskClass(HelloWorldFunction.class.getCanonicalName())
// .setTrigger(new RepeatingTrigger.Builder()
// .withInterval(3, TimeUnit.SECONDS)
// .withRepeatCount(5)
// .build());
// final String messageId = producer.sendMessage(m);
//
// final AtomicLong counter = new AtomicLong(0);
//
// MessageQueueDispatcher dispatcher = new MessageQueueDispatcher.Builder()
// .withBatchSize(5)
// .withCallback(new Function<MessageContext, Boolean>() {
// long startTime = 0;
//
// @Override
// public synchronized Boolean apply(MessageContext message) {
// if (startTime == 0)
// startTime = System.currentTimeMillis();
//
// LOG.info("Callback : " + (System.currentTimeMillis() - startTime) + " " + message);
// counter.incrementAndGet();
// return true;
// }
// })
// .withMessageQueue(scheduler)
// .withThreadCount(2)
// .build();
//
// dispatcher.start();
//
// Thread.sleep(TimeUnit.MILLISECONDS.convert(20, TimeUnit.SECONDS));
//
// Collection<MessageHistory> history = scheduler.getKeyHistory(repeatingKey, null, null, 10);
// LOG.info(history);
//
// dispatcher.stop();
//
// Assert.assertEquals(5, counter.get());
// }
// Add a batch of messages and peek
{
List<Message> messages = Lists.newArrayList();
for (int i = 0; i < 10; i++) {
messages.add(new Message().addParameter("body", "" + i));
}
producer.sendMessages(messages);
Collection<Message> all = consumer.peekMessages(Integer.MAX_VALUE);
Assert.assertEquals(10, all.size());
for (Message msg : all) {
LOG.info(msg.getParameters().toString());
}
}
}
@Test
@Ignore
public void testStressQueue() throws Exception {
ExecutorService executor = Executors.newFixedThreadPool(100);
final AtomicLong counter = new AtomicLong(0);
final AtomicLong insertCount = new AtomicLong(0);
final long max_count = 1000000;
final CountingQueueStats stats = new CountingQueueStats();
final ConsistencyLevel cl = ConsistencyLevel.CL_ONE;
final MessageQueue scheduler = new ShardedDistributedMessageQueue.Builder()
.withColumnFamily(SCHEDULER_NAME_CF_NAME)
.withQueueName("StressQueue"+qNameSfx)
.withKeyspace(keyspace)
.withConsistencyLevel(cl)
.withStats(stats)
.withTimeBuckets(10, 30, TimeUnit.SECONDS)
.withShardCount(100)
.withPollInterval(100L, TimeUnit.MILLISECONDS)
.withShardLockManager(slm)
.build();
scheduler.createStorage();
Thread.sleep(1000);
scheduler.createQueue();
final ConcurrentMap<String, Boolean> lookup = Maps.newConcurrentMap();
final int batchSize = 50;
Executors.newSingleThreadExecutor().execute(new Runnable() {
@Override
public void run() {
MessageProducer producer = scheduler.createProducer();
for (int i = 0; i < max_count / batchSize; i++) {
long tm = System.currentTimeMillis();
List<Message> messages = Lists.newArrayList();
for (int j = 0; j < batchSize; j++) {
long id = insertCount.incrementAndGet();
messages.add(new Message()
.setKey("" + id)
.addParameter("data", "The quick brown fox jumped over the lazy cow " + id)
.setTimeout(0)
.setTrigger(new RunOnceTrigger.Builder()
.withDelay(j, TimeUnit.SECONDS)
.build()));
}
try {
producer.sendMessages(messages);
} catch (MessageQueueException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long sleep = 1000 - System.currentTimeMillis() - tm;
if (sleep > 0) {
try {
Thread.sleep(sleep);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
});
// // Producer
// final AtomicLong iCounter = new AtomicLong(0);
// for (int j = 0; j < 1; j++) {
// executor.submit(new Runnable() {
// @Override
// public void run() {
// MessageProducer producer = scheduler.createProducer();
//
// List<Message> tasks = Lists.newArrayList();
// while (true) {
// long count = insertCount.incrementAndGet();
// if (count > max_count) {
// insertCount.decrementAndGet();
// break;
// }
// try {
// tasks.add(new Message()
// .setKey("" + count)
// .addParameter("data", "The quick brown fox jumped over the lazy cow " + count)
// // .setNextTriggerTime(TimeUnit.SECONDS.convert(tm, TimeUnit.MILLISECONDS))
//// .setTimeout(1L, TimeUnit.MINUTES)
// );
//
// if (tasks.size() == batchSize) {
// producer.sendMessages(tasks);
// tasks.clear();
// }
// } catch (Exception e) {
// LOG.error(e.getMessage());
// try {
// Thread.sleep(1000);
// } catch (InterruptedException e1) {
// e1.printStackTrace();
// }
// }
// }
//
// if (tasks.size() == batchSize) {
// try {
// producer.sendMessages(tasks);
// } catch (MessageQueueException e) {
// e.printStackTrace();
// }
// tasks.clear();
// }
// }
// });
// }
// Status
final AtomicLong prevCount = new AtomicLong(0);
Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
long newCount = insertCount.get();
// long newCount = counter.get();
// LOG.info("#### Processed : " + (newCount - prevCount.get()) + " of " + newCount + " (" + (insertCount.get() - newCount) + ")");
// LOG.info("#### Pending : " + scheduler.getTaskCount());
// for (Entry<String, Integer> shard : producer.getShardCounts().entrySet()) {
// LOG.info(" " + shard.getKey() + " : " + shard.getValue());
// }
LOG.info(stats.toString());
LOG.info("" + (newCount - prevCount.get()) + " /sec (" + newCount + ")");
prevCount.set(newCount);
// if (insertCount.get() >= max_count) {
// Map<String, Integer> counts;
// try {
// counts = scheduler.getShardCounts();
// long total = 0;
// for (Entry<String, Integer> shard : counts.entrySet()) {
// total += shard.getValue();
// }
//
// LOG.info("Total: " + total + " " + counts.toString());
// } catch (MessageQueueException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// }
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}, 1, 1, TimeUnit.SECONDS);
// Consumer
MessageQueueDispatcher dispatcher = new MessageQueueDispatcher.Builder()
.withBatchSize(500)
.withCallback(new Function<MessageContext, Boolean>() {
@Override
public Boolean apply(MessageContext message) {
String data = (String) message.getMessage().getParameters().get("data");
counter.incrementAndGet();
// Return true to 'ack' the message
// Return false to not 'ack' which will result in the message timing out
// Throw any exception to put the message into a poison queue
return true;
}
})
.withMessageQueue(scheduler)
.withConsumerCount(5)
.withThreadCount(1 + 10)
.build();
dispatcher.start();
executor.awaitTermination(1000, TimeUnit.SECONDS);
}
@Test
public void testQueueBusyLock() throws Exception {
final CountingQueueStats stats = new CountingQueueStats();
final ShardedDistributedMessageQueue scheduler = new ShardedDistributedMessageQueue.Builder()
.withColumnFamily(SCHEDULER_NAME_CF_NAME)
.withQueueName("TestQueueBusyLock" + qNameSfx)
.withKeyspace(keyspace)
.withConsistencyLevel(CONSISTENCY_LEVEL)
.withStats(stats)
.withShardCount(1)
.withPollInterval(100L, TimeUnit.MILLISECONDS)
.withShardLockManager(slm)
.build();
scheduler.deleteQueue();
scheduler.createQueue();
MessageProducer producer = scheduler.createProducer();
// Add a batch of messages and peek
List<Message> messages = Lists.newArrayList();
for (int i = 0; i < 5; i++) {
messages.add(new Message().addParameter("body", "" + i));
}
producer.sendMessages(messages);
long queuedCount = scheduler.getMessageCount();
final AtomicInteger count = new AtomicInteger();
// Lock the shard. This should throw a few BusyLockExceptions
String shard = scheduler.getShardStats().keySet().iterator().next();
ShardLock l = null;
if(slm!=null) {
l = slm.acquireLock(shard);
}
// Consumer
MessageQueueDispatcher dispatcher = new MessageQueueDispatcher.Builder()
.withBatchSize(25)
.withCallback(new Function<MessageContext, Boolean>() {
@Override
public Boolean apply(MessageContext message) {
count.incrementAndGet();
return true;
}
})
.withMessageQueue(scheduler)
.withConsumerCount(10)
.withProcessorThreadCount(10)
.withAckInterval(20, TimeUnit.MILLISECONDS)
.withPollingInterval(15, TimeUnit.MILLISECONDS)
.build();
// Start the consumer
dispatcher.start();
// Release the lock
if(slm!=null) {
// Wait
Thread.sleep(1000);
slm.releaseLock(l);
}
// Wait another 10 seconds and then stop the dispatcher
Thread.sleep(1000);
dispatcher.stop();
assertEquals(queuedCount, count.intValue());
// Check the busy lock count
if(slm!=null) {
System.out.println("Lock attempts " + slm.getLockAttempts());
assertTrue(slm.getBusyLockCounts().get(shard).intValue() > 0);
}
}
/**
* A shard lock manager implementation.
*/
static class ReentrantLockManager implements ShardLockManager {
private ConcurrentHashMap<String, ReentrantLock> locks = new ConcurrentHashMap<String, ReentrantLock>();
private ConcurrentHashMap<String, AtomicInteger> busyLockCounts = new ConcurrentHashMap<String, AtomicInteger>();
private AtomicLong lockAttempts = new AtomicLong();
@Override
public ShardLock acquireLock(String shardName) throws BusyLockException {
locks.putIfAbsent(shardName, new ReentrantLock());
ReentrantLock l = locks.get(shardName);
try {
lockAttempts.incrementAndGet();
if (l.tryLock()) {
return new ReentrantShardLock(l, shardName);
} else {
busyLockCounts.putIfAbsent(shardName, new AtomicInteger());
busyLockCounts.get(shardName).incrementAndGet();
throw new BusyLockException("Shard " + shardName + " is already locked" + ": busy lock count " + busyLockCounts.get(shardName));
}
} catch (Exception e) {
throw new BusyLockException("Could not lock shard " + shardName, e);
}
}
@Override
public void releaseLock(ShardLock lock) {
if(lock!=null) {
ReentrantShardLock rsl = (ReentrantShardLock) lock;
rsl.getLock().unlock();
}
}
public Map<String,AtomicInteger> getBusyLockCounts() {
return busyLockCounts;
}
public long getLockAttempts() {
return lockAttempts.longValue();
}
}
/**
* A shard lock implementation that uses a ReentrantLock.
*/
static class ReentrantShardLock implements ShardLock {
private ReentrantLock lock;
private String shardName;
public ReentrantShardLock(ReentrantLock lock, String shardName) {
this.lock = lock;
this.shardName = shardName;
}
@Override
public String getShardName() {
return shardName;
}
public ReentrantLock getLock() {
return lock;
}
}
}
| 7,556 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/thrift/HelloWorldFunction.java | package com.netflix.astyanax.thrift;
import com.google.common.base.Function;
import com.netflix.astyanax.recipes.queue.MessageContext;
public class HelloWorldFunction implements Function<MessageContext, Boolean>{
private final static long startTime = System.currentTimeMillis();
@Override
public Boolean apply(MessageContext input) {
long offset = System.currentTimeMillis() - startTime;
System.out.println("Hello world (" + offset + ") : " + input);
// if (new Random().nextDouble() > 0) {
// throw new RuntimeException("WTF!?");
// }
return true;
}
}
| 7,557 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/thrift/ThriftKeyspaceAllRowsTest.java | package com.netflix.astyanax.thrift;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.ExceptionCallback;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.RowCallback;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxCheckpointManager;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.query.CheckpointManager;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.util.RangeBuilder;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicLong;
public class ThriftKeyspaceAllRowsTest {
private static Logger LOG = LoggerFactory.getLogger(ThriftKeyspaceAllRowsTest.class);
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static String TEST_CLUSTER_NAME = "cass_sandbox";
private static String TEST_KEYSPACE_NAME = "AstyanaxUnitTests";
private static final String SEEDS = "localhost:9160";
private static final long CASSANDRA_WAIT_TIME = 3000;
private static final long LOTS_OF_ROWS_COUNT = 1000;
public static ColumnFamily<Long, String> CF_ALL_ROWS =
ColumnFamily.newColumnFamily("AllRows1", LongSerializer.get(), StringSerializer.get());
public static ColumnFamily<Long, String> CF_ALL_ROWS_TOMBSTONE =
ColumnFamily.newColumnFamily("AllRowsTombstone1", LongSerializer.get(), StringSerializer.get());
public static ColumnFamily<Long, String> CF_LOTS_OF_ROWS =
new ColumnFamily<Long, String>("LotsOfRows1", LongSerializer.get(), StringSerializer.get());
public static ColumnFamily<Long, String> CF_CHECKPOINTS =
new ColumnFamily<Long, String>("Checkpoints", LongSerializer.get(), StringSerializer.get());
@BeforeClass
public static void setup() throws Exception {
System.out.println("TESTING THRIFT KEYSPACE");
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(CASSANDRA_WAIT_TIME);
createKeyspace();
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(CASSANDRA_WAIT_TIME);
}
public static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.TOKEN_AWARE)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS)
)
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
keyspace.createColumnFamily(CF_ALL_ROWS, null);
keyspace.createColumnFamily(CF_ALL_ROWS_TOMBSTONE, null);
keyspace.createColumnFamily(CF_LOTS_OF_ROWS, null);
keyspace.createColumnFamily(CF_CHECKPOINTS, null);
KeyspaceDefinition ki = keyspaceContext.getEntity().describeKeyspace();
System.out.println("Describe Keyspace: " + ki.getName());
MutationBatch m;
try {
m = keyspace.prepareMutationBatch();
// Add 10 rows
for (long i = 0; i < 10; i++) {
m.withRow(CF_ALL_ROWS, i)
.putColumn("A", 1)
.putColumn("B", 1)
;
}
// Add 10 rows
for (long i = 10; i < 20; i++) {
m.withRow(CF_ALL_ROWS, i)
.putColumn("B", 1)
.putColumn("C", 1)
;
}
// Add 10 rows
for (long i = 20; i < 30; i++) {
m.withRow(CF_ALL_ROWS, i)
.putColumn("B", 1)
.putColumn("C", 1)
;
}
for (long i = 0; i < 100; i++) {
m.withRow(CF_ALL_ROWS_TOMBSTONE, i)
.delete()
;
}
m.execute();
m = keyspace.prepareMutationBatch();
// Delete 7
for (long i = 0; i < 20; i += 3) {
m.withRow(CF_ALL_ROWS, i)
.delete();
}
// Delete 10
for (long i = 20; i < 30; i ++ ) {
m.withRow(CF_ALL_ROWS, i)
.delete();
}
// CF_ALL_ROWS should have 13 rows + 17 tombstones
m.execute();
// Add 10,000 rows
m = keyspace.prepareMutationBatch();
for (long i = 0; i < LOTS_OF_ROWS_COUNT; i++) {
m.withRow(CF_LOTS_OF_ROWS, i).putColumn("DATA", "TEST" + i);
}
m.execute();
} catch (Exception e) {
System.out.println(e.getMessage());
Assert.fail();
}
}
public static <K, C> Set<K> getKeySet(Rows<K, C> rows) {
Set<K> set = new TreeSet<K>();
for (Row<K, C> row : rows) {
if (set.contains(row.getKey()))
Assert.fail("Duplicate key found : " + row.getKey());
LOG.info("Row: " + row.getKey());
set.add(row.getKey());
}
return set;
}
@Test
public void testGetAll() {
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setRowLimit(5)
.setExceptionCallback(new ExceptionCallback() {
@Override
public boolean onException(ConnectionException e) {
Assert.fail(e.getMessage());
return true;
}
})
.execute();
for (Row<Long, String> row : rows.getResult()) {
LOG.info("Row: " + row.getKey() + " count=" + row.getColumns().size());
}
Set<Long> set = getKeySet(rows.getResult());
LOG.info(set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
}
@Test
public void testGetAllDefaults() {
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
// .setRowLimit(5)
.setExceptionCallback(new ExceptionCallback() {
@Override
public boolean onException(ConnectionException e) {
Assert.fail(e.getMessage());
return true;
}
})
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info(set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
}
@Test
public void testGetAllWithTombstones() {
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS_TOMBSTONE)
.getAllRows()
.setRepeatLastToken(false)
.setRowLimit(5)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("All columns row count: " + set.size() + " " + set.toString());
Assert.assertEquals(0, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS_TOMBSTONE)
.getAllRows()
.setRepeatLastToken(false)
.setIncludeEmptyRows(true)
.setRowLimit(5)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("All columns row count: " + set.size() + " " + set.toString());
Assert.assertEquals(100, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setRepeatLastToken(false)
.setRowLimit(5)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("All columns row count: " + set.size() + " " + set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.withColumnSlice("A")
.setRepeatLastToken(false)
.setRowLimit(5)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("Column='A' Row count: " + set.size() + " " + set.toString());
Assert.assertEquals(6, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.withColumnSlice("B")
.setRepeatLastToken(false)
.setRowLimit(5)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("Column='B' Row count: " + set.size() + " " + set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.withColumnRange(new RangeBuilder().setLimit(1).build())
.setRowLimit(5)
.setRepeatLastToken(false)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("Limit 1 row count: " + set.size() + " " + set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.withColumnRange(new RangeBuilder().setLimit(0).build())
.setRepeatLastToken(false)
.setRowLimit(5)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("Limit 0 row count: " + set.size() + " " + set.toString());
Assert.assertEquals(30, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setRepeatLastToken(false)
.setRowLimit(5)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("All columns row count: " + set.size() + " " + set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setIncludeEmptyRows(true)
.setRepeatLastToken(false)
.setRowLimit(5)
.execute();
Set<Long> set = getKeySet(rows.getResult());
LOG.info("IncludeEmpty Row count: " + set.size() + " " + set.toString());
Assert.assertEquals(30, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
}
public static class ToKeySetCallback<K,C> implements RowCallback<K, C> {
private Set<K> set = new TreeSet<K>();
@Override
public synchronized void success(Rows<K, C> rows) {
set.addAll(getKeySet(rows));
}
@Override
public boolean failure(ConnectionException e) {
// TODO Auto-generated method stub
return false;
}
public Set<K> get() {
return set;
}
}
// @Test
// public void testCCS() {
// String clusterName = "cass_ccs";
// String keyspaceName = "CacheStatus";
//
// AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder()
// .forCluster(clusterName)
// .forKeyspace("CacheStatus")
// .withAstyanaxConfiguration(new AstyanaxConfigurationImpl())
// .withConnectionPoolConfiguration(new ConnectionPoolConfigurationImpl(clusterName + "_" + keyspaceName)
// .setPort(PORT)
// .setSocketTimeout(30000)
// .setMaxTimeoutWhenExhausted(2000)
// .setMaxConnsPerHost(1)
// )
// .withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
// .withHostSupplier(new NetflixDiscoveryHostSupplier(clusterName))
// .buildKeyspace(ThriftFamilyFactory.getInstance());
//
// context.start();
// Keyspace keyspace = context.getEntity();
//
// ColumnFamily<String, String> cf = ColumnFamily.newColumnFamily("cacheConfig", StringSerializer.get(), StringSerializer.get());
// try {
// ToKeySetCallback<String, String> callback = new ToKeySetCallback<String, String>();
// keyspace.prepareQuery(cf)
// .getAllRows()
// .setRepeatLastToken(false)
// .setRowLimit(5)
// .setIncludeEmptyRows(true)
// .executeWithCallback(callback);
//
// Set<String> set = callback.get();
// LOG.info("All columns row count: " + set.size() + " " + set.toString());
// Assert.assertEquals(16, set.size());
// } catch (ConnectionException e) {
// Assert.fail();
// }
//
//
// }
@Test
public void testGetAllWithTombstonesWithCallback() {
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS_TOMBSTONE)
.getAllRows()
.setRepeatLastToken(false)
.setRowLimit(5)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("All columns row count: " + set.size() + " " + set.toString());
Assert.assertEquals(0, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS_TOMBSTONE)
.getAllRows()
.setRepeatLastToken(false)
.setIncludeEmptyRows(true)
.setRowLimit(5)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("All columns row count: " + set.size() + " " + set.toString());
Assert.assertEquals(100, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setRepeatLastToken(false)
.setRowLimit(5)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("All columns row count: " + set.size() + " " + set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.withColumnSlice("A")
.setRepeatLastToken(false)
.setRowLimit(5)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("Column='A' Row count: " + set.size() + " " + set.toString());
Assert.assertEquals(6, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.withColumnSlice("B")
.setRepeatLastToken(false)
.setRowLimit(5)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("Column='B' Row count: " + set.size() + " " + set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.withColumnRange(new RangeBuilder().setLimit(1).build())
.setRowLimit(5)
.setRepeatLastToken(false)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("Limit 1 row count: " + set.size() + " " + set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.withColumnRange(new RangeBuilder().setLimit(0).build())
.setRepeatLastToken(false)
.setRowLimit(5)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("Limit 0 row count: " + set.size() + " " + set.toString());
Assert.assertEquals(30, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setRepeatLastToken(false)
.setRowLimit(5)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("All columns row count: " + set.size() + " " + set.toString());
Assert.assertEquals(13, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
try {
ToKeySetCallback callback = new ToKeySetCallback();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setIncludeEmptyRows(true)
.setRepeatLastToken(false)
.setRowLimit(5)
.executeWithCallback(callback);
Set<Long> set = callback.get();
LOG.info("IncludeEmpty Row count: " + set.size() + " " + set.toString());
Assert.assertEquals(30, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
}
@Test
public void testGetAllWithCallback() {
try {
final AtomicLong counter = new AtomicLong();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setRowLimit(3)
.setRepeatLastToken(false)
.withColumnRange(new RangeBuilder().setLimit(2).build())
.executeWithCallback(new RowCallback<Long, String>() {
@Override
public void success(Rows<Long, String> rows) {
for (Row<Long, String> row : rows) {
LOG.info("ROW: " + row.getKey() + " " + row.getColumns().size());
counter.incrementAndGet();
}
}
@Override
public boolean failure(ConnectionException e) {
LOG.error(e.getMessage(), e);
return false;
}
});
LOG.info("Read " + counter.get() + " keys");
} catch (ConnectionException e) {
Assert.fail();
}
}
@Test
public void testGetAllWithCallbackThreads() {
try {
final AtomicLong counter = new AtomicLong();
keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setRowLimit(3)
.setRepeatLastToken(false)
.setConcurrencyLevel(4)
.executeWithCallback(new RowCallback<Long, String>() {
@Override
public void success(Rows<Long, String> rows) {
LOG.info(Thread.currentThread().getName());
for (Row<Long, String> row : rows) {
counter.incrementAndGet();
}
}
@Override
public boolean failure(ConnectionException e) {
LOG.error(e.getMessage(), e);
return false;
}
});
LOG.info("Read " + counter.get() + " keys");
} catch (ConnectionException e) {
LOG.info("Error getting all rows with callback", e);
Assert.fail();
}
}
@Test
public void testGetAllWithCallbackThreadsAndCheckpoints() throws Exception {
try {
final AtomicLong counter = new AtomicLong();
final CheckpointManager manager = new AstyanaxCheckpointManager(keyspace, CF_CHECKPOINTS.getName(), 123L);
// Read rows in 4 threads
keyspace.prepareQuery(CF_LOTS_OF_ROWS)
.getAllRows()
.setRowLimit(10)
.setRepeatLastToken(true)
.setConcurrencyLevel(4)
.setCheckpointManager(manager)
.executeWithCallback(new RowCallback<Long, String>() {
@Override
public void success(Rows<Long, String> rows) {
try {
LOG.info("Checkpoint: " + manager.getCheckpoints());
} catch (Exception e) {
e.printStackTrace();
}
LOG.info(Thread.currentThread().getName());
for (Row<Long, String> row : rows) {
LOG.info(Thread.currentThread().getName() + " " + row.getKey());
counter.incrementAndGet();
}
}
@Override
public boolean failure(ConnectionException e) {
LOG.error(e.getMessage(), e);
return false;
}
});
Assert.assertEquals(LOTS_OF_ROWS_COUNT, counter.get());
LOG.info("Read " + counter.get() + " keys");
LOG.info(manager.getCheckpoints().toString());
keyspace.prepareQuery(CF_LOTS_OF_ROWS)
.getAllRows()
.setRowLimit(10)
.setRepeatLastToken(true)
.setConcurrencyLevel(4)
.setCheckpointManager(manager)
.executeWithCallback(new RowCallback<Long, String>() {
@Override
public void success(Rows<Long, String> rows) {
Assert.fail("All rows should have been processed");
}
@Override
public boolean failure(ConnectionException e) {
LOG.error(e.getMessage(), e);
return false;
}
});
} catch (ConnectionException e) {
LOG.error("Failed to run test", e);
Assert.fail();
}
}
@Test
public void testTokenRangeTest() {
try {
OperationResult<Rows<Long, String>> rows = keyspace.prepareQuery(CF_ALL_ROWS)
.getAllRows()
.setRowLimit(5)
.setExceptionCallback(new ExceptionCallback() {
@Override
public boolean onException(ConnectionException e) {
Assert.fail(e.getMessage());
return true;
}
})
.forTokenRange("9452287970026068429538183539771339207", "37809151880104273718152734159085356828")
.execute();
Iterator<Row<Long, String>> itr = rows.getResult().iterator();
while (itr.hasNext()) {
Row<Long, String> row = itr.next();
LOG.info("Row: " + row.getKey() + " count=" + row.getColumns().size());
}
Set<Long> set = getKeySet(rows.getResult());
LOG.info(set.toString());
// only a subset of the rows should have been returned
Assert.assertEquals(4, set.size());
} catch (ConnectionException e) {
Assert.fail();
}
}
}
| 7,558 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/thrift/MockCompositeType.java | package com.netflix.astyanax.thrift;
import com.netflix.astyanax.annotations.Component;
public class MockCompositeType {
@Component
private String stringPart;
@Component
private Integer intPart;
@Component
private Integer intPart2;
@Component
private boolean boolPart;
@Component
private String utf8StringPart;
public MockCompositeType() {
}
public MockCompositeType(String part1, Integer part2, Integer part3,
boolean boolPart, String utf8StringPart) {
this.stringPart = part1;
this.intPart = part2;
this.intPart2 = part3;
this.boolPart = boolPart;
this.utf8StringPart = utf8StringPart;
}
public MockCompositeType setStringPart(String part) {
this.stringPart = part;
return this;
}
public String getStringPart() {
return this.stringPart;
}
public MockCompositeType setIntPart1(int value) {
this.intPart = value;
return this;
}
public int getIntPart1() {
return this.intPart;
}
public MockCompositeType setIntPart2(int value) {
this.intPart2 = value;
return this;
}
public int getIntPart2() {
return this.intPart2;
}
public MockCompositeType setBoolPart(boolean boolPart) {
this.boolPart = boolPart;
return this;
}
public boolean getBoolPart() {
return this.boolPart;
}
public MockCompositeType setUtf8StringPart(String str) {
this.utf8StringPart = str;
return this;
}
public String getUtf8StringPart() {
return this.utf8StringPart;
}
public String toString() {
return new StringBuilder().append("MockCompositeType[")
.append(stringPart).append(',').append(intPart).append(',')
.append(intPart2).append(',').append(boolPart).append(',')
.append(utf8StringPart).append(']').toString();
}
} | 7,559 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/thrift/ThriftKeyspaceImplTest.java | package com.netflix.astyanax.thrift;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Serializable;
import java.io.StringReader;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Random;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import junit.framework.Assert;
import org.apache.cassandra.thrift.KsDef;
import com.netflix.astyanax.shaded.org.apache.cassandra.utils.Pair;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.log4j.BasicConfigurator;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.MapDifference;
import com.google.common.collect.Maps;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Cluster;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.ExceptionCallback;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.RowCallback;
import com.netflix.astyanax.Serializer;
import com.netflix.astyanax.SerializerPackage;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.TokenRange;
import com.netflix.astyanax.connectionpool.exceptions.BadRequestException;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.NotFoundException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.cql.CqlStatementResult;
import com.netflix.astyanax.ddl.ColumnFamilyDefinition;
import com.netflix.astyanax.ddl.FieldMetadata;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.impl.FilteringHostSupplier;
import com.netflix.astyanax.impl.RingDescribeHostSupplier;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.ColumnSlice;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.model.CqlResult;
import com.netflix.astyanax.model.Equality;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.query.AllRowsQuery;
import com.netflix.astyanax.query.ColumnQuery;
import com.netflix.astyanax.query.IndexQuery;
import com.netflix.astyanax.query.PreparedIndexExpression;
import com.netflix.astyanax.query.RowQuery;
import com.netflix.astyanax.retry.ExponentialBackoff;
import com.netflix.astyanax.serializers.AnnotatedCompositeSerializer;
import com.netflix.astyanax.serializers.ByteBufferSerializer;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.ObjectSerializer;
import com.netflix.astyanax.serializers.PrefixedSerializer;
import com.netflix.astyanax.serializers.SerializerPackageImpl;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.serializers.TimeUUIDSerializer;
import com.netflix.astyanax.serializers.UnknownComparatorException;
import com.netflix.astyanax.test.SessionEvent;
import com.netflix.astyanax.util.ColumnarRecordWriter;
import com.netflix.astyanax.util.CsvColumnReader;
import com.netflix.astyanax.util.CsvRecordReader;
import com.netflix.astyanax.util.JsonRowsWriter;
import com.netflix.astyanax.util.RangeBuilder;
import com.netflix.astyanax.util.RecordReader;
import com.netflix.astyanax.util.RecordWriter;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
import com.netflix.astyanax.util.TimeUUIDUtils;
public class ThriftKeyspaceImplTest {
private static Logger LOG = LoggerFactory.getLogger(ThriftKeyspaceImplTest.class);
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static ColumnFamily<String, String> CF_USER_INFO = ColumnFamily.newColumnFamily(
"UserInfo", // Column Family Name
StringSerializer.get(), // Key Serializer
StringSerializer.get()); // Column Serializer
private static ColumnFamily<Long, Long> CF_DELETE = ColumnFamily
.newColumnFamily(
"delete",
LongSerializer.get(),
LongSerializer.get());
private static ColumnFamily<Long, String> CF_USERS = ColumnFamily
.newColumnFamily(
"users",
LongSerializer.get(),
StringSerializer.get());
private static ColumnFamily<String, String> CF_TTL = ColumnFamily
.newColumnFamily(
"ttl",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_CQL = ColumnFamily
.newColumnFamily(
"cfcql",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_CALLBACK = ColumnFamily
.newColumnFamily(
"cfcallback",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_STANDARD1 = ColumnFamily
.newColumnFamily(
"Standard1",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, Long> CF_LONGCOLUMN = ColumnFamily
.newColumnFamily(
"LongColumn1",
StringSerializer.get(),
LongSerializer.get());
public static ColumnFamily<String, String> CF_STANDARD2 = ColumnFamily
.newColumnFamily(
"Standard2",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_ALLROWS = ColumnFamily
.newColumnFamily(
"AllRows",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_COUNTER1 = ColumnFamily
.newColumnFamily(
"Counter1",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_NOT_DEFINED = ColumnFamily
.newColumnFamily(
"NotDefined",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_EMPTY = ColumnFamily
.newColumnFamily(
"NotDefined",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<Long, Long> ATOMIC_UPDATES = ColumnFamily
.newColumnFamily(
"AtomicUpdates",
LongSerializer.get(),
LongSerializer.get());
public static AnnotatedCompositeSerializer<MockCompositeType> M_SERIALIZER = new AnnotatedCompositeSerializer<MockCompositeType>(
MockCompositeType.class);
public static ColumnFamily<String, MockCompositeType> CF_COMPOSITE = ColumnFamily
.newColumnFamily(
"CompositeColumn",
StringSerializer.get(),
M_SERIALIZER);
public static ColumnFamily<ByteBuffer, ByteBuffer> CF_COMPOSITE_CSV = ColumnFamily
.newColumnFamily(
"CompositeCsv",
ByteBufferSerializer.get(),
ByteBufferSerializer.get());
public static ColumnFamily<MockCompositeType, String> CF_COMPOSITE_KEY = ColumnFamily
.newColumnFamily(
"CompositeKey",
M_SERIALIZER,
StringSerializer.get());
public static ColumnFamily<String, UUID> CF_TIME_UUID = ColumnFamily
.newColumnFamily(
"TimeUUID1",
StringSerializer.get(),
TimeUUIDSerializer.get());
public static AnnotatedCompositeSerializer<SessionEvent> SE_SERIALIZER = new AnnotatedCompositeSerializer<SessionEvent>(
SessionEvent.class);
public static ColumnFamily<String, SessionEvent> CF_CLICK_STREAM = ColumnFamily
.newColumnFamily("ClickStream", StringSerializer.get(),
SE_SERIALIZER);
private static final String SEEDS = "localhost:9160";
private static final long CASSANDRA_WAIT_TIME = 3000;
private static String TEST_CLUSTER_NAME = "cass_sandbox";
private static String TEST_KEYSPACE_NAME = "AstyanaxUnitTests";
@BeforeClass
public static void setup() throws Exception {
BasicConfigurator.configure();
System.out.println("TESTING THRIFT KEYSPACE");
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(CASSANDRA_WAIT_TIME);
createKeyspace();
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(CASSANDRA_WAIT_TIME);
}
public static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS)
)
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getClient();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
ImmutableMap<String, Object> ksOptions = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
ImmutableMap<String, Object> NO_OPTIONS = ImmutableMap.of();
Map<ColumnFamily, Map<String, Object>> cfs = ImmutableMap.<ColumnFamily, Map<String, Object>>builder()
.put(CF_STANDARD1,
ImmutableMap.<String, Object>builder()
.put("column_metadata", ImmutableMap.<String, Object>builder()
.put("Index1", ImmutableMap.<String, Object>builder()
.put("validation_class", "UTF8Type")
.put("index_type", "KEYS")
.build())
.put("Index2", ImmutableMap.<String, Object>builder()
.put("validation_class", "UTF8Type")
.put("index_type", "KEYS")
.build())
.build())
.build())
.put(CF_TTL, NO_OPTIONS)
.build();
keyspace.createKeyspace(ksOptions, cfs);
keyspace.createColumnFamily(CF_STANDARD2, null);
keyspace.createColumnFamily(CF_ALLROWS, null);
keyspace.createColumnFamily(CF_LONGCOLUMN, null);
keyspace.createColumnFamily(CF_DELETE, null);
keyspace.createColumnFamily(ATOMIC_UPDATES,null);
keyspace.createColumnFamily(CF_CQL, null);
keyspace.createColumnFamily(CF_CALLBACK, null);
keyspace.createColumnFamily(CF_COUNTER1, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "CounterColumnType")
.build());
keyspace.createColumnFamily(CF_CLICK_STREAM, ImmutableMap.<String, Object>builder()
.put("comparator_type", "CompositeType(UTF8Type, TimeUUIDType)")
.build());
keyspace.createColumnFamily(CF_COMPOSITE_CSV, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "UTF8Type")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "CompositeType(UTF8Type, LongType)")
.build());
keyspace.createColumnFamily(CF_COMPOSITE, ImmutableMap.<String, Object>builder()
.put("comparator_type", "CompositeType(AsciiType, IntegerType(reversed=true), IntegerType, BytesType, UTF8Type)")
.build());
keyspace.createColumnFamily(CF_COMPOSITE_KEY, ImmutableMap.<String, Object>builder()
.put("key_validation_class", "BytesType")
.build());
keyspace.createColumnFamily(CF_TIME_UUID, null);
keyspace.createColumnFamily(CF_USER_INFO, null);
keyspace.createColumnFamily(CF_USERS, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "UTF8Type")
.put("column_metadata", ImmutableMap.<String, Object>builder()
.put("firstname", ImmutableMap.<String, Object>builder()
.put("validation_class", "UTF8Type")
.put("index_type", "KEYS")
.build())
.put("lastname", ImmutableMap.<String, Object>builder()
.put("validation_class", "UTF8Type")
.put("index_type", "KEYS")
.build())
.put("age", ImmutableMap.<String, Object>builder()
.put("validation_class", "LongType")
.put("index_type", "KEYS")
.build())
.build())
.build());
KeyspaceDefinition ki = keyspaceContext.getClient().describeKeyspace();
System.out.println("Describe Keyspace: " + ki.getName());
try {
//
// CF_Super :
// 'A' :
// 'a' :
// 1 : 'Aa1',
// 2 : 'Aa2',
// 'b' :
// ...
// 'z' :
// ...
// 'B' :
// ...
//
// CF_Standard :
// 'A' :
// 'a' : 1,
// 'b' : 2,
// ...
// 'z' : 26,
// 'B' :
// ...
//
MutationBatch m;
OperationResult<Void> result;
m = keyspace.prepareMutationBatch();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String rowKey = Character.toString(keyName);
ColumnListMutation<String> cfmStandard = m.withRow(
CF_STANDARD1, rowKey);
for (char cName = 'a'; cName <= 'z'; cName++) {
cfmStandard.putColumn(Character.toString(cName),
(int) (cName - 'a') + 1, null);
}
cfmStandard
.putColumn("Index1", (int) (keyName - 'A') + 1, null);
cfmStandard.putColumn("Index2", 42, null);
m.execute();
}
m.withRow(CF_STANDARD1, "Prefixes").putColumn("Prefix1_a", 1, null)
.putColumn("Prefix1_b", 2, null)
.putColumn("prefix2_a", 3, null);
result = m.execute();
String rowKey = "A";
ColumnListMutation<Long> cfmLong = m.withRow(CF_LONGCOLUMN, rowKey);
for (Long l = -10L; l < 10L; l++) {
cfmLong.putEmptyColumn(l, null);
}
cfmLong.putEmptyColumn(Long.MAX_VALUE, null);
result = m.execute();
m.withRow(CF_USER_INFO, "acct1234")
.putColumn("firstname", "john", null)
.putColumn("lastname", "smith", null)
.putColumn("address", "555 Elm St", null)
.putColumn("age", 30, null)
.putEmptyColumn("empty");
m.execute();
// Inserts for CF_ALLROWS
m = keyspace.prepareMutationBatch();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
rowKey = Character.toString(keyName);
ColumnListMutation<String> cfmStandard = m.withRow(
CF_ALLROWS, rowKey);
for (char cName = 'a'; cName <= 'z'; cName++) {
cfmStandard.putColumn(Character.toString(cName),
(int) (cName - 'a') + 1, null);
}
m.execute();
}
} catch (Exception e) {
System.out.println(e.getMessage());
Assert.fail();
}
}
@Test
public void testMultiColumnDelete() throws Exception {
MutationBatch mb = keyspace.prepareMutationBatch();
mb.withRow(CF_DELETE, 1L)
.setTimestamp(1).putEmptyColumn(1L, null)
.setTimestamp(10).putEmptyColumn(2L, null)
;
mb.execute();
ColumnList<Long> result1 = keyspace.prepareQuery(CF_DELETE).getRow(1L).execute().getResult();
Assert.assertEquals(2, result1.size());
Assert.assertNotNull(result1.getColumnByName(1L));
Assert.assertNotNull(result1.getColumnByName(2L));
logColumnList("Insert", result1);
mb = keyspace.prepareMutationBatch();
mb.withRow(CF_DELETE, 1L)
.setTimestamp(result1.getColumnByName(1L).getTimestamp()-1)
.deleteColumn(1L)
.setTimestamp(result1.getColumnByName(2L).getTimestamp()-1)
.deleteColumn(2L)
.putEmptyColumn(3L, null);
mb.execute();
result1 = keyspace.prepareQuery(CF_DELETE).getRow(1L).execute().getResult();
logColumnList("Delete with older timestamp", result1);
Assert.assertEquals(3, result1.size());
LOG.info("Delete L2 with TS: " + (result1.getColumnByName(2L).getTimestamp()+1));
mb.withRow(CF_DELETE, 1L)
.setTimestamp(result1.getColumnByName(1L).getTimestamp()+1)
.deleteColumn(1L)
.setTimestamp(result1.getColumnByName(2L).getTimestamp()+1)
.deleteColumn(2L);
mb.execute();
result1 = keyspace.prepareQuery(CF_DELETE).getRow(1L).execute().getResult();
logColumnList("Delete with newer timestamp", result1);
Assert.assertEquals(1, result1.size());
}
<T> void logColumnList(String label, ColumnList<T> cl) {
LOG.info(">>>>>> " + label);
for (Column<T> c : cl) {
LOG.info(c.getName() + " " + c.getTimestamp());
}
LOG.info("<<<<<<");
}
@Test
public void testCqlComposite() throws Exception {
CqlStatementResult result = keyspace.prepareCqlStatement()
.withCql("SELECT * FROM " + CF_COMPOSITE_CSV.getName())
.execute()
.getResult();
result.getSchema();
result.getRows(CF_COMPOSITE_CSV);
}
@Test
public void testHasValue() throws Exception {
ColumnList<String> response = keyspace.prepareQuery(CF_USER_INFO).getRow("acct1234").execute().getResult();
Assert.assertEquals("firstname", response.getColumnByName("firstname").getName());
Assert.assertEquals("firstname", response.getColumnByName("firstname").getName());
Assert.assertEquals("john", response.getColumnByName("firstname").getStringValue());
Assert.assertEquals("john", response.getColumnByName("firstname").getStringValue());
Assert.assertEquals(true, response.getColumnByName("firstname").hasValue());
Assert.assertEquals(false, response.getColumnByName("empty").hasValue());
}
@Test
public void getKeyspaceDefinition() throws Exception {
KeyspaceDefinition def = keyspaceContext.getEntity().describeKeyspace();
Collection<String> fieldNames = def.getFieldNames();
LOG.info("Getting field names");
for (String field : fieldNames) {
LOG.info(field);
}
LOG.info(fieldNames.toString());
for (FieldMetadata field : def.getFieldsMetadata()) {
LOG.info(field.getName() + " = " + def.getFieldValue(field.getName()) + " (" + field.getType() + ")");
}
for (ColumnFamilyDefinition cfDef : def.getColumnFamilyList()) {
LOG.info("----------" );
for (FieldMetadata field : cfDef.getFieldsMetadata()) {
LOG.info(field.getName() + " = " + cfDef.getFieldValue(field.getName()) + " (" + field.getType() + ")");
}
}
}
@Test
public void testCopyKeyspace() throws Exception {
KeyspaceDefinition def = keyspaceContext.getEntity().describeKeyspace();
Properties props = def.getProperties();
for (Entry<Object, Object> prop : props.entrySet()) {
LOG.info(prop.getKey() + " : " + prop.getValue());
}
KsDef def2 = ThriftUtils.getThriftObjectFromProperties(KsDef.class, props);
Properties props2 = ThriftUtils.getPropertiesFromThrift(def2);
LOG.info("Props1:" + new TreeMap<Object, Object>(props));
LOG.info("Props2:" + new TreeMap<Object, Object>(props2));
MapDifference<Object, Object> diff = Maps.difference(props, props2);
LOG.info("Not copied : " + diff.entriesOnlyOnLeft());
LOG.info("Added : " + diff.entriesOnlyOnRight());
LOG.info("Differing : " + diff.entriesDiffering());
Assert.assertTrue(diff.areEqual());
}
@Test
public void testNonExistentKeyspace() {
AstyanaxContext<Keyspace> ctx = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME + "_NonExistent")
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
ctx.start();
try {
KeyspaceDefinition keyspaceDef = ctx.getEntity().describeKeyspace();
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
@Test
public void testDescribeRing() throws Exception {
// [TokenRangeImpl [startToken=0, endToken=0, endpoints=[127.0.0.1]]]
List<TokenRange> ring = keyspaceContext.getEntity().describeRing();
LOG.info(ring.toString());
// 127.0.0.1
RingDescribeHostSupplier ringSupplier = new RingDescribeHostSupplier(keyspaceContext.getEntity(), 9160);
List<Host> hosts = ringSupplier.get();
Assert.assertEquals(1, hosts.get(0).getTokenRanges().size());
LOG.info(hosts.toString());
Supplier<List<Host>> sourceSupplier1 = Suppliers.ofInstance((List<Host>)Lists.newArrayList(new Host("127.0.0.1", 9160)));
Supplier<List<Host>> sourceSupplier2 = Suppliers.ofInstance((List<Host>)Lists.newArrayList(new Host("127.0.0.2", 9160)));
// 127.0.0.1
LOG.info(sourceSupplier1.get().toString());
// 127.0.0.2
LOG.info(sourceSupplier2.get().toString());
hosts = new FilteringHostSupplier(ringSupplier, sourceSupplier1).get();
LOG.info(hosts.toString());
Assert.assertEquals(1, hosts.size());
Assert.assertEquals(1, hosts.get(0).getTokenRanges().size());
hosts = new FilteringHostSupplier(ringSupplier, sourceSupplier2).get();
LOG.info(hosts.toString());
Assert.assertEquals(1, hosts.size());
}
@Test
public void paginateColumns() throws Exception {
String column = "";
ColumnList<String> columns;
int pageize = 10;
RowQuery<String, String> query = keyspace
.prepareQuery(CF_STANDARD1)
.getKey("A")
.autoPaginate(true)
.withColumnRange(
new RangeBuilder().setStart(column)
.setLimit(pageize).build());
while (!(columns = query.execute().getResult()).isEmpty()) {
for (Column<String> c : columns) {
}
// column = Iterables.getLast(columns).getName() + "\u0000";
}
}
@Test
public void example() {
AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.NONE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl("MyConnectionPool")
.setMaxConnsPerHost(1).setSeeds(
"127.0.0.1:9160"))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
context.start();
Keyspace keyspace = context.getEntity();
MutationBatch m = keyspace.prepareMutationBatch();
// m.withRow(CF_USER_STATS, "acct1234")
// .incrementCounterColumn("loginCount", 1);
try {
OperationResult<Void> result = m.execute();
} catch (ConnectionException e) {
System.out.println(e);
}
try {
OperationResult<ColumnList<String>> result = keyspace
.prepareQuery(CF_USER_INFO).getKey("acct1234").execute();
ColumnList<String> columns = result.getResult();
// Lookup columns in response by name
int age = columns.getColumnByName("age").getIntegerValue();
String address = columns.getColumnByName("address")
.getStringValue();
// Or, iterate through the columns
for (Column<String> c : result.getResult()) {
System.out.println(c.getName());
}
} catch (ConnectionException e) {
System.out.println(e);
}
}
@Test
public void paginateLongColumns() {
Long column = Long.MIN_VALUE;
ColumnList<Long> columns;
int pageize = 10;
try {
RowQuery<String, Long> query = keyspace
.prepareQuery(CF_LONGCOLUMN)
.getKey("A")
.autoPaginate(true)
.withColumnRange(
new RangeBuilder().setStart(column)
.setLimit(pageize).build());
while (!(columns = query.execute().getResult()).isEmpty()) {
LOG.info("-----");
for (Column<Long> c : columns) {
LOG.info(Long.toString(c.getName()));
}
// column = Iterables.getLast(columns).getName() + "\u0000";
}
} catch (ConnectionException e) {
}
}
@Test
public void getAll() {
AtomicLong counter = new AtomicLong(0);
try {
OperationResult<Rows<String, String>> rows = keyspace
.prepareQuery(CF_ALLROWS).getAllRows().setConcurrencyLevel(2).setRowLimit(10)
.setRepeatLastToken(false)
.withColumnRange(new RangeBuilder().setLimit(0).build())
.setExceptionCallback(new ExceptionCallback() {
@Override
public boolean onException(ConnectionException e) {
Assert.fail(e.getMessage());
return true;
}
}).execute();
for (Row<String, String> row : rows.getResult()) {
counter.incrementAndGet();
LOG.info("ROW: " + row.getKey() + " " + row.getColumns().size());
}
Assert.assertEquals(26, counter.get());
} catch (ConnectionException e) {
Assert.fail();
}
}
@Test
public void getAllWithCallback() {
try {
MutationBatch m = keyspace.prepareMutationBatch();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String rowKey = Character.toString(keyName);
ColumnListMutation<String> cfmStandard = m.withRow(CF_CALLBACK, rowKey);
for (char cName = 'a'; cName <= 'z'; cName++) {
cfmStandard.putColumn(Character.toString(cName),
(int) (cName - 'a') + 1, null);
}
m.execute();
}
final AtomicLong counter = new AtomicLong();
keyspace.prepareQuery(CF_CALLBACK).getAllRows().setRowLimit(3)
.setRepeatLastToken(false)
.setConcurrencyLevel(2)
.withColumnRange(new RangeBuilder().setLimit(2).build())
.executeWithCallback(new RowCallback<String, String>() {
@Override
public void success(Rows<String, String> rows) {
for (Row<String, String> row : rows) {
LOG.info("ROW: " + row.getKey() + " "
+ row.getColumns().size());
counter.incrementAndGet();
}
}
@Override
public boolean failure(ConnectionException e) {
LOG.error(e.getMessage(), e);
return false;
}
});
LOG.info("Read " + counter.get() + " keys");
Assert.assertEquals(26, counter.get());
} catch (ConnectionException e) {
Assert.fail();
}
}
static class UserInfo implements Serializable {
private static final long serialVersionUID = 6366200973810770033L;
private String firstName;
private String lastName;
public UserInfo() {
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getFirstName() {
return this.firstName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getLastName() {
return this.lastName;
}
public boolean equals(Object other) {
UserInfo smo = (UserInfo) other;
return firstName.equals(smo.firstName)
&& lastName.equals(smo.lastName);
}
}
@Test
public void testSerializedClassValue() {
UserInfo smo = new UserInfo();
smo.setLastName("Landau");
smo.setFirstName("Eran");
try {
ByteBuffer bb = ObjectSerializer.get().toByteBuffer(smo);
keyspace.prepareColumnMutation(CF_STANDARD1, "Key_SerializeTest",
"Column1").putValue(bb, null).execute();
UserInfo smo2 = (UserInfo) keyspace.prepareQuery(CF_STANDARD1)
.getKey("Key_SerializeTest").getColumn("Column1").execute()
.getResult().getValue(ObjectSerializer.get());
Assert.assertEquals(smo, smo2);
} catch (ConnectionException e) {
Assert.fail();
}
}
@Test
public void testSingleOps() throws Exception {
String key = "SingleOpsTest";
Random prng = new Random();
// Set a string value
{
String column = "StringColumn";
String value = RandomStringUtils.randomAlphanumeric(32);
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
ColumnQuery<String> query = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column);
String v = query.execute().getResult().getStringValue();
Assert.assertEquals(value, v);
v = query.execute().getResult().getStringValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult()
.getStringValue();
Assert.fail();
} catch (NotFoundException e) {
} catch (ConnectionException e) {
Assert.fail();
}
}
// Set a byte value
{
String column = "ByteColumn";
byte value = (byte) prng.nextInt(Byte.MAX_VALUE);
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
byte v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getByteValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getByteValue();
Assert.fail();
} catch (NotFoundException e) {
// expected
}
}
// Set a short value
{
String column = "ShortColumn";
short value = (short) prng.nextInt(Short.MAX_VALUE);
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
short v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getShortValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getShortValue();
Assert.fail();
} catch (NotFoundException e) {
// expected
}
}
// Set a int value
{
String column = "IntColumn";
int value = prng.nextInt();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
int v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getIntegerValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getIntegerValue();
Assert.fail();
} catch (NotFoundException e) {
// expected
}
}
// Set a long value
{
String column = "LongColumn";
long value = prng.nextLong();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
long v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getLongValue();
Assert.assertEquals(value, v);
// get as integer should fail
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult()
.getIntegerValue();
Assert.fail();
} catch (Exception e) {
// expected
}
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getLongValue();
Assert.fail();
} catch (NotFoundException e) {
// expected
}
}
// Set a float value
{
String column = "FloatColumn";
float value = prng.nextFloat();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
float v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getFloatValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getFloatValue();
Assert.fail();
} catch (NotFoundException e) {
// expected
}
}
// Set a double value
{
String column = "IntColumn";
double value = prng.nextDouble();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
double v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getDoubleValue();
Assert.assertEquals(value, v);
// get as integer should fail
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult()
.getIntegerValue();
Assert.fail();
} catch (Exception e) {
// expected
}
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult()
.getDoubleValue();
Assert.fail();
} catch (NotFoundException e) {
} catch (ConnectionException e) {
Assert.fail();
}
}
// Set long column with timestamp
{
String column = "TimestampColumn";
long value = prng.nextLong();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.withTimestamp(100)
.putValue(value, null)
.execute();
// Read
Column<String> c = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult();
Assert.assertEquals(100, c.getTimestamp());
}
}
@Test
public void testTimeUUIDUnique() {
long now = System.currentTimeMillis();
UUID uuid1 = TimeUUIDUtils.getTimeUUID(now);
UUID uuid2 = TimeUUIDUtils.getTimeUUID(now);
LOG.info(uuid1.toString());
LOG.info(uuid2.toString());
Assert.assertTrue(uuid1.equals(uuid2));
}
@Test
public void testTimeUUID2() {
MutationBatch m = keyspace.prepareMutationBatch();
String rowKey = "Key2";
m.withRow(CF_TIME_UUID, rowKey).delete();
try {
m.execute();
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
long now = System.currentTimeMillis();
long msecPerDay = 86400000;
for (int i = 0; i < 100; i++) {
m.withRow(CF_TIME_UUID, rowKey).putColumn(
TimeUUIDUtils.getTimeUUID(now - i * msecPerDay), i, null);
}
try {
m.execute();
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
try {
OperationResult<ColumnList<UUID>> result = keyspace
.prepareQuery(CF_TIME_UUID)
.getKey(rowKey)
.withColumnRange(
new RangeBuilder()
.setLimit(100)
.setStart(
TimeUUIDUtils.getTimeUUID(now - 20
* msecPerDay)).build())
.execute();
for (Column<UUID> column : result.getResult()) {
System.out.println((now - TimeUUIDUtils.getTimeFromUUID(column
.getName())) / msecPerDay);
}
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testTimeUUID() {
MutationBatch m = keyspace.prepareMutationBatch();
UUID columnName = TimeUUIDUtils.getUniqueTimeUUIDinMillis();
long columnTime = TimeUUIDUtils.getTimeFromUUID(columnName);
String rowKey = "Key1";
m.withRow(CF_TIME_UUID, rowKey).delete();
try {
m.execute();
} catch (ConnectionException e1) {
Assert.fail();
}
int startTime = 100;
int endTime = 200;
m.withRow(CF_TIME_UUID, rowKey).putColumn(columnName, 42, null);
for (int i = startTime; i < endTime; i++) {
// UUID c = TimeUUIDUtils.getTimeUUID(i);
LOG.info(TimeUUIDUtils.getTimeUUID(columnTime + i).toString());
m.withRow(CF_TIME_UUID, rowKey).putColumn(
TimeUUIDUtils.getTimeUUID(columnTime + i), i, null);
}
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
try {
OperationResult<Column<UUID>> result = keyspace
.prepareQuery(CF_TIME_UUID).getKey(rowKey)
.getColumn(columnName).execute();
Assert.assertEquals(columnName, result.getResult().getName());
Assert.assertTrue(result.getResult().getIntegerValue() == 42);
OperationResult<ColumnList<UUID>> result2 = keyspace
.prepareQuery(CF_TIME_UUID).getKey(rowKey).execute();
result2 = keyspace
.prepareQuery(CF_TIME_UUID)
.getKey(rowKey)
.withColumnRange(
new RangeBuilder()
.setLimit(10)
.setStart(TimeUUIDUtils.getTimeUUID(0))
.setEnd(TimeUUIDUtils
.getTimeUUID(Long.MAX_VALUE >> 8))
.build()).execute();
Assert.assertEquals(10, result2.getResult().size());
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
UUID currentUUID = TimeUUIDUtils.getUniqueTimeUUIDinMicros();
SerializerPackage pkg = null;
try {
pkg = keyspace.getSerializerPackage(CF_TIME_UUID.getName(), false);
} catch (ConnectionException e) {
Assert.fail();
e.printStackTrace();
} catch (UnknownComparatorException e) {
Assert.fail();
e.printStackTrace();
}
Serializer<UUID> serializer = (Serializer<UUID>) pkg
.getColumnNameSerializer();
ByteBuffer buffer = serializer.toByteBuffer(currentUUID);
String value = serializer.getString(buffer);
LOG.info("UUID Time = " + value);
// Test timeUUID pagination
RowQuery<String, UUID> query = keyspace
.prepareQuery(CF_TIME_UUID)
.getKey(rowKey)
.withColumnRange(
new RangeBuilder()
.setLimit(10)
.setStart(
TimeUUIDUtils.getTimeUUID(columnTime
+ startTime))
.setEnd(TimeUUIDUtils.getTimeUUID(columnTime
+ endTime)).build()).autoPaginate(true);
OperationResult<ColumnList<UUID>> result;
int pageCount = 0;
int rowCount = 0;
try {
LOG.info("starting pagination");
while (!(result = query.execute()).getResult().isEmpty()) {
pageCount++;
rowCount += result.getResult().size();
LOG.info("==== Block ====");
for (Column<UUID> column : result.getResult()) {
LOG.info("Column is " + column.getName());
}
}
LOG.info("pagination complete");
} catch (ConnectionException e) {
Assert.fail();
LOG.info(e.getMessage());
e.printStackTrace();
}
}
@Test
public void testCopy() {
String keyName = "A";
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(keyName)
.copyTo(CF_STANDARD2, keyName).execute();
ColumnList<String> list1 = keyspace.prepareQuery(CF_STANDARD1)
.getKey(keyName).execute().getResult();
ColumnList<String> list2 = keyspace.prepareQuery(CF_STANDARD2)
.getKey(keyName).execute().getResult();
Iterator<Column<String>> iter1 = list1.iterator();
Iterator<Column<String>> iter2 = list2.iterator();
while (iter1.hasNext()) {
Column<String> column1 = iter1.next();
Column<String> column2 = iter2.next();
Assert.assertEquals(column1.getName(), column2.getName());
Assert.assertEquals(column1.getByteBufferValue(),
column2.getByteBufferValue());
}
Assert.assertFalse(iter2.hasNext());
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail(e.getMessage());
}
}
@Test
public void testMutationBatchMultipleWithRow() throws Exception {
MutationBatch mb = keyspace.prepareMutationBatch();
Long key = 9L;
mb.withRow(CF_USERS, key).delete();
mb.withRow(CF_USERS, key).putEmptyColumn("test", null);
mb.execute();
ColumnList<String> result = keyspace.prepareQuery(CF_USERS).getRow(key).execute().getResult();
Assert.assertEquals(1, result.size());
}
@Test
public void testAtomicBatchMutation() throws Exception {
MutationBatch mb = keyspace.prepareMutationBatch().withAtomicBatch(true);
mb.withRow(ATOMIC_UPDATES, 1L)
.putColumn(11L, 11L)
.putColumn(12L, 12L);
mb.withRow(ATOMIC_UPDATES, 2L)
.putColumn(21L, 21L)
.putColumn(22L, 22L);
mb.execute();
Rows<Long, Long> result =
keyspace.prepareQuery(ATOMIC_UPDATES).getAllRows().execute().getResult();
int size = 0;
for (Row<Long, Long> row : result) {
LOG.info("ROW: " + row.getKey() + " " + row.getColumns().size());
size++;
Assert.assertEquals(2, row.getColumns().size());
}
Assert.assertEquals(2, size);
size = 0;
mb = keyspace.prepareMutationBatch().withAtomicBatch(true);
mb.withRow(ATOMIC_UPDATES, 3L)
.putColumn(11L, 11L)
.putColumn(12L, 12L);
mb.withRow(ATOMIC_UPDATES, 1L).delete();
mb.withRow(ATOMIC_UPDATES, 2L).delete();
mb.execute();
result = keyspace.prepareQuery(ATOMIC_UPDATES).getAllRows().execute().getResult();
for (Row<Long, Long> row : result) {
LOG.info("ROW: " + row.getKey() + " " + row.getColumns().size());
size++;
Assert.assertEquals(2, row.getColumns().size());
}
Assert.assertEquals(1, size);
mb = keyspace.prepareMutationBatch().withAtomicBatch(true);
mb.withRow(ATOMIC_UPDATES, 3L).delete();
mb.execute();
}
@Test
public void testClickStream() {
MutationBatch m = keyspace.prepareMutationBatch();
String userId = "UserId";
long timeCounter = 0;
for (int i = 0; i < 10; i++) {
String sessionId = "Session" + i;
for (int j = 0; j < 10; j++) {
m.withRow(CF_CLICK_STREAM, userId).putColumn(
new SessionEvent(sessionId,
TimeUUIDUtils.getTimeUUID(j)),
Long.toString(timeCounter), null);
timeCounter++;
}
}
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
try {
OperationResult<ColumnList<SessionEvent>> result;
result = keyspace
.prepareQuery(CF_CLICK_STREAM)
.getKey(userId)
.withColumnRange(
SE_SERIALIZER.buildRange()
.greaterThanEquals("Session3")
.lessThanEquals("Session5").build())
.execute();
// Assert.assertEquals(10, result.getResult().size());
// LOG.info("*********************** INCLUSIVE - INCLUSIVE");
// for (Column<SessionEvent> column : result.getResult()) {
// LOG.info("####### " + column.getName() + " = " +
// column.getLongValue());
// }
result = keyspace
.prepareQuery(CF_CLICK_STREAM)
.getKey(userId)
.withColumnRange(
SE_SERIALIZER.buildRange()
.greaterThanEquals("Session3")
.lessThan("Session5").build()).execute();
// Assert.assertEquals(10, result.getResult().size());
// LOG.info("XXXXXXXXXXXXXXXXXXXXXXXX INCLUSIVE - NON_INCLUSIVE");
// for (Column<SessionEvent> column : result.getResult()) {
// LOG.info("####### " + column.getName() + " = " +
// column.getLongValue());
// }
result = keyspace
.prepareQuery(CF_CLICK_STREAM)
.getKey(userId)
.withColumnRange(
SE_SERIALIZER.buildRange().greaterThan("Session3")
.lessThanEquals("Session5").build())
.execute();
// LOG.info("XXXXXXXXXXXXXXXXXXXXXXXX NON_INCLUSIVE - INCLUSIVE");
// Assert.assertEquals(10, result.getResult().size());
// for (Column<SessionEvent> column : result.getResult()) {
// LOG.info("####### " + column.getName() + " = " +
// column.getLongValue());
// }
result = keyspace
.prepareQuery(CF_CLICK_STREAM)
.getKey(userId)
.withColumnRange(
SE_SERIALIZER.buildRange().greaterThan("Session3")
.lessThan("Session5").build()).execute();
// LOG.info("XXXXXXXXXXXXXXXXXXXXXXXX NON_INCLUSIVE - NON_INCLUSIVE");
// for (Column<SessionEvent> column : result.getResult()) {
// LOG.info("####### " + column.getName() + " = " +
// column.getLongValue());
// }
result = keyspace
.prepareQuery(CF_CLICK_STREAM)
.getKey(userId)
.withColumnRange(
SE_SERIALIZER
.buildRange()
.withPrefix("Session3")
.greaterThanEquals(
TimeUUIDUtils.getTimeUUID(2))
.lessThanEquals(
TimeUUIDUtils.getTimeUUID(8))
.build()).execute();
// Assert.assertEquals(10, result.getResult().size());
// LOG.info("XXXXXXXXXXXXXXXXXXXXXXXX EQUAL - EQUAL");
// for (Column<SessionEvent> column : result.getResult()) {
// LOG.info("####### " + column.getName() + " = " +
// column.getLongValue());
// }
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testChangeConsistencyLevel() {
try {
keyspace.prepareQuery(CF_STANDARD1)
.setConsistencyLevel(ConsistencyLevel.CL_ONE).getKey("A")
.execute();
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCompositeKey() {
MockCompositeType key = new MockCompositeType("A", 1, 2, true, "B");
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_COMPOSITE_KEY, key).putColumn("Test", "Value", null);
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
try {
ColumnList<String> row = keyspace.prepareQuery(CF_COMPOSITE_KEY)
.getKey(key).execute().getResult();
Assert.assertFalse(row.isEmpty());
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testComposite() {
String rowKey = "Composite1";
boolean bool = false;
MutationBatch m = keyspace.prepareMutationBatch();
ColumnListMutation<MockCompositeType> mRow = m.withRow(CF_COMPOSITE,
rowKey);
int columnCount = 0;
for (char part1 = 'a'; part1 <= 'b'; part1++) {
for (int part2 = 0; part2 < 10; part2++) {
for (int part3 = 10; part3 < 11; part3++) {
bool = !bool;
columnCount++;
mRow.putEmptyColumn(
new MockCompositeType(Character.toString(part1),
part2, part3, bool, "UTF"), null);
}
}
}
LOG.info("Created " + columnCount + " columns");
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
OperationResult<ColumnList<MockCompositeType>> result;
try {
result = keyspace.prepareQuery(CF_COMPOSITE).getKey(rowKey)
.execute();
Assert.assertEquals(columnCount, result.getResult().size());
for (Column<MockCompositeType> col : result.getResult()) {
LOG.info("COLUMN: " + col.getName().toString());
}
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
try {
Column<MockCompositeType> column = keyspace
.prepareQuery(CF_COMPOSITE).getKey(rowKey)
.getColumn(new MockCompositeType("a", 0, 10, true, "UTF"))
.execute().getResult();
LOG.info("Got single column: " + column.getName().toString());
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
LOG.info("Range builder");
try {
result = keyspace
.prepareQuery(CF_COMPOSITE)
.getKey(rowKey)
.withColumnRange(
M_SERIALIZER
.buildRange()
.withPrefix("a")
.greaterThanEquals(1)
.lessThanEquals(1)
.build()).execute();
for (Column<MockCompositeType> col : result.getResult()) {
LOG.info("COLUMN: " + col.getName().toString());
}
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
/*
* Composite c = new Composite(); c.addComponent("String1",
* StringSerializer.get()) .addComponent(123, IntegerSerializer.get());
*
* MutationBatch m = keyspace.prepareMutationBatch();
* m.withRow(CF_COMPOSITE, "Key1") .putColumn(c, 123, null);
*
* try { m.execute(); } catch (ConnectionException e) { Assert.fail(); }
*
* try { OperationResult<Column<Composite>> result =
* keyspace.prepareQuery(CF_COMPOSITE) .getKey("Key1") .getColumn(c)
* .execute();
*
* Assert.assertEquals(123, result.getResult().getIntegerValue()); }
* catch (ConnectionException e) { Assert.fail(); }
*/
}
@Test
public void testCompositeSlice() throws ConnectionException {
AnnotatedCompositeSerializer<MockCompositeType> ser = new AnnotatedCompositeSerializer<MockCompositeType>(
MockCompositeType.class);
keyspace.prepareQuery(CF_COMPOSITE)
.getKey("Key1")
.withColumnRange(
ser.makeEndpoint("sessionid1", Equality.LESS_THAN)
.toBytes(),
ser.makeEndpoint("sessionid1", Equality.GREATER_THAN)
.toBytes(), false, 100).execute();
}
@Test
public void testIndexQueryWithPagination() {
OperationResult<Rows<String, String>> result;
try {
LOG.info("************************************************** testIndexQueryWithPagination: ");
int rowCount = 0;
int pageCount = 0;
IndexQuery<String, String> query = keyspace
.prepareQuery(CF_STANDARD1).searchWithIndex()
.setRowLimit(10).autoPaginateRows(true).addExpression()
.whereColumn("Index2").equals().value(42);
while (!(result = query.execute()).getResult().isEmpty()) {
pageCount++;
rowCount += result.getResult().size();
LOG.info("==== Block ====");
for (Row<String, String> row : result.getResult()) {
LOG.info("RowKey is " + row.getKey());
}
}
Assert.assertEquals(pageCount, 3);
Assert.assertEquals(rowCount, 26);
LOG.info("************************************************** Index query: "
+ result.getResult().size());
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
e.printStackTrace();
Assert.fail();
} catch (Exception e) {
LOG.error(e.getMessage(), e);
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testIndexQuery() {
OperationResult<Rows<String, String>> result;
try {
LOG.info("************************************************** prepareGetMultiRowIndexQuery: ");
result = keyspace.prepareQuery(CF_STANDARD1).searchWithIndex()
.setStartKey("").addExpression().whereColumn("Index1")
.equals().value(26).execute();
Assert.assertEquals(1, result.getResult().size());
Assert.assertEquals("Z", result.getResult().getRowByIndex(0)
.getKey());
/*
* for (Row<String, String> row : result.getResult()) {
* LOG.info("RowKey is " + row.getKey()); for (Column<String> column
* : row.getColumns()) { LOG.info(" Column: " + column.getName() +
* "=" + column.getIntegerValue()); } }
*/
LOG.info("************************************************** Index query: "
+ result.getResult().size());
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
e.printStackTrace();
Assert.fail();
} catch (Exception e) {
LOG.error(e.getMessage(), e);
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testPreparedIndexQuery() {
OperationResult<Rows<String, String>> result;
try {
LOG.info("************************************************** prepareGetMultiRowIndexQuery: ");
PreparedIndexExpression<String, String> clause = CF_STANDARD1
.newIndexClause().whereColumn("Index1").equals().value(26);
result = keyspace.prepareQuery(CF_STANDARD1).searchWithIndex()
.setStartKey("")
.addPreparedExpressions(Arrays.asList(clause)).execute();
for (Row<String, String> row : result.getResult()) {
LOG.info("RowKey is " + row.getKey() + " columnCount="
+ row.getColumns().size());
for (Column<String> column : row.getColumns()) {
LOG.info(" Column: " + column.getName() + "="
+ column.getIntegerValue());
}
}
Assert.assertEquals(1, result.getResult().size());
Assert.assertEquals("Z", result.getResult().getRowByIndex(0)
.getKey());
LOG.info("************************************************** Index query: "
+ result.getResult().size());
} catch (ConnectionException e) {
e.printStackTrace();
LOG.error(e.getMessage(), e);
Assert.fail();
} catch (Exception e) {
e.printStackTrace();
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testIncrementCounter() {
long baseAmount, incrAmount = 100;
Column<String> column;
try {
column = getColumnValue(CF_COUNTER1, "CounterRow1", "MyCounter");
baseAmount = column.getLongValue();
} catch (Exception e) {
baseAmount = 0;
}
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_COUNTER1, "CounterRow1").incrementCounterColumn(
"MyCounter", incrAmount);
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
column = getColumnValue(CF_COUNTER1, "CounterRow1", "MyCounter");
Assert.assertNotNull(column);
Assert.assertEquals(column.getLongValue(), baseAmount + incrAmount);
m = keyspace.prepareMutationBatch();
m.withRow(CF_COUNTER1, "CounterRow1").incrementCounterColumn(
"MyCounter", incrAmount);
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
column = getColumnValue(CF_COUNTER1, "CounterRow1", "MyCounter");
Assert.assertNotNull(column);
Assert.assertEquals(column.getLongValue(), baseAmount + 2 * incrAmount);
}
@Test
public void testDeleteCounter() {
Column<String> column;
String rowKey = "CounterRowDelete1";
String counterName = "MyCounter";
// Increment the column
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_COUNTER1, rowKey).incrementCounterColumn(counterName, 1);
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
// Read back the value
column = getColumnValue(CF_COUNTER1, rowKey, counterName);
Assert.assertNotNull(column);
Assert.assertEquals(column.getLongValue(), 1);
// Delete the column
try {
// keyspace.prepareColumnMutation(CF_COUNTER1, rowKey, counterName)
// .deleteCounterColumn().execute();
keyspace.prepareColumnMutation(CF_COUNTER1, rowKey, counterName)
.deleteCounterColumn().execute();
/*
* m = keyspace.prepareMutationBatch(); m.withRow(CF_COUNTER1,
* rowKey) .deleteColumn(counterName); m.execute();
*/
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
// Try to read back
// This should be non-existent
column = getColumnValue(CF_COUNTER1, rowKey, counterName);
if (column != null) {
LOG.error("Counter has value: " + column.getLongValue());
Assert.fail();
}
}
@Test
public void testEmptyRowKey() {
try {
keyspace.prepareMutationBatch().withRow(CF_STANDARD1, "");
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
keyspace.prepareMutationBatch().withRow(CF_STANDARD1, null);
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testEmptyColumn() {
ColumnListMutation<String> mutation = keyspace.prepareMutationBatch().withRow(CF_STANDARD1, "ABC");
try {
mutation.putColumn(null, 1L);
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
mutation.putColumn("", 1L);
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
mutation.deleteColumn("");
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
mutation.deleteColumn(null);
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testCql() {
try {
System.out.println("testCQL");
LOG.info("CQL Test");
MutationBatch m = keyspace.prepareMutationBatch();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String rowKey = Character.toString(keyName);
ColumnListMutation<String> cfmStandard = m.withRow(CF_CQL, rowKey);
for (char cName = 'a'; cName <= 'z'; cName++) {
cfmStandard.putColumn(Character.toString(cName),
(int) (cName - 'a') + 1, null);
}
m.execute();
}
OperationResult<CqlResult<String, String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.withCql("SELECT * FROM cfcql;").execute();
Assert.assertTrue(result.getResult().hasRows());
Assert.assertEquals(26, result.getResult().getRows().size());
Assert.assertFalse(result.getResult().hasNumber());
Row<String, String> row;
row = result.getResult().getRows().getRow("A");
Assert.assertEquals("A", row.getKey());
row = result.getResult().getRows().getRow("B");
Assert.assertEquals("B", row.getKey());
row = result.getResult().getRows().getRow("NonExistent");
Assert.assertNull(row);
row = result.getResult().getRows().getRowByIndex(9);
Assert.assertEquals("I", row.getKey());
for (Row<String, String> row1 : result.getResult().getRows()) {
LOG.info("KEY***: " + row1.getKey());
}
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testCqlCount() {
try {
LOG.info("CQL Test");
OperationResult<CqlResult<String, String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.withCql("SELECT count(*) FROM Standard1 where KEY='A';")
.execute();
long count = result.getResult().getRows().getRowByIndex(0).getColumns().getColumnByName("count").getLongValue();
LOG.info("CQL Count: " + count);
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testGetSingleColumn() {
Column<String> column = getColumnValue(CF_STANDARD1, "A", "a");
Assert.assertNotNull(column);
Assert.assertEquals(1, column.getIntegerValue());
}
@Test
public void testColumnFamilyDoesntExist() {
ColumnFamily<String, String> cf = new ColumnFamily<String, String>(
"DoesntExist", StringSerializer.get(), StringSerializer.get());
OperationResult<Void> result;
try {
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(cf, "Key1").putColumn("Column2", "Value2", null);
result = m.execute();
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
@Test
public void testKeyspaceDoesntExist() {
AstyanaxContext<Keyspace> keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME + "_DOESNT_EXIST")
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.NONE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME + "_DOESNT_EXIST")
.setMaxConnsPerHost(1).setSeeds(SEEDS))
.buildKeyspace(ThriftFamilyFactory.getInstance());
try {
keyspaceContext.start();
Keyspace ks = keyspaceContext.getEntity();
OperationResult<Void> result = null;
try {
MutationBatch m = ks.prepareMutationBatch();
m.withRow(CF_STANDARD1, "Key1").putColumn("Column2", "Value2",
null);
result = m.execute();
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
} finally {
keyspaceContext.shutdown();
}
}
@Test
public void testCreateKeyspaceThatAlreadyExists() {
String keyspaceName = TEST_KEYSPACE_NAME + "_ksAlreadyExists";
AstyanaxContext<Keyspace> keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(keyspaceName)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.NONE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(keyspaceName)
.setMaxConnsPerHost(1).setSeeds(SEEDS))
.buildKeyspace(ThriftFamilyFactory.getInstance());
Keyspace ks = null;
try {
keyspaceContext.start();
ks = keyspaceContext.getClient();
Properties props = new Properties();
props.setProperty("name", keyspaceName);
props.setProperty("strategy_class", "SimpleStrategy");
props.setProperty("strategy_options.replication_factor", "1");
try {
ks.createKeyspaceIfNotExists(props);
KeyspaceDefinition ksDef = ks.describeKeyspace();
Assert.assertNotNull(ksDef);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
// NOW create is again.
try {
ks.createKeyspaceIfNotExists(props);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
} finally {
try {
if (ks != null) {
ks.dropKeyspace();
}
} catch (Exception e) {
LOG.info(e.getMessage());
}
keyspaceContext.shutdown();
}
}
@Test
public void testGetSingleColumnNotExists() {
Column<String> column = getColumnValue(CF_STANDARD1, "A",
"DoesNotExist");
Assert.assertNull(column);
}
@Test
public void testGetSingleColumnNotExistsAsync() {
Future<OperationResult<Column<String>>> future = null;
try {
future = keyspace.prepareQuery(CF_STANDARD1).getKey("A")
.getColumn("DoesNotExist").executeAsync();
future.get(1000, TimeUnit.MILLISECONDS);
} catch (ConnectionException e) {
LOG.info("ConnectionException: " + e.getMessage());
Assert.fail();
} catch (InterruptedException e) {
LOG.info(e.getMessage());
Assert.fail();
} catch (ExecutionException e) {
if (e.getCause() instanceof NotFoundException)
LOG.info(e.getCause().getMessage());
else {
Assert.fail(e.getMessage());
}
} catch (TimeoutException e) {
future.cancel(true);
LOG.info(e.getMessage());
Assert.fail();
}
}
@Test
public void testGetSingleKeyNotExists() {
Column<String> column = getColumnValue(CF_STANDARD1, "AA", "ab");
Assert.assertNull(column);
}
@Test
public void testFunctionalQuery() throws ConnectionException {
OperationResult<ColumnList<String>> r1 = keyspace
.prepareQuery(CF_STANDARD1).getKey("A").execute();
Assert.assertEquals(28, r1.getResult().size());
/*
* OperationResult<Rows<String, String>> r2 = keyspace.prepareQuery()
* .fromColumnFamily(CF_STANDARD1) .selectKeyRange("A", "Z", null, null,
* 5) .execute();
*/
}
@Test
public void testNullKeyInMutation() throws ConnectionException {
try {
keyspace.prepareMutationBatch()
.withRow(CF_STANDARD1, null)
.putColumn("abc", "def");
Assert.fail();
}
catch (NullPointerException e) {
}
}
@Test
public void testColumnSlice() throws ConnectionException {
OperationResult<ColumnList<String>> r1 = keyspace
.prepareQuery(CF_STANDARD1).getKey("A")
.withColumnSlice("a", "b").execute();
Assert.assertEquals(2, r1.getResult().size());
}
@Test
public void testColumnRangeSlice() throws ConnectionException {
OperationResult<ColumnList<String>> r1 = keyspace
.prepareQuery(CF_STANDARD1)
.getKey("A")
.withColumnRange(
new RangeBuilder().setStart("a").setEnd("b")
.setLimit(5).build()).execute();
Assert.assertEquals(2, r1.getResult().size());
OperationResult<ColumnList<String>> r2 = keyspace
.prepareQuery(CF_STANDARD1).getKey("A")
.withColumnRange("a", null, false, 5).execute();
Assert.assertEquals(5, r2.getResult().size());
Assert.assertEquals("a", r2.getResult().getColumnByIndex(0).getName());
ByteBuffer EMPTY_BUFFER = ByteBuffer.wrap(new byte[0]);
OperationResult<ColumnList<String>> r3 = keyspace
.prepareQuery(CF_STANDARD1).getKey("A")
.withColumnRange(EMPTY_BUFFER, EMPTY_BUFFER, true, 5).execute();
Assert.assertEquals(5, r3.getResult().size());
Assert.assertEquals("z", r3.getResult().getColumnByIndex(0).getName());
}
@Test
public void testGetColumnsWithPrefix() throws ConnectionException {
OperationResult<ColumnList<String>> r = keyspace
.prepareQuery(CF_STANDARD1)
.getKey("Prefixes")
.withColumnRange("Prefix1_\u00000", "Prefix1_\uffff", false,
Integer.MAX_VALUE).execute();
Assert.assertEquals(2, r.getResult().size());
Assert.assertEquals("Prefix1_a", r.getResult().getColumnByIndex(0)
.getName());
Assert.assertEquals("Prefix1_b", r.getResult().getColumnByIndex(1)
.getName());
}
@Test
public void testGetCounters() throws ConnectionException {
LOG.info("Starting testGetCounters...");
try {
OperationResult<Column<String>> result = keyspace
.prepareQuery(CF_COUNTER1).getKey("CounterRow1")
.getColumn("TestCounter").execute();
Long count = result.getResult().getLongValue();
Assert.assertNotNull(count);
Assert.assertTrue(count > 0);
} catch (NotFoundException e) {
}
LOG.info("... testGetCounters done");
}
@Test
public void testGetSingleKey() {
try {
for (char key = 'A'; key <= 'Z'; key++) {
String keyName = Character.toString(key);
OperationResult<ColumnList<String>> result = keyspace
.prepareQuery(CF_STANDARD1).getKey(keyName).execute();
Assert.assertNotNull(result.getResult());
System.out.printf("%s executed on %s in %d msec size=%d\n",
keyName, result.getHost(), result.getLatency(), result
.getResult().size());
}
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testGetSingleKeyAsync() {
try {
Future<OperationResult<ColumnList<String>>> result = keyspace
.prepareQuery(CF_STANDARD1).getKey("A").executeAsync();
result.get(1000, TimeUnit.MILLISECONDS);
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
e.printStackTrace();
Assert.fail();
} catch (InterruptedException e) {
LOG.error(e.getMessage(), e);
e.printStackTrace();
Assert.fail();
} catch (ExecutionException e) {
LOG.error(e.getMessage(), e);
e.printStackTrace();
Assert.fail();
} catch (TimeoutException e) {
LOG.error(e.getMessage(), e);
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testGetAllKeysRoot() {
LOG.info("Starting testGetAllKeysRoot...");
try {
List<String> keys = new ArrayList<String>();
for (char key = 'A'; key <= 'Z'; key++) {
String keyName = Character.toString(key);
keys.add(keyName);
}
OperationResult<Rows<String, String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.getKeySlice(keys.toArray(new String[keys.size()]))
.execute();
Assert.assertEquals(26, result.getResult().size());
Row<String, String> row;
row = result.getResult().getRow("A");
Assert.assertEquals("A", row.getKey());
row = result.getResult().getRow("B");
Assert.assertEquals("B", row.getKey());
row = result.getResult().getRow("NonExistent");
Assert.assertNull(row);
row = result.getResult().getRowByIndex(10);
Assert.assertEquals("K", row.getKey());
/*
* LOG.info("Get " + result.getResult().size() + " keys"); for
* (Row<String, String> row : result.getResult()) {
* LOG.info(String.format("%s executed on %s in %d msec size=%d\n",
* row.getKey(), result.getHost(), result.getLatency(),
* row.getColumns().size())); for (Column<String> sc :
* row.getColumns()) { LOG.info(" " + sc.getName());
* ColumnList<Integer> subColumns =
* sc.getSubColumns(IntegerSerializer.get()); for (Column<Integer>
* sub : subColumns) { LOG.info(" " + sub.getName() + "=" +
* sub.getStringValue()); } } }
*/
} catch (ConnectionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Assert.fail();
}
LOG.info("... testGetAllKeysRoot");
}
@Test
public void testGetColumnSlice() {
LOG.info("Starting testGetColumnSlice...");
try {
OperationResult<ColumnList<String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.getKey("A")
.withColumnSlice(
new ColumnSlice<String>("c", "h").setLimit(5))
.execute();
Assert.assertNotNull(result.getResult());
Assert.assertEquals(5, result.getResult().size());
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testGetAllKeysPath() {
LOG.info("Starting testGetAllKeysPath...");
try {
List<String> keys = new ArrayList<String>();
for (char key = 'A'; key <= 'Z'; key++) {
String keyName = Character.toString(key);
keys.add(keyName);
}
OperationResult<Rows<String, String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.getKeySlice(keys.toArray(new String[keys.size()]))
.execute();
/*
* System.out.printf("%s executed on %s in %d msec size=%d\n",
* row.getKey(), result.getHost(), result.getLatency(),
* row.getColumns().size());
*/
// for (Row<String, String> row : result.getResult()) {
// for (Column<Integer> column : row.getColumns()) {
// System.out.println(" Column: " + column.getName());
// }
// }
OperationResult<Map<String, Integer>> counts = keyspace
.prepareQuery(CF_STANDARD1)
.getKeySlice(keys.toArray(new String[keys.size()]))
.getColumnCounts()
.execute();
Assert.assertEquals(26, counts.getResult().size());
for (Entry<String, Integer> count : counts.getResult().entrySet()) {
Assert.assertEquals(new Integer(28), count.getValue());
}
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
LOG.info("Starting testGetAllKeysPath...");
}
@Test
public void testDeleteMultipleKeys() {
LOG.info("Starting testDeleteMultipleKeys...");
LOG.info("... testGetAllKeysPath");
}
@Test
public void testMutationMerge() {
MutationBatch m1 = keyspace.prepareMutationBatch();
MutationBatch m2 = keyspace.prepareMutationBatch();
MutationBatch m3 = keyspace.prepareMutationBatch();
MutationBatch m4 = keyspace.prepareMutationBatch();
MutationBatch m5 = keyspace.prepareMutationBatch();
m1.withRow(CF_STANDARD1, "1").putColumn("1", "X", null);
m2.withRow(CF_STANDARD1, "2").putColumn("2", "X", null)
.putColumn("3", "X", null);
m3.withRow(CF_STANDARD1, "3").putColumn("4", "X", null)
.putColumn("5", "X", null).putColumn("6", "X", null);
m4.withRow(CF_STANDARD1, "1").putColumn("7", "X", null)
.putColumn("8", "X", null).putColumn("9", "X", null)
.putColumn("10", "X", null);
MutationBatch merged = keyspace.prepareMutationBatch();
LOG.info(merged.toString());
Assert.assertEquals(merged.getRowCount(), 0);
merged.mergeShallow(m1);
LOG.info(merged.toString());
Assert.assertEquals(merged.getRowCount(), 1);
merged.mergeShallow(m2);
LOG.info(merged.toString());
Assert.assertEquals(merged.getRowCount(), 2);
merged.mergeShallow(m3);
LOG.info(merged.toString());
Assert.assertEquals(merged.getRowCount(), 3);
merged.mergeShallow(m4);
LOG.info(merged.toString());
Assert.assertEquals(merged.getRowCount(), 3);
merged.mergeShallow(m5);
LOG.info(merged.toString());
Assert.assertEquals(merged.getRowCount(), 3);
}
@Test
public void testDelete() {
LOG.info("Starting testDelete...");
String rowKey = "DeleteMe_testDelete";
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_STANDARD1, rowKey).putColumn("Column1", "X", null)
.putColumn("Column2", "X", null);
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
Assert.assertEquals(getColumnValue(CF_STANDARD1, rowKey, "Column1")
.getStringValue(), "X");
Assert.assertTrue(deleteColumn(CF_STANDARD1, rowKey, "Column1"));
Assert.assertNull(getColumnValue(CF_STANDARD1, rowKey, "Column1"));
LOG.info("... testDelete");
}
@Test
public void testDeleteLotsOfColumns() {
LOG.info("Starting testDelete...");
String rowKey = "DeleteMe_testDeleteLotsOfColumns";
int nColumns = 100;
int pageSize = 25;
// Insert a bunch of rows
MutationBatch m = keyspace.prepareMutationBatch();
ColumnListMutation<String> rm = m.withRow(CF_STANDARD1, rowKey);
for (int i = 0; i < nColumns; i++) {
rm.putEmptyColumn("" + i, null);
}
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
// Verify count
try {
int count = keyspace.prepareQuery(CF_STANDARD1)
.setConsistencyLevel(ConsistencyLevel.CL_QUORUM)
.getKey(rowKey).getCount().execute().getResult();
Assert.assertEquals(nColumns, count);
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
// Delete half of the columns
m = keyspace.prepareMutationBatch().setConsistencyLevel(
ConsistencyLevel.CL_QUORUM);
rm = m.withRow(CF_STANDARD1, rowKey);
for (int i = 0; i < nColumns / 2; i++) {
rm.deleteColumn("" + i);
}
try {
m.execute();
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
// Verify count
try {
int count = getRowColumnCount(CF_STANDARD1, rowKey);
Assert.assertEquals(nColumns / 2, count);
count = getRowColumnCountWithPagination(CF_STANDARD1, rowKey,
pageSize);
Assert.assertEquals(nColumns / 2, count);
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
// Delete all of the columns
m = keyspace.prepareMutationBatch().setConsistencyLevel(
ConsistencyLevel.CL_QUORUM);
rm = m.withRow(CF_STANDARD1, rowKey);
for (int i = 0; i < nColumns; i++) {
rm.deleteColumn("" + i);
}
try {
m.execute();
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
// Verify count
try {
int count = getRowColumnCount(CF_STANDARD1, rowKey);
Assert.assertEquals(0, count);
count = getRowColumnCountWithPagination(CF_STANDARD1, rowKey,
pageSize);
Assert.assertEquals(0, count);
} catch (ConnectionException e) {
Assert.fail(e.getMessage());
}
LOG.info("... testDelete");
}
private <K, C> int getRowColumnCount(ColumnFamily<K, C> cf, K rowKey)
throws ConnectionException {
int count = keyspace.prepareQuery(cf)
.setConsistencyLevel(ConsistencyLevel.CL_QUORUM).getKey(rowKey)
.getCount().execute().getResult();
return count;
}
private <K, C> int getRowColumnCountWithPagination(ColumnFamily<K, C> cf,
K rowKey, int pageSize) throws ConnectionException {
RowQuery<K, C> query = keyspace.prepareQuery(cf)
.setConsistencyLevel(ConsistencyLevel.CL_QUORUM).getKey(rowKey)
.withColumnRange(new RangeBuilder().setLimit(pageSize).build())
.autoPaginate(true);
ColumnList<C> result;
int count = 0;
while (!(result = query.execute().getResult()).isEmpty()) {
count += result.size();
}
return count;
}
@Test
public void testCsvLoader() {
StringBuilder sb = new StringBuilder()
.append("key, firstname, lastname, age, test\n")
.append("1, eran, landau, 34, a\n")
.append("2, netta, landau, 33, b\n")
.append("3, arielle, landau, 6, c\n")
.append("4, eyal, landau, 2, d\n");
RecordReader reader = new CsvRecordReader(new StringReader(
sb.toString()));
RecordWriter writer = new ColumnarRecordWriter(keyspace,
CF_USERS.getName());
try {
reader.start();
writer.start();
List<Pair<String, String>> record = null;
while (null != (record = reader.next())) {
writer.write(record);
}
} catch (IOException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} finally {
reader.shutdown();
writer.shutdown();
}
try {
Rows<Long, String> rows = keyspace.prepareQuery(CF_USERS)
.getAllRows().execute().getResult();
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_USERS.getName(), false))
.setRowsAsArray(false).write(rows);
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_USERS.getName(), false))
.setRowsAsArray(true).setCountName("_count_")
.setRowsName("_rows_").setNamesName("_names_").write(rows);
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_USERS.getName(), false))
.setRowsAsArray(true).setDynamicColumnNames(true)
.write(rows);
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_USERS.getName(), false))
.setRowsAsArray(true).setIgnoreUndefinedColumns(true)
.write(rows);
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_USERS.getName(), false))
.setRowsAsArray(true)
.setFixedColumnNames("firstname", "lastname")
.setIgnoreUndefinedColumns(true).write(rows);
LOG.info("******* COLUMNS AS ROWS ********");
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_USERS.getName(), false))
.setRowsAsArray(true).setColumnsAsRows(true).write(rows);
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} catch (Exception e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testCsvLoaderWithCustomSerializers() {
StringBuilder sb = new StringBuilder()
.append("key, firstname, lastname, age, test\n")
.append("1, eran, landau, 34, a\n")
.append("2, netta, landau, 33, b\n")
.append("3, arielle, landau, 6, c\n")
.append("4, eyal, landau, 2, d\n");
SerializerPackageImpl pkg = null;
try {
pkg = new SerializerPackageImpl().setKeyType("LongType")
.setColumnNameType("UTF8Type")
.setDefaultValueType("UTF8Type")
.setValueType("firstname", "UTF8Type")
.setValueType("lastname", "UTF8Type")
.setValueType("age", "LongType");
} catch (UnknownComparatorException e) {
Assert.fail();
}
RecordReader reader = new CsvRecordReader(new StringReader(
sb.toString()));
RecordWriter writer = new ColumnarRecordWriter(keyspace,
CF_USERS.getName(), pkg);
try {
reader.start();
writer.start();
List<Pair<String, String>> record = null;
while (null != (record = reader.next())) {
writer.write(record);
}
} catch (IOException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} finally {
reader.shutdown();
writer.shutdown();
}
try {
Rows<Long, String> rows = keyspace.prepareQuery(CF_USERS)
.getAllRows().execute().getResult();
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_USERS.getName(), false))
.setRowsAsArray(false).write(rows);
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} catch (Exception e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testCsvLoaderComposite() {
StringBuilder sb = new StringBuilder().append("key, column, value\n")
.append("1, a:1, 1a1\n").append("1, b:1, 2b1\n")
.append("2, a:1, 3a1\n").append("3, a:1, 4a1\n");
CsvColumnReader reader = new CsvColumnReader(new StringReader(
sb.toString()));
RecordWriter writer = new ColumnarRecordWriter(keyspace,
CF_COMPOSITE_CSV.getName());
try {
reader.start();
writer.start();
List<Pair<String, String>> record = null;
while (null != (record = reader.next())) {
writer.write(record);
}
} catch (IOException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} finally {
reader.shutdown();
writer.shutdown();
}
try {
Rows<ByteBuffer, ByteBuffer> rows = keyspace
.prepareQuery(CF_COMPOSITE_CSV).getAllRows().execute()
.getResult();
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_COMPOSITE_CSV.getName(),
false)).setRowsAsArray(false).write(rows);
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_COMPOSITE_CSV.getName(),
false)).setRowsAsArray(true)
.setCountName("_count_").setRowsName("_rows_")
.setNamesName("_names_").write(rows);
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_COMPOSITE_CSV.getName(),
false)).setRowsAsArray(true)
.setDynamicColumnNames(true).write(rows);
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_COMPOSITE_CSV.getName(),
false)).setRowsAsArray(true)
.setIgnoreUndefinedColumns(true).write(rows);
LOG.info("******* COLUMNS AS ROWS ********");
new JsonRowsWriter(new PrintWriter(System.out, true),
keyspace.getSerializerPackage(CF_COMPOSITE_CSV.getName(),
false)).setRowsAsArray(true).setColumnsAsRows(true)
.write(rows);
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
} catch (Exception e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testTtlValues() throws Exception {
MutationBatch mb = keyspace.prepareMutationBatch();
mb.withRow(CF_TTL, "row")
.putColumn("TTL0", "TTL0", 0)
.putColumn("TTLNULL", "TTLNULL", null)
.putColumn("TTL1", "TTL1", 1);
mb.execute();
Thread.sleep(2000);
ColumnList<String> result = keyspace.prepareQuery(CF_TTL)
.getRow("row")
.execute().getResult();
Assert.assertEquals(2, result.size());
Assert.assertNotNull(result.getColumnByName("TTL0"));
Assert.assertNotNull(result.getColumnByName("TTLNULL"));
}
@Test
public void testCluster() {
AstyanaxContext<Cluster> clusterContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl())
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME)
.setSeeds(SEEDS).setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(200)
.setMaxConnsPerHost(1))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildCluster(ThriftFamilyFactory.getInstance());
clusterContext.start();
Cluster cluster = clusterContext.getEntity();
try {
cluster.describeClusterName();
List<KeyspaceDefinition> keyspaces = cluster.describeKeyspaces();
LOG.info("Keyspace count:" + keyspaces.size());
for (KeyspaceDefinition keyspace : keyspaces) {
LOG.info("Keyspace: " + keyspace.getName());
}
Assert.assertNotNull(keyspaces);
Assert.assertTrue(keyspaces.size() > 0);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
clusterContext.shutdown();
}
}
@Test
public void testPrefixedSerializer() {
ColumnFamily<String, String> cf = new ColumnFamily<String, String>(
"Standard1", StringSerializer.get(), StringSerializer.get());
ColumnFamily<String, String> cf1 = new ColumnFamily<String, String>(
"Standard1", new PrefixedSerializer<String, String>("Prefix1_",
StringSerializer.get(), StringSerializer.get()),
StringSerializer.get());
ColumnFamily<String, String> cf2 = new ColumnFamily<String, String>(
"Standard1", new PrefixedSerializer<String, String>("Prefix2_",
StringSerializer.get(), StringSerializer.get()),
StringSerializer.get());
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(cf1, "A").putColumn("Column1", "Value1", null);
m.withRow(cf2, "A").putColumn("Column1", "Value2", null);
try {
m.execute();
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
try {
OperationResult<ColumnList<String>> result = keyspace
.prepareQuery(cf).getKey("Prefix1_A").execute();
Assert.assertEquals(1, result.getResult().size());
Column<String> c = result.getResult().getColumnByName("Column1");
Assert.assertEquals("Value1", c.getStringValue());
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
try {
OperationResult<ColumnList<String>> result = keyspace
.prepareQuery(cf).getKey("Prefix2_A").execute();
Assert.assertEquals(1, result.getResult().size());
Column<String> c = result.getResult().getColumnByName("Column1");
Assert.assertEquals("Value2", c.getStringValue());
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
}
}
@Test
public void testWithRetry() {
String clusterName = TEST_CLUSTER_NAME + "_DOESNT_EXIST";
AstyanaxContext<Keyspace> keyspaceContext = new AstyanaxContext.Builder()
.forCluster(clusterName)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.NONE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(clusterName + "_"
+ TEST_KEYSPACE_NAME).setMaxConnsPerHost(1)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
ColumnFamily<String, String> cf = new ColumnFamily<String, String>(
"DoesntExist", StringSerializer.get(), StringSerializer.get());
try {
MutationBatch m = keyspaceContext.getEntity()
.prepareMutationBatch()
.withRetryPolicy(new ExponentialBackoff(10, 3));
m.withRow(cf, "Key1").putColumn("Column2", "Value2", null);
m.execute();
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
// This test confirms the fix for https://github.com/Netflix/astyanax/issues/170
@Test
public void columnAutoPaginateTest() throws Exception {
final ColumnFamily<String, UUID> CF1 = ColumnFamily.newColumnFamily("CF1", StringSerializer.get(),
TimeUUIDSerializer.get());
final ColumnFamily<String, String> CF2 = ColumnFamily.newColumnFamily("CF2", StringSerializer.get(),
StringSerializer.get());
keyspace.createColumnFamily(CF1, null);
Thread.sleep(3000);
keyspace.createColumnFamily(CF2, null);
Thread.sleep(3000);
// query on another column family with different column key type
// does not seem to work after the first query
keyspace.prepareQuery(CF2).getKey("anything").execute();
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF1, "test").putColumn(TimeUUIDUtils.getUniqueTimeUUIDinMillis(), "value1", null);
m.execute();
RowQuery<String, UUID> query = keyspace.prepareQuery(CF1).getKey("test").autoPaginate(true);
// Adding a column range removes the problem
// query.withColumnRange(new RangeBuilder().build());
ColumnList<UUID> columns = query.execute().getResult();
keyspace.prepareQuery(CF2).getKey("anything").execute();
}
@Test
public void testDDLWithProperties() throws Exception {
String keyspaceName = "DDLPropertiesKeyspace";
Properties props = new Properties();
props.put("name", keyspaceName + "_wrong");
props.put("strategy_class", "SimpleStrategy");
props.put("strategy_options.replication_factor", "1");
AstyanaxContext<Keyspace> kc = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(keyspaceName)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME + "_" + keyspaceName)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS)
)
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
kc.start();
Keyspace ks = kc.getClient();
try {
ks.createKeyspace(props);
Assert.fail("Should have gotten name mismatch error");
}
catch (BadRequestException e) {
LOG.info(e.getMessage());
}
props.put("name", keyspaceName);
ks.createKeyspace(props);
Properties props1 = ks.getKeyspaceProperties();
LOG.info(props.toString());
LOG.info(props1.toString());
}
private boolean deleteColumn(ColumnFamily<String, String> cf,
String rowKey, String columnName) {
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(cf, rowKey).deleteColumn(columnName);
try {
m.execute();
return true;
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
return false;
}
}
private Column<String> getColumnValue(ColumnFamily<String, String> cf,
String rowKey, String columnName) {
OperationResult<Column<String>> result;
try {
result = keyspace.prepareQuery(cf).getKey(rowKey)
.getColumn(columnName).execute();
return result.getResult();
} catch (NotFoundException e) {
LOG.info(e.getMessage());
return null;
} catch (ConnectionException e) {
LOG.error(e.getMessage(), e);
Assert.fail();
return null;
}
}
}
| 7,560 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/thrift/ThriftClusterImplTest.java | package com.netflix.astyanax.thrift;
import java.util.Properties;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Cluster;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.ColumnFamilyDefinition;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
public class ThriftClusterImplTest {
private static final Logger LOG = LoggerFactory.getLogger(ThriftClusterImplTest.class);
private static final String SEEDS = "localhost:9160";
private static final long CASSANDRA_WAIT_TIME = 3000;
private static String TEST_CLUSTER_NAME = "cass_sandbox";
private static String TEST_KEYSPACE_NAME = "AstyanaxUnitTests";
private static AstyanaxContext<Cluster> context;
private static Cluster cluster;
@BeforeClass
public static void setup() throws Exception {
System.out.println("TESTING THRIFT KEYSPACE");
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(CASSANDRA_WAIT_TIME);
context = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS)
)
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildCluster(ThriftFamilyFactory.getInstance());
context.start();
cluster = context.getClient();
}
@AfterClass
public static void teardown() throws Exception {
if (context != null)
context.shutdown();
Thread.sleep(CASSANDRA_WAIT_TIME);
}
@Test
public void test() throws Exception {
String keyspaceName = "ClusterTest";
Properties props = new Properties();
props.put("name", keyspaceName);
props.put("strategy_class", "SimpleStrategy");
props.put("strategy_options.replication_factor", "1");
cluster.createKeyspace(props);
Properties prop1 = cluster.getKeyspaceProperties(keyspaceName);
System.out.println(prop1);
Assert.assertTrue(prop1.containsKey("name"));
Assert.assertTrue(prop1.containsKey("strategy_class"));
Properties prop2 = cluster.getAllKeyspaceProperties();
System.out.println(prop2);
Assert.assertTrue(prop2.containsKey("ClusterTest.name"));
Assert.assertTrue(prop2.containsKey("ClusterTest.strategy_class"));
Properties cfProps = new Properties();
cfProps.put("keyspace", keyspaceName);
cfProps.put("name", "cf1");
cfProps.put("compression_options.sstable_compression", "");
cluster.createColumnFamily(cfProps);
Properties cfProps1 = cluster.getKeyspaceProperties(keyspaceName);
KeyspaceDefinition ksdef = cluster.describeKeyspace(keyspaceName);
ColumnFamilyDefinition cfdef = ksdef.getColumnFamily("cf1");
LOG.info(cfProps1.toString());
LOG.info(cfdef.getProperties().toString());
Assert.assertEquals(cfProps1.get("cf_defs.cf1.comparator_type"), "org.apache.cassandra.db.marshal.BytesType");
}
}
| 7,561 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/serializers/SerializersTest.java | package com.netflix.astyanax.serializers;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Date;
import java.util.UUID;
import junit.framework.Assert;
import com.netflix.astyanax.shaded.org.apache.cassandra.db.marshal.AbstractType;
import com.netflix.astyanax.shaded.org.apache.cassandra.db.marshal.CompositeType;
import com.netflix.astyanax.shaded.org.apache.cassandra.db.marshal.ShadedTypeParser;
import com.netflix.astyanax.shaded.org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.commons.lang.StringUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.Serializer;
import com.netflix.astyanax.annotations.Component;
import com.netflix.astyanax.model.Composite;
public class SerializersTest {
private static Logger LOG = LoggerFactory.getLogger(SerializersTest.class);
private static BytesArraySerializer hexSerializer = new BytesArraySerializer();
/**
* @param ser
* @param val
*/
public <T> void testSerializerIdempotent(Serializer<T> ser, T val) {
ByteBuffer bb = ser.toByteBuffer(val);
T deSerVal1 = ser.fromByteBuffer(bb);
T deSerVal2 = ser.fromByteBuffer(bb);
Assert.assertEquals(val, deSerVal1);
Assert.assertEquals(val, deSerVal2);
}
/**
* @param ser
* @param val
*/
public <T> void testSerializer(Serializer<T> ser, T val) {
ByteBuffer bb = ser.toByteBuffer(val);
T deSerVal1 = ser.fromByteBuffer(bb);
Assert.assertEquals(val, deSerVal1);
}
@Test
public void testMultiLongs() {
try {
ByteBuffer bb = ByteBuffer.allocate(100);
bb.putLong(1L);
bb.putLong(2L);
ByteBuffer bb2 = ByteBuffer.wrap(bb.array(), 8, 8);
LongSerializer ser = LongSerializer.get();
Long ret = ser.fromByteBuffer(bb2);
Long ret2 = ser.fromByteBuffer(bb2);
Assert.assertTrue(2 == ret);
Assert.assertTrue(2 == ret2);
}
catch (Exception e) {
e.printStackTrace();
}
}
static class LongLong {
@Component
Long comp1;
@Component
Long comp2;
public LongLong() {
}
public LongLong(long c1, long c2) {
this.comp1 = c1;
this.comp2 = c2;
}
@Override
public boolean equals(Object c2) {
if (!(c2 instanceof LongLong)) return false;
if (this.comp1 == ((LongLong)c2).comp1 && this.comp2 == ((LongLong)c2).comp2) return true;
return false;
}
};
@Test
public void testMultiLongsComposite() {
try {
AnnotatedCompositeSerializer<LongLong> ser = new AnnotatedCompositeSerializer<LongLong>(LongLong.class);
LongLong comp = new LongLong(1L, 2L);
ByteBuffer bb = ser.toByteBuffer(comp);
LongLong comp2 = ser.fromByteBuffer(bb);
Assert.assertEquals(comp, comp2);
}
catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testAsciiSerializer() {
AsciiSerializer ser = new AsciiSerializer();
String value = "Test";
ByteBuffer byteBuffer = ser.fromString(value);
Assert.assertEquals("54657374", hexSerializer.getString(byteBuffer));
testSerializer(ser, value);
}
@Test
public void testAsciiSerializerIdempotent() {
AsciiSerializer ser = new AsciiSerializer();
String value = "Test";
testSerializerIdempotent(ser, value);
}
@Test
public void testBigIntegerSerializer() {
BigIntegerSerializer ser = new BigIntegerSerializer();
BigInteger bi1 = new BigInteger("127");
ByteBuffer bb1 = ser.toByteBuffer(bi1);
BigInteger bi1_verify = ser.fromByteBuffer(bb1);
ByteBuffer bb1_str = ser.fromString("127");
ByteBuffer bb2 = ser.getNext(bb1);
BigInteger bi2 = ser.fromByteBuffer(bb2);
Assert.assertEquals(bi1, bi1_verify);
Assert.assertEquals(bb1, bb1_str);
Assert.assertEquals(1, bi2.intValue() - bi1.intValue());
Assert.assertEquals(bb2.capacity(), bb1.capacity() + 1);
}
@Test
public void testBigIntegerSerializerIdempotent() {
BigIntegerSerializer ser = new BigIntegerSerializer();
BigInteger bi = new BigInteger("127");
testSerializerIdempotent(ser, bi);
}
@Test
public void testBooleanSerializer() {
BooleanSerializer ser = new BooleanSerializer();
Boolean value = new Boolean(true);
testSerializer(ser, value);
}
@Test
public void testBooleanSerializerIdempotent() {
BooleanSerializer ser = new BooleanSerializer();
Boolean value = new Boolean(true);
testSerializerIdempotent(ser, value);
}
@Test
public void testByteBufferSerializer() {
ByteBufferSerializer ser = new ByteBufferSerializer();
ByteBuffer value = ByteBufferUtil.bytes("Hello, World");
testSerializer(ser, value);
}
@Test
public void testByteBufferSerializerIdempotent() {
ByteBufferSerializer ser = new ByteBufferSerializer();
ByteBuffer value = ByteBufferUtil.bytes("Hello, World");
testSerializerIdempotent(ser, value);
}
@Test
public void testCompressedSerializer() {
GzipStringSerializer ser = new GzipStringSerializer();
String str = StringUtils
.repeat("{The : 'quick', brown : 'fox', jumped : 'over', the : 'lazy cow'}",
100);
int origLength = str.length();
ByteBuffer compressed = ser.toByteBuffer(str);
int compLength = compressed.limit();
String str2 = ser.fromByteBuffer(compressed);
Assert.assertEquals(str, str2);
LOG.info(String.format("Compressed at ratio: %2f",
(double) (origLength - compLength) / (double) origLength));
}
@Test
public void testSnappyCompressedSerializer() {
SnappyStringSerializer ser = new SnappyStringSerializer();
String str = StringUtils
.repeat("{The : 'quick', brown : 'fox', jumped : 'over', the : 'lazy cow'}",
100);
int origLength = str.length();
ByteBuffer compressed = ser.toByteBuffer(str);
int compLength = compressed.limit();
String str2 = ser.fromByteBuffer(compressed);
Assert.assertEquals(str, str2);
LOG.info(String.format("Compressed at ratio: %2f",
(double) (origLength - compLength) / (double) origLength));
}
@Test
public void testBytesArraySerializer() {
BytesArraySerializer ser = new BytesArraySerializer();
}
@Test
public void testCharSerializer() {
CharSerializer ser = new CharSerializer();
Character value = new Character('A');
testSerializer(ser, value);
}
@Test
public void testCharSerializerIdempotent() {
CharSerializer ser = new CharSerializer();
Character value = new Character('A');
testSerializerIdempotent(ser, value);
}
@Test
public void testDateSerializer() {
DateSerializer ser = new DateSerializer();
Date value = new Date();
testSerializer(ser, value);
}
@Test
public void testDateSerializerIdempotent() {
DateSerializer ser = new DateSerializer();
Date value = new Date();
testSerializerIdempotent(ser, value);
}
@Test
public void testDoubleSerializer() {
DoubleSerializer ser = new DoubleSerializer();
Double d1 = 127.0;
ByteBuffer bb1 = ser.toByteBuffer(d1);
Double d1_verify = ser.fromByteBuffer(bb1.duplicate());
ByteBuffer bb1_str = ser.fromString("127");
ByteBuffer bb2 = ser.getNext(bb1);
Double d2 = ser.fromByteBuffer(bb2.duplicate());
Assert.assertEquals(d1, d1_verify);
Assert.assertEquals(bb1, bb1_str);
Assert.assertEquals(d1 + Double.MIN_VALUE, d2);
Assert.assertEquals(bb2.capacity(), bb1.capacity());
ByteBuffer bbMax = ser.toByteBuffer(Double.MAX_VALUE);
try {
ser.getNext(bbMax);
Assert.fail();
} catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testDoubleSerializerIdempotent() {
DoubleSerializer ser = new DoubleSerializer();
Double value = 127.0;
testSerializerIdempotent(ser, value);
}
@Test
public void testFloatSerializer() {
FloatSerializer ser = new FloatSerializer();
Float f1 = (float) 127.0;
ByteBuffer bb1 = ser.toByteBuffer(f1);
Float f1_verify = ser.fromByteBuffer(bb1.duplicate());
ByteBuffer bb1_str = ser.fromString("127");
ByteBuffer bb2 = ser.getNext(bb1);
Float f2 = ser.fromByteBuffer(bb2.duplicate());
Assert.assertEquals(f1, f1_verify);
Assert.assertEquals(bb1, bb1_str);
Assert.assertEquals(f1 + Float.MIN_VALUE, f2);
Assert.assertEquals(bb2.capacity(), bb1.capacity());
ByteBuffer bbMax = ser.toByteBuffer(Float.MAX_VALUE);
try {
ser.getNext(bbMax);
Assert.fail();
} catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testFloatSerializerIdempotent() {
FloatSerializer ser = new FloatSerializer();
Float value = (float) 127.0;
testSerializerIdempotent(ser, value);
}
@Test
public void testIntegerSerializer() {
IntegerSerializer ser = new IntegerSerializer();
Integer bi1 = 127;
ByteBuffer bb1 = ser.toByteBuffer(bi1);
Integer bi1_verify = ser.fromByteBuffer(bb1);
ByteBuffer bb1_str = ser.fromString("127");
ByteBuffer bb2 = ser.getNext(bb1);
Integer bi2 = ser.fromByteBuffer(bb2);
Assert.assertEquals(bi1, bi1_verify);
Assert.assertEquals(bb1, bb1_str);
Assert.assertEquals(1, bi2.intValue() - bi1.intValue());
Assert.assertEquals(bb2.capacity(), bb1.capacity());
ByteBuffer bbMax = ser.toByteBuffer(Integer.MAX_VALUE);
try {
ser.getNext(bbMax);
Assert.fail();
} catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testIntegerSerializerIdmepotent() {
IntegerSerializer ser = new IntegerSerializer();
Integer val = 127;
testSerializerIdempotent(ser, val);
}
@Test
public void testLongSerializer() {
LongSerializer ser = new LongSerializer();
Long val1 = (long) 127;
ByteBuffer bb1 = ser.toByteBuffer(val1);
Long val1_verify = ser.fromByteBuffer(bb1.duplicate());
ByteBuffer bb1_str = ser.fromString("127");
ByteBuffer bb2 = ser.getNext(bb1);
Long val2 = ser.fromByteBuffer(bb2.duplicate());
Assert.assertEquals(val1, val1_verify);
Assert.assertEquals(bb1, bb1_str);
Assert.assertEquals(1, val2.intValue() - val1.intValue());
Assert.assertEquals(bb2.capacity(), bb1.capacity());
ByteBuffer bbMax = ser.toByteBuffer(Long.MAX_VALUE);
try {
ser.getNext(bbMax);
Assert.fail();
} catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testLongSerializerIdempotent() {
LongSerializer ser = new LongSerializer();
Long val = (long) 127;
testSerializerIdempotent(ser, val);
}
@Test
public void testByteSerializer() {
ByteSerializer ser = new ByteSerializer();
Byte val1 = 31;
ByteBuffer bb1 = ser.toByteBuffer(val1);
Byte val1_verify = ser.fromByteBuffer(bb1.duplicate());
ByteBuffer bb1_str = ser.fromString("31");
ByteBuffer bb2 = ser.getNext(bb1);
Byte val2 = ser.fromByteBuffer(bb2);
Assert.assertEquals(val1, val1_verify);
Assert.assertEquals(bb1, bb1_str);
Assert.assertEquals(1, val2.intValue() - val1.intValue());
Assert.assertEquals(bb2.capacity(), bb1.capacity());
ByteBuffer bbMax = ser.toByteBuffer(Byte.MAX_VALUE);
try {
ser.getNext(bbMax);
Assert.fail();
} catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testByteSerializerIdmepotent() {
ByteSerializer ser = new ByteSerializer();
Byte val = 31;
testSerializerIdempotent(ser, val);
}
@Test
public void testShortSerializer() {
ShortSerializer ser = new ShortSerializer();
Short val1 = 127;
ByteBuffer bb1 = ser.toByteBuffer(val1);
Short val1_verify = ser.fromByteBuffer(bb1.duplicate());
ByteBuffer bb1_str = ser.fromString("127");
ByteBuffer bb2 = ser.getNext(bb1);
Short val2 = ser.fromByteBuffer(bb2);
Assert.assertEquals(val1, val1_verify);
Assert.assertEquals(bb1, bb1_str);
Assert.assertEquals(1, val2.intValue() - val1.intValue());
Assert.assertEquals(bb2.capacity(), bb1.capacity());
ByteBuffer bbMax = ser.toByteBuffer(Short.MAX_VALUE);
try {
ser.getNext(bbMax);
Assert.fail();
} catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testShortSerializerIdempotent() {
ShortSerializer ser = new ShortSerializer();
Short val = 127;
testSerializerIdempotent(ser, val);
}
@Test
public void testStringSerializer() {
StringSerializer ser = new StringSerializer();
String val = "Hello, World";
testSerializer(ser, val);
}
@Test
public void testStringSerializerIdempotent() {
StringSerializer ser = new StringSerializer();
String val = "Hello, World";
testSerializer(ser, val);
}
@Test
public void testUUIDSerializer() {
UUIDSerializer ser = new UUIDSerializer();
UUID uuid = UUID.randomUUID();
testSerializer(ser, uuid);
}
@Test
public void testUUIDSerializerIdempotent() {
UUIDSerializer ser = new UUIDSerializer();
UUID uuid = UUID.randomUUID();
testSerializerIdempotent(ser, uuid);
}
@Test
public void intVsBigInt() {
IntegerSerializer intSer = new IntegerSerializer();
BigIntegerSerializer bigIntSer = new BigIntegerSerializer();
int value = 1234;
intSer.toBytes(value);
// bigIntSer.toBytes();
}
static class Composite1 {
@Component
public String firstName;
@Component
public String lastName;
@Component
public Integer age;
@Component
public BigDecimal decimal;
@Component
public BigInteger integer;
public Composite1() {
}
public Composite1(String firstName, String lastName, Integer age,
BigInteger bi, BigDecimal bd) {
this.firstName = firstName;
this.lastName = lastName;
this.age = age;
this.decimal = bd;
this.integer = bi;
}
public String toString() {
return new StringBuilder().append("(").append(firstName)
.append(",").append(lastName).append(",").append(age)
.append(")").toString();
}
@Override
public boolean equals(Object arg0) {
if (!(arg0 instanceof Composite1)) {
return false;
}
Composite1 other = (Composite1) arg0;
return (String.valueOf(firstName).equals(
String.valueOf(other.firstName))
&& String.valueOf(lastName).equals(
String.valueOf(other.lastName))
&& String.valueOf(decimal).equals(
String.valueOf(other.decimal))
&& String.valueOf(integer).equals(
String.valueOf(other.integer)) && age == other.age);
}
}
@Test
public void testAnnotatedCompositeSerializer() {
try {
AnnotatedCompositeSerializer<Composite1> ser = new AnnotatedCompositeSerializer<Composite1>(
Composite1.class);
Composite1 c1 = new Composite1("Arielle", "Landau", 6,
new BigInteger("1"), new BigDecimal(1));
ByteBuffer bytes = ser.toByteBuffer(c1);
Composite1 c2 = ser.fromByteBuffer(bytes);
Assert.assertEquals(c1, c2);
} catch (Exception e) {
LOG.error(e.getMessage());
Assert.fail();
}
}
@Test
public void testAnnotatedCompositeSerializerIdempotent() {
try {
AnnotatedCompositeSerializer<Composite1> ser = new AnnotatedCompositeSerializer<Composite1>(
Composite1.class);
Composite1 c1 = new Composite1("Arielle", "Landau", 6,
new BigInteger("1"), new BigDecimal(1));
testSerializerIdempotent(ser, c1);
} catch (Exception e) {
LOG.error(e.getMessage());
Assert.fail();
}
}
@Test
public void testStressAnnotatedCompositeSerializer() throws Exception {
AnnotatedCompositeSerializer<Composite1> ser = new AnnotatedCompositeSerializer<Composite1>(
Composite1.class);
int count = 10000;
Composite1 c1 = new Composite1("Arielle", "Landau", 6, new BigInteger(
"1"), new BigDecimal(2));
long startTime, runTime;
for (int j = 0; j < 3; j++) {
System.out.println("-----");
startTime = System.nanoTime();
for (int i = 0; i < count; i++) {
ByteBuffer bb = ByteBuffer.allocate(8092);
bb.putShort((short) c1.firstName.length());
bb.put(ByteBuffer.wrap(c1.firstName.getBytes()));
bb.put((byte) 0x00);
bb.putShort((short) c1.lastName.length());
bb.put(ByteBuffer.wrap(c1.lastName.getBytes()));
bb.put((byte) 0x00);
bb.putShort((short) 4);
bb.putInt(c1.age);
bb.put((byte) 0x00);
bb.flip();
}
runTime = System.nanoTime() - startTime;
System.out.println("Raw Time in msec : " + runTime / 1000000);
StringSerializer sser = StringSerializer.get();
Field fFirstName = Composite1.class.getField("firstName");
Field fLastName = Composite1.class.getField("lastName");
Field fAge = Composite1.class.getField("age");
startTime = System.nanoTime();
for (int i = 0; i < count; i++) {
ByteBuffer bb = ByteBuffer.allocate(8092);
String firstName = (String) fFirstName.get(c1);
bb.putShort((short) firstName.length());
bb.put(firstName.getBytes());
bb.put((byte) 0x00);
String lastName = (String) fLastName.get(c1);
bb.putShort((short) lastName.length());
bb.put(lastName.getBytes());
bb.put((byte) 0x00);
int age = (Integer) fAge.get(c1);
bb.putShort((short) 4);
bb.putInt(age);
bb.put((byte) 0x00);
bb.flip();
}
runTime = System.nanoTime() - startTime;
System.out
.println("Reflection Time in msec : " + runTime / 1000000);
startTime = System.nanoTime();
for (int i = 0; i < count; i++) {
ByteBuffer bb = ser.toByteBuffer(c1);
// Composite1 c2 = ser.fromByteBuffer(bytes);
// System.out.println(Hex.encodeHexString(bb.array()));
}
runTime = System.nanoTime() - startTime;
System.out.println("toByteBuffer Time in msec : " + runTime
/ 1000000);
}
}
@Test
public void testAnnotatedCompositeSerializerWithNulls() {
try {
AnnotatedCompositeSerializer<Composite1> ser = new AnnotatedCompositeSerializer<Composite1>(
Composite1.class);
Composite1 c1 = new Composite1("Arielle", null, null, null, null);
ByteBuffer bytes = ser.toByteBuffer(c1);
Composite1 c2 = ser.fromByteBuffer(bytes);
Assert.assertEquals(c1, c2);
} catch (Exception e) {
e.printStackTrace();
LOG.error(e.getMessage());
Assert.fail();
}
}
static class Composite2 {
@Component(ordinal = 0)
String firstName;
@Component(ordinal = 2)
String lastName;
@Component(ordinal = 1)
Integer age;
public Composite2() {
}
public Composite2(String firstName, String lastName, Integer age) {
this.firstName = firstName;
this.lastName = lastName;
this.age = age;
}
public String toString() {
return new StringBuilder().append("(").append(firstName)
.append(",").append(lastName).append(",").append(age)
.append(")").toString();
}
@Override
public boolean equals(Object arg0) {
if (!(arg0 instanceof Composite2)) {
return false;
}
Composite2 other = (Composite2) arg0;
return (firstName.equals(other.firstName)
&& lastName.equals(other.lastName) && age == other.age);
}
}
@Test
public void testAnnotatedCompositeSerializerWithOrdinal() {
AnnotatedCompositeSerializer<Composite2> ser = new AnnotatedCompositeSerializer<Composite2>(
Composite2.class);
try {
Composite2 c1 = new Composite2("Arielle", "Landau", 6);
ByteBuffer bytes = ser.toByteBuffer(c1);
Composite2 c2 = ser.fromByteBuffer(bytes);
Composite2 c3 = ser.fromByteBuffer(bytes);
Assert.assertEquals(c1, c2);
Assert.assertEquals(c2, c3);
} catch (Exception e) {
LOG.error(e.getMessage());
Assert.fail();
}
}
@Test
public void testCompositeType() {
String comparatorType = "CompositeType(UTF8Type,UTF8Type)";
String columnName = "(abc,1234)";
try {
AbstractType type = ShadedTypeParser.parse(comparatorType);
if (type instanceof CompositeType) {
CompositeType ctype = (CompositeType) type;
ByteBuffer data = ctype.fromString(columnName);
String columnName2 = ctype.getString(data);
Assert.assertEquals(columnName, columnName2);
} else {
Assert.fail();
}
} catch (Exception e) {
Assert.fail();
LOG.error(e.getMessage());
}
}
@Test
public void testDeserializeOfSepecificSerializer() throws Exception {
Composite composite1 = new Composite("abc", 123L);
CompositeSerializer serializer = new SpecificCompositeSerializer(
(CompositeType) ShadedTypeParser
.parse("CompositeType(UTF8Type,LongType)"));
ByteBuffer byteBuffer = serializer.toByteBuffer(composite1);
Composite composite2 = serializer.fromByteBuffer(byteBuffer);
Assert.assertEquals(String.class, composite1.getComponent(0).getValue()
.getClass());
Assert.assertEquals(Long.class, composite1.getComponent(1).getValue()
.getClass());
Assert.assertEquals(composite1.getComponent(0).getValue().getClass(),
composite2.getComponent(0).getValue().getClass());
Assert.assertEquals(composite1.getComponent(1).getValue().getClass(),
composite2.getComponent(1).getValue().getClass());
Assert.assertEquals(composite1.getComponent(0).getValue(), composite2
.getComponent(0).getValue());
Assert.assertEquals(composite1.getComponent(1).getValue(), composite2
.getComponent(1).getValue());
}
}
| 7,562 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/serializers/AnnotatedCompositeSerializerTest.java | package com.netflix.astyanax.serializers;
import com.google.common.base.Strings;
import com.netflix.astyanax.annotations.Component;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.Date;
/**
* Created with IntelliJ IDEA.
* User: omar
* Date: 3/4/13
* Time: 5:22 PM
* To change this template use File | Settings | File Templates.
*/
public class AnnotatedCompositeSerializerTest {
@Test
public void testOverflow() {
AnnotatedCompositeSerializer<Foo> serializer = new AnnotatedCompositeSerializer<Foo>(Foo.class);
Foo foo = new Foo();
foo.bar = Strings.repeat("b", 2000);
foo.bar1 = Strings.repeat("b", 2000);
foo.bar2 = Strings.repeat("b", 4192);
ByteBuffer byteBuffer = serializer.toByteBuffer(foo);
}
@Test
public void testOverflow2() {
AnnotatedCompositeSerializer<Foo2> serializer = new AnnotatedCompositeSerializer<Foo2>(
Foo2.class);
Foo2 foo = new Foo2();
foo.bar = Strings.repeat("b", 500);
foo.test = Strings.repeat("b", 12);
ByteBuffer byteBuffer = serializer.toByteBuffer(foo);
}
public static class Foo2 {
@Component(ordinal = 0)
private Date updateTimestamp;
@Component(ordinal = 1)
private String bar;
@Component(ordinal = 2)
private String test;
}
public static class Foo {
@Component(ordinal = 0)
private String bar;
@Component(ordinal = 0)
private String bar1;
@Component(ordinal = 0)
private String bar2;
}
}
| 7,563 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/serializers/SerializerPackageImplTest.java | package com.netflix.astyanax.serializers;
import java.nio.ByteBuffer;
import junit.framework.Assert;
import org.junit.Test;
public class SerializerPackageImplTest {
@Test
public void test() {
SerializerPackageImpl serializers = new SerializerPackageImpl();
try {
serializers.setColumnType("CompositeType(UTF8Type, LongType)");
} catch (UnknownComparatorException e) {
e.printStackTrace();
Assert.fail();
}
String input = "abc:123";
ByteBuffer buffer = serializers.columnAsByteBuffer(input);
String str = serializers.columnAsString(buffer);
Assert.assertEquals(input, str);
}
@Test
public void testSetCompositeKeyType() throws Exception
{
SerializerPackageImpl serializers = new SerializerPackageImpl();
serializers.setKeyType( "CompositeType(UTF8Type, LongType)" );
Assert.assertTrue(serializers.getKeySerializer() instanceof SpecificCompositeSerializer);
}
@Test
public void testSetStandardKeyType() throws Exception
{
SerializerPackageImpl serializers = new SerializerPackageImpl();
serializers.setKeyType( "LongType" );
Assert.assertTrue(serializers.getKeySerializer() instanceof LongSerializer);
}
@Test
public void testSetReversedColumnType() throws Exception
{
SerializerPackageImpl serializers = new SerializerPackageImpl();
serializers.setColumnNameType("ReversedType(UTF8Type)");
Assert.assertTrue(serializers.getColumnNameSerializer() instanceof SpecificReversedSerializer);
}
}
| 7,564 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/recipes/ReverseIndexQueryTest.java | package com.netflix.astyanax.recipes;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Cluster;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.Serializer;
import com.netflix.astyanax.annotations.Component;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.recipes.ReverseIndexQuery.IndexEntryCallback;
import com.netflix.astyanax.serializers.AnnotatedCompositeSerializer;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
public class ReverseIndexQueryTest {
private static Logger LOG = LoggerFactory.getLogger(ReverseIndexQueryTest.class);
private static AstyanaxContext<Cluster> clusterContext;
private static final String TEST_CLUSTER_NAME = "TestCluster";
private static final String TEST_KEYSPACE_NAME = "ReverseIndexTest";
private static final String TEST_DATA_CF = "Data";
private static final String TEST_INDEX_CF = "Index";
private static final boolean TEST_INIT_KEYSPACE = true;
private static final long ROW_COUNT = 1000;
private static final int SHARD_COUNT = 11;
public static final String SEEDS = "localhost:7102";
private static ColumnFamily<Long, String> CF_DATA = ColumnFamily
.newColumnFamily(TEST_DATA_CF, LongSerializer.get(),
StringSerializer.get());
private static class IndexEntry {
@Component(ordinal = 0)
Long value;
@Component(ordinal = 1)
Long key;
public IndexEntry(Long value, Long key) {
this.value = value;
this.key = key;
}
}
private static Serializer<IndexEntry> indexEntitySerializer = new AnnotatedCompositeSerializer<IndexEntry>(
IndexEntry.class);
private static ColumnFamily<String, IndexEntry> CF_INDEX = ColumnFamily
.newColumnFamily(TEST_INDEX_CF, StringSerializer.get(),
indexEntitySerializer);
@BeforeClass
public static void setup() throws Exception {
clusterContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.NONE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME)
.setMaxConnsPerHost(1).setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildCluster(ThriftFamilyFactory.getInstance());
clusterContext.start();
if (TEST_INIT_KEYSPACE) {
Cluster cluster = clusterContext.getEntity();
try {
LOG.info("Dropping keyspace: " + TEST_KEYSPACE_NAME);
cluster.dropKeyspace(TEST_KEYSPACE_NAME);
Thread.sleep(10000);
} catch (ConnectionException e) {
LOG.warn(e.getMessage());
}
Map<String, String> stratOptions = new HashMap<String, String>();
stratOptions.put("replication_factor", "3");
try {
LOG.info("Creating keyspace: " + TEST_KEYSPACE_NAME);
KeyspaceDefinition ksDef = cluster.makeKeyspaceDefinition();
ksDef.setName(TEST_KEYSPACE_NAME)
.setStrategyOptions(stratOptions)
.setStrategyClass("SimpleStrategy")
.addColumnFamily(
cluster.makeColumnFamilyDefinition()
.setName(CF_DATA.getName())
.setComparatorType("UTF8Type")
// .setKeyValidationClass("LongType")
// .setDefaultValidationClass("BytesType")
)
.addColumnFamily(
cluster.makeColumnFamilyDefinition()
.setName(CF_INDEX.getName())
.setComparatorType(
"CompositeType(LongType, LongType)")
.setDefaultValidationClass("BytesType"));
cluster.addKeyspace(ksDef);
Thread.sleep(2000);
populateKeyspace();
} catch (ConnectionException e) {
LOG.error(e.getMessage());
}
}
}
@AfterClass
public static void teardown() {
if (clusterContext != null)
clusterContext.shutdown();
}
public static void populateKeyspace() throws Exception {
LOG.info("Ppoulating keyspace: " + TEST_KEYSPACE_NAME);
Keyspace keyspace = clusterContext.getEntity().getKeyspace(
TEST_KEYSPACE_NAME);
try {
// CF_Users :
// 1 :
// 'A' : 1,
// 'B' : 2,
//
// CF_Index :
// 'B_Shard1':
// 2:1 : null
// 3:2 : null
//
MutationBatch m = keyspace.prepareMutationBatch();
for (long row = 0; row < ROW_COUNT; row++) {
long value = row * 100;
m.withRow(CF_DATA, row).putColumn("A", "ABC", null)
.putColumn("B", "DEF", null);
m.withRow(CF_INDEX, "B_" + (row % SHARD_COUNT)).putColumn(
new IndexEntry(value, row), row, null);
}
// System.out.println(m);
m.execute();
} catch (Exception e) {
LOG.error(e.getMessage());
Assert.fail();
}
}
@Test
public void testReverseIndex() throws Exception{
LOG.info("Starting");
final AtomicLong counter = new AtomicLong();
Keyspace keyspace = clusterContext.getEntity().getKeyspace(TEST_KEYSPACE_NAME);
ReverseIndexQuery
.newQuery(keyspace, CF_DATA, CF_INDEX.getName(),
LongSerializer.get())
.fromIndexValue(100L)
.toIndexValue(10000L)
.withIndexShards(
new Shards.StringShardBuilder().setPrefix("B_")
.setShardCount(SHARD_COUNT).build())
.withColumnSlice(Arrays.asList("A"))
.forEach(new Function<Row<Long, String>, Void>() {
@Override
public Void apply(Row<Long, String> row) {
StringBuilder sb = new StringBuilder();
for (Column<String> column : row.getColumns()) {
sb.append(column.getName()).append(", ");
}
counter.incrementAndGet();
LOG.info("Row: " + row.getKey() + " Columns: "
+ sb.toString());
return null;
}
}).forEachIndexEntry(new IndexEntryCallback<Long, Long>() {
@Override
public boolean handleEntry(Long key, Long value,
ByteBuffer meta) {
LOG.info("Row : " + key + " IndexValue: " + value
+ " Meta: "
+ LongSerializer.get().fromByteBuffer(meta));
if (key % 2 == 1)
return false;
return true;
}
}).execute();
LOG.info("Read " + counter.get() + " rows");
}
}
| 7,565 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/recipes/MiscUnitTest.java | package com.netflix.astyanax.recipes;
import com.google.common.base.Function;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.*;
import com.netflix.astyanax.recipes.functions.ColumnCounterFunction;
import com.netflix.astyanax.recipes.functions.RowCopierFunction;
import com.netflix.astyanax.recipes.functions.RowCounterFunction;
import com.netflix.astyanax.recipes.functions.TraceFunction;
import com.netflix.astyanax.recipes.locks.ColumnPrefixDistributedRowLock;
import com.netflix.astyanax.recipes.locks.StaleLockException;
import com.netflix.astyanax.recipes.reader.AllRowsReader;
import com.netflix.astyanax.recipes.uniqueness.*;
import com.netflix.astyanax.serializers.IntegerSerializer;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.serializers.TimeUUIDSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
import com.netflix.astyanax.util.TimeUUIDUtils;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
public class MiscUnitTest {
private static Logger LOG = LoggerFactory.getLogger(MiscUnitTest.class);
/**
* Constants
*/
private static final long CASSANDRA_WAIT_TIME = 3000;
private static final int TTL = 20;
private static final String TEST_CLUSTER_NAME = "cass_sandbox";
private static final String TEST_KEYSPACE_NAME = "AstyanaxUnitTests_MiscRecipes";
private static final String SEEDS = "localhost:9160";
private static final int ALL_ROWS_COUNT = 10000;
/**
* Column Family definitions
*/
public static ColumnFamily<String, UUID> CF_USER_UNIQUE_UUID = ColumnFamily
.newColumnFamily(
"UserUniqueUUID",
StringSerializer.get(),
TimeUUIDSerializer.get());
public static ColumnFamily<String, UUID> CF_EMAIL_UNIQUE_UUID = ColumnFamily
.newColumnFamily(
"EmailUniqueUUID",
StringSerializer.get(),
TimeUUIDSerializer.get());
private static ColumnFamily<String, String> LOCK_CF_LONG =
ColumnFamily.newColumnFamily("LockCfLong", StringSerializer.get(), StringSerializer.get(), LongSerializer.get());
private static ColumnFamily<String, String> LOCK_CF_STRING =
ColumnFamily.newColumnFamily("LockCfString", StringSerializer.get(), StringSerializer.get(), StringSerializer.get());
private static ColumnFamily<String, String> UNIQUE_CF = ColumnFamily
.newColumnFamily(
"UniqueCf",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_STANDARD1 = ColumnFamily
.newColumnFamily(
"Standard1",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_STANDARD1_COPY = ColumnFamily
.newColumnFamily(
"Standard1_COPY",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<Integer, Integer> CF_ALL_ROWS = ColumnFamily
.newColumnFamily(
"AllRowsMiscUnitTest",
IntegerSerializer.get(),
IntegerSerializer.get());
public static ColumnFamily<Integer, Integer> CF_ALL_ROWS_COPY = ColumnFamily
.newColumnFamily(
"AllRowsMiscUnitTestCopy",
IntegerSerializer.get(),
IntegerSerializer.get());
/**
* Interal
*/
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
@BeforeClass
public static void setup() throws Exception {
System.out.println("TESTING THRIFT KEYSPACE");
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(CASSANDRA_WAIT_TIME);
createKeyspace();
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(CASSANDRA_WAIT_TIME);
}
public static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.TOKEN_AWARE)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
keyspace.createColumnFamily(CF_USER_UNIQUE_UUID, null);
keyspace.createColumnFamily(CF_EMAIL_UNIQUE_UUID, null);
keyspace.createColumnFamily(CF_ALL_ROWS, null);
keyspace.createColumnFamily(LOCK_CF_LONG, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "LongType")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "UTF8Type")
.build());
keyspace.createColumnFamily(LOCK_CF_STRING, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "UTF8Type")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "UTF8Type")
.build());
keyspace.createColumnFamily(CF_STANDARD1, ImmutableMap.<String, Object>builder()
.put("column_metadata", ImmutableMap.<String, Object>builder()
.put("Index1", ImmutableMap.<String, Object>builder()
.put("validation_class", "UTF8Type")
.put("index_name", "Index1")
.put("index_type", "KEYS")
.build())
.put("Index2", ImmutableMap.<String, Object>builder()
.put("validation_class", "UTF8Type")
.put("index_name", "Index2")
.put("index_type", "KEYS")
.build())
.build())
.build());
keyspace.createColumnFamily(UNIQUE_CF, null);
keyspace.createColumnFamily(CF_STANDARD1_COPY, null);
KeyspaceDefinition ki = keyspaceContext.getEntity().describeKeyspace();
System.out.println("Describe Keyspace: " + ki.getName());
try {
//
// CF_Super :
// 'A' :
// 'a' :
// 1 : 'Aa1',
// 2 : 'Aa2',
// 'b' :
// ...
// 'z' :
// ...
// 'B' :
// ...
//
// CF_Standard :
// 'A' :
// 'a' : 1,
// 'b' : 2,
// ...
// 'z' : 26,
// 'B' :
// ...
//
MutationBatch m;
OperationResult<Void> result;
m = keyspace.prepareMutationBatch();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String rowKey = Character.toString(keyName);
ColumnListMutation<String> cfmStandard = m.withRow(
CF_STANDARD1, rowKey);
for (char cName = 'a'; cName <= 'z'; cName++) {
cfmStandard.putColumn(Character.toString(cName),
(int) (cName - 'a') + 1, null);
}
cfmStandard
.putColumn("Index1", (int) (keyName - 'A') + 1, null);
cfmStandard.putColumn("Index2", 42, null);
m.execute();
}
m.withRow(CF_STANDARD1, "Prefixes").putColumn("Prefix1_a", 1, null)
.putColumn("Prefix1_b", 2, null)
.putColumn("prefix2_a", 3, null);
result = m.execute();
m.execute();
m = keyspace.prepareMutationBatch();
for (int i = 0; i < ALL_ROWS_COUNT; i++) {
m.withRow(CF_ALL_ROWS, i).putColumn(0, true);
if (m.getRowCount() == 50) {
m.execute();
}
}
m.execute();
} catch (Exception e) {
System.out.println(e.getMessage());
Assert.fail();
}
}
@Test
public void testMultiRowUniqueness() {
DedicatedMultiRowUniquenessConstraint<UUID> constraint = new DedicatedMultiRowUniquenessConstraint<UUID>
(keyspace, TimeUUIDUtils.getUniqueTimeUUIDinMicros())
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.withRow(CF_USER_UNIQUE_UUID, "user1")
.withRow(CF_EMAIL_UNIQUE_UUID, "user1@domain.com");
DedicatedMultiRowUniquenessConstraint<UUID> constraint2 = new DedicatedMultiRowUniquenessConstraint<UUID>
(keyspace, TimeUUIDUtils.getUniqueTimeUUIDinMicros())
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.withRow(CF_USER_UNIQUE_UUID, "user1")
.withRow(CF_EMAIL_UNIQUE_UUID, "user1@domain.com");
try {
Column<UUID> c = constraint.getUniqueColumn();
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
constraint.acquire();
Column<UUID> c = constraint.getUniqueColumn();
LOG.info("Unique column is " + c.getName());
try {
constraint2.acquire();
Assert.fail("Should already be acquired");
}
catch (NotUniqueException e) {
}
catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
finally {
try {
constraint2.release();
}
catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
}
catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
finally {
try {
constraint.release();
}
catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
try {
constraint2.acquire();
Column<UUID> c = constraint.getUniqueColumn();
LOG.info("Unique column is " + c.getName());
}
catch (NotUniqueException e) {
Assert.fail("Should already be unique");
}
catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
finally {
try {
constraint2.release();
}
catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
}
// @Test
// public void testAllRowsReaderConcurrency() throws Exception {
// final AtomicLong counter = new AtomicLong(0);
//
// boolean result = new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
// .withConcurrencyLevel(4)
// .forEachRow(new Function<Row<String, String>, Boolean>() {
// @Override
// public Boolean apply(@Nullable Row<String, String> row) {
// counter.incrementAndGet();
// LOG.info("Got a row: " + row.getKey().toString());
// return true;
// }
// })
// .build()
// .call();
//
// Assert.assertTrue(result);
// Assert.assertEquals(28, counter.get());
// }
@Test
public void testTtl() throws Exception {
ColumnPrefixDistributedRowLock<String> lock =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testTtl")
.withTtl(2)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
try {
lock.acquire();
Assert.assertEquals(1, lock.readLockColumns().size());
Thread.sleep(3000);
Assert.assertEquals(0, lock.readLockColumns().size());
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock.release();
}
Assert.assertEquals(0, lock.readLockColumns().size());
}
@Test
public void testTtlString() throws Exception {
ColumnPrefixDistributedRowLock<String> lock =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testTtl")
.withTtl(2)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
try {
lock.acquire();
Assert.assertEquals(1, lock.readLockColumns().size());
Thread.sleep(3000);
Assert.assertEquals(0, lock.readLockColumns().size());
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock.release();
}
Assert.assertEquals(0, lock.readLockColumns().size());
}
@Test
public void testStaleLockWithFail() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(5000);
try {
lock2.acquire();
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLockWithFail_String() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(5000);
try {
lock2.acquire();
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLock() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.failOnStaleLock(true)
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(2000);
try {
lock2.acquire();
Assert.fail();
}
catch (StaleLockException e) {
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLock_String() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.failOnStaleLock(true)
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(2000);
try {
lock2.acquire();
Assert.fail();
}
catch (StaleLockException e) {
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testMultiLock() {
MultiRowUniquenessConstraint unique = new MultiRowUniquenessConstraint(keyspace)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.withTtl(60)
.withLockId("abc")
.withColumnPrefix("prefix_")
.withRow(UNIQUE_CF, "testMultiLock_A")
.withRow(UNIQUE_CF, "testMultiLock_B");
ColumnPrefixUniquenessConstraint<String> singleUnique
= new ColumnPrefixUniquenessConstraint<String>(keyspace, UNIQUE_CF, "testMultiLock_A")
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.withPrefix("prefix_");
try {
unique.acquire();
String uniqueColumn = singleUnique.readUniqueColumn();
Assert.assertEquals("abc", uniqueColumn);
LOG.info("UniqueColumn: " + uniqueColumn);
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
MultiRowUniquenessConstraint unique2 = new MultiRowUniquenessConstraint(keyspace)
.withTtl(60)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.withColumnPrefix("prefix_")
.withRow(UNIQUE_CF, "testMultiLock_B");
try {
unique2.acquire();
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
Assert.assertEquals("abc", singleUnique.readUniqueColumn());
unique.release();
}
catch (Exception e) {
LOG.error(e.getMessage());
Assert.fail();
}
try {
unique2.acquire();
}
catch (Exception e) {
LOG.error(e.getMessage());
Assert.fail();
}
try {
unique2.release();
} catch (Exception e) {
LOG.error(e.getMessage());
Assert.fail();
}
}
@Test
public void testRowUniquenessConstraint() throws Exception {
RowUniquenessConstraint<String, String> unique = new RowUniquenessConstraint<String, String>
(keyspace, UNIQUE_CF, "testRowUniquenessConstraint", UUIDStringSupplier.getInstance())
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
;
RowUniquenessConstraint<String, String> unique2 = new RowUniquenessConstraint<String, String>
(keyspace, UNIQUE_CF, "testRowUniquenessConstraint", UUIDStringSupplier.getInstance())
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
;
try {
unique.withData("abc").acquire();
try {
unique2.acquire();
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
String data = unique.readDataAsString();
Assert.assertNotNull(data);
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
LOG.error(e.getMessage());
}
finally {
unique.release();
}
try {
String data = unique.readDataAsString();
Assert.fail();
}
catch (Exception e) {
LOG.info("", e);
}
}
@Test
public void testPrefixUniquenessConstraint() throws Exception {
ColumnPrefixUniquenessConstraint<String> unique = new ColumnPrefixUniquenessConstraint<String>(
keyspace, UNIQUE_CF, "testPrefixUniquenessConstraint")
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
;
ColumnPrefixUniquenessConstraint<String> unique2 = new ColumnPrefixUniquenessConstraint<String>(
keyspace, UNIQUE_CF, "testPrefixUniquenessConstraint")
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
;
try {
unique.acquire();
String column = unique.readUniqueColumn();
LOG.info("Unique Column: " + column);
try {
unique2.acquire();
Assert.fail();
}
catch (Exception e) {
}
}
catch (Exception e) {
Assert.fail(e.getMessage());
LOG.error(e.getMessage());
}
finally {
unique.release();
}
try {
String column = unique.readUniqueColumn();
LOG.info(column);
Assert.fail();
}
catch (Exception e) {
}
}
@Test
public void testPrefixUniquenessConstraintWithColumn() throws Exception {
ColumnPrefixUniquenessConstraint<String> unique = new ColumnPrefixUniquenessConstraint<String>(
keyspace, UNIQUE_CF, "testPrefixUniquenessConstraintWithColumn")
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.withUniqueId("abc");
ColumnPrefixUniquenessConstraint<String> unique2 = new ColumnPrefixUniquenessConstraint<String>(
keyspace, UNIQUE_CF, "testPrefixUniquenessConstraintWithColumn")
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.withUniqueId("def");
try {
unique.acquire();
String column = unique.readUniqueColumn();
LOG.info("Unique Column: " + column);
Assert.assertEquals("abc", column);
try {
unique2.acquire();
Assert.fail();
}
catch (Exception e) {
}
column = unique.readUniqueColumn();
LOG.info("Unique Column: " + column);
Assert.assertEquals("abc", column);
}
catch (Exception e) {
Assert.fail(e.getMessage());
LOG.error(e.getMessage());
}
finally {
unique.release();
}
}
@Test
public void testAcquireAndMutate() throws Exception {
final String row = "testAcquireAndMutate";
final String dataColumn = "data";
final String value = "test";
ColumnPrefixUniquenessConstraint<String> unique = new ColumnPrefixUniquenessConstraint<String>(
keyspace, UNIQUE_CF, row)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.withUniqueId("def");
try {
unique.acquireAndApplyMutation(new Function<MutationBatch, Boolean>() {
@Override
public Boolean apply(MutationBatch m) {
m.withRow(UNIQUE_CF, row)
.putColumn(dataColumn, value, null);
return true;
}
});
String column = unique.readUniqueColumn();
Assert.assertNotNull(column);
}
catch (Exception e) {
e.printStackTrace();
LOG.error("", e);
Assert.fail();
}
finally {
}
ColumnList<String> columns = keyspace.prepareQuery(UNIQUE_CF).getKey(row).execute().getResult();
Assert.assertEquals(2, columns.size());
Assert.assertEquals(value, columns.getStringValue(dataColumn, null));
unique.release();
columns = keyspace.prepareQuery(UNIQUE_CF).getKey(row).execute().getResult();
Assert.assertEquals(1, columns.size());
Assert.assertEquals(value, columns.getStringValue(dataColumn, null));
}
// @Test
// public void testAllRowsReader() throws Exception {
// final AtomicLong counter = new AtomicLong(0);
//
// boolean result = new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
// .forEachRow(new Function<Row<String, String>, Boolean>() {
// @Override
// public Boolean apply(@Nullable Row<String, String> row) {
// counter.incrementAndGet();
// LOG.info("Got a row: " + row.getKey().toString());
// return true;
// }
// })
// .build()
// .call();
//
// Assert.assertTrue(result);
// Assert.assertEquals(28, counter.get());
// }
@Test
public void testAllRowsReader() throws Exception {
final AtomicLong counter = new AtomicLong(0);
AllRowsReader<String, String> reader = new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
.withPageSize(3)
.withConcurrencyLevel(2)
// .withPartitioner(new Murmur3Partitioner())
.forEachRow(new Function<Row<String, String>, Boolean>() {
@Override
public Boolean apply(Row<String, String> row) {
counter.incrementAndGet();
LOG.info("Got a row: " + row.getKey().toString());
return true;
}
})
.build();
try {
boolean result = reader.call();
Assert.assertEquals(counter.get(), 27);
Assert.assertTrue(result);
}
catch (Exception e) {
LOG.info(e.getMessage(), e);
Assert.fail(e.getMessage());
}
}
@Test
public void testAllRowsReaderCopier() throws Exception {
final ColumnCounterFunction columnCounter = new ColumnCounterFunction();
final RowCounterFunction rowCounter = new RowCounterFunction();
new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
.withPageSize(3)
.withConcurrencyLevel(2)
.forEachRow(columnCounter)
.build()
.call();
LOG.info("Column count = " + columnCounter.getCount());
new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
.withPageSize(3)
.withConcurrencyLevel(2)
.forEachRow(rowCounter)
.build()
.call();
LOG.info("Row count = " + rowCounter.getCount());
new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
.withPageSize(3)
.withConcurrencyLevel(2)
.forEachRow(RowCopierFunction.builder(keyspace, CF_STANDARD1_COPY).build())
.build()
.call();
rowCounter.reset();
new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1_COPY)
.withPageSize(3)
.withConcurrencyLevel(2)
.forEachRow(rowCounter)
.build()
.call();
LOG.info("Copied row count = " + rowCounter.getCount());
LOG.info("CF_STANDARD1");
new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
.withPageSize(3)
.withConcurrencyLevel(2)
.forEachRow(TraceFunction.builder(CF_STANDARD1_COPY).build())
.build()
.call();
LOG.info("CF_STANDARD1_COPY");
new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1_COPY)
.withPageSize(3)
.withConcurrencyLevel(2)
.forEachRow(TraceFunction.builder(CF_STANDARD1_COPY).build())
.build()
.call();
}
@Test
public void testAllRowsReaderConcurrency12() throws Exception {
final AtomicLong counter = new AtomicLong(0);
final Map<Long, AtomicLong> threadIds = Maps.newHashMap();
AllRowsReader<Integer, Integer> reader = new AllRowsReader.Builder<Integer, Integer>(keyspace, CF_ALL_ROWS)
.withPageSize(100)
.withConcurrencyLevel(12)
.withColumnSlice(0)
.forEachRow(new Function<Row<Integer, Integer>, Boolean>() {
@Override
public synchronized Boolean apply(Row<Integer, Integer> row) {
long threadId = Thread.currentThread().getId();
AtomicLong threadCounter = threadIds.get(threadId);
if (threadCounter == null) {
threadCounter = new AtomicLong(0);
threadIds.put(threadId, threadCounter);
}
threadCounter.incrementAndGet();
counter.incrementAndGet();
return true;
}
})
.build();
try {
Stopwatch sw = Stopwatch.createStarted();
boolean result = reader.call();
long runtimeMillis = sw.stop().elapsed(TimeUnit.MILLISECONDS);
LOG.info("Count = " + counter.get() + " runtime=" + runtimeMillis);
LOG.info("ThreadIds (" + threadIds.size() + ") " + threadIds);
Assert.assertEquals(threadIds.size(), 12);
Assert.assertEquals(counter.get(), ALL_ROWS_COUNT);
Assert.assertTrue(result);
}
catch (Exception e) {
LOG.info(e.getMessage(), e);
Assert.fail(e.getMessage());
}
}
@Test
public void testAllRowsReaderWithCancel() throws Exception {
final AtomicLong counter = new AtomicLong(0);
AllRowsReader<String, String> reader = new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
.withPageSize(3)
.withConcurrencyLevel(2)
.forEachRow(new Function<Row<String, String>, Boolean>() {
@Override
public Boolean apply(Row<String, String> row) {
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
counter.incrementAndGet();
LOG.info("Got a row: " + row.getKey().toString());
return true;
}
})
.build();
Future<Boolean> future = Executors.newSingleThreadExecutor().submit(reader);
Thread.sleep(1000);
reader.cancel();
try {
boolean result = future.get();
Assert.assertEquals(false, result);
}
catch (Exception e) {
LOG.info("Failed to execute", e);
}
LOG.info("Before: " + counter.get());
Assert.assertNotSame(28, counter.get());
Thread.sleep(2000);
LOG.info("After: " + counter.get());
Assert.assertNotSame(28, counter.get());
}
@Test
public void testAllRowsReaderWithException() throws Exception {
AllRowsReader<String, String> reader = new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1)
.withPageSize(3)
.withConcurrencyLevel(2)
.forEachRow(new Function<Row<String, String>, Boolean>() {
@Override
public Boolean apply(Row<String, String> row) {
throw new RuntimeException("Very bad");
}
})
.build();
Future<Boolean> future = Executors.newSingleThreadExecutor().submit(reader);
try {
boolean result = future.get();
Assert.fail();
}
catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Very bad"));
LOG.info("Failed to execute", e);
}
}
}
| 7,566 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/recipes/UniquenessConstraintTest.java | package com.netflix.astyanax.recipes;
import java.util.HashMap;
import java.util.Map;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Cluster;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
@Ignore
public class UniquenessConstraintTest {
private static Logger LOG = LoggerFactory
.getLogger(UniquenessConstraintTest.class);
private static AstyanaxContext<Cluster> clusterContext;
private static final String TEST_CLUSTER_NAME = "TestCluster";
private static final String TEST_KEYSPACE_NAME = "UniqueIndexTest";
private static final String TEST_DATA_CF = "UniqueRowKeyTest";
private static final boolean TEST_INIT_KEYSPACE = true;
private static ColumnFamily<Long, String> CF_DATA = ColumnFamily
.newColumnFamily(TEST_DATA_CF, LongSerializer.get(),
StringSerializer.get());
@BeforeClass
public static void setup() throws Exception {
clusterContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.NONE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME)
.setMaxConnsPerHost(1).setSeeds(
"localhost:7102"))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildCluster(ThriftFamilyFactory.getInstance());
clusterContext.start();
if (TEST_INIT_KEYSPACE) {
Cluster cluster = clusterContext.getEntity();
try {
LOG.info("Dropping keyspace: " + TEST_KEYSPACE_NAME);
cluster.dropKeyspace(TEST_KEYSPACE_NAME);
Thread.sleep(10000);
} catch (ConnectionException e) {
LOG.warn(e.getMessage());
}
Map<String, String> stratOptions = new HashMap<String, String>();
stratOptions.put("replication_factor", "3");
try {
LOG.info("Creating keyspace: " + TEST_KEYSPACE_NAME);
KeyspaceDefinition ksDef = cluster.makeKeyspaceDefinition();
ksDef.setName(TEST_KEYSPACE_NAME)
.setStrategyOptions(stratOptions)
.setStrategyClass("SimpleStrategy")
.addColumnFamily(
cluster.makeColumnFamilyDefinition()
.setName(CF_DATA.getName())
.setComparatorType("UTF8Type"));
cluster.addKeyspace(ksDef);
Thread.sleep(1000);
} catch (ConnectionException e) {
LOG.error(e.getMessage());
}
}
}
@AfterClass
public static void teardown() {
if (clusterContext != null)
clusterContext.shutdown();
}
@Test
public void testUniqueness() throws Exception {
LOG.info("Starting");
Keyspace keyspace = clusterContext.getEntity().getKeyspace(TEST_KEYSPACE_NAME);
UniquenessConstraintWithPrefix<Long> unique = new UniquenessConstraintWithPrefix<Long>(
keyspace, CF_DATA)
.setTtl(2)
.setPrefix("unique_")
.setConsistencyLevel(ConsistencyLevel.CL_ONE)
.setMonitor(
new UniquenessConstraintViolationMonitor<Long, String>() {
@Override
public void onViolation(Long key, String column) {
LOG.info("Violated: " + key + " column: "
+ column);
}
});
try {
String column = unique.isUnique(1234L);
Assert.assertNotNull(column);
LOG.info(column);
column = unique.isUnique(1234L);
Assert.assertNull(column);
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
}
column = unique.isUnique(1234L);
Assert.assertNotNull(column);
LOG.info(column);
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail(e.getMessage());
}
}
}
| 7,567 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/recipes/ChunkedObjectRecipeTest.java | package com.netflix.astyanax.recipes;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.recipes.storage.CassandraChunkedStorageProvider;
import com.netflix.astyanax.recipes.storage.ChunkedStorage;
import com.netflix.astyanax.recipes.storage.ObjectMetadata;
import com.netflix.astyanax.recipes.storage.ObjectWriteCallback;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.util.SingletonEmbeddedCassandra;
public class ChunkedObjectRecipeTest {
private static final Logger LOG = LoggerFactory.getLogger(ChunkedObjectRecipeTest.class);
public static ColumnFamily<String, String> CF_CHUNK =
ColumnFamily.newColumnFamily("cfchunk", StringSerializer.get(), StringSerializer.get());
private static final long CASSANDRA_WAIT_TIME = 3000;
private static final String TEST_CLUSTER_NAME = "cass_sandbox";
private static final String TEST_KEYSPACE_NAME = "AstyanaxUnitTests_ChunkRecipe";
private static final String SEEDS = "localhost:9160";
/**
* Interal
*/
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
@BeforeClass
public static void setup() throws Exception {
SingletonEmbeddedCassandra.getInstance();
Thread.sleep(CASSANDRA_WAIT_TIME);
createKeyspace();
}
@AfterClass
public static void teardown() throws Exception {
if (keyspaceContext != null)
keyspaceContext.shutdown();
Thread.sleep(CASSANDRA_WAIT_TIME);
}
public static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.TOKEN_AWARE)
.setDiscoveryDelayInSeconds(60000))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(20)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getClient();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
keyspace.createColumnFamily(CF_CHUNK, null);
}
@Test
public void testChunkedRecipe() throws Exception {
CassandraChunkedStorageProvider provider = new CassandraChunkedStorageProvider(keyspace, CF_CHUNK);
StringBuilder sb = new StringBuilder();
for (int i=0; i<100; i++) {
sb.append("abcdefghijklmnopqrstuvwxyz_");
}
String input = sb.toString();
ByteArrayInputStream in = new ByteArrayInputStream(input.getBytes());
ObjectMetadata meta = ChunkedStorage.newWriter(provider, "MyObject", in)
.withChunkSize(100)
.call();
meta = ChunkedStorage.newInfoReader(provider, "MyObject").call();
System.out.println("Obj size: " + meta.getObjectSize().intValue());
System.out.println("Chunk count: " + meta.getChunkCount());
ByteArrayOutputStream os = new ByteArrayOutputStream(meta.getObjectSize().intValue());
meta = ChunkedStorage.newReader(provider, "MyObject", os)
.withBatchSize(11) // Randomize fetching blocks within a batch.
.withConcurrencyLevel(3)
.call();
String output = os.toString();
Assert.assertEquals(input, output);
ChunkedStorage.newDeleter(provider, "MyObject").call();
for (int i=0; i<meta.getChunkCount(); i++) {
ColumnList<String> result = keyspace.prepareQuery(CF_CHUNK).getKey("MyObject$" + i).execute().getResult();
Assert.assertTrue(result.isEmpty());
}
}
@Test
public void testChunkFailure() throws Exception {
CassandraChunkedStorageProvider provider = new CassandraChunkedStorageProvider(keyspace, CF_CHUNK);
StringBuilder sb = new StringBuilder();
for (int i=0; i<100; i++) {
sb.append("abcdefghijklmnopqrstuvwxyz_");
}
String input = sb.toString();
ByteArrayInputStream in = new ByteArrayInputStream(input.getBytes());
CallbackThatFails callback = new CallbackThatFails(26);
try {
ChunkedStorage.newWriter(provider, "MyObjectThatFails", in)
.withChunkSize(100)
.withCallback(callback)
.call();
Assert.fail("Should have received ex from ChunkedObjectWriter");
} catch (Exception e) {
} finally {
Assert.assertFalse("callback.success: " + callback.success, callback.success);
Assert.assertTrue("callback.chunkException: " + callback.chunkException, callback.chunkException);
Assert.assertEquals("callback.failedChunk: " + callback.failedChunk, callback.chunkNumToFailOn, callback.failedChunk);
}
}
private class CallbackThatFails implements ObjectWriteCallback {
private int chunkNumToFailOn;
private boolean success = false;
private boolean chunkException = false;
private int failedChunk = -1;
private CallbackThatFails(int chunk) {
chunkNumToFailOn = chunk;
}
@Override
public void onSuccess() {
success = true;
}
@Override
public void onFailure(Exception exception) {
}
@Override
public void onChunkException(int chunk, Exception exception) {
chunkException = true;
failedChunk = chunk;
}
@Override
public void onChunk(int chunk, int size) {
if (chunk == chunkNumToFailOn) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
throw new RuntimeException("Failing for chunk: " + chunkNumToFailOn);
}
}
};
}
| 7,568 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/recipes/LockRecipeTest.java | package com.netflix.astyanax.recipes;
import java.util.concurrent.TimeUnit;
import com.netflix.astyanax.test.EmbeddedCassandra;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.recipes.locks.ColumnPrefixDistributedRowLock;
import com.netflix.astyanax.recipes.locks.StaleLockException;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
/**
* Ignore for now because of issues with running embedded cassandra from multiple unit tests
* @author elandau
*
*/
@Ignore
public class LockRecipeTest {
private static ColumnFamily<String, String> LOCK_CF_LONG =
ColumnFamily.newColumnFamily("LockCfLong", StringSerializer.get(), StringSerializer.get(), LongSerializer.get());
private static ColumnFamily<String, String> LOCK_CF_STRING =
ColumnFamily.newColumnFamily("LockCfString", StringSerializer.get(), StringSerializer.get(), StringSerializer.get());
private static final int TTL = 20;
private static final int TIMEOUT = 10;
private static final String SEEDS = "localhost:9160";
private static final long CASSANDRA_WAIT_TIME = 3000;
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static EmbeddedCassandra cassandra;
private static String TEST_CLUSTER_NAME = "cass_sandbox";
private static String TEST_KEYSPACE_NAME = "LockUnitTest";
@BeforeClass
public static void setup() throws Exception {
System.out.println("TESTING THRIFT KEYSPACE");
cassandra = new EmbeddedCassandra();
cassandra.start();
Thread.sleep(CASSANDRA_WAIT_TIME);
createKeyspace();
}
@AfterClass
public static void teardown() {
if (keyspaceContext != null)
keyspaceContext.shutdown();
if (cassandra != null)
cassandra.stop();
}
public static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.TOKEN_AWARE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(10)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
keyspace.createColumnFamily(LOCK_CF_LONG, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "LongType")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "UTF8Type")
.build());
keyspace.createColumnFamily(LOCK_CF_STRING, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "UTF8Type")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "UTF8Type")
.build());
;
KeyspaceDefinition ki = keyspaceContext.getEntity().describeKeyspace();
System.out.println("Describe Keyspace: " + ki.getName());
}
@Test
public void testTtl() throws Exception {
ColumnPrefixDistributedRowLock<String> lock =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testTtl")
.withTtl(2)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
try {
lock.acquire();
Assert.assertEquals(1, lock.readLockColumns().size());
Thread.sleep(3000);
Assert.assertEquals(0, lock.readLockColumns().size());
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock.release();
}
Assert.assertEquals(0, lock.readLockColumns().size());
}
@Test
public void testTtlString() throws Exception {
ColumnPrefixDistributedRowLock<String> lock =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testTtl")
.withTtl(2)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
try {
lock.acquire();
Assert.assertEquals(1, lock.readLockColumns().size());
Thread.sleep(3000);
Assert.assertEquals(0, lock.readLockColumns().size());
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock.release();
}
Assert.assertEquals(0, lock.readLockColumns().size());
}
@Test
public void testStaleLockWithFail() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(5000);
try {
lock2.acquire();
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLockWithFail_String() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(5000);
try {
lock2.acquire();
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLock() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.failOnStaleLock(true)
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(2000);
try {
lock2.acquire();
Assert.fail();
}
catch (StaleLockException e) {
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLock_String() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.failOnStaleLock(true)
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(2000);
try {
lock2.acquire();
Assert.fail();
}
catch (StaleLockException e) {
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testLockAndMutate() throws Exception {
// String rowKey = "testLockAndMutate";
// String dataColumn = "SomeDataColumn";
// Integer value = 1;
// // Write some data
// try {
// MutationBatch m = keyspace.prepareMutationBatch().setConsistencyLevel(ConsistencyLevel.CL_ONE);
// m.withRow(LOCK_CF_LONG, rowKey)
// .putColumn(dataColumn, value, null);
// m.execute();
// }
// catch (Exception e) {
// e.printStackTrace();
// Assert.fail(e.getMessage());
// }
//
// // Take a lock
// ColumnPrefixDistributedRowLock<String> lock =
// new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, rowKey)
// .expireLockAfter(1, TimeUnit.SECONDS);
//
// try {
// ColumnMap<String> columns = lock
// .withColumnPrefix("$lock$_")
// .withLockId("myLockId")
// .withConsistencyLevel(ConsistencyLevel.CL_ONE)
// .acquireLockAndReadRow();
//
// // Read data and update
// Assert.assertNotNull(columns);
// Assert.assertEquals(1, columns.size());
//
// value = columns.get(dataColumn).getIntegerValue() + 1;
// MutationBatch m = keyspace.prepareMutationBatch();
// m.withRow(LOCK_CF_LONG, rowKey)
// .putColumn(dataColumn, value, null);
//
// // Write data and release the lock
// lock.releaseWithMutation(m);
// }
// catch (Exception e) {
// e.printStackTrace();
// Assert.fail(e.getMessage());
// lock.release();
// }
//
// ColumnList<String> columns = keyspace
// .prepareQuery(LOCK_CF_LONG)
// .setConsistencyLevel(ConsistencyLevel.CL_ONE)
// .getKey(rowKey)
// .execute()
// .getResult();
// Assert.assertEquals(1, columns.size());
// Assert.assertEquals(value, columns.getIntegerValue(dataColumn, 0));
}
}
| 7,569 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/model/DynamicCompositeTest.java | package com.netflix.astyanax.model;
import java.nio.ByteBuffer;
import java.util.UUID;
import org.junit.Test;
import com.netflix.astyanax.serializers.AbstractSerializer;
import com.netflix.astyanax.serializers.AsciiSerializer;
import com.netflix.astyanax.serializers.BytesArraySerializer;
import com.netflix.astyanax.serializers.IntegerSerializer;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.serializers.UUIDSerializer;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
public class DynamicCompositeTest {
@Test
public void testComposite() {
DynamicComposite dc = new DynamicComposite();
for ( char ch = 'A'; ch < 'Z'; ch++ ) {
dc.addComponent( Character.toString( ch ), StringSerializer.get() );
}
}
@Test
public void testReversedSerialization() {
AsciiSerializer asciiSerializer = AsciiSerializer.get();
BytesArraySerializer bytesArraySerializer = BytesArraySerializer.get();
IntegerSerializer integerSerializer = IntegerSerializer.get();
LongSerializer longSerializer = LongSerializer.get();
StringSerializer stringSerializer = StringSerializer.get();
UUIDSerializer uuidSerializer = UUIDSerializer.get();
DynamicComposite dc = new DynamicComposite();
final String string = "test";
final byte[] bytes = new byte[] { 0x00 };
final int intValue = 1;
final long longValue = 1l;
final UUID uuid = UUID.randomUUID();
dc.addComponent( string, asciiSerializer, getReversed( asciiSerializer ) );
dc.addComponent( bytes, bytesArraySerializer, getReversed( bytesArraySerializer ) );
dc.addComponent( intValue, integerSerializer, getReversed( integerSerializer ) );
dc.addComponent( longValue, longSerializer, getReversed( longSerializer ) );
dc.addComponent( string, stringSerializer, getReversed( stringSerializer ) );
dc.addComponent( uuid, uuidSerializer, getReversed( uuidSerializer ) );
//serialize to bytes
ByteBuffer buff = dc.serialize();
//de-serialize
DynamicComposite read = DynamicComposite.fromByteBuffer( buff );
assertEquals(6, read.size());
assertEquals(string, read.getComponent( 0 ).getValue( asciiSerializer ));
assertArrayEquals( bytes, ( byte[] ) read.getComponent( 1 ).getValue( bytesArraySerializer ) );
assertEquals(intValue, read.getComponent( 2 ).getValue( integerSerializer ));
assertEquals(longValue, read.getComponent( 3 ).getValue( longSerializer ));
assertEquals(string, read.getComponent( 4 ).getValue( stringSerializer ));
assertEquals(uuid, read.getComponent( 5 ).getValue( uuidSerializer ));
}
private String getReversed( AbstractSerializer serializer ) {
return serializer.getComparatorType().getTypeName() + "(reversed=true)";
}
}
| 7,570 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/model/CompositeTest.java | package com.netflix.astyanax.model;
import java.nio.ByteBuffer;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import org.apache.commons.lang.StringUtils;
import com.netflix.astyanax.serializers.ByteBufferOutputStream;
import junit.framework.Assert;
import static org.junit.Assert.assertEquals;
public class CompositeTest {
@Test
public void testByteBufferOutputStream() throws Exception {
ByteBufferOutputStream out = new ByteBufferOutputStream();
int length = 0;
for ( int i = 0; i < 300; i++ ) {
length += i;
out.write( StringUtils.repeat( "*", i ).getBytes() );
}
ByteBuffer buffer = out.getByteBuffer();
Assert.assertEquals( buffer.capacity(), length );
}
/**
* Test the I/O of using a static composite works correctly
*/
@Test
public void compositeSerializesPrimitives() {
final CompositeBuilder builder = Composites.newCompositeBuilder();
final String string = "test";
final UUID uuid = UUID.randomUUID();
final boolean bool = true;
final Integer integer = 10;
final Long longval = 20l;
builder.addString( string );
builder.addUUID( uuid );
builder.addBoolean( bool );
builder.addInteger( integer );
builder.addLong( longval );
final CompositeParser parser = Composites.newCompositeParser( builder.build() );
//now read back
assertEquals( string, parser.readString() );
assertEquals( uuid, parser.readUUID() );
assertEquals( bool, parser.readBoolean() );
assertEquals( integer, parser.readInteger() );
assertEquals( longval, parser.readLong() );
}
/**
* Test the index out of bounds on the read
*/
@Test(expected = IndexOutOfBoundsException.class)
public void compositeOutofBounds() {
final CompositeBuilder builder = Composites.newCompositeBuilder();
final String string = "test";
builder.addString( string );
final CompositeParser parser = Composites.newCompositeParser( builder.build() );
//now read back
assertEquals( string, parser.readString() );
//read beyond available elements. Should throw IndexOutOfBoundsException
parser.readString();
}
}
| 7,571 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/RoundRobinConnectionPoolImplTest.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.connectionpool.impl;
import com.netflix.astyanax.connectionpool.ConnectionPool;
import com.netflix.astyanax.connectionpool.ConnectionPoolConfiguration;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.HostConnectionPool;
import com.netflix.astyanax.connectionpool.Operation;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.NoAvailableHostsException;
import com.netflix.astyanax.connectionpool.exceptions.OperationException;
import com.netflix.astyanax.retry.RunOnce;
import com.netflix.astyanax.test.TestClient;
import com.netflix.astyanax.test.TestConnectionFactory;
import com.netflix.astyanax.test.TestConstants;
import com.netflix.astyanax.test.TestHostType;
import com.netflix.astyanax.test.TestOperation;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import com.netflix.astyanax.connectionpool.ConnectionContext;
public class RoundRobinConnectionPoolImplTest extends BaseConnectionPoolTest {
private static Logger LOG = Logger
.getLogger(RoundRobinConnectionPoolImplTest.class);
private static Operation<TestClient, String> dummyOperation = new TestOperation();
protected ConnectionPool<TestClient> createPool() {
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfiguration config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
config.initialize();
ConnectionPool<TestClient> pool = new RoundRobinConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
return pool;
}
@Test
public void testUncheckedException() {
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
config.initialize();
ConnectionPool<TestClient> pool = new RoundRobinConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
pool.addHost(
new Host("127.0.0.1", TestHostType.GOOD_IMMEDIATE.ordinal()),
true);
OperationResult<String> result;
try {
result = pool.executeWithFailover(new TestOperation() {
@Override
public String execute(TestClient client, ConnectionContext context)
throws ConnectionException, OperationException {
throw new RuntimeException("Unkecked Exception");
}
}, RunOnce.get());
LOG.info(pool.toString());
Assert.fail();
} catch (ConnectionException e) {
}
Assert.assertEquals(monitor.getConnectionClosedCount(), 1);
}
@Test
public void testUncheckedExceptionInOpen() {
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
config.initialize();
config.setInitConnsPerHost(0);
ConnectionPool<TestClient> pool = new RoundRobinConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
pool.addHost(new Host("127.0.0.1",
TestHostType.CONNECT_WITH_UNCHECKED_EXCEPTION.ordinal()), true);
OperationResult<String> result;
try {
result = pool.executeWithFailover(dummyOperation, RunOnce.get());
LOG.info(pool.toString());
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
think(1000);
try {
result = pool.executeWithFailover(dummyOperation, RunOnce.get());
LOG.info(pool.toString());
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
think(1000);
Assert.assertEquals(monitor.getConnectionClosedCount(), 0);
}
@Test
public void testHostDown() {
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
config.setRetryBackoffStrategy(new FixedRetryBackoffStrategy(200, 2000));
config.setMaxConnsPerHost(3);
config.setMaxPendingConnectionsPerHost(2);
config.initialize();
ConnectionPool<TestClient> cp = new RoundRobinConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
Host host = new Host("127.0.0.1", TestHostType.GOOD_IMMEDIATE.ordinal());
cp.addHost(host, true);
OperationResult<String> result;
try {
result = cp.executeWithFailover(new TestOperation(), RunOnce.get());
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
HostConnectionPool<TestClient> pool = cp.getHostPool(host);
Assert.assertNotNull(pool);
pool.markAsDown(null);
try {
result = cp.executeWithFailover(new TestOperation(), RunOnce.get());
Assert.fail();
} catch (NoAvailableHostsException e) {
} catch (ConnectionException e) {
LOG.info(e);
Assert.fail();
}
this.think(1000);
try {
result = cp.executeWithFailover(new TestOperation(), RunOnce.get());
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
}
@Test
@Ignore
public void testAddingNewHost() {
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME)
.setRetrySuspendWindow(1000)
.setRetryBackoffStrategy(new FixedRetryBackoffStrategy(3000,3000))
.setMaxConnsPerHost(3)
.setMaxPendingConnectionsPerHost(2)
.setInitConnsPerHost(0);
config.initialize();
ConnectionPool<TestClient> cp = new RoundRobinConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
Host host = new Host("127.0.0.1",
TestHostType.CONNECT_FAIL_FIRST_TWO.ordinal());
cp.addHost(host, true);
OperationResult<String> result;
for (int i = 0; i < 2; i++) {
try {
result = cp.executeWithFailover(new TestOperation(), RunOnce.get());
Assert.fail("Failed iteration " + i);
} catch (ConnectionException e) {
LOG.info(e);
}
}
try {
result = cp.executeWithFailover(new TestOperation(), RunOnce.get());
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
}
}
| 7,572 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/StressSimpleHostConnectionPoolImpl.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.connectionpool.impl;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.log4j.Logger;
import com.netflix.astyanax.connectionpool.Connection;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.HostConnectionPool;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.OperationException;
import com.netflix.astyanax.test.TestClient;
import com.netflix.astyanax.test.TestConnectionFactory;
import com.netflix.astyanax.test.TestHostType;
public class StressSimpleHostConnectionPoolImpl {
private static final Logger LOG = Logger.getLogger(Stress.class);
public static class NoOpListener implements
SimpleHostConnectionPool.Listener<TestClient> {
@Override
public void onHostDown(HostConnectionPool<TestClient> pool) {
}
@Override
public void onHostUp(HostConnectionPool<TestClient> pool) {
}
}
/**
* @param args
*/
public static void main(String[] args) {
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
"cluster_keyspace");
config.setMaxConnsPerHost(3);
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
Host host = new Host("127.0.0.1", TestHostType.GOOD_IMMEDIATE.ordinal());
final SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(null, monitor), monitor,
config, new NoOpListener());
int numThreads = 100;
final int numOperations = 100;
final int timeout = 2000;
ExecutorService executor = Executors.newFixedThreadPool(numThreads);
for (int i = 0; i < numThreads; i++) {
executor.submit(new Runnable() {
@Override
public void run() {
for (int i = 0; i < numOperations; i++) {
Connection<TestClient> conn = null;
try {
conn = pool.borrowConnection(timeout);
think(10, 10);
} catch (OperationException e) {
// e.printStackTrace();
} catch (ConnectionException e) {
// e.printStackTrace();
} finally {
conn.getHostConnectionPool().returnConnection(conn);
}
}
}
});
}
try {
executor.shutdown();
while (!executor.awaitTermination(1000, TimeUnit.MILLISECONDS)) {
LOG.info(pool.toString());
}
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
LOG.info("**** FINISHED ****");
LOG.info(pool.toString());
}
private static void think(int min, int max) {
try {
if (max > min) {
Thread.sleep(min + new Random().nextInt(max - min));
} else {
Thread.sleep(min);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
| 7,573 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/Stress.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.connectionpool.impl;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Suppliers;
import com.google.common.collect.Lists;
import com.netflix.astyanax.connectionpool.ConnectionPool;
import com.netflix.astyanax.connectionpool.ConnectionPoolMonitor;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.HostConnectionPool;
import com.netflix.astyanax.connectionpool.LatencyScoreStrategy.Instance;
import com.netflix.astyanax.connectionpool.LatencyScoreStrategy.Listener;
import com.netflix.astyanax.connectionpool.exceptions.BadRequestException;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionAbortedException;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.HostDownException;
import com.netflix.astyanax.connectionpool.exceptions.OperationException;
import com.netflix.astyanax.connectionpool.exceptions.OperationTimeoutException;
import com.netflix.astyanax.connectionpool.exceptions.PoolTimeoutException;
import com.netflix.astyanax.connectionpool.exceptions.TimeoutException;
import com.netflix.astyanax.connectionpool.exceptions.TokenRangeOfflineException;
import com.netflix.astyanax.connectionpool.exceptions.TransportException;
import com.netflix.astyanax.connectionpool.exceptions.UnknownException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.RoundRobinConnectionPoolImpl;
import com.netflix.astyanax.retry.RunOnce;
import com.netflix.astyanax.test.ProbabalisticFunction;
import com.netflix.astyanax.test.TestClient;
import com.netflix.astyanax.test.TestConnectionFactory;
import com.netflix.astyanax.test.TestConstants;
import com.netflix.astyanax.test.TestDriver;
import com.netflix.astyanax.test.TestHostType;
import com.netflix.astyanax.test.TestOperation;
public class Stress {
private static Logger LOG = LoggerFactory.getLogger(Stress.class);
/**
* @param args
*/
public static void main(String[] args) {
ConnectionPoolConfigurationImpl config;
config = new ConnectionPoolConfigurationImpl(TestConstants.CLUSTER_NAME
+ "_" + TestConstants.KEYSPACE_NAME);
// config.setMaxConns(100);
config.setMaxFailoverCount(-1);
config.setMaxTimeoutWhenExhausted(1000);
config.setMaxConnsPerHost(25);
config.setInitConnsPerHost(0);
config.setTimeoutWindow(5000);
config.setMaxTimeoutCount(10);
config.setRetrySuspendWindow(5000);
config.setLatencyScoreStrategy(new EmaLatencyScoreStrategyImpl(1000, 0, 20));
// config.setRetryBackoffStrategy(new
// ExponentialRetryBackoffStrategy(20, 1000, 2000));
final ConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
TestConnectionFactory factory = new TestConnectionFactory(config, monitor);
final ConnectionPool<TestClient> pool = new RoundRobinConnectionPoolImpl<TestClient>(config, factory, monitor);
pool.start();
final List<Host> hosts = Lists.newArrayList(
new Host("127.0.0.1", TestHostType.GOOD_FAST.ordinal()),
new Host("127.0.0.2", TestHostType.GOOD_FAST.ordinal()),
new Host("127.0.0.3", TestHostType.GOOD_FAST.ordinal()),
new Host("127.0.0.4", TestHostType.GOOD_FAST.ordinal()),
new Host("127.0.0.5", TestHostType.GOOD_FAST.ordinal()),
new Host("127.0.0.6", TestHostType.GOOD_FAST.ordinal()),
new Host("127.0.0.7", TestHostType.GOOD_FAST.ordinal()),
new Host("127.0.0.8", TestHostType.GOOD_FAST.ordinal()),
// new Host("127.0.0.9", TestHostType.GOOD_SLOW.ordinal()),
new Host("127.0.0.10", TestHostType.SWAP_EVERY_200.ordinal()),
new Host("127.0.0.11", TestHostType.ALTERNATING_SOCKET_TIMEOUT_200.ordinal())
// new Host("127.0.0.12", TestHostType.ALTERNATING_SOCKET_TIMEOUT_200.ordinal()),
// new Host("127.0.0.13", TestHostType.CONNECT_FAIL_FIRST_TWO.ordinal())
);
for (Host host : hosts) {
pool.addHost(host, true);
}
final Map<Host, AtomicLong> counts = new TreeMap<Host, AtomicLong>();
for (HostConnectionPool<TestClient> p : pool.getActivePools()) {
counts.put(p.getHost(), new AtomicLong());
}
System.out.println(monitor.toString());
final AtomicBoolean timeoutsEnabled = new AtomicBoolean(false);
final AtomicLong lastOperationCount = new AtomicLong();
EmaLatencyScoreStrategyImpl latency = new EmaLatencyScoreStrategyImpl(1000, 0, 10);
final Instance sampler = latency.createInstance();
latency.start(new Listener() {
@Override
public void onUpdate() {
}
@Override
public void onReset() {
}
});
final Function<TestDriver, Void> function = new ProbabalisticFunction.Builder<TestDriver, Void>()
.withDefault(new Function<TestDriver, Void>() {
public Void apply(TestDriver arg0) {
return null;
}
})
.withAlways(new Runnable() {
public void run() {
think(10, 30);
}
})
// .withProbability(0.0001, new Function<TestDriver, Void>() {
// public Void apply(@Nullable TestDriver arg0) {
// if (timeoutsEnabled.get()) {
// think(1100, 0);
// throw new RuntimeException(new TimeoutException("TimeoutException"));
// }
// return null;
// }
// })
// .withProbability(0.0001, new Function<TestDriver, Void>() {
// public Void apply(@Nullable TestDriver arg0) {
// throw new RuntimeException(new UnknownException(new Exception("UnknownExceptionDescription")));
// }
// })
// .withProbability(0.0001, new Function<TestDriver, Void>() {
// public Void apply(@Nullable TestDriver arg0) {
// think(1000, 0);
// throw new RuntimeException(new OperationTimeoutException("OperationTimeoutException"));
// }
// })
// .withProbability(0.0001, new Function<TestDriver, Void>() {
// public Void apply(@Nullable TestDriver arg0) {
// throw new RuntimeException(new HostDownException("HostDownException"));
// }
// })
// .withProbability(0.01, new Function<TestDriver, Void>() {
// public Void apply(@Nullable TestDriver arg0) {
// throw new RuntimeException(new ConnectionAbortedException("ConnectionAbortedException"));
// }
// })
// .withProbability(0.0001, new Function<TestDriver, Void>() {
// public Void apply(@Nullable TestDriver arg0) {
// throw new RuntimeException(new BadRequestException("BadRequestException"));
// }
// })
// .withProbability(0.0001, new Function<TestDriver, Void>() {
// public Void apply(@Nullable TestDriver arg0) {
// throw new RuntimeException(new TokenRangeOfflineException("TokenRangeOfflineException"));
// }
// })
// .withProbability(0.0001, new Function<TestDriver, Void>() {
// public Void apply(@Nullable TestDriver arg0) {
// throw new RuntimeException(new TransportException("TransportException"));
// }
// })
.build();
final List<HostConnectionPool<TestClient>> hostPools = Lists.newArrayList(pool.getActivePools());
final TestDriver driver = new TestDriver.Builder()
.withIterationCount(0)
.withThreadCount(200)
// .withFutures(100, TimeUnit.MILLISECONDS)
.withCallsPerSecondSupplier(Suppliers.ofInstance(200))
// .withFutures(100, TimeUnit.MILLISECONDS)
.withCallback(new Function<TestDriver, Void>() {
public Void apply(final TestDriver driver) {
long startTime = System.nanoTime();
try {
pool.executeWithFailover(new TestOperation() {
public String execute(TestClient client) throws ConnectionException, OperationException {
try {
function.apply(driver);
return null;
}
catch (RuntimeException e) {
if (e.getCause() instanceof ConnectionException)
throw (ConnectionException)e.getCause();
throw e;
}
}
}, new RunOnce());
} catch (PoolTimeoutException e) {
LOG.info(e.getMessage());
} catch (ConnectionException e) {
} finally {
sampler.addSample((System.nanoTime() - startTime)/1000000);
}
return null;
}
})
//
// Event to turn timeouts on/off
//
.withRecurringEvent(10, TimeUnit.SECONDS, new Function<TestDriver, Void>() {
@Override
public Void apply(TestDriver driver) {
timeoutsEnabled.getAndSet(!timeoutsEnabled.get());
// LOG.info("Toggle timeouts " + timeoutsEnabled.get());
return null;
}
})
//
// Print status information
//
.withRecurringEvent(1, TimeUnit.SECONDS, new Function<TestDriver, Void>() {
@Override
public Void apply(TestDriver driver) {
long opCount = lastOperationCount.get();
lastOperationCount.set(driver.getOperationCount());
System.out.println("" + driver.getRuntime() + "," + sampler.getScore() + "," + (lastOperationCount.get() - opCount));
System.out.println(monitor.toString());
System.out.println(monitor.toString());
for (HostConnectionPool<TestClient> host : pool.getPools()) {
System.out.println(" " + host.toString());
}
return null;
}
})
//
// Remove a random host
//
.withRecurringEvent(10, TimeUnit.SECONDS, new Function<TestDriver, Void>() {
@Override
public Void apply(TestDriver driver) {
// System.out.println("Latency: " + sampler.getScore());
//
// List<Host> newHosts = Lists.newArrayList(hosts);
// newHosts.remove(new Random().nextInt(hosts.size()));
// pool.setHosts(newHosts);
//
// System.out.println(monitor.toString());
// for (HostConnectionPool<TestClient> host : pool.getPools()) {
// System.out.println(" " + host.toString());
// }
return null;
}
})
.build();
driver.start();
try {
driver.await();
} catch (InterruptedException e) {
}
}
private static void think(int min, int max) {
try {
if (max > min) {
Thread.sleep(min + new Random().nextInt(max - min));
} else {
Thread.sleep(min);
}
} catch (InterruptedException e) {
}
}
}
| 7,574 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/SimpleHostConnectionPoolTest.java | package com.netflix.astyanax.connectionpool.impl;
import junit.framework.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.connectionpool.Connection;
import com.netflix.astyanax.connectionpool.ConnectionPoolConfiguration;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.HostConnectionPool;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.HostDownException;
import com.netflix.astyanax.connectionpool.exceptions.PoolTimeoutException;
import com.netflix.astyanax.connectionpool.exceptions.TimeoutException;
import com.netflix.astyanax.shallows.EmptyPartitioner;
import com.netflix.astyanax.test.TestClient;
import com.netflix.astyanax.test.TestConnectionFactory;
import com.netflix.astyanax.test.TestHostType;
import com.netflix.astyanax.test.TestOperation;
import com.netflix.astyanax.connectionpool.ConnectionContext;
public class SimpleHostConnectionPoolTest {
private static Logger LOG = LoggerFactory
.getLogger(SimpleHostConnectionPoolTest.class);
private static int WAIT_TIMEOUT = 50;
public static class NoOpListener implements
SimpleHostConnectionPool.Listener<TestClient> {
@Override
public void onHostDown(HostConnectionPool<TestClient> pool) {
}
@Override
public void onHostUp(HostConnectionPool<TestClient> pool) {
}
}
@Test
public void testAddHost() {
Host host = new Host("127.0.0.1", TestHostType.GOOD_FAST.ordinal());
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfiguration config = createConfig();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
try {
pool.primeConnections(1);
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
} catch (InterruptedException e) {
LOG.error(e.getMessage());
Assert.fail();
}
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(1, pool.getIdleConnectionCount());
}
@Test
public void testAddHostWithCheckedException() {
Host host = new Host("127.0.0.1",
TestHostType.CONNECT_TIMEOUT.ordinal());
ConnectionPoolConfiguration config = createConfig();
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
pool.primeConnections(1);
Assert.fail();
} catch (InterruptedException e) {
LOG.error(e.getMessage());
Assert.fail();
} catch (ConnectionException e) {
}
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
}
@Test
public void testAddHostWithUncheckedException() {
Host host = new Host("127.0.0.1",
TestHostType.CONNECT_WITH_UNCHECKED_EXCEPTION.ordinal());
ConnectionPoolConfiguration config = createConfig();
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
pool.primeConnections(1);
Assert.fail();
} catch (InterruptedException e) {
LOG.error(e.getMessage());
Assert.fail();
} catch (ConnectionException e) {
}
LOG.info(pool.toString());
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(0, pool.getIdleConnectionCount());
Assert.assertEquals(0, pool.getOpenedConnectionCount());
Assert.assertEquals(0, pool.getClosedConnectionCount());
Assert.assertEquals(2, pool.getFailedOpenConnectionCount());
Assert.assertEquals(0, pool.getBusyConnectionCount());
Assert.assertEquals(0, pool.getPendingConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
}
@Test
public void testGrowConnections() {
Host host = new Host("127.0.0.1", TestHostType.GOOD_FAST.ordinal());
ConnectionPoolConfiguration config = createConfig();
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
pool.primeConnections(2);
} catch (InterruptedException e) {
LOG.error(e.getMessage());
Assert.fail();
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(2, pool.getIdleConnectionCount());
try {
pool.primeConnections(2);
} catch (Exception e) {
LOG.error(e.getMessage());
Assert.fail();
}
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
}
@Test
public void testFailFirst() throws Exception {
Host host = new Host("127.0.0.1", TestHostType.CONNECT_FAIL_FIRST_TWO.ordinal());
ConnectionPoolConfigurationImpl config = createConfig();
config.setRetryBackoffStrategy(new FixedRetryBackoffStrategy(100, 100));
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
pool.primeConnections(2);
Assert.fail();
} catch (InterruptedException e) {
LOG.error(e.getMessage());
} catch (ConnectionException e) {
LOG.error(e.getMessage());
}
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
Thread.sleep(1000);
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
}
@Test
public void testShutdown() {
Host host = new Host("127.0.0.1", TestHostType.GOOD_FAST.ordinal());
ConnectionPoolConfigurationImpl config = createConfig();
config.setRetryBackoffStrategy(new FixedRetryBackoffStrategy(200, 2000));
config.setMaxConnsPerHost(3);
config.setMaxPendingConnectionsPerHost(2);
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
pool.primeConnections(2);
} catch (InterruptedException e) {
LOG.error(e.getMessage());
Assert.fail();
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(2, pool.getIdleConnectionCount());
pool.markAsDown(null);
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(2, pool.getIdleConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
try {
pool.primeConnections(1);
} catch (HostDownException e) {
} catch (Exception e) {
Assert.fail();
}
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(2, pool.getIdleConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(2, pool.getIdleConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
try {
pool.primeConnections(2);
} catch (InterruptedException e) {
LOG.error(e.getMessage());
Assert.fail();
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
Assert.assertEquals(3, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(3, pool.getIdleConnectionCount());
}
@Test
public void testCloseOrReturnConnection() {
Host host = new Host("127.0.0.1", TestHostType.GOOD_FAST.ordinal());
ConnectionPoolConfiguration config = createConfig();
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
pool.primeConnections(2);
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(2, pool.getIdleConnectionCount());
Connection<TestClient> connection = pool.borrowConnection(0);
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(1, pool.getIdleConnectionCount());
pool.returnConnection(connection);
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(2, pool.getIdleConnectionCount());
connection = pool.borrowConnection(0);
Assert.assertEquals(2, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(1, pool.getIdleConnectionCount());
} catch (InterruptedException e) {
LOG.error(e.getMessage());
Assert.fail();
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
}
@Test
public void testAsyncOpenConnection() {
Host host = new Host("127.0.0.1", TestHostType.GOOD_FAST.ordinal());
ConnectionPoolConfigurationImpl config = createConfig();
config.setMaxConnsPerHost(1);
// Open the first connection
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
Connection<TestClient> connection = pool
.borrowConnection(WAIT_TIMEOUT);
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
// Subsequent open should fail
try {
Connection<TestClient> connection = pool
.borrowConnection(WAIT_TIMEOUT);
Assert.fail();
} catch (PoolTimeoutException e) {
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
}
@Test
@Ignore
public void testAsyncOpenConnectionWithShutdown() {
Host host = new Host("127.0.0.1", TestHostType.GOOD_SLOW.ordinal());
ConnectionPoolConfigurationImpl config = createConfig();
config.setMaxConnsPerHost(1);
// Open the first connection
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
Connection<TestClient> connection = pool.borrowConnection(1);
} catch (PoolTimeoutException e) {
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
pool.markAsDown(null);
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
// This should fail because we shut down
try {
Connection<TestClient> connection = pool
.borrowConnection(WAIT_TIMEOUT);
Assert.fail();
} catch (HostDownException e) {
} catch (ConnectionException e) {
LOG.error(e.getMessage());
Assert.fail();
}
// Count should still be 1 because we have a pending connection
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
}
@Test
public void testAsyncOpenConnectionWithCheckedException() {
Host host = new Host("127.0.0.1",
TestHostType.CONNECT_TIMEOUT.ordinal());
ConnectionPoolConfigurationImpl config = createConfig();
// Open the first connection
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
Connection<TestClient> connection = pool
.borrowConnection(WAIT_TIMEOUT);
Assert.fail();
} catch (ConnectionException e) {
}
LOG.info(pool.toString());
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(0, pool.getIdleConnectionCount());
Assert.assertEquals(0, pool.getOpenedConnectionCount());
Assert.assertEquals(0, pool.getClosedConnectionCount());
Assert.assertEquals(1, pool.getFailedOpenConnectionCount());
Assert.assertEquals(0, pool.getBusyConnectionCount());
Assert.assertEquals(0, pool.getPendingConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
}
@Test
public void testAsyncOpenConnectionWithUnCheckedException() throws Exception {
Host host = new Host("127.0.0.1",
TestHostType.CONNECT_WITH_UNCHECKED_EXCEPTION.ordinal());
ConnectionPoolConfigurationImpl config = createConfig();
// Open the first connection
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
Connection<TestClient> connection = pool
.borrowConnection(WAIT_TIMEOUT);
Assert.fail();
} catch (ConnectionException e) {
}
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
}
@Test
public void testExcessiveTimeouts() throws Exception {
Host host = new Host("127.0.0.1",
TestHostType.GOOD_FAST.ordinal());
ConnectionPoolConfigurationImpl config = createConfig();
config.setRetryBackoffStrategy(new FixedRetryBackoffStrategy(500, 500));
// Open the first connection
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
SimpleHostConnectionPool<TestClient> pool = new SimpleHostConnectionPool<TestClient>(
host, new TestConnectionFactory(config, monitor), monitor,
config, new NoOpListener());
try {
for (int i = 0; i < 3; i++) {
Connection<TestClient> connection = pool.borrowConnection(WAIT_TIMEOUT);
try {
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
connection.execute(new TestOperation() {
@Override
public String execute(TestClient client, ConnectionContext context) throws ConnectionException {
throw new TimeoutException("Test");
}
});
Assert.fail();
}
catch (Throwable t) {
}
finally {
pool.returnConnection(connection);
}
Assert.assertEquals(i+1, pool.getErrorsSinceLastSuccess());
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
}
Connection<TestClient> connection = pool.borrowConnection(WAIT_TIMEOUT);
try {
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
connection.execute(new TestOperation() {
@Override
public String execute(TestClient client, ConnectionContext context) throws ConnectionException {
throw new TimeoutException("Test");
}
});
}
catch (Throwable t) {
}
finally {
pool.returnConnection(connection);
}
Assert.assertEquals(0, pool.getActiveConnectionCount());
Assert.assertEquals(true, pool.isReconnecting());
Assert.assertEquals(0, pool.getIdleConnectionCount());
Thread.sleep(1000);
connection = pool.borrowConnection(WAIT_TIMEOUT);
pool.returnConnection(connection);
Assert.assertEquals(1, pool.getActiveConnectionCount());
Assert.assertEquals(false, pool.isReconnecting());
Assert.assertEquals(1, pool.getIdleConnectionCount());
} catch (ConnectionException e) {
LOG.error("Error", e);
Assert.fail(e.getMessage());
}
}
public ConnectionPoolConfigurationImpl createConfig() {
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl("cluster_keyspace");
config.setMaxConnsPerHost(2);
config.setInitConnsPerHost(1);
config.setConnectTimeout(200);
config.setPartitioner(new EmptyPartitioner());
config.initialize();
return config;
}
}
| 7,575 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/BagConnectionPoolImplTest.java | package com.netflix.astyanax.connectionpool.impl;
import com.netflix.astyanax.connectionpool.ConnectionPool;
import com.netflix.astyanax.connectionpool.ConnectionContext;
import com.netflix.astyanax.connectionpool.ConnectionPoolConfiguration;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.Operation;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.OperationException;
import com.netflix.astyanax.retry.RunOnce;
import com.netflix.astyanax.test.TestClient;
import com.netflix.astyanax.test.TestConnectionFactory;
import com.netflix.astyanax.test.TestConstants;
import com.netflix.astyanax.test.TestHostType;
import com.netflix.astyanax.test.TestOperation;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BagConnectionPoolImplTest extends BaseConnectionPoolTest {
private static Logger LOG = LoggerFactory
.getLogger(BagConnectionPoolImplTest.class);
private static Operation<TestClient, String> dummyOperation = new TestOperation();
protected ConnectionPool<TestClient> createPool() {
ConnectionPoolConfiguration config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
config.initialize();
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPool<TestClient> pool = new BagOfConnectionsConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
return pool;
}
public void testAll() {
}
@Test
public void testUncheckedException() {
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
config.initialize();
ConnectionPool<TestClient> pool = new BagOfConnectionsConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
pool.addHost(
new Host("127.0.0.1", TestHostType.GOOD_IMMEDIATE.ordinal()),
true);
OperationResult<String> result;
try {
result = pool.executeWithFailover(new TestOperation() {
@Override
public String execute(TestClient client, ConnectionContext context)
throws ConnectionException, OperationException {
throw new RuntimeException("Unkecked Exception");
}
}, RunOnce.get());
LOG.info(pool.toString());
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
Assert.assertEquals(monitor.getConnectionClosedCount(), 1);
}
@Test
public void testUncheckedExceptionInOpen() {
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
config.setInitConnsPerHost(0);
config.initialize();
ConnectionPool<TestClient> pool = new BagOfConnectionsConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
pool.addHost(new Host("127.0.0.1",
TestHostType.CONNECT_WITH_UNCHECKED_EXCEPTION.ordinal()), true);
OperationResult<String> result;
try {
result = pool.executeWithFailover(dummyOperation, RunOnce.get());
LOG.info(pool.toString());
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
think(1000);
try {
result = pool.executeWithFailover(dummyOperation, RunOnce.get());
LOG.info(pool.toString());
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
think(1000);
Assert.assertEquals(monitor.getConnectionClosedCount(), 0);
}
@Test
public void testOperationTimeout() {
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
ConnectionPoolConfigurationImpl config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
config.setInitConnsPerHost(0);
config.initialize();
ConnectionPool<TestClient> pool = new BagOfConnectionsConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
pool.addHost(
new Host("127.0.0.1", TestHostType.OPERATION_TIMEOUT.ordinal()),
true);
pool.addHost(
new Host("127.0.0.2", TestHostType.OPERATION_TIMEOUT.ordinal()),
true);
for (int i = 0; i < 5; i++) {
OperationResult<String> result;
try {
result = pool
.executeWithFailover(dummyOperation, RunOnce.get());
LOG.info(pool.toString());
Assert.fail();
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
Assert.assertEquals(15, monitor.getConnectionCreatedCount());
Assert.assertEquals(15, monitor.getConnectionClosedCount());
}
}
| 7,576 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/HostConnectionPoolPartitionTest.java | package com.netflix.astyanax.connectionpool.impl;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import junit.framework.Assert;
import org.junit.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.HostConnectionPool;
import com.netflix.astyanax.connectionpool.LatencyScoreStrategy;
import com.netflix.astyanax.partitioner.LongBOPPartitioner;
import com.netflix.astyanax.test.TestClient;
import com.netflix.astyanax.test.TestHostConnectionPool;
public class HostConnectionPoolPartitionTest {
@Test
public void testPartition() {
LatencyScoreStrategy strategy = new SmaLatencyScoreStrategyImpl(10000, 60000, 100, 4.0);
TokenHostConnectionPoolPartition partition = new TokenHostConnectionPoolPartition(new BigInteger("1"), strategy);
List<TestHostConnectionPool> pools = Arrays.asList(
makePool(1),
makePool(2),
makePool(3),
makePool(4));
List<TestHostConnectionPool> pool1 = Arrays.asList(
pools.get(0),
pools.get(1),
pools.get(2));
List<TestHostConnectionPool> pool2 = Arrays.asList(
pools.get(0),
pools.get(3));
Assert.assertEquals(0, partition.getPools().size());
partition.setPools(pool1);
Assert.assertEquals(3, partition.getPools().size());
partition.setPools(pool2);
Assert.assertEquals(2, partition.getPools().size());
}
// Is there a reason that this test is in HostConnectionPoolPartitionTest?
// Perhaps this should be moved to the existing TokenAwareConnectionPoolTest or
// a new TokenParitionedTopologyTest?
@Test
public void testTopology() {
LatencyScoreStrategy strategy = new SmaLatencyScoreStrategyImpl(10000,60000, 100, 4.0);
int nHosts = 6;
int nReplicationFactor = 3;
TokenPartitionedTopology<TestClient> topology = new TokenPartitionedTopology<TestClient>(LongBOPPartitioner.get(), strategy);
// Make the set of pools
List<HostConnectionPool<TestClient>> pools = Lists.newArrayList();
for (int i = 0; i < nHosts; i++) {
pools.add(makePool(i));
}
// Make the flat ring (before ring_describe is called)
List<HostConnectionPool<TestClient>> flatRing = pools;
boolean didChange = topology.setPools(flatRing);
Assert.assertTrue(didChange);
Assert.assertEquals(0, topology.getPartitionCount());
System.out.println(topology);
// Make a ring with tokens and RF
// Map<BigInteger, Collection<HostConnectionPool<TestClient>>> tokenRing = Maps.newHashMap();
// for (int i = 0; i < nHosts; i++) {
// List<HostConnectionPool<TestClient>> partition = Lists.newArrayList();
// for (int j = 0; j < nReplicationFactor; j++) {
// partition.add(pools.get((i + j) % nHosts));
// }
// tokenRing.put(new BigInteger(Integer.toString(i * 1000)), partition);
// }
//
// didChange = topology.setPools(tokenRing);
// Assert.assertTrue(didChange);
// Assert.assertEquals(nHosts, topology.getPartitionCount());
// System.out.println(topology);
//
// HostConnectionPoolPartition<TestClient> partition = topology
// .getPartition(null);
// Assert.assertEquals(nHosts, partition.getPools().size());
//
// // Partition Token Map:
// // HCP 0 - (5000, 0]
// // HCP 1000 - (0, 1000]
// // HCP 2000 - (1000, 2000]
// // HCP 3000 - (2000, 3000]
// // HCP 4000 - (3000, 4000]
// // HCP 5000 - (4000, 5000]
//
// // Test ordinals
// for (int i = 0; i < nHosts; i++) {
// partition = topology.getPartition(new BigInteger(Integer
// .toString(i * 1000)));
// Assert.assertEquals(new BigInteger(Integer.toString(i * 1000)),
// partition.id());
// }
//
// // Test mid-range tokens
// for (int i = nHosts; i > 0; i--) {
// partition = topology.getPartition(new BigInteger(Integer
// .toString(i * 1000 - 500)));
//
// if (i == nHosts) { // 5500 is contained in (5000,0] which belongs to HCP 0
// Assert.assertEquals(BigInteger.ZERO, partition.id());
// } else {
// Assert.assertEquals(new BigInteger(Integer.toString(i * 1000)),
// partition.id());
// }
// }
//
// Map<BigInteger, Collection<HostConnectionPool<TestClient>>> emptyRing = Maps
// .newHashMap();
// topology.setPools(emptyRing);
// System.out.println(topology);
// Assert.assertEquals(0, topology.getPartitionCount());
// Assert.assertEquals(0, topology.getAllPools().getPools().size());
}
public TestHostConnectionPool makePool(int index) {
return new TestHostConnectionPool(new Host("127.0.0." + index, 0));
}
}
| 7,577 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/TokenAwareConnectionPoolTest.java | package com.netflix.astyanax.connectionpool.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.math.BigInteger;
import java.util.List;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.connectionpool.ConnectionPool;
import com.netflix.astyanax.connectionpool.ConnectionPoolConfiguration;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.HostConnectionPool;
import com.netflix.astyanax.connectionpool.Operation;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.partitioner.OrderedBigIntegerPartitioner;
import com.netflix.astyanax.retry.RetryPolicy;
import com.netflix.astyanax.retry.RunOnce;
import com.netflix.astyanax.serializers.BigIntegerSerializer;
import com.netflix.astyanax.test.TestClient;
import com.netflix.astyanax.test.TestConnectionFactory;
import com.netflix.astyanax.test.TestConstants;
import com.netflix.astyanax.test.TestOperation;
import com.netflix.astyanax.test.TestTokenRange;
import com.netflix.astyanax.test.TokenTestOperation;
import com.netflix.astyanax.util.TokenGenerator;
public class TokenAwareConnectionPoolTest extends BaseConnectionPoolTest {
private static Logger LOG = LoggerFactory.getLogger(TokenAwareConnectionPoolTest.class);
private static Operation<TestClient, String> dummyOperation = new TestOperation();
protected ConnectionPool<TestClient> createPool() {
ConnectionPoolConfiguration config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME)
.setPartitioner(OrderedBigIntegerPartitioner.get());
config.initialize();
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
return new TokenAwareConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
}
@Test
public void testTokenMappingForMidRangeTokens() throws ConnectionException {
ConnectionPool<TestClient> cp = createPool();
List<Host> ring1 = makeRing(3, 1, 1);
LOG.info("testTokenMappingForMidRangeTokens\n" + TestTokenRange.getRingDetails(ring1));
cp.setHosts(ring1);
BigInteger threeNodeRingIncrement = TokenGenerator.MAXIMUM.divide(new BigInteger("3"));
RetryPolicy retryPolicy = new RunOnce();
BigInteger key = BigInteger.ZERO;
LOG.info(key.toString() + " 127.0.1.2");
OperationResult<String> result = cp.executeWithFailover(new TokenTestOperation(key), retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.2",result.getHost().getIpAddress());
key = BigInteger.ONE;
LOG.info(key.toString() + " 127.0.1.0");
result = cp.executeWithFailover(new TokenTestOperation(key), retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.0",result.getHost().getIpAddress());
key = threeNodeRingIncrement.subtract(BigInteger.ONE);
LOG.info(key.toString() + " 127.0.1.0");
result = cp.executeWithFailover(new TokenTestOperation(key), retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.0",result.getHost().getIpAddress());
key = threeNodeRingIncrement;
LOG.info(key.toString() + " 127.0.1.0");
result = cp.executeWithFailover(new TokenTestOperation(key), retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.0",result.getHost().getIpAddress());
key = threeNodeRingIncrement.add(BigInteger.ONE);
LOG.info(key.toString() + " 127.0.1.1");
result = cp.executeWithFailover(new TokenTestOperation(key), retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.1",result.getHost().getIpAddress());
key = threeNodeRingIncrement.add(threeNodeRingIncrement).add(BigInteger.ONE);
LOG.info(key.toString() + " 127.0.1.1");
result = cp.executeWithFailover(new TokenTestOperation(key), retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.1",result.getHost().getIpAddress());
key = threeNodeRingIncrement.add(threeNodeRingIncrement).add(BigInteger.ONE).add(BigInteger.ONE);
LOG.info(key.toString() + " 127.0.1.2");
result = cp.executeWithFailover(new TokenTestOperation(key), retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.2",result.getHost().getIpAddress());
}
@Test
public void testTokenMappingForOrdinalTokens() throws ConnectionException {
ConnectionPool<TestClient> cp = createPool();
// the following will generate a ring of two nodes with the following characteristics;
// node1 - ip = 127.0.1.0, token ownership range = (1200 , 0]
// node1 - ip = 127.0.1.1, token ownership range = (0 , 600]
// node1 - ip = 127.0.1.1, token ownership range = (600 , 1200]
List<Host> ring1 = TestTokenRange.makeRing(3, 1, 1, BigInteger.ZERO, new BigInteger("1800"));
LOG.info("testTokenMappingForOrdinalTokens\n" + TestTokenRange.getRingDetails(ring1));
cp.setHosts(ring1);
BigInteger threeNodeRingIncrement = new BigInteger("600");
Operation<TestClient, String> firstHostOp = new TokenTestOperation(BigInteger.ZERO);
Operation<TestClient, String> secondHostOp = new TokenTestOperation(threeNodeRingIncrement);
Operation<TestClient, String> thirdHostOp = new TokenTestOperation(threeNodeRingIncrement.multiply(new BigInteger("2")));
Operation<TestClient, String> maxTokenHostOp = new TokenTestOperation(threeNodeRingIncrement.multiply(new BigInteger("3")));
LOG.info(BigIntegerSerializer.get().fromByteBuffer(firstHostOp.getRowKey()).toString());
LOG.info(BigIntegerSerializer.get().fromByteBuffer(secondHostOp.getRowKey()).toString());
LOG.info(BigIntegerSerializer.get().fromByteBuffer(thirdHostOp.getRowKey()).toString());
LOG.info(BigIntegerSerializer.get().fromByteBuffer(maxTokenHostOp.getRowKey()).toString());
RetryPolicy retryPolicy = new RunOnce();
OperationResult<String> result = cp.executeWithFailover(firstHostOp, retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.2",result.getHost().getIpAddress());
result = cp.executeWithFailover(secondHostOp, retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.0",result.getHost().getIpAddress());
result = cp.executeWithFailover(thirdHostOp, retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.1",result.getHost().getIpAddress());
result = cp.executeWithFailover(maxTokenHostOp, retryPolicy);
assertNotNull(result);
assertEquals("127.0.1.2",result.getHost().getIpAddress());
}
@Test
public void testTokenToHostMappingInWrappedRange() throws ConnectionException {
ConnectionPool<TestClient> cp = createPool();
// the following will generate a ring of two nodes with the following characteristics;
// node1 - ip = 127.0.1.0, token ownership range = (510 , 10]
// node1 - ip = 127.0.1.1, token ownership range = (10 , 510]
List<Host> ring1 = TestTokenRange.makeRing(2, 1, 1, BigInteger.TEN, new BigInteger("1010"));
cp.setHosts(ring1);
LOG.info("testTokenToHostMappingInWrappedRange\n" + TestTokenRange.getRingDetails(ring1));
Operation<TestClient, String> op = new TokenTestOperation(BigInteger.ZERO);
RetryPolicy retryPolicy = new RunOnce();
OperationResult<String> result = cp.executeWithFailover(op, retryPolicy);
assertNotNull(result);
// since token ownership wraps node2 should own token 0
assertEquals("127.0.1.1",result.getHost().getIpAddress());
}
@Test
public void testTokenToHostMappingOutsideOfRing() throws ConnectionException {
ConnectionPool<TestClient> cp = createPool();
// the following will generate a ring of two nodes with the following characteristics;
// node1 - ip = 127.0.1.0, token ownership range = (500 , 0]
// node1 - ip = 127.0.1.1, token ownership range = (0 , 500]
List<Host> ring1 = TestTokenRange.makeRing(2, 1, 1, BigInteger.ZERO, new BigInteger("1000"));
cp.setHosts(ring1);
LOG.info("testTokenToHostMappingOutsideOfRing\n" + TestTokenRange.getRingDetails(ring1));
Operation<TestClient, String> op = new TokenTestOperation(new BigInteger("1250"));
RetryPolicy retryPolicy = new RunOnce();
OperationResult<String> result = cp.executeWithFailover(op, retryPolicy);
assertNotNull(result);
// requests for tokens outside the ring will be serviced by host associated with
// 1st partition in ring
assertEquals("127.0.1.1",result.getHost().getIpAddress());
}
@Test
public void changeRingTest() {
ConnectionPool<TestClient> cp = createPool();
List<Host> ring1 = makeRing(6, 3, 1);
List<Host> ring2 = makeRing(6, 3, 2);
cp.setHosts(ring1);
List<HostConnectionPool<TestClient>> hosts1 = cp.getActivePools();
cp.setHosts(ring2);
List<HostConnectionPool<TestClient>> hosts2 = cp.getActivePools();
LOG.info(hosts1.toString());
LOG.info(hosts2.toString());
}
private List<Host> makeRing(int nHosts, int replication_factor, int id) {
return TestTokenRange.makeRing(nHosts,replication_factor,id,TokenGenerator.MINIMUM,TokenGenerator.MAXIMUM);
}
}
| 7,578 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/RingDescribeNodeAutoDiscoveryImplTest.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.connectionpool.impl;
import org.junit.Ignore;
import org.junit.Test;
@Ignore
public class RingDescribeNodeAutoDiscoveryImplTest {
@Test
public void testRingDescribe() {
// String keyspaceName = "KEYSPACE";
// String clusterName = "CLUSTER";
//
// final MockKeyspace keyspace
// = new MockKeyspace(keyspaceName);
// keyspace.start();
//
// ConnectionPoolConfigurationImpl config
// = new ConnectionPoolConfigurationImpl(clusterName, keyspaceName);
//
// MockConnectionPool pool
// = new MockConnectionPool();
//
// NodeDiscoveryImpl discovery
// = new NodeDiscoveryImpl("TEST", 0, 30000, new
// Supplier<List<TokenRange>>() {
// @Override
// public List<TokenRange> get() {
// try {
// return keyspace.describeRing();
// } catch (ConnectionException e) {
// return Lists.newArrayList();
// }
// }
//
// }, pool);
//
// List<TokenRange> tokens = new ArrayList<TokenRange>();
// TokenRange range1 = new MockTokenRange("0", "1",
// Arrays.asList("127.0.0.1", "10.0.0.2"));
// TokenRange range2 = new MockTokenRange("2", "3",
// Arrays.asList("10.0.0.2", "127.0.0.3"));
// tokens.addAll(Arrays.asList(range1, range2));
// keyspace.setTokenRange(tokens);
//
// Assert.assertNull(pool.getHosts());
// discovery.start();
//
// Map<BigInteger, List<Host>> ring = pool.getHosts();
//
// Assert.assertEquals(ring.size(), 2);
// Assert.assertNotNull(ring.get("0"));
// Assert.assertNotNull(ring.get("2"));
//
// discovery.shutdown();
}
}
| 7,579 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/connectionpool/impl/BaseConnectionPoolTest.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.connectionpool.impl;
import java.util.ArrayList;
import java.util.List;
import com.google.common.collect.Lists;
import com.netflix.astyanax.connectionpool.ConnectionPool;
import com.netflix.astyanax.connectionpool.ConnectionPoolConfiguration;
import com.netflix.astyanax.connectionpool.Host;
import com.netflix.astyanax.connectionpool.Operation;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.NoAvailableHostsException;
import com.netflix.astyanax.connectionpool.exceptions.OperationException;
import com.netflix.astyanax.connectionpool.exceptions.PoolTimeoutException;
import com.netflix.astyanax.connectionpool.exceptions.TransportException;
import com.netflix.astyanax.retry.ConstantBackoff;
import com.netflix.astyanax.retry.RetryPolicy;
import com.netflix.astyanax.retry.RunOnce;
import com.netflix.astyanax.test.TestClient;
import com.netflix.astyanax.test.TestConnectionFactory;
import com.netflix.astyanax.test.TestConstants;
import com.netflix.astyanax.test.TestHostType;
import com.netflix.astyanax.test.TestOperation;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import com.netflix.astyanax.connectionpool.ConnectionContext;
@Ignore
public abstract class BaseConnectionPoolTest {
private static Logger LOG = Logger
.getLogger(RoundRobinConnectionPoolImplTest.class);
private static Operation<TestClient, String> dummyOperation = new TestOperation();
// private static ConnectionPoolConfigurationImpl config;
// @BeforeClass
public static void setup() {
// config = new
// ConnectionPoolConfigurationImpl(MockConstants.CLUSTER_NAME,
// MockConstants.KEYSPACE_NAME);
// config.setConnectionPoolFactory(ConnectionPoolType.ROUND_ROBIN);
// config.setMaxTimeoutWhenExhausted(0);
// config.setMaxFailoverCount(-1);
}
protected abstract ConnectionPool<TestClient> createPool();
@Test
public void testAll() {
ConnectionPool<TestClient> pool = createPool();
for (int i = 0; i < 5; i++) {
pool.addHost(
new Host("127.0." + i + ".0", TestHostType.GOOD_FAST
.ordinal()), true);
// pool.addHost(new Host("127.0." + i + ".1",
// MockHostType.LOST_CONNECTION.ordinal()));
// pool.addHost(new Host("127.0." + i + ".1",
// MockHostType.CONNECT_TIMEOUT.ordinal()));
// pool.addHost(new Host("127.0." + i + ".1",
// MockHostType.ALWAYS_DOWN.ordinal()));
// pool.addHost(new Host("127.0." + i + ".1",
// MockHostType.THRASHING_TIMEOUT.ordinal()));
// pool.addHost(new Host("127.0." + i + ".1",
// MockHostType.CONNECT_BAD_REQUEST_EXCEPTION.ordinal()));
}
for (int i = 0; i < 10; i++) {
try {
OperationResult<String> result = pool.executeWithFailover(
dummyOperation, RunOnce.get());
LOG.info(result.getHost());
} catch (OperationException e) {
LOG.info(e.getMessage());
Assert.fail(e.getMessage());
} catch (ConnectionException e) {
LOG.info(e.getCause());
Assert.fail(e.getMessage());
}
}
}
@Test
public void testRollingRestart() {
ConnectionPool<TestClient> pool = createPool();
List<Host> hosts = new ArrayList<Host>();
for (int i = 0; i < 5; i++) {
Host host = new Host("127.0." + i + ".0",
TestHostType.GOOD_FAST.ordinal());
pool.addHost(host, true);
hosts.add(host);
}
for (int i = 0; i < 5; i++) {
try {
OperationResult<String> result = pool.executeWithFailover(
new TestOperation() {
@Override
public String execute(TestClient client, ConnectionContext context)
throws ConnectionException,
OperationException {
throw new TransportException("He's dead jim");
}
}, RunOnce.get());
Assert.fail();
} catch (Exception e) {
}
}
}
@Test
public void testAlwaysDown() {
ConnectionPool<TestClient> pool = createPool();
pool.addHost(new Host("127.0.0.1", TestHostType.ALWAYS_DOWN.ordinal()),
true);
try {
pool.executeWithFailover(dummyOperation, RunOnce.get());
Assert.fail();
} catch (OperationException e) {
LOG.info(e.getMessage());
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
@Test
public void testConnectTimeout() {
ConnectionPool<TestClient> pool = createPool();
pool.addHost(
new Host("127.0.0.1", TestHostType.CONNECT_TIMEOUT.ordinal()),
true);
try {
pool.executeWithFailover(dummyOperation, RunOnce.get());
Assert.fail();
} catch (OperationException e) {
LOG.info(e.getMessage());
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
@Test
public void testOperationTimeoutTimeout() {
ConnectionPool<TestClient> pool = createPool();
pool.addHost(
new Host("127.0.0.1", TestHostType.OPERATION_TIMEOUT.ordinal()),
true);
try {
pool.executeWithFailover(dummyOperation, RunOnce.get());
Assert.fail();
} catch (OperationException e) {
LOG.info(e.getMessage());
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
@Test
public void testTimeoutTimeout() {
ConnectionPool<TestClient> pool = createPool();
pool.addHost(
new Host("127.0.0.1", TestHostType.SOCKET_TIMEOUT.ordinal()),
true);
try {
pool.executeWithFailover(dummyOperation, RunOnce.get());
Assert.fail();
} catch (OperationException e) {
LOG.info(e.getMessage());
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
@Test
public void testConnectBadRequest() {
ConnectionPool<TestClient> pool = createPool();
pool.addHost(new Host("127.0.0.1",
TestHostType.CONNECT_BAD_REQUEST_EXCEPTION.ordinal()), true);
try {
pool.executeWithFailover(dummyOperation, RunOnce.get());
Assert.fail();
} catch (OperationException e) {
LOG.info(e.getMessage());
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
@Test
public void testThrashingTimeout() {
ConnectionPool<TestClient> pool = createPool();
pool.addHost(
new Host("127.0.0.1", TestHostType.THRASHING_TIMEOUT.ordinal()),
true);
for (int i = 0; i < 10; i++) {
try {
think(1);
pool.executeWithFailover(dummyOperation, RunOnce.get());
} catch (OperationException e) {
LOG.info(e.getMessage());
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
}
@Test
public void testGoodFast() {
ConnectionPool<TestClient> pool = createPool();
pool.addHost(new Host("127.0.0.1", TestHostType.GOOD_SLOW.ordinal()),
true);
for (int i = 0; i < 10; i++) {
try {
pool.executeWithFailover(dummyOperation, RunOnce.get());
LOG.info("Success");
} catch (OperationException e) {
LOG.info(e.getMessage());
} catch (ConnectionException e) {
LOG.info(e.getMessage());
}
}
}
@Test
public void testDefaultConfig() {
ConnectionPoolConfiguration config = new ConnectionPoolConfigurationImpl(
TestConstants.CLUSTER_NAME + "_" + TestConstants.KEYSPACE_NAME);
CountingConnectionPoolMonitor monitor = new CountingConnectionPoolMonitor();
try {
ConnectionPool<TestClient> pool = new RoundRobinConnectionPoolImpl<TestClient>(
config, new TestConnectionFactory(config, monitor), monitor);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testRestartedCluster() {
ConnectionPool<TestClient> pool = createPool();
Host host1 = new Host("127.0.0.1", TestHostType.GOOD_FAST.ordinal());
List<Host> ring1 = Lists.newArrayList(host1);
Host host2 = new Host("127.0.0.2", TestHostType.GOOD_FAST.ordinal());
List<Host> ring2 = Lists.newArrayList(host2);
List<Host> ring3 = Lists.newArrayList();
pool.setHosts(ring1);
Assert.assertTrue (pool.hasHost (host1));
Assert.assertTrue (pool.isHostUp(host1));
Assert.assertFalse(pool.hasHost (host2));
Assert.assertFalse(pool.isHostUp(host2));
try {
OperationResult<String> result = pool.executeWithFailover(
dummyOperation, RunOnce.get());
Assert.assertEquals(host1, result.getHost());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
pool.setHosts(ring3);
Assert.assertFalse(pool.hasHost(host1));
Assert.assertFalse(pool.hasHost(host2));
try {
OperationResult<String> result = pool.executeWithFailover(
dummyOperation, RunOnce.get());
result = pool.executeWithFailover(dummyOperation, RunOnce.get());
Assert.fail();
} catch (NoAvailableHostsException e) {
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
pool.setHosts(ring2);
Assert.assertTrue(pool.hasHost(host2));
Assert.assertTrue(pool.isHostUp(host2));
Assert.assertFalse(pool.hasHost(host1));
Assert.assertFalse(pool.isHostUp(host1));
try {
OperationResult<String> result = pool.executeWithFailover(
dummyOperation, RunOnce.get());
Assert.assertEquals(host2, result.getHost());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
@Ignore
public void testAddHostThatIsDown() {
/*
* ConnectionPoolConfigurationImpl config = new
* ConnectionPoolConfigurationImpl(MockConstants.CLUSTER_NAME,
* MockConstants.KEYSPACE_NAME); config.setRetryBackoffStrategy(new
* FixedRetryBackoffStrategy(100, 0));
*
* ConnectionPool<MockClient> pool = new
* RoundRobinConnectionPoolImpl<MockClient>(config, new
* MockConnectionFactory(config));
*/
ConnectionPool<TestClient> pool = createPool();
Host host1 = new Host("127.0.0.1",
TestHostType.CONNECT_FAIL_FIRST.ordinal());
List<Host> ring1 = Lists.newArrayList(host1);
OperationResult<String> result;
pool.setHosts(ring1);
Assert.assertTrue(pool.hasHost(host1));
Assert.assertTrue(pool.isHostUp(host1));
try {
pool.executeWithFailover(dummyOperation, RunOnce.get());
Assert.fail();
} catch (PoolTimeoutException e) {
} catch (ConnectionException e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
think(500);
Assert.assertTrue(pool.hasHost(host1));
Assert.assertTrue(pool.isHostUp(host1));
try {
pool.executeWithFailover(dummyOperation, RunOnce.get());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
@Ignore
public void testConnectionAborted() {
ConnectionPool<TestClient> pool = createPool();
Host host = new Host("127.0.0.1",
TestHostType.ABORTED_CONNECTION.ordinal());
pool.addHost(host, true);
OperationResult<String> result;
try {
result = pool.executeWithFailover(dummyOperation, RunOnce.get());
Assert.fail();
} catch (ConnectionException e) {
}
}
@Test
public void testRetryEmptyPool() {
ConnectionPool<TestClient> pool = createPool();
RetryPolicy retry = new RunOnce();
try {
pool.executeWithFailover(dummyOperation, retry);
Assert.fail();
} catch (ConnectionException e) {
Assert.assertEquals(1, retry.getAttemptCount());
LOG.error(e);
}
retry = new ConstantBackoff(1, 10);
try {
pool.executeWithFailover(dummyOperation, retry);
Assert.fail();
} catch (ConnectionException e) {
Assert.assertEquals(10, retry.getAttemptCount());
LOG.info(e);
}
}
protected void think(long timeout) {
try {
Thread.sleep(timeout);
} catch (InterruptedException e) {
}
}
}
| 7,580 |
0 | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/test/java/com/netflix/astyanax/query/PreparedQueryTests.java | package com.netflix.astyanax.query;
import static org.junit.Assert.*;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.List;
import org.junit.Test;
import com.google.common.util.concurrent.ListenableFuture;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.CqlResult;
public class PreparedQueryTests {
@Test
public void testAdditionOfValues(){
AbstractPreparedCqlQuery<?,?> s = new AbstractPreparedCqlQuery<Object, Object>() {
@Override
public OperationResult<CqlResult<Object, Object>> execute()
throws ConnectionException {
throw new UnsupportedOperationException();
}
@Override
public ListenableFuture<OperationResult<CqlResult<Object, Object>>> executeAsync()
throws ConnectionException {
throw new UnsupportedOperationException();
}
};
assertTrue("New query object should contain no values",s.getValues().isEmpty());
List<ByteBuffer> expectedValues = new LinkedList<ByteBuffer>();
expectedValues.add(ByteBuffer.wrap("hello".getBytes()));
expectedValues.add(ByteBuffer.wrap("world".getBytes()));
s.withValues(expectedValues);
List<ByteBuffer> actualValues = s.getValues();
String errorMessage = "failed to add values to the query object";
assertEquals(errorMessage, expectedValues.size(),actualValues.size());
for(int i = 0 ; i < actualValues.size(); ++i){
assertArrayEquals(errorMessage,expectedValues.get(i).array(),actualValues.get(i).array());
}
}
}
| 7,581 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/test/EmbeddedCassandra.java | /*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.test;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.cassandra.service.CassandraDaemon;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.io.Closeables;
import com.google.common.io.Files;
import com.google.common.io.Resources;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* Use EmbeddedCassandraFactory
* @author elandau
*
*/
public class EmbeddedCassandra {
private static final Logger LOG = LoggerFactory.getLogger(EmbeddedCassandra.class);
public static final int DEFAULT_PORT = 9160;
public static final int DEFAULT_STORAGE_PORT = 7000;
private final ExecutorService service = Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat("EmbeddedCassandra-%d")
.build());
private final CassandraDaemon cassandra;
private final File dataDir;
public EmbeddedCassandra() throws IOException {
this(createTempDir(), "TestCluster", DEFAULT_PORT, DEFAULT_STORAGE_PORT);
}
public EmbeddedCassandra(String dataDir) throws IOException {
this(new File(dataDir), "TestCluster", DEFAULT_PORT, DEFAULT_STORAGE_PORT);
}
public EmbeddedCassandra(File dataDir) throws IOException {
this(dataDir, "TestCluster", DEFAULT_PORT, DEFAULT_STORAGE_PORT);
}
private static File createTempDir() {
File tempDir = Files.createTempDir();
tempDir.deleteOnExit();
return tempDir;
}
public EmbeddedCassandra(File dataDir, String clusterName, int port, int storagePort) throws IOException {
LOG.info("Starting cassandra in dir " + dataDir);
this.dataDir = dataDir;
dataDir.mkdirs();
InputStream is = null;
try {
URL templateUrl = EmbeddedCassandra.class.getClassLoader().getResource("cassandra2-template.yaml");
Preconditions.checkNotNull(templateUrl, "Cassandra config template is null");
String baseFile = Resources.toString(templateUrl, Charset.defaultCharset());
String newFile = baseFile.replace("$DIR$", dataDir.getPath());
newFile = newFile.replace("$PORT$", Integer.toString(port));
newFile = newFile.replace("$STORAGE_PORT$", Integer.toString(storagePort));
newFile = newFile.replace("$CLUSTER$", clusterName);
File configFile = new File(dataDir, "cassandra.yaml");
Files.write(newFile, configFile, Charset.defaultCharset());
LOG.info("Cassandra config file: " + configFile.getPath());
System.setProperty("cassandra.config", "file:" + configFile.getPath());
try {
cassandra = new CassandraDaemon();
cassandra.init(null);
}
catch (IOException e) {
LOG.error("Error initializing embedded cassandra", e);
throw e;
}
}
finally {
Closeables.close(is, true);
}
LOG.info("Started cassandra deamon");
}
public void start() {
Future<Object> future = service.submit(new Callable<Object>(){
@Override
public Object call() throws Exception
{
try {
cassandra.start();
}
catch (Exception e) {
e.printStackTrace();
}
return null;
}
}
);
try {
future.get();
} catch (InterruptedException e) {
// do nothing
} catch (ExecutionException e) {
LOG.error("Error starting embedded cassandra", e);
throw new RuntimeException(e);
}
}
public void stop() {
service.shutdownNow();
cassandra.deactivate();
}
}
| 7,582 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/ColumnTimestampAndTTLTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
public class ColumnTimestampAndTTLTests extends KeyspaceTests {
private static ColumnFamily<Long, Long> CF_COL_TIMESTAMP = ColumnFamily
.newColumnFamily(
"columntimestamps",
LongSerializer.get(),
LongSerializer.get(),
LongSerializer.get());
private static ColumnFamily<String, String> CF_TTL = ColumnFamily
.newColumnFamily(
"columnttls",
StringSerializer.get(),
StringSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_COL_TIMESTAMP, null);
keyspace.createColumnFamily(CF_TTL, null);
CF_COL_TIMESTAMP.describe(keyspace);
CF_TTL.describe(keyspace);
}
@AfterClass
public static void teardown() throws Exception {
keyspace.dropColumnFamily(CF_COL_TIMESTAMP);
keyspace.dropColumnFamily(CF_TTL);
}
@Test
public void testColumnTimestamps() throws Exception {
CF_COL_TIMESTAMP.describe(keyspace);
MutationBatch mb = keyspace.prepareMutationBatch();
mb.withRow(CF_COL_TIMESTAMP, 1L)
.setTimestamp(1).putColumn(1L, 1L)
.setTimestamp(10).putColumn(2L, 2L)
;
mb.execute();
ColumnList<Long> result1 = keyspace.prepareQuery(CF_COL_TIMESTAMP).getRow(1L).execute().getResult();
Assert.assertEquals(2, result1.size());
Assert.assertNotNull(result1.getColumnByName(1L));
Assert.assertNotNull(result1.getColumnByName(2L));
mb = keyspace.prepareMutationBatch();
mb.withRow(CF_COL_TIMESTAMP, 1L)
.setTimestamp(result1.getColumnByName(1L).getTimestamp()-1)
.deleteColumn(1L)
.setTimestamp(result1.getColumnByName(2L).getTimestamp()-1)
.deleteColumn(2L)
.putEmptyColumn(3L, null);
mb.execute();
result1 = keyspace.prepareQuery(CF_COL_TIMESTAMP).getRow(1L).execute().getResult();
Assert.assertEquals(3, result1.size());
mb = keyspace.prepareMutationBatch();
mb.withRow(CF_COL_TIMESTAMP, 1L)
.setTimestamp(result1.getColumnByName(1L).getTimestamp()+1)
.deleteColumn(1L)
.setTimestamp(result1.getColumnByName(2L).getTimestamp()+1)
.deleteColumn(2L);
mb.execute();
result1 = keyspace.prepareQuery(CF_COL_TIMESTAMP).getRow(1L).execute().getResult();
Assert.assertEquals(1, result1.size());
}
@Test
public void testTtlValues() throws Exception {
MutationBatch mb = keyspace.prepareMutationBatch();
mb.withRow(CF_TTL, "row")
.putColumn("TTL0", "TTL0", 0)
.putColumn("TTLNULL", "TTLNULL", null)
.putColumn("TTL1", "TTL1", 1);
mb.execute();
Thread.sleep(2000);
ColumnList<String> result = keyspace.prepareQuery(CF_TTL)
.getRow("row")
.execute().getResult();
Assert.assertEquals(2, result.size());
Assert.assertNotNull(result.getColumnByName("TTL0"));
Assert.assertNotNull(result.getColumnByName("TTLNULL"));
}
}
| 7,583 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/RowCopierTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.serializers.IntegerSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
public class RowCopierTests extends KeyspaceTests {
private static final ColumnFamily<Integer, String> CF_ROW_COPY =
new ColumnFamily<Integer, String>("testrowcopy", IntegerSerializer.get(), StringSerializer.get(), IntegerSerializer.get());
private static final ColumnFamily<Integer, String> CF_ROW_COPY2 =
new ColumnFamily<Integer, String>("testrowcopy2", IntegerSerializer.get(), StringSerializer.get(), IntegerSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_ROW_COPY, null);
keyspace.createColumnFamily(CF_ROW_COPY2, null);
CF_ROW_COPY.describe(keyspace);
CF_ROW_COPY2.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_ROW_COPY);
keyspace.dropColumnFamily(CF_ROW_COPY2);
}
@Test
public void runRowCopyTest() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_ROW_COPY, 10).putColumn("c1", 1).putColumn("c2", 2);
m.execute();
ColumnList<String> result = keyspace.prepareQuery(CF_ROW_COPY).getRow(10).execute().getResult();
Column<String> column = result.getColumnByIndex(0);
Assert.assertEquals("c1", column.getName());
Assert.assertEquals(1, column.getIntegerValue());
column = result.getColumnByIndex(1);
Assert.assertEquals("c2", column.getName());
Assert.assertEquals(2, column.getIntegerValue());
keyspace.prepareQuery(CF_ROW_COPY).getRow(10).copyTo(CF_ROW_COPY2, 11).execute();
ColumnList<String> result2 = keyspace.prepareQuery(CF_ROW_COPY2).getRow(11).execute().getResult();
column = result2.getColumnByIndex(0);
Assert.assertEquals("c1", column.getName());
Assert.assertEquals(1, column.getIntegerValue());
column = result2.getColumnByIndex(1);
Assert.assertEquals("c2", column.getName());
Assert.assertEquals(2, column.getIntegerValue());
}
}
| 7,584 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/SerializerPackageTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import com.netflix.astyanax.Serializer;
import com.netflix.astyanax.serializers.SpecificCompositeSerializer;
import com.netflix.astyanax.shaded.org.apache.cassandra.db.marshal.AbstractType;
import com.netflix.astyanax.shaded.org.apache.cassandra.db.marshal.CompositeType;
import com.netflix.astyanax.shaded.org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.SerializerPackage;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.Composite;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
public class SerializerPackageTests extends KeyspaceTests {
private static final Logger LOG = Logger.getLogger(SerializerPackageTests.class);
public static ColumnFamily<String, Long> CF_SERIALIZER1 = ColumnFamily
.newColumnFamily(
"Serializer1",
StringSerializer.get(),
LongSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.prepareQuery(CF_SERIALIZER1)
.withCql("CREATE TABLE astyanaxunittests.serializer1 (key text, column1 bigint, value text, PRIMARY KEY (key))")
.execute();
CF_SERIALIZER1.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_SERIALIZER1);
}
@Test
public void testSerializer() throws Exception {
keyspace.prepareQuery(CF_SERIALIZER1)
.withCql("select * from astyanaxunittests.serializer1")
.execute();
SerializerPackage serializer = keyspace.getSerializerPackage("Serializer1", false);
System.out.println("");
System.out.println("KeySerializer: " + serializer.getKeySerializer());
System.out.println("ColumnNameSerializer: " + serializer.getColumnNameSerializer());
System.out.println("ColumnSerializer: " + serializer.getColumnSerializer());
System.out.println("DefaultValueSerializer: " + serializer.getDefaultValueSerializer());
System.out.println("ValueSerializer: " + serializer.getValueSerializer());
String ss1 = "ss1";
ByteBuffer bb1 = StringSerializer.get().fromString(ss1);
String ss1Result = serializer.getKeySerializer().getString(bb1);
System.out.println("ss1Result: " + ss1Result);
Assert.assertEquals(ss1, ss1Result);
Serializer comp = serializer.getColumnNameSerializer();
System.out.println(comp.getComparatorType().toString());
Composite dc = new Composite(ss1);
List<AbstractType<?>> types =
new ArrayList<AbstractType<?>>();
types.add(UTF8Type.instance);
CompositeType c1 = CompositeType.getInstance(types);
SpecificCompositeSerializer ccSerializer = new SpecificCompositeSerializer(c1);
ByteBuffer bb2 = ccSerializer.toByteBuffer(dc);
Composite c2 = (Composite) serializer.getColumnNameSerializer().fromByteBuffer(bb2);
ss1Result = (String) c2.get(0);
Assert.assertEquals(ss1, ss1Result);
}
}
| 7,585 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/CounterColumnTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.serializers.StringSerializer;
public class CounterColumnTests extends KeyspaceTests {
public static ColumnFamily<String, String> CF_COUNTER1 = ColumnFamily
.newColumnFamily(
"Counter1",
StringSerializer.get(),
StringSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_COUNTER1,ImmutableMap.<String, Object>builder()
.put("default_validation_class", "CounterColumnType")
.build());
CF_COUNTER1.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
initContext();
keyspace.dropColumnFamily(CF_COUNTER1);
}
@Test
public void testIncrementCounter() throws Exception {
long baseAmount, incrAmount = 100;
Column<String> column;
column = keyspace.prepareQuery(CF_COUNTER1).getRow("CounterRow1").getColumn("MyCounter").execute().getResult();
//Assert.assertNull(column);
baseAmount = 0;
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_COUNTER1, "CounterRow1").incrementCounterColumn("MyCounter", incrAmount);
m.execute();
//
// column = keyspace.prepareQuery(CF_COUNTER1).getRow("CounterRow1").getColumn("MyCounter").execute().getResult();
// Assert.assertNotNull(column);
// Assert.assertEquals(baseAmount + incrAmount, column.getLongValue());
//
// m = keyspace.prepareMutationBatch();
// m.withRow(CF_COUNTER1, "CounterRow1").incrementCounterColumn("MyCounter", incrAmount);
// m.execute();
//
// column = keyspace.prepareQuery(CF_COUNTER1).getRow("CounterRow1").getColumn("MyCounter").execute().getResult();
// Assert.assertNotNull(column);
// Assert.assertEquals(column.getLongValue(), baseAmount + 2 * incrAmount);
}
@Test
public void testDeleteCounter() throws Exception {
Column<String> column;
String rowKey = "CounterRowDelete1";
String counterName = "MyCounter";
// Increment the column
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_COUNTER1, rowKey).incrementCounterColumn(counterName, 1);
m.execute();
// // Read back the value
// column = keyspace.prepareQuery(CF_COUNTER1).getRow(rowKey).getColumn(counterName).execute().getResult();
// Assert.assertNotNull(column);
// Assert.assertEquals(column.getLongValue(), 1);
//
// // Delete the column
// keyspace.prepareColumnMutation(CF_COUNTER1, rowKey, counterName).deleteCounterColumn().execute();
//
// // Try to read back
// // This should be non-existent
// column = keyspace.prepareQuery(CF_COUNTER1).getRow(rowKey).getColumn(counterName).execute().getResult();
// Assert.assertNull(column);
}
}
| 7,586 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/RingDescribeTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.List;
import org.apache.log4j.Logger;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.connectionpool.TokenRange;
public class RingDescribeTests extends KeyspaceTests {
private static final Logger LOG = Logger.getLogger(RingDescribeTests.class);
@BeforeClass
public static void init() throws Exception {
initContext();
}
@Test
public void testDescribeRing() throws Exception {
// [TokenRangeImpl [startToken=0, endToken=0, endpoints=[127.0.0.1]]]
List<TokenRange> ring = keyspace.describeRing();
LOG.info(ring.toString());
}
}
| 7,587 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/SingleColumnMutationTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
public class SingleColumnMutationTests extends KeyspaceTests {
public static ColumnFamily<Long, String> CF_SINGLE_COLUMN = ColumnFamily
.newColumnFamily(
"cfsinglecolmutation",
LongSerializer.get(),
StringSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_SINGLE_COLUMN, null);
CF_SINGLE_COLUMN.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_SINGLE_COLUMN);
}
@Test
public void testSingleColumnMutation() throws Exception {
keyspace.prepareColumnMutation(CF_SINGLE_COLUMN, 1L, "1").putValue("11", null).execute();
keyspace.prepareColumnMutation(CF_SINGLE_COLUMN, 1L, "2").putValue("22", null).execute();
keyspace.prepareColumnMutation(CF_SINGLE_COLUMN, 1L, "3").putValue("33", null).execute();
ColumnList<String> result = keyspace.prepareQuery(CF_SINGLE_COLUMN).getRow(1L).execute().getResult();
Assert.assertTrue(3 == result.size());
Assert.assertEquals("11", result.getColumnByName("1").getStringValue());
Assert.assertEquals("22", result.getColumnByName("2").getStringValue());
Assert.assertEquals("33", result.getColumnByName("3").getStringValue());
keyspace.prepareColumnMutation(CF_SINGLE_COLUMN, 1L, "2").putEmptyColumn(null).execute();
keyspace.prepareColumnMutation(CF_SINGLE_COLUMN, 1L, "3").deleteColumn().execute();
result = keyspace.prepareQuery(CF_SINGLE_COLUMN).getRow(1L).execute().getResult();
Assert.assertTrue(2 == result.size());
Assert.assertEquals("11", result.getColumnByName("1").getStringValue());
Assert.assertNull(result.getColumnByName("2").getStringValue());
}
} | 7,588 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/TimeUUIDTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.UUID;
import junit.framework.Assert;
import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.cql.reads.model.CqlRangeBuilder;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.query.RowQuery;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.serializers.TimeUUIDSerializer;
import com.netflix.astyanax.util.RangeBuilder;
import com.netflix.astyanax.util.TimeUUIDUtils;
public class TimeUUIDTests extends KeyspaceTests {
private static final Logger LOG = Logger.getLogger(TimeUUIDTests.class);
public static ColumnFamily<String, UUID> CF_TIME_UUID = ColumnFamily
.newColumnFamily(
"TimeUUID1",
StringSerializer.get(),
TimeUUIDSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_TIME_UUID, null);
CF_TIME_UUID.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_TIME_UUID);
}
@Test
public void testTimeUUID() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
UUID columnName = TimeUUIDUtils.getUniqueTimeUUIDinMillis();
long columnTime = TimeUUIDUtils.getTimeFromUUID(columnName);
String rowKey = "Key1";
m.withRow(CF_TIME_UUID, rowKey).delete();
m.execute();
m.discardMutations();
int startTime = 100;
int endTime = 200;
m.withRow(CF_TIME_UUID, rowKey).putColumn(columnName, 42, null);
for (int i = startTime; i < endTime; i++) {
// UUID c = TimeUUIDUtils.getTimeUUID(i);
LOG.info(TimeUUIDUtils.getTimeUUID(columnTime + i).toString());
m.withRow(CF_TIME_UUID, rowKey).putColumn(
TimeUUIDUtils.getTimeUUID(columnTime + i), i, null);
}
m.execute();
OperationResult<Column<UUID>> result = keyspace
.prepareQuery(CF_TIME_UUID).getKey(rowKey)
.getColumn(columnName).execute();
Assert.assertEquals(columnName, result.getResult().getName());
Assert.assertTrue(result.getResult().getIntegerValue() == 42);
OperationResult<ColumnList<UUID>> result2 = keyspace.prepareQuery(CF_TIME_UUID).getKey(rowKey).execute();
Assert.assertTrue(result2.getResult().size() >= (endTime - startTime));
result2 = keyspace
.prepareQuery(CF_TIME_UUID)
.getKey(rowKey)
.withColumnRange(
new RangeBuilder()
.setLimit(10)
.setStart(TimeUUIDUtils.getTimeUUID(0))
.setEnd(TimeUUIDUtils
.getTimeUUID(Long.MAX_VALUE >> 8))
.build()).execute();
Assert.assertEquals(10, result2.getResult().size());
// Test timeUUID pagination
RowQuery<String, UUID> query = keyspace
.prepareQuery(CF_TIME_UUID)
.getKey(rowKey)
.withColumnRange(
new CqlRangeBuilder<UUID>()
.setFetchSize(10)
.setStart(
TimeUUIDUtils.getTimeUUID(columnTime
+ startTime))
.setEnd(TimeUUIDUtils.getTimeUUID(columnTime
+ endTime)).build()).autoPaginate(true);
OperationResult<ColumnList<UUID>> result3;
int pageCount = 0;
int rowCount = 0;
try {
LOG.info("starting pagination");
while (!(result3 = query.execute()).getResult().isEmpty()) {
pageCount++;
Assert.assertTrue(result3.getResult().size() <= 10);
rowCount += result3.getResult().size();
LOG.info("==== Block ====");
for (Column<UUID> column : result3.getResult()) {
LOG.info("Column is " + column.getName());
}
}
Assert.assertTrue("pagination complete: " + pageCount, pageCount >= 10);
Assert.assertTrue("pagination complete ", rowCount <= 100);
} catch (ConnectionException e) {
Assert.fail();
LOG.info(e.getMessage());
e.printStackTrace();
}
}
@Test
public void testTimeUUID2() throws Exception {
CF_TIME_UUID.describe(keyspace);
MutationBatch m = keyspace.prepareMutationBatch();
String rowKey = "Key2";
m.withRow(CF_TIME_UUID, rowKey).delete();
m.execute();
m.discardMutations();
long now = System.currentTimeMillis();
long msecPerDay = 86400000;
for (int i = 0; i < 100; i++) {
m.withRow(CF_TIME_UUID, rowKey).putColumn(
TimeUUIDUtils.getTimeUUID(now - i * msecPerDay), i, null);
}
m.execute();
OperationResult<ColumnList<UUID>> result = keyspace
.prepareQuery(CF_TIME_UUID)
.getKey(rowKey)
.withColumnRange(
new RangeBuilder()
.setLimit(100)
.setStart(
TimeUUIDUtils.getTimeUUID(now - 20
* msecPerDay)).build())
.execute();
Assert.assertTrue(result.getResult().size() >= 20);
}
}
| 7,589 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/RowSliceRowRangeQueryTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Set;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.cql.reads.model.CqlRangeBuilder;
import com.netflix.astyanax.cql.reads.model.CqlRangeImpl;
import com.netflix.astyanax.cql.test.utils.ReadTests;
import com.netflix.astyanax.cql.test.utils.TestUtils;
import com.netflix.astyanax.cql.test.utils.TestUtils.TestTokenRange;
import com.netflix.astyanax.model.ByteBufferRange;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
public class RowSliceRowRangeQueryTests extends ReadTests {
private static ColumnFamily<String, String> CF_COLUMN_RANGE_TEST = TestUtils.CF_COLUMN_RANGE_TEST;
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_COLUMN_RANGE_TEST, null);
CF_COLUMN_RANGE_TEST.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_COLUMN_RANGE_TEST);
}
@Test
public void runAllTests() throws Exception {
boolean rowDeleted = false;
TestUtils.populateRowsForColumnRange(keyspace);
Thread.sleep(1000);
testRowKeysWithAllColumns(rowDeleted);
testRowKeysWithColumnSet(rowDeleted);
testRowKeysWithColumnRange(rowDeleted);
testRowRangeWithAllColumns(rowDeleted);
testRowRangeWithColumnSet(rowDeleted);
testRowRangeWithColumnRange(rowDeleted);
TestUtils.deleteRowsForColumnRange(keyspace);
Thread.sleep(1000);
rowDeleted = true;
testRowKeysWithAllColumns(rowDeleted);
testRowKeysWithColumnSet(rowDeleted);
testRowKeysWithColumnRange(rowDeleted);
testRowRangeWithAllColumns(rowDeleted);
testRowRangeWithColumnSet(rowDeleted);
testRowRangeWithColumnRange(rowDeleted);
}
private void testRowKeysWithAllColumns(boolean rowDeleted) throws Exception {
Set<String> rowKeys = getRandomRowKeys();
Rows<String, String> rows = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST).getRowSlice(rowKeys).execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
return;
}
Assert.assertFalse(rows.isEmpty());
int rowKeysSize = rowKeys.size();
for (Row<String, String> row : rows) {
boolean isPresent = rowKeys.remove(row.getKey());
Assert.assertTrue("Extraneous row: " + row.getKey(), isPresent);
ColumnList<String> colList = row.getColumns();
Assert.assertEquals(26, colList.size());
for(int index=0; index<26; index++) {
Column<String> col = colList.getColumnByIndex(index);
Assert.assertTrue(String.valueOf((char)('a' + index)).equals(col.getName()));
Assert.assertEquals(index + 1, col.getIntegerValue());
}
}
Assert.assertEquals(rowKeysSize, rows.size());
}
private void testRowKeysWithColumnSet(boolean rowDeleted) throws Exception {
Set<String> rowKeys = getRandomRowKeys();
Set<String> columns = getRandomColumns();
Rows<String, String> rows = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowSlice(rowKeys)
.withColumnSlice(columns)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
return;
}
Assert.assertFalse(rows.isEmpty());
List<String> expected = new ArrayList<String>(columns);
Collections.sort(expected);
int rowKeysSize = rowKeys.size();
for (Row<String, String> row : rows) {
boolean isPresent = rowKeys.remove(row.getKey());
Assert.assertTrue("Extraneous row: " + row.getKey(), isPresent);
List<String> result = new ArrayList<String>();
ColumnList<String> colList = row.getColumns();
for (Column<String> col : colList) {
result.add(col.getName());
}
Collections.sort(result);
Assert.assertEquals(expected, result);
}
Assert.assertEquals(rowKeysSize, rows.size());
}
@SuppressWarnings("unchecked")
private void testRowKeysWithColumnRange(boolean rowDeleted) throws Exception {
Set<String> rowKeys = getRandomRowKeys();
// get random start and end column
CqlRangeImpl<String> columns = (CqlRangeImpl<String>) getRandomColumnRange();
Rows<String, String> rows = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowSlice(rowKeys)
.withColumnRange(columns)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
return;
}
Assert.assertFalse(rows.isEmpty());
int rowKeysSize = rowKeys.size();
for (Row<String, String> row : rows) {
boolean isPresent = rowKeys.remove(row.getKey());
Assert.assertTrue("Extraneous row: " + row.getKey(), isPresent);
int numExpectedCols = columns.getCqlEnd().charAt(0) - columns.getCqlStart().charAt(0) + 1;
ColumnList<String> colList = row.getColumns();
Assert.assertEquals(numExpectedCols, colList.size());
for (Column<String> col : colList) {
Assert.assertTrue(col.getName().compareTo(columns.getCqlStart()) >= 0);
Assert.assertTrue(col.getName().compareTo(columns.getCqlEnd()) <= 0);
}
}
Assert.assertEquals(rowKeysSize, rows.size());
}
private void testRowRangeWithAllColumns(boolean rowDeleted) throws Exception {
List<String> expectedColumns = new ArrayList<String>();
for (char ch = 'a'; ch <= 'z'; ch++) {
expectedColumns.add(String.valueOf(ch));
}
for (TestTokenRange testRange : getTestTokenRanges()) {
Rows<String, String> rows = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowRange(null, null, testRange.startToken, testRange.endToken, -1)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
continue;
}
Assert.assertFalse(rows.isEmpty());
List<String> list = new ArrayList<String>();
for (Row<String, String> row : rows) {
String key = row.getKey();
list.add(key);
ColumnList<String> columns = row.getColumns();
testRangeColumnsForRow(columns, expectedColumns);
}
Assert.assertEquals(testRange.expectedRowKeys, list);
}
}
private void testRowRangeWithColumnSet(boolean rowDeleted) throws Exception {
Set<String> randomColumns = getRandomColumns();
List<String> expectedColumns = new ArrayList<String>(randomColumns);
Collections.sort(expectedColumns);
for (TestTokenRange testRange : getTestTokenRanges()) {
Rows<String, String> rows = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowRange(null, null, testRange.startToken, testRange.endToken, -1)
.withColumnSlice(randomColumns)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
continue;
}
Assert.assertFalse(rows.isEmpty());
List<String> list = new ArrayList<String>();
for (Row<String, String> row : rows) {
String key = row.getKey();
list.add(key);
ColumnList<String> columns = row.getColumns();
testRangeColumnsForRow(columns, expectedColumns);
}
Assert.assertEquals(testRange.expectedRowKeys, list);
}
}
private void testRowRangeWithColumnRange(boolean rowDeleted) throws Exception {
CqlRangeImpl<String> columnRange = (CqlRangeImpl<String>) getRandomColumnRange();
for (TestTokenRange testRange : getTestTokenRanges()) {
Rows<String, String> rows = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowRange(null, null, testRange.startToken, testRange.endToken, -1)
.withColumnRange(columnRange)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
continue;
}
Assert.assertFalse(rows.isEmpty());
int numExpectedCols = columnRange.getCqlEnd().charAt(0) - columnRange.getCqlStart().charAt(0) + 1;
List<String> list = new ArrayList<String>();
for (Row<String, String> row : rows) {
String key = row.getKey();
list.add(key);
ColumnList<String> colList = row.getColumns();
Assert.assertEquals(numExpectedCols, colList.size());
for (Column<String> col : colList) {
Assert.assertTrue(col.getName().compareTo(columnRange.getCqlStart()) >= 0);
Assert.assertTrue(col.getName().compareTo(columnRange.getCqlEnd()) <= 0);
}
}
Assert.assertEquals(testRange.expectedRowKeys, list);
}
}
private Set<String> getRandomRowKeys() {
Random random = new Random();
int numRowKeys = random.nextInt(26) + 1; // avoid 0 rows
Set<String> set = new HashSet<String>();
for (int i=0; i<numRowKeys; i++) {
int no = random.nextInt(26);
char ch = (char) ('A' + no);
set.add(String.valueOf(ch));
}
System.out.println("Set: " + set);
return set;
}
private Set<String> getRandomColumns() {
Random random = new Random();
int numRowKeys = random.nextInt(26) + 1; // avoid 0 rows
Set<String> set = new HashSet<String>();
for (int i=0; i<numRowKeys; i++) {
int no = random.nextInt(26);
char ch = (char) ('a' + no);
set.add(String.valueOf(ch));
}
return set;
}
private ByteBufferRange getRandomColumnRange() {
Random random = new Random();
Integer n1 = random.nextInt(26);
Integer n2 = random.nextInt(26);
String c1 = String.valueOf((char)('a' + n1));
String c2 = String.valueOf((char)('a' + n2));
if (n1 < n2) {
return new CqlRangeBuilder<String>().setStart(c1).setEnd(c2).build();
} else {
return new CqlRangeBuilder<String>().setStart(c2).setEnd(c1).build();
}
}
private List<TestTokenRange> getTestTokenRanges() {
return TestUtils.getTestTokenRanges();
}
private void testRangeColumnsForRow(ColumnList<String> columns, List<String> expected) {
Iterator<Column<String>> iter1 = columns.iterator();
Iterator<String> iter2 = expected.iterator();
while (iter2.hasNext()) {
Column<String> column = iter1.next();
String expectedName = iter2.next();
Assert.assertEquals(expectedName, column.getName());
int expectedValue = expectedName.charAt(0) - 'a' + 1;
Assert.assertEquals(expectedValue, column.getIntegerValue());
}
}
}
| 7,590 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/RowUniquenessConstraintTest.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.UUID;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.base.Function;
import com.google.common.base.Supplier;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.recipes.uniqueness.NotUniqueException;
import com.netflix.astyanax.recipes.uniqueness.RowUniquenessConstraint;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
public class RowUniquenessConstraintTest extends KeyspaceTests {
public static ColumnFamily<Long, String> CF_UNIQUE_CONSTRAINT = ColumnFamily
.newColumnFamily(
"cfunique",
LongSerializer.get(),
StringSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_UNIQUE_CONSTRAINT, null);
CF_UNIQUE_CONSTRAINT.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_UNIQUE_CONSTRAINT);
}
Supplier<String> UniqueColumnSupplier = new Supplier<String>() {
@Override
public String get() {
return UUID.randomUUID().toString();
}
};
@Test
public void testUnique() throws Exception {
RowUniquenessConstraint<Long, String> unique =
new RowUniquenessConstraint<Long, String>(keyspace, CF_UNIQUE_CONSTRAINT, 1L, UniqueColumnSupplier)
.withConsistencyLevel(ConsistencyLevel.CL_ONE);
unique.acquire();
try {
unique = new RowUniquenessConstraint<Long, String>(keyspace, CF_UNIQUE_CONSTRAINT, 1L, UniqueColumnSupplier)
.withConsistencyLevel(ConsistencyLevel.CL_ONE);
unique.acquire();
Assert.fail("Should have gotten a non-unique ex");
} catch (NotUniqueException e) {
System.out.println(e.getMessage());
}
}
@Test
public void testUniqueAndRelease() throws Exception {
RowUniquenessConstraint<Long, String> unique =
new RowUniquenessConstraint<Long, String>(keyspace, CF_UNIQUE_CONSTRAINT, 2L, UniqueColumnSupplier)
.withConsistencyLevel(ConsistencyLevel.CL_ONE);
unique.acquire();
unique.release();
unique = new RowUniquenessConstraint<Long, String>(keyspace, CF_UNIQUE_CONSTRAINT, 2L, UniqueColumnSupplier)
.withConsistencyLevel(ConsistencyLevel.CL_ONE);
unique.acquire();
}
@Test
public void testUniquenessWithCustomMutation() throws Exception {
ColumnList<String> result = keyspace.prepareQuery(CF_UNIQUE_CONSTRAINT).getRow(10L).execute().getResult();
Assert.assertTrue(result.isEmpty());
RowUniquenessConstraint<Long, String> unique =
new RowUniquenessConstraint<Long, String>(keyspace, CF_UNIQUE_CONSTRAINT, 3L, UniqueColumnSupplier)
.withConsistencyLevel(ConsistencyLevel.CL_ONE);
unique.acquireAndApplyMutation(new Function<MutationBatch, Boolean>() {
public Boolean apply(MutationBatch input) {
input.withRow(CF_UNIQUE_CONSTRAINT, 10L).putEmptyColumn("MyCustomColumn", null);
return true;
}
});
result = keyspace.prepareQuery(CF_UNIQUE_CONSTRAINT).getRow(10L).execute().getResult();
Assert.assertFalse(result.isEmpty());
}
}
| 7,591 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/KeyspaceTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import org.apache.log4j.PropertyConfigurator;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.cql.test.utils.AstyanaxContextFactory;
public class KeyspaceTests {
public static AstyanaxContext<Keyspace> context;
public static Keyspace keyspace;
public KeyspaceTests() {
}
public static void initContext() throws Exception {
PropertyConfigurator.configure("./src/main/resources/test-log4j.properties");
keyspace = AstyanaxContextFactory.getCachedKeyspace();
}
}
| 7,592 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/StaticColumnFamilyTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.ArrayList;
import java.util.List;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.serializers.StringSerializer;
public class StaticColumnFamilyTests extends KeyspaceTests {
private static ColumnFamily<String, String> CF_ACCOUNTS = new ColumnFamily<String, String>("accounts", StringSerializer.get(), StringSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.prepareQuery(CF_ACCOUNTS)
.withCql("CREATE TABLE astyanaxunittests.accounts (userid text PRIMARY KEY, user text, pswd text)")
.execute();
CF_ACCOUNTS.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_ACCOUNTS);
}
@Test
public void testReadWriteOpsWithStaticNamedColumns() throws Exception {
populateRowsForAccountsTable(keyspace);
Thread.sleep(200);
boolean rowDeleted = false;
performSimpleRowQuery(rowDeleted);
performSimpleRowQueryWithColumnCollection(rowDeleted);
performSimpleRowSingleColumnQuery(rowDeleted);
performRowSliceQueryWithAllColumns(rowDeleted);
performRowSliceQueryWithColumnSlice(rowDeleted);
deleteRowsForAccountsTable(keyspace);
Thread.sleep(200);
rowDeleted = true;
performSimpleRowQuery(rowDeleted);
performSimpleRowQueryWithColumnCollection(rowDeleted);
performSimpleRowSingleColumnQuery(rowDeleted);
performRowSliceQueryWithAllColumns(rowDeleted);
performRowSliceQueryWithColumnSlice(rowDeleted);
}
private void performSimpleRowQuery(boolean rowDeleted) throws Exception {
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String key = Character.toString(keyName);
performSimpleRowQueryForRow(key, rowDeleted, key);
}
}
private void performSimpleRowQueryForRow(String rowKey, boolean rowDeleted, String expectedChar) throws Exception {
ColumnList<String> result = keyspace.prepareQuery(CF_ACCOUNTS).getRow(rowKey).execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
Assert.assertFalse(result.isEmpty());
Column<String> col = result.getColumnByName("user");
Assert.assertEquals("user" + expectedChar, col.getStringValue());
col = result.getColumnByName("pswd");
Assert.assertEquals("pswd" + expectedChar, col.getStringValue());
}
}
private void performSimpleRowQueryWithColumnCollection(boolean rowDeleted) throws Exception {
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String key = Character.toString(keyName);
performSimpleRowQueryWithColumnCollectionForRow(key, rowDeleted, key);
}
}
private void performSimpleRowQueryWithColumnCollectionForRow(String rowKey, boolean rowDeleted, String expectedChar) throws Exception {
ColumnList<String> result = keyspace.prepareQuery(CF_ACCOUNTS).getRow(rowKey).withColumnSlice("user", "pswd").execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
Assert.assertFalse(result.isEmpty());
Column<String> col = result.getColumnByName("user");
Assert.assertEquals("user" + expectedChar, col.getStringValue());
col = result.getColumnByName("pswd");
Assert.assertEquals("pswd" + expectedChar, col.getStringValue());
}
result = keyspace.prepareQuery(CF_ACCOUNTS).getRow(rowKey).withColumnSlice("user").execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
Assert.assertFalse(result.isEmpty());
Column<String> col = result.getColumnByName("user");
Assert.assertEquals("user" + expectedChar, col.getStringValue());
}
result = keyspace.prepareQuery(CF_ACCOUNTS).getRow(rowKey).withColumnSlice("pswd").execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
Assert.assertFalse(result.isEmpty());
Column<String> col = result.getColumnByName("pswd");
Assert.assertEquals("pswd" + expectedChar, col.getStringValue());
}
List<String> cols = new ArrayList<String>();
cols.add("user"); cols.add("pswd");
result = keyspace.prepareQuery(CF_ACCOUNTS).getRow(rowKey).withColumnSlice(cols).execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
Assert.assertFalse(result.isEmpty());
Column<String> col = result.getColumnByName("user");
Assert.assertEquals("user" + expectedChar, col.getStringValue());
col = result.getColumnByName("pswd");
Assert.assertEquals("pswd" + expectedChar, col.getStringValue());
}
cols.remove("user");
result = keyspace.prepareQuery(CF_ACCOUNTS).getRow(rowKey).withColumnSlice(cols).execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
Assert.assertFalse(result.isEmpty());
Column<String> col = result.getColumnByName("pswd");
Assert.assertEquals("pswd" + expectedChar, col.getStringValue());
}
}
private void performSimpleRowSingleColumnQuery(boolean rowDeleted) throws Exception {
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String key = Character.toString(keyName);
performSimpleRowSingleColumnQueryForRow(key, rowDeleted, key);
}
}
private void performSimpleRowSingleColumnQueryForRow(String rowKey, boolean rowDeleted, String expectedChar) throws Exception {
Column<String> col = keyspace.prepareQuery(CF_ACCOUNTS).getRow(rowKey).getColumn("user").execute().getResult();
if (rowDeleted) {
Assert.assertNull(col);
} else {
Assert.assertTrue(col.hasValue());
Assert.assertEquals("user" + expectedChar, col.getStringValue());
}
col = keyspace.prepareQuery(CF_ACCOUNTS).getRow(rowKey).getColumn("pswd").execute().getResult();
if (rowDeleted) {
Assert.assertNull(col);
} else {
Assert.assertTrue(col.hasValue());
Assert.assertEquals("pswd" + expectedChar, col.getStringValue());
}
}
private void performRowSliceQueryWithAllColumns(boolean rowDeleted) throws Exception {
List<String> keys = new ArrayList<String>();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
keys.add(Character.toString(keyName));
}
int index = 0;
Rows<String, String> rows = keyspace.prepareQuery(CF_ACCOUNTS).getRowSlice(keys).execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
} else {
Assert.assertFalse(rows.isEmpty());
for (com.netflix.astyanax.model.Row<String, String> row : rows) {
Assert.assertEquals(keys.get(index),row.getKey());
ColumnList<String> cols = row.getColumns();
Assert.assertFalse(cols.isEmpty());
Column<String> col = cols.getColumnByName("user");
Assert.assertEquals("user" + keys.get(index), col.getStringValue());
col = cols.getColumnByName("pswd");
Assert.assertEquals("pswd" + keys.get(index), col.getStringValue());
index++;
}
}
}
private void performRowSliceQueryWithColumnSlice(boolean rowDeleted) throws Exception {
List<String> keys = new ArrayList<String>();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
keys.add(Character.toString(keyName));
}
int index = 0;
Rows<String, String> rows = keyspace.prepareQuery(CF_ACCOUNTS).getRowSlice(keys).withColumnSlice("user", "pswd").execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
} else {
Assert.assertFalse(rows.isEmpty());
for (com.netflix.astyanax.model.Row<String, String> row : rows) {
Assert.assertEquals(keys.get(index),row.getKey());
ColumnList<String> cols = row.getColumns();
Assert.assertFalse(cols.isEmpty());
Column<String> col = cols.getColumnByName("user");
Assert.assertEquals("user" + keys.get(index), col.getStringValue());
col = cols.getColumnByName("pswd");
Assert.assertEquals("pswd" + keys.get(index), col.getStringValue());
index++;
}
}
index=0;
rows = keyspace.prepareQuery(CF_ACCOUNTS).getRowSlice(keys).withColumnSlice("user").execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
} else {
Assert.assertFalse(rows.isEmpty());
for (com.netflix.astyanax.model.Row<String, String> row : rows) {
Assert.assertEquals(keys.get(index),row.getKey());
ColumnList<String> cols = row.getColumns();
Assert.assertFalse(cols.isEmpty());
Column<String> col = cols.getColumnByName("user");
Assert.assertEquals("user" + keys.get(index), col.getStringValue());
index++;
}
}
index=0;
rows = keyspace.prepareQuery(CF_ACCOUNTS).getRowSlice(keys).withColumnSlice("pswd").execute().getResult();
if (rowDeleted) {
Assert.assertTrue(rows.isEmpty());
} else {
Assert.assertFalse(rows.isEmpty());
for (com.netflix.astyanax.model.Row<String, String> row : rows) {
Assert.assertEquals(keys.get(index),row.getKey());
ColumnList<String> cols = row.getColumns();
Assert.assertFalse(cols.isEmpty());
Column<String> col = cols.getColumnByName("pswd");
Assert.assertEquals("pswd" + keys.get(index), col.getStringValue());
index++;
}
}
}
public static void populateRowsForAccountsTable(Keyspace keyspace) throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String character = Character.toString(keyName);
ColumnListMutation<String> colMutation = m.withRow(CF_ACCOUNTS, character);
colMutation.putColumn("user", "user" + character).putColumn("pswd", "pswd" + character);
m.execute();
m.discardMutations();
}
}
public static void deleteRowsForAccountsTable(Keyspace keyspace) throws Exception {
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
MutationBatch m = keyspace.prepareMutationBatch();
String rowKey = Character.toString(keyName);
m.withRow(CF_ACCOUNTS, rowKey).delete();
m.execute();
m.discardMutations();
}
}
}
| 7,593 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/ColumnCountQueryTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.cql.test.utils.ReadTests;
import com.netflix.astyanax.cql.test.utils.TestUtils;
import com.netflix.astyanax.cql.test.utils.TestUtils.TestTokenRange;
import com.netflix.astyanax.model.ColumnFamily;
public class ColumnCountQueryTests extends ReadTests {
private static ColumnFamily<String, String> CF_COLUMN_RANGE_TEST = TestUtils.CF_COLUMN_RANGE_TEST;
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_COLUMN_RANGE_TEST, null);
CF_COLUMN_RANGE_TEST.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_COLUMN_RANGE_TEST);
}
@Test
public void runAllTests() throws Exception {
CF_COLUMN_RANGE_TEST.describe(keyspace);
boolean rowDeleted = false;
/** INSERT ROWS FOR COLUMN COUNT READ TESTS */
populateRowsForColumnRange();
Thread.sleep(1000);
/** PERFORM READS AND CHECK FOR COLUMN COUNTS */
testColumnCountSingleRowAndAllColumns(rowDeleted);
testColumnCountSingleRowAndColumnSet(rowDeleted);
testColumnCountSingleRowAndColumnRange(rowDeleted);
testColumnCountMultipleRowKeysAndAllColumns(rowDeleted);
testColumnCountMultipleRowKeysAndColumnSet(rowDeleted);
testColumnCountMultipleRowKeysAndColumnRange(rowDeleted);
testColumnCountRowRangeAndAllColumns(rowDeleted);
testColumnCountRowRangeAndColumnSet(rowDeleted);
testColumnCountRowRangeAndColumnRange(rowDeleted);
/** DELETE ROWS */
deleteRowsForColumnRange();
Thread.sleep(1000);
rowDeleted = true;
/** PERFORM READS AND CHECK FOR COLUMN COUNTS = 0 */
testColumnCountSingleRowAndAllColumns(rowDeleted);
testColumnCountSingleRowAndColumnSet(rowDeleted);
testColumnCountSingleRowAndColumnRange(rowDeleted);
testColumnCountMultipleRowKeysAndAllColumns(rowDeleted);
testColumnCountMultipleRowKeysAndColumnSet(rowDeleted);
testColumnCountMultipleRowKeysAndColumnRange(rowDeleted);
testColumnCountRowRangeAndAllColumns(rowDeleted);
testColumnCountRowRangeAndColumnSet(rowDeleted);
testColumnCountRowRangeAndColumnRange(rowDeleted);
}
private void testColumnCountSingleRowAndAllColumns(boolean rowDeleted) throws Exception {
char ch = 'A';
while (ch <= 'Z') {
String rowKey = String.valueOf(ch);
Integer columnCount = keyspace
.prepareQuery(CF_COLUMN_RANGE_TEST)
.getKey(rowKey)
.getCount()
.execute().getResult();
int expected = rowDeleted ? 0 : 26;
Assert.assertTrue("expected: " + expected + " colCount: " + columnCount, expected == columnCount);
ch++;
}
}
private void testColumnCountSingleRowAndColumnSet(boolean rowDeleted) throws Exception {
Random random = new Random();
char ch = 'A';
while (ch <= 'Z') {
String rowKey = String.valueOf(ch);
int numColumns = random.nextInt(26) + 1; // avoid 0
Integer columnCount = keyspace
.prepareQuery(CF_COLUMN_RANGE_TEST)
.getKey(rowKey)
.withColumnSlice(getRandomColumns(numColumns))
.getCount()
.execute().getResult();
int expected = rowDeleted ? 0 : numColumns;
Assert.assertTrue("expected: " + expected + " colCount: " + columnCount, expected == columnCount);
ch++;
}
}
private void testColumnCountSingleRowAndColumnRange(boolean rowDeleted) throws Exception {
Random random = new Random();
char ch = 'A';
while (ch <= 'Z') {
String rowKey = String.valueOf(ch);
// Get a random start column
int rand = random.nextInt(26);
char randCH = (char) ('a' + rand);
String startCol = String.valueOf(randCH);
Integer columnCount = keyspace
.prepareQuery(CF_COLUMN_RANGE_TEST)
.getKey(rowKey)
.withColumnRange(startCol, "z", false, -1)
.getCount()
.execute().getResult();
int charOffset = startCol.charAt(0) - 'a' + 1;
int expected = rowDeleted ? 0 : 26 - charOffset + 1;
/**
* e.g if start col = 'b'
* then range = 'b' -> 'z' both inclusive
* then colCount = 25
* where
* 'a' - 'b' + 1 = 2 which is offset for 'b'
* 25 = 26 ('z') - 2('b') + 1
*/
Assert.assertTrue("expected: " + expected + " colCount: " + columnCount, expected == columnCount);
ch++;
}
}
private void testColumnCountMultipleRowKeysAndAllColumns(boolean rowDeleted) throws Exception {
Collection<String> rowKeys = getRandomRowKeys();
Map<String, Integer> columnCountsPerRowKey = keyspace
.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowSlice(rowKeys)
.getColumnCounts()
.execute().getResult();
Map<String, Integer> expected = new HashMap<String, Integer>();
if (!rowDeleted) {
for (String key : rowKeys) {
expected.put(key, 26);
}
}
Assert.assertEquals("expected: " + expected + " colCount: " + columnCountsPerRowKey, expected, columnCountsPerRowKey);
}
private void testColumnCountMultipleRowKeysAndColumnSet(boolean rowDeleted) throws Exception {
Collection<String> rowKeys = getRandomRowKeys();
Collection<String> columns = getRandomColumns(new Random().nextInt(26) + 1);
Map<String, Integer> columnCountsPerRowKey = keyspace
.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowSlice(rowKeys)
.withColumnSlice(columns)
.getColumnCounts()
.execute().getResult();
Map<String, Integer> expected = new HashMap<String, Integer>();
if (!rowDeleted) {
for (String key : rowKeys) {
expected.put(key, columns.size());
}
}
Assert.assertEquals("expected: " + expected + " colCount: " + columnCountsPerRowKey, expected, columnCountsPerRowKey);
}
private void testColumnCountMultipleRowKeysAndColumnRange(boolean rowDeleted) throws Exception {
Collection<String> rowKeys = getRandomRowKeys();
// Get a random start column
int rand = new Random().nextInt(26);
char randCH = (char) ('a' + rand);
String startCol = String.valueOf(randCH);
Map<String, Integer> columnCountsPerRowKey = keyspace
.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowSlice(rowKeys)
.withColumnRange(startCol, "z", false, -1)
.getColumnCounts()
.execute().getResult();
int charOffset = startCol.charAt(0) - 'a' + 1;
int expectedColCount = 26 - charOffset + 1;
Map<String, Integer> expected = new HashMap<String, Integer>();
if (!rowDeleted) {
for (String key : rowKeys) {
expected.put(key, expectedColCount);
}
}
Assert.assertEquals("expected: " + expected + " colCount: " + columnCountsPerRowKey, expected, columnCountsPerRowKey);
}
private void testColumnCountRowRangeAndAllColumns(boolean rowDeleted) throws Exception {
List<TestTokenRange> testRanges = TestUtils.getTestTokenRanges();
Map<String, Integer> expectedRowCounts = rowDeleted ? new HashMap<String, Integer>() : getExpectedRowCountsForTokenRanges(testRanges, 26);
Map<String, Integer> resultRowCounts = new HashMap<String, Integer>();
for (TestTokenRange testRange : testRanges) {
Map<String, Integer> rowCounts = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowRange(null, null, testRange.startToken, testRange.endToken, -1)
.getColumnCounts()
.execute().getResult();
resultRowCounts.putAll(rowCounts);
}
Assert.assertEquals(expectedRowCounts, resultRowCounts);
}
private void testColumnCountRowRangeAndColumnSet(boolean rowDeleted) throws Exception {
Collection<String> columns = getRandomColumns(new Random().nextInt(26) + 1);
List<TestTokenRange> testRanges = TestUtils.getTestTokenRanges();
Map<String, Integer> expectedRowCounts = rowDeleted ? new HashMap<String, Integer>() : getExpectedRowCountsForTokenRanges(testRanges, columns.size());
Map<String, Integer> resultRowCounts = new HashMap<String, Integer>();
for (TestTokenRange testRange : testRanges) {
Map<String, Integer> rowCounts = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowRange(null, null, testRange.startToken, testRange.endToken, -1)
.withColumnSlice(columns)
.getColumnCounts()
.execute().getResult();
resultRowCounts.putAll(rowCounts);
}
Assert.assertEquals(expectedRowCounts, resultRowCounts);
}
private void testColumnCountRowRangeAndColumnRange(boolean rowDeleted) throws Exception {
// Get the random start column
int rand = new Random().nextInt(26);
char randCH = (char) ('a' + rand);
String startColumn = String.valueOf(randCH);
int expectedColCount = 'z' - startColumn.charAt(0) + 1;
List<TestTokenRange> testRanges = TestUtils.getTestTokenRanges();
Map<String, Integer> expectedRowCounts = rowDeleted ? new HashMap<String, Integer>() : getExpectedRowCountsForTokenRanges(testRanges, expectedColCount);
Map<String, Integer> resultRowCounts = new HashMap<String, Integer>();
for (TestTokenRange testRange : testRanges) {
Map<String, Integer> rowCounts = keyspace.prepareQuery(CF_COLUMN_RANGE_TEST)
.getRowRange(null, null, testRange.startToken, testRange.endToken, -1)
.withColumnRange(startColumn, "z", false, -1)
.getColumnCounts()
.execute().getResult();
resultRowCounts.putAll(rowCounts);
}
Assert.assertEquals(expectedRowCounts, resultRowCounts);
}
private void populateRowsForColumnRange() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String rowKey = Character.toString(keyName);
ColumnListMutation<String> colMutation = m.withRow(CF_COLUMN_RANGE_TEST, rowKey);
for (char cName = 'a'; cName <= 'z'; cName++) {
colMutation.putColumn(Character.toString(cName), (int) (cName - 'a') + 1, null);
}
m.withCaching(true);
m.execute();
m.discardMutations();
}
}
private void deleteRowsForColumnRange() throws Exception {
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
MutationBatch m = keyspace.prepareMutationBatch();
String rowKey = Character.toString(keyName);
m.withRow(CF_COLUMN_RANGE_TEST, rowKey).delete();
m.execute();
m.discardMutations();
}
}
private Collection<String> getRandomRowKeys() {
Random random = new Random();
int numRowKeys = random.nextInt(26) + 1;
Set<String> hashSet = new HashSet<String>();
while(hashSet.size() < numRowKeys) {
int pick = random.nextInt(26);
char ch = (char) ('A' + pick);
hashSet.add(String.valueOf(ch));
}
return hashSet;
}
private Map<String, Integer> getExpectedRowCountsForTokenRanges(List<TestTokenRange> testRanges, int expectedColumnCountForEachRow) {
Map<String, Integer> expectedRowCounts = new HashMap<String, Integer>();
for (TestTokenRange range : testRanges) {
for (String rowKey : range.expectedRowKeys) {
expectedRowCounts.put(rowKey, expectedColumnCountForEachRow);
}
}
return expectedRowCounts;
}
}
| 7,594 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/CompositeColumnTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.annotations.Component;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.serializers.AnnotatedCompositeSerializer;
import com.netflix.astyanax.serializers.IntegerSerializer;
public class CompositeColumnTests extends KeyspaceTests {
private static AnnotatedCompositeSerializer<Population> compSerializer = new AnnotatedCompositeSerializer<Population>(Population.class);
private static ColumnFamily<Integer, Population> CF_POPULATION =
new ColumnFamily<Integer, Population>("population", IntegerSerializer.get(), compSerializer, IntegerSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_POPULATION, null);
CF_POPULATION.describe(keyspace);
}
@AfterClass
public static void teardown() throws Exception {
keyspace.dropColumnFamily(CF_POPULATION);
}
@Test
public void runAllTests() throws Exception {
boolean rowDeleted = false;
populateRowsForCFPopulation();
Thread.sleep(1000);
/** READ SINGLE ROW QUERIES */
testReadSingleRowAllColumns(rowDeleted);
testReadSingleRowSingleColumn(rowDeleted);
testReadSingleRowColumnRange(rowDeleted);
/** READ ROW SLICE WITH ROW KEYS */
testReadMultipleRowKeysWithAllColumns(rowDeleted);
testReadMultipleRowKeysWithColumnRange(rowDeleted);
/** READ ROW SLICE WITH ROWS RANGE */
testReadRowRangeWithAllColumns(rowDeleted);
testReadRowRangeWithColumnRange(rowDeleted);
/** ALL ROW COUNT QUERIES */
testReadSingleRowAllColumnsWithColumnCount(rowDeleted);
testReadSingleRowColumnRangeWithColumnCount(rowDeleted);
testReadMultipleRowKeysAllColumnsWithColumnCount(rowDeleted);
testReadMultipleRowKeysColumnRangeWithColumnCount(rowDeleted);
testReadRowRangeAllColumnsWithColumnCount(rowDeleted);
testReadRowRangeColumnRangeWithColumnCount(rowDeleted);
deleteRowsForCFPopulation();
Thread.sleep(1000);
rowDeleted = true;
/** READ SINGLE ROW QUERIES */
testReadSingleRowAllColumns(rowDeleted);
testReadSingleRowSingleColumn(rowDeleted);
testReadSingleRowColumnRange(rowDeleted);
/** READ ROW SLICE WITH ROW KEYS */
testReadMultipleRowKeysWithAllColumns(rowDeleted);
testReadMultipleRowKeysWithColumnRange(rowDeleted);
/** READ ROW SLICE WITH ROWS RANGE */
testReadRowRangeWithAllColumns(rowDeleted);
testReadRowRangeWithColumnRange(rowDeleted);
/** ALL ROW COUNT QUERIES */
testReadSingleRowAllColumnsWithColumnCount(rowDeleted);
testReadSingleRowColumnRangeWithColumnCount(rowDeleted);
testReadMultipleRowKeysAllColumnsWithColumnCount(rowDeleted);
testReadMultipleRowKeysColumnRangeWithColumnCount(rowDeleted);
testReadRowRangeAllColumnsWithColumnCount(rowDeleted);
testReadRowRangeColumnRangeWithColumnCount(rowDeleted);
}
private void populateRowsForCFPopulation() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
Random random = new Random();
for (int year = 2001; year <= 2014; year++) {
m.withRow(CF_POPULATION, year)
.putColumn(NewYork.clone(), random.nextInt(25000))
.putColumn(SanDiego.clone(), random.nextInt(25000))
.putColumn(SanFrancisco.clone(), random.nextInt(25000))
.putColumn(Seattle.clone(), random.nextInt(25000));
}
m.execute();
}
private void deleteRowsForCFPopulation() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
for (int year = 2001; year <= 2014; year ++) {
m.withRow(CF_POPULATION, year).delete();
}
m.execute();
}
private void testReadSingleRowAllColumns(boolean rowDeleted) throws Exception {
for (int year = 2001; year <= 2014; year++) {
ColumnList<Population> result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
continue;
} else {
checkResult(result, SanDiego, SanFrancisco, NewYork, Seattle);
}
}
}
private void testReadSingleRowSingleColumn(boolean rowDeleted) throws Exception {
for (int year = 2001; year <= 2014; year++) {
Column<Population> result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.getColumn(SanFrancisco.clone())
.execute().getResult();
if (rowDeleted) {
Assert.assertNull(result);
continue;
} else {
Assert.assertTrue(result.hasValue());
}
Assert.assertEquals(SanFrancisco, result.getName());
}
}
private void testReadSingleRowColumnRange(boolean rowDeleted) throws Exception {
AnnotatedCompositeSerializer<Population> compSerializer = new AnnotatedCompositeSerializer<Population>(Population.class);
for (int year = 2001; year <= 2001; year++) {
ColumnList<Population> result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
continue;
} else {
checkResult(result, SanDiego, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
continue;
} else {
checkResult(result, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
continue;
} else {
checkResult(result, Seattle);
}
}
}
private void testReadMultipleRowKeysWithAllColumns(boolean rowDeleted) throws Exception {
Rows<Integer, Population> result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, 2001, 5, SanDiego, SanFrancisco, NewYork, Seattle);
}
}
private void testReadMultipleRowKeysWithColumnRange(boolean rowDeleted) throws Exception {
Rows<Integer, Population> result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, 2001, 5, SanDiego, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, 2001, 5, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, 2001, 5, Seattle);
}
}
private void testReadRowRangeWithAllColumns(boolean rowDeleted) throws Exception {
List<TestRange> testRanges = getTestRanges();
for (TestRange testRange : testRanges) {
Rows<Integer, Population> result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, testRange.expectedRowKeys, SanDiego, SanFrancisco, NewYork, Seattle);
}
}
}
private void testReadRowRangeWithColumnRange(boolean rowDeleted) throws Exception {
List<TestRange> testRanges = getTestRanges();
for (TestRange testRange : testRanges) {
Rows<Integer, Population> result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, testRange.expectedRowKeys, SanDiego, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, testRange.expectedRowKeys, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, testRange.expectedRowKeys, Seattle);
}
}
}
/** ALL COLUMN COUNT QUERIES */
private void testReadSingleRowAllColumnsWithColumnCount(boolean rowDeleted) throws Exception {
for (int year = 2001; year <= 2014; year++) {
Integer result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.getCount()
.execute().getResult();
int expected = rowDeleted ? 0 : 4;
Assert.assertTrue(expected == result.intValue());
}
}
private void testReadSingleRowColumnRangeWithColumnCount(boolean rowDeleted) throws Exception {
for (int year = 2001; year <= 2014; year++) {
Integer result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.getCount()
.execute().getResult();
int expected = rowDeleted ? 0 : 2;
Assert.assertTrue(expected == result.intValue());
result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.getCount()
.execute().getResult();
expected = rowDeleted ? 0 : 1;
Assert.assertTrue(expected == result.intValue());
result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.getCount()
.execute().getResult();
expected = rowDeleted ? 0 : 1;
Assert.assertTrue(expected == result.intValue());
}
}
private void testReadMultipleRowKeysAllColumnsWithColumnCount(boolean rowDeleted) throws Exception {
Map<Integer, Integer> result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.getColumnCounts()
.execute().getResult();
Map<Integer, Integer> expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (int year = 2001; year<= 2005; year++) {
expected.put(year, 4);
}
}
Assert.assertEquals(expected, result);
}
private void testReadMultipleRowKeysColumnRangeWithColumnCount(boolean rowDeleted) throws Exception {
Map<Integer, Integer> result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.getColumnCounts()
.execute().getResult();
Map<Integer, Integer> expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey = 2001; rowKey<=2005; rowKey++) {
expected.put(rowKey, 2);
}
}
Assert.assertEquals(expected, result);
result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.getColumnCounts()
.execute().getResult();
expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey = 2001; rowKey<=2005; rowKey++) {
expected.put(rowKey, 1);
}
}
Assert.assertEquals(expected, result);
result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.getColumnCounts()
.execute().getResult();
expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey = 2001; rowKey<=2005; rowKey++) {
expected.put(rowKey, 1);
}
}
Assert.assertEquals(expected, result);
}
private void testReadRowRangeAllColumnsWithColumnCount(boolean rowDeleted) throws Exception {
List<TestRange> testRanges = getTestRanges();
TestRange range = testRanges.get(0);
Map<Integer, Integer> result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, range.start, range.end, 100)
.getColumnCounts()
.execute().getResult();
Map<Integer, Integer> expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer year : range.expectedRowKeys) {
expected.put(year, 4);
}
}
Assert.assertEquals(expected, result);
}
private void testReadRowRangeColumnRangeWithColumnCount(boolean rowDeleted) throws Exception {
List<TestRange> testRanges = getTestRanges();
for (TestRange testRange : testRanges) {
Map<Integer, Integer> result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.getColumnCounts()
.execute().getResult();
Map<Integer, Integer> expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey : testRange.expectedRowKeys) {
expected.put(rowKey, 2);
}
}
Assert.assertEquals(expected, result);
result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.getColumnCounts()
.execute().getResult();
expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey : testRange.expectedRowKeys) {
expected.put(rowKey, 1);
}
}
Assert.assertEquals(expected, result);
result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.getColumnCounts()
.execute().getResult();
expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey : testRange.expectedRowKeys) {
expected.put(rowKey, 1);
}
}
Assert.assertEquals(expected, result);
}
}
private void checkResult(ColumnList<Population> result, Population ... expected) throws Exception {
Assert.assertFalse(result.isEmpty());
Assert.assertEquals(expected.length, result.size());
int index = 0;
for (Population p : expected) {
Assert.assertEquals(p, result.getColumnByIndex(index++).getName());
}
}
private void checkRowResult(Rows<Integer, Population> result, Integer startKey, Integer size, Population ... expected) throws Exception {
int rowKey = startKey;
for (Row<Integer, Population> row : result) {
Assert.assertTrue(rowKey == row.getKey());
checkResult(row.getColumns(), expected);
rowKey++;
}
Assert.assertTrue("Result: " + result.size() + ", size: " + size, size == result.size());
}
private void checkRowResult(Rows<Integer, Population> result, List<Integer> rowKeys, Population ... expected) throws Exception {
int index = 0;
for (Row<Integer, Population> row : result) {
Assert.assertEquals(rowKeys.toString() + " " + row.getKey(), rowKeys.get(index++), row.getKey());
checkResult(row.getColumns(), expected);
}
Assert.assertTrue(rowKeys.size() == result.size());
}
/** TEST CITIES */
public static Population NewYork = new Population("NY", "New York", 10000);
public static Population SanDiego = new Population("CA", "San Diego", 20000);
public static Population SanFrancisco = new Population("CA", "San Francisco", 30000);
public static Population Seattle = new Population("WA", "Seattle", 40000);
public static class Population {
@Component(ordinal=0) String state;
@Component(ordinal=1) String city;
@Component(ordinal=2) Integer zipcode;
public Population() {
}
public Population(String state, String city, Integer zipcode) {
this.state = state;
this.city = city;
this.zipcode = zipcode;
}
public String toString() {
return "Population [" + state + ", " + city + ", " + zipcode + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((state == null) ? 0 : state.hashCode());
result = prime * result + ((city == null) ? 0 : city.hashCode());
result = prime * result + ((zipcode == null) ? 0 : zipcode.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null)return false;
if (getClass() != obj.getClass()) return false;
Population other = (Population) obj;
boolean equal = true;
equal &= (state != null) ? (state.equals(other.state)) : other.state == null;
equal &= (city != null) ? (city.equals(other.city)) : other.city == null;
equal &= (zipcode != null) ? (zipcode.equals(other.zipcode)) : other.zipcode == null;
return equal;
}
public Population clone() {
return new Population(state, city, zipcode);
}
}
/**
* 2014 --> -6625834866172541556 2003 --> -5952676706262623311 2009 --> -4850296245464368619
* 2010 --> -4012971246572234480 2005 --> -3904377230599730913 2006 --> -3604768136712843506
* 2012 --> -3193851331505022123 2007 --> -797272529921810676 2001 --> 267648259961407629
* 2002 --> 313927025611477591 2011 --> 2700799408278278395 2004 --> 5455601112738248795
* 2013 --> 8821734684824899422 2008 --> 9033513988054576353
*/
private static class TestRange {
private String start;
private String end;
private List<Integer> expectedRowKeys = new ArrayList<Integer>();
private TestRange(String start, String end, Integer ... rows) {
this.start = start;
this.end = end;
this.expectedRowKeys.addAll(Arrays.asList(rows));
}
}
private List<TestRange> getTestRanges() {
List<TestRange> list = new ArrayList<TestRange>();
list.add(new TestRange("-6625834866172541556", "-4850296245464368619", 2014, 2003, 2009));
list.add(new TestRange("-4012971246572234480", "-3604768136712843506", 2010, 2005, 2006));
list.add(new TestRange("-3193851331505022123", "267648259961407629", 2012, 2007, 2001));
list.add(new TestRange("313927025611477591", "5455601112738248795", 2002, 2011, 2004));
list.add(new TestRange("8821734684824899422", "9033513988054576353", 2013, 2008));
return list;
}
}
| 7,595 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/SchemaTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import junit.framework.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.annotations.Component;
import com.netflix.astyanax.cql.test.utils.AstyanaxContextFactory;
import com.netflix.astyanax.cql.test.utils.ClusterConfiguration;
import com.netflix.astyanax.cql.test.utils.ClusterConfiguration.Driver;
import com.netflix.astyanax.ddl.ColumnDefinition;
import com.netflix.astyanax.ddl.ColumnFamilyDefinition;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.serializers.AnnotatedCompositeSerializer;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
public class SchemaTests extends KeyspaceTests {
@BeforeClass
public static void init() throws Exception {
initContext();
}
@Test
public void createKeyspaceUsingOptions() throws Exception {
String keyspaceName = "AstyanaxTestKeyspaceUsingOptions".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
keyspace.createKeyspace(options);
Thread.sleep(1000);
KeyspaceDefinition ksDef = keyspace.describeKeyspace();
verifyKeyspacePropertiesForSimpleStrategy(keyspaceName, ksDef);
keyspace.dropKeyspace();
/** NETWORK TOPOLOGY */
keyspaceName = "AstyanaxTestKeyspaceUsingOptions2".toLowerCase();
context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("us-east", "3")
.put("eu-west", "3")
.build())
.put("strategy_class", "NetworkTopologyStrategy")
.build();
keyspace.createKeyspace(options);
Thread.sleep(1000);
ksDef = keyspace.describeKeyspace();
verifyKeyspacePropertiesForNetworkTopology(keyspaceName, ksDef);
keyspace.dropKeyspace();
}
@Test
public void createKeyspaceUsingProperties() throws Exception {
/** SIMPLE STRATEGY */
String keyspaceName = "AstyanaxTestKeyspaceUsingProperties".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Properties props = new Properties();
props.setProperty("strategy_options.replication_factor", "1");
props.setProperty("strategy_class", "SimpleStrategy");
keyspace.createKeyspace(props);
Thread.sleep(1000);
KeyspaceDefinition ksDef = keyspace.describeKeyspace();
verifyKeyspacePropertiesForSimpleStrategy(keyspaceName, ksDef);
keyspace.dropKeyspace();
/** NETWORK TOPOLOGY STRATEGY */
keyspaceName = "AstyanaxTestKeyspaceUsingProperties2".toLowerCase();
context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
props = new Properties();
props.setProperty("strategy_options.us-east", "3");
props.setProperty("strategy_options.eu-west", "3");
props.setProperty("strategy_class", "NetworkTopologyStrategy");
keyspace.createKeyspace(props);
Thread.sleep(1000);
ksDef = keyspace.describeKeyspace();
verifyKeyspacePropertiesForNetworkTopology(keyspaceName, ksDef);
keyspace.dropKeyspace();
}
private void verifyKeyspacePropertiesForSimpleStrategy(String keyspaceName, KeyspaceDefinition ksDef) throws Exception {
Assert.assertEquals(keyspaceName, ksDef.getName());
Assert.assertTrue(ksDef.getStrategyClass().contains("SimpleStrategy"));
Properties properties = ksDef.getProperties();
Assert.assertEquals(keyspaceName, properties.getProperty("name"));
Assert.assertEquals("true", properties.get("durable_writes"));
String strategyClass = properties.getProperty("strategy_class");
if (strategyClass == null) {
strategyClass = properties.getProperty("replication.class");
}
Assert.assertTrue(ksDef.getStrategyClass().contains("SimpleStrategy"));
Map<String, String> strategyOptions = ksDef.getStrategyOptions();
Assert.assertEquals("1", strategyOptions.get("replication_factor"));
}
private void verifyKeyspacePropertiesForNetworkTopology(String keyspaceName, KeyspaceDefinition ksDef) throws Exception {
Assert.assertEquals(keyspaceName, ksDef.getName());
Assert.assertTrue(ksDef.getStrategyClass().contains("NetworkTopologyStrategy"));
Properties properties = ksDef.getProperties();
Assert.assertEquals(keyspaceName, properties.getProperty("name"));
Assert.assertEquals("true", properties.get("durable_writes"));
String strategyClass = properties.getProperty("strategy_class");
if (strategyClass == null) {
strategyClass = properties.getProperty("replication.class");
}
Assert.assertTrue(ksDef.getStrategyClass().contains("NetworkTopologyStrategy"));
Map<String, String> strategyOptions = ksDef.getStrategyOptions();
Assert.assertEquals("3", strategyOptions.get("us-east"));
Assert.assertEquals("3", strategyOptions.get("eu-west"));
}
@Test
public void createKeyspaceAndCFsUsingUsingOptions() throws Exception {
String keyspaceName = "AstyanaxTestKeyspaceAndCFsUsingOptions".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
Map<ColumnFamily, Map<String, Object>> cfs = ImmutableMap.<ColumnFamily, Map<String, Object>>builder()
.put(new ColumnFamily<String, String>("testcf1", StringSerializer.get(), StringSerializer.get()),
ImmutableMap.<String, Object>builder()
.put("bloom_filter_fp_chance", 0.01)
.build())
.put(new ColumnFamily<Long, String>("testcf2", LongSerializer.get(), StringSerializer.get()),
ImmutableMap.<String, Object>builder()
.put("read_repair_chance", 0.2)
.put("bloom_filter_fp_chance", 0.01)
.build())
.build();
keyspace.createKeyspace(options, cfs);
Thread.sleep(1000);
KeyspaceDefinition ksDef = keyspace.describeKeyspace();
verifyKeyspacePropertiesForSimpleStrategy(keyspaceName, ksDef);
Map<String, String> strategyOptions = ksDef.getStrategyOptions();
Assert.assertEquals("1", strategyOptions.get("replication_factor"));
Properties cfProps = keyspace.getColumnFamilyProperties("testcf1");
Assert.assertEquals("0.1", String.valueOf(cfProps.get("read_repair_chance")));
Assert.assertEquals("0.01", String.valueOf(cfProps.get("bloom_filter_fp_chance")));
Assert.assertEquals("KEYS_ONLY", String.valueOf(cfProps.get("caching")));
Assert.assertEquals("4", String.valueOf(cfProps.get("min_compaction_threshold")));
Assert.assertEquals("32", String.valueOf(cfProps.get("max_compaction_threshold")));
cfProps = keyspace.getColumnFamilyProperties("testcf2");
Assert.assertEquals("0.2", String.valueOf(cfProps.get("read_repair_chance")));
Assert.assertEquals("0.01", String.valueOf(cfProps.get("bloom_filter_fp_chance")));
Assert.assertEquals("KEYS_ONLY", String.valueOf(cfProps.get("caching")));
Assert.assertEquals("4", String.valueOf(cfProps.get("min_compaction_threshold")));
Assert.assertEquals("32", String.valueOf(cfProps.get("max_compaction_threshold")));
keyspace.dropKeyspace();
}
@Test
public void createKeyspaceAndCFsDirectly() throws Exception {
String keyspaceName = "AstyanaxTestKeyspaceAndCFsDirect".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> ksOptions = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
keyspace.createKeyspace(ksOptions);
ColumnFamily<String, String> cf1 = new ColumnFamily<String, String>("testcf1", StringSerializer.get(), StringSerializer.get());
Map<String, Object> options1 = ImmutableMap.<String, Object>builder()
.put("read_repair_chance", 0.2)
.put("bloom_filter_fp_chance", 0.01)
.build();
keyspace.createColumnFamily(cf1, options1);
Map<String, Object> options2 = new HashMap<String, Object>();
options2.put("name", "testcf2");
options2.put("read_repair_chance", 0.4);
options2.put("bloom_filter_fp_chance", 0.01);
keyspace.createColumnFamily(options2);
Thread.sleep(1000);
KeyspaceDefinition ksDef = keyspace.describeKeyspace();
verifyKeyspacePropertiesForSimpleStrategy(keyspaceName, ksDef);
Properties cfProps = keyspace.getColumnFamilyProperties("testcf1");
Assert.assertEquals("0.2", String.valueOf(cfProps.get("read_repair_chance")));
Assert.assertEquals("0.01", String.valueOf(cfProps.get("bloom_filter_fp_chance")));
Assert.assertEquals("KEYS_ONLY", String.valueOf(cfProps.get("caching")));
Assert.assertEquals("4", String.valueOf(cfProps.get("min_compaction_threshold")));
Assert.assertEquals("32", String.valueOf(cfProps.get("max_compaction_threshold")));
cfProps = keyspace.getColumnFamilyProperties("testcf2");
Assert.assertEquals("0.4", String.valueOf(cfProps.get("read_repair_chance")));
Assert.assertEquals("0.01", String.valueOf(cfProps.get("bloom_filter_fp_chance")));
Assert.assertEquals("KEYS_ONLY", String.valueOf(cfProps.get("caching")));
Assert.assertEquals("4", String.valueOf(cfProps.get("min_compaction_threshold")));
Assert.assertEquals("32", String.valueOf(cfProps.get("max_compaction_threshold")));
ColumnFamilyDefinition cfDef = ksDef.getColumnFamily("testcf1");
Assert.assertEquals("testcf1", cfDef.getName());
Assert.assertEquals(0.2, cfDef.getReadRepairChance());
Assert.assertEquals("KEYS_ONLY", cfDef.getCaching());
Assert.assertTrue(32 == cfDef.getMaxCompactionThreshold());
Assert.assertTrue(4 == cfDef.getMinCompactionThreshold());
Assert.assertEquals(0.01, cfDef.getBloomFilterFpChance());
cfDef = ksDef.getColumnFamily("testcf2");
Assert.assertEquals("testcf2", cfDef.getName());
Assert.assertEquals(0.4, cfDef.getReadRepairChance());
Assert.assertEquals("KEYS_ONLY", cfDef.getCaching());
Assert.assertTrue(32 == cfDef.getMaxCompactionThreshold());
Assert.assertTrue(4 == cfDef.getMinCompactionThreshold());
Assert.assertEquals(0.01, cfDef.getBloomFilterFpChance());
List<ColumnFamilyDefinition> cfDefs = ksDef.getColumnFamilyList();
Assert.assertTrue(2 == cfDefs.size());
cfDef = cfDefs.get(0);
Assert.assertEquals("testcf1", cfDef.getName());
Assert.assertEquals(0.2, cfDef.getReadRepairChance());
Assert.assertEquals("KEYS_ONLY", cfDef.getCaching());
Assert.assertTrue(32 == cfDef.getMaxCompactionThreshold());
Assert.assertTrue(4 == cfDef.getMinCompactionThreshold());
Assert.assertEquals(0.01, cfDef.getBloomFilterFpChance());
cfDef = cfDefs.get(1);
Assert.assertEquals("testcf2", cfDef.getName());
Assert.assertEquals(0.4, cfDef.getReadRepairChance());
Assert.assertEquals("KEYS_ONLY", cfDef.getCaching());
Assert.assertTrue(32 == cfDef.getMaxCompactionThreshold());
Assert.assertTrue(4 == cfDef.getMinCompactionThreshold());
Assert.assertEquals(0.01, cfDef.getBloomFilterFpChance());
keyspace.dropKeyspace();
}
@Test
public void createKeyspaceWithCompositeCF() throws Exception {
// Annotated composite class
class Population {
@Component(ordinal=0) String country;
@Component(ordinal=1) String state;
@Component(ordinal=2) String city;
@Component(ordinal=3) Integer zip;
@Component(ordinal=3) Date district;
// Must have public default constructor
public Population() {
}
}
AnnotatedCompositeSerializer<Population> compSerializer = new AnnotatedCompositeSerializer<Population>(Population.class);
ColumnFamily<String, Population> CF_POPULATION =
new ColumnFamily<String, Population>("population", StringSerializer.get(), compSerializer);
String keyspaceName = "AstyanaxTestKeyspaceCompositeCFs".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> ksOptions = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
keyspace.createKeyspace(ksOptions);
KeyspaceDefinition ksDef = keyspace.describeKeyspace();
Assert.assertEquals(keyspaceName, ksDef.getName());
keyspace.createColumnFamily(CF_POPULATION, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "UTF8Type")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "CompositeType(UTF8Type, UTF8Type, UTF8Type, Int32Type, DateType)")
.build());
if (ClusterConfiguration.getDriver().equals(Driver.JAVA_DRIVER)) {
List<ColumnFamilyDefinition> list = ksDef.getColumnFamilyList();
Assert.assertTrue(1 == list.size());
ColumnFamilyDefinition cfDef = list.get(0);
Assert.assertEquals("population", cfDef.getName());
List<ColumnDefinition> colDefs = cfDef.getColumnDefinitionList();
Assert.assertTrue(7 == colDefs.size());
for (int i=1; i<=5; i++) {
ColumnDefinition colDef = colDefs.get(i-1);
Assert.assertEquals("column" + i, colDef.getName());
Assert.assertNotNull(colDef.getValidationClass());
}
ColumnDefinition colDef = colDefs.get(6);
Assert.assertEquals("value", colDef.getName());
Assert.assertNotNull(colDef.getValidationClass());
cfDef = ksDef.getColumnFamily("population");
Assert.assertEquals("population", cfDef.getName());
colDefs = cfDef.getColumnDefinitionList();
Assert.assertTrue(7 == colDefs.size());
for (int i=1; i<=5; i++) {
colDef = colDefs.get(i-1);
Assert.assertEquals("column" + i, colDef.getName());
Assert.assertNotNull(colDef.getValidationClass());
}
colDef = colDefs.get(6);
Assert.assertEquals("value", colDef.getName());
Assert.assertNotNull(colDef.getValidationClass());
}
keyspace.dropKeyspace();
}
@Test
public void alterKeyspaceOptions() throws Exception {
String keyspaceName = "AstyanaxTestKeyspaceAlterOptions".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
keyspace.createKeyspace(options);
Thread.sleep(1000);
KeyspaceDefinition ksDef = keyspace.describeKeyspace();
verifyKeyspacePropertiesForSimpleStrategy(keyspaceName, ksDef);
keyspace.updateKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "2")
.build())
.put("strategy_class", "SimpleStrategy")
.build());
ksDef = keyspace.describeKeyspace();
Assert.assertEquals("2", ksDef.getStrategyOptions().get("replication_factor"));
keyspace.dropKeyspace();
/** NETWORK TOPOLOGY */
keyspaceName = "AstyanaxTestKeyspaceAlterOptions2".toLowerCase();
context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("us-east", "3")
.put("eu-west", "3")
.build())
.put("strategy_class", "NetworkTopologyStrategy")
.build();
keyspace.createKeyspace(options);
Thread.sleep(1000);
KeyspaceDefinition ksDef2 = keyspace.describeKeyspace();
verifyKeyspacePropertiesForNetworkTopology(keyspaceName, ksDef2);
options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("us-east", "2")
.put("eu-west", "2")
.build())
.put("strategy_class", "NetworkTopologyStrategy")
.build();
keyspace.updateKeyspace(options);
ksDef2 = keyspace.describeKeyspace();
System.out.println(ksDef2.getStrategyOptions());
Assert.assertEquals("2", ksDef2.getStrategyOptions().get("us-east"));
Assert.assertEquals("2", ksDef2.getStrategyOptions().get("eu-west"));
keyspace.dropKeyspace();
}
@Test
public void alterCFOptions() throws Exception {
String keyspaceName = "AstyanaxTestKeyspaceAlterCFOptions".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
keyspace.createKeyspace(options);
Thread.sleep(1000);
ColumnFamily<String, String> cf = new ColumnFamily<String, String>("testaltercf1", StringSerializer.get(), StringSerializer.get());
keyspace.createColumnFamily(cf, null);
Assert.assertEquals(0.1, keyspace.getColumnFamilyProperties("testaltercf1").get("read_repair_chance"));
keyspace.updateColumnFamily(cf, ImmutableMap.<String, Object>builder()
.put("read_repair_chance", 0.2)
.build());
Assert.assertEquals(0.2, keyspace.getColumnFamilyProperties("testaltercf1").get("read_repair_chance"));
keyspace.dropKeyspace();
}
@Test
public void createAndDeleteCF() throws Exception {
String keyspaceName = "AstyanaxTestKeyspaceCreateDeleteCF".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
keyspace.createKeyspace(options);
Thread.sleep(1000);
ColumnFamily<String, String> cf = new ColumnFamily<String, String>("testcreatedeletecf1", StringSerializer.get(), StringSerializer.get());
keyspace.createColumnFamily(cf, null);
Assert.assertEquals(0.1, keyspace.getColumnFamilyProperties("testcreatedeletecf1").get("read_repair_chance"));
keyspace.dropColumnFamily(cf);
try {
keyspace.getColumnFamilyProperties("testaltercf1");
Assert.fail("Should have gotten CF not found ex");
} catch(RuntimeException e) {
} finally {
keyspace.dropKeyspace();
}
}
@Test
public void createAndDeleteKeyspace() throws Exception {
String keyspaceName = "AstyanaxTestKeyspaceCreateDeleteKS".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
keyspace.createKeyspace(options);
Thread.sleep(1000);
KeyspaceDefinition ksDef = keyspace.describeKeyspace();
Assert.assertTrue(ksDef.getStrategyClass().contains("SimpleStrategy"));
keyspace.dropKeyspace();
try {
keyspace.describeKeyspace();
Assert.fail("Should have gotten KS not found ex");
} catch(RuntimeException e) {
}
}
@Test
public void keyspaceDescribePartitioner() throws Exception {
String keyspaceName = "AstyanaxTestKeyspaceDescribeRing".toLowerCase();
AstyanaxContext<Keyspace> context = AstyanaxContextFactory.getKeyspace(keyspaceName);
context.start();
keyspace = context.getClient();
Map<String, Object> options = ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build();
keyspace.createKeyspace(options);
Thread.sleep(1000);
String partitioner = keyspace.describePartitioner();
Assert.assertNotNull(partitioner);
keyspace.dropKeyspace();
}
} | 7,596 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/SingleRowColumnPaginationTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.util.Random;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.cql.reads.model.CqlRangeBuilder;
import com.netflix.astyanax.cql.test.utils.ReadTests;
import com.netflix.astyanax.cql.test.utils.TestUtils;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.query.RowQuery;
public class SingleRowColumnPaginationTests extends ReadTests {
private static ColumnFamily<String, String> CF_COLUMN_RANGE_TEST = TestUtils.CF_COLUMN_RANGE_TEST;
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_COLUMN_RANGE_TEST, null);
CF_COLUMN_RANGE_TEST.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_COLUMN_RANGE_TEST);
}
@Test
public void runAllTests() throws Exception {
boolean rowDeleted = false;
TestUtils.populateRowsForColumnRange(keyspace);
Thread.sleep(1000);
paginateColumnsForAllRows(rowDeleted);
TestUtils.deleteRowsForColumnRange(keyspace);
Thread.sleep(1000);
rowDeleted = true;
paginateColumnsForAllRows(rowDeleted);
}
private void paginateColumnsForAllRows(boolean rowDeleted) throws Exception {
Random random = new Random();
char ch = 'A';
while (ch <= 'Z') {
int pageSize = random.nextInt(26) % 10;
if (pageSize <= 0) {
pageSize = 10;
}
paginateColumnsForRowKey(String.valueOf(ch), rowDeleted, pageSize);
ch++;
}
}
private void paginateColumnsForRowKey(String rowKey, boolean rowDeleted, int pageSize) throws Exception {
ColumnList<String> columns;
RowQuery<String, String> query = keyspace
.prepareQuery(TestUtils.CF_COLUMN_RANGE_TEST)
.getKey(rowKey)
.autoPaginate(true)
.withColumnRange(
new CqlRangeBuilder<String>().setStart("a")
.setFetchSize(pageSize).build());
int count = 1;
while (!(columns = query.execute().getResult()).isEmpty()) {
Assert.assertTrue(columns.size() <= pageSize);
for (Column<String> col : columns) {
int value = col.getName().charAt(0) - 'a' + 1;
Assert.assertEquals(count, value);
count++;
}
}
if (rowDeleted) {
Assert.assertTrue(count == 1);
}
}
}
| 7,597 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/CFStandardTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import junit.framework.Assert;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.NotFoundException;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.ColumnSlice;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.model.CqlResult;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.query.ColumnQuery;
import com.netflix.astyanax.query.RowQuery;
import com.netflix.astyanax.serializers.ObjectSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.util.RangeBuilder;
public class CFStandardTests extends KeyspaceTests {
private static final Logger LOG = Logger.getLogger(CFStandardTests.class);
public static ColumnFamily<String, String> CF_STANDARD1 = ColumnFamily
.newColumnFamily(
"Standard1",
StringSerializer.get(),
StringSerializer.get());
public static ColumnFamily<String, String> CF_STANDARD2 = ColumnFamily
.newColumnFamily(
"Standard2",
StringSerializer.get(),
StringSerializer.get());
private static ColumnFamily<String, String> CF_USER_INFO = ColumnFamily.newColumnFamily(
"UserInfo", // Column Family Name
StringSerializer.get(), // Key Serializer
StringSerializer.get()); // Column Serializer
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_STANDARD1, null);
keyspace.createColumnFamily(CF_STANDARD2, null);
keyspace.createColumnFamily(CF_USER_INFO, null);
CF_STANDARD1.describe(keyspace);
CF_STANDARD2.describe(keyspace);
CF_USER_INFO.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_STANDARD1);
keyspace.dropColumnFamily(CF_STANDARD2);
keyspace.dropColumnFamily(CF_USER_INFO);
}
@Test
public void testSerializedClassValue() throws Exception {
UserInfo smo = new UserInfo();
smo.setLastName("Landau");
smo.setFirstName("Eran");
ByteBuffer bb = ObjectSerializer.get().toByteBuffer(smo);
keyspace.prepareColumnMutation(CF_STANDARD1, "Key_SerializeTest",
"Column1").putValue(bb, null).execute();
UserInfo smo2 = (UserInfo) keyspace.prepareQuery(CF_STANDARD1)
.getKey("Key_SerializeTest").getColumn("Column1").execute()
.getResult().getValue(ObjectSerializer.get());
Assert.assertEquals(smo, smo2);
}
@Test
public void testSingleOps() throws Exception {
String key = "SingleOpsTest";
Random prng = new Random();
// Set a string value
{
String column = "StringColumn";
String value = RandomStringUtils.randomAlphanumeric(32);
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
ColumnQuery<String> query = keyspace.prepareQuery(CF_STANDARD1).getKey(key).getColumn(column);
String v = query.execute().getResult().getStringValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult()
.getStringValue();
Assert.fail();
} catch (RuntimeException e) {
} catch (NotFoundException e) {
} catch (ConnectionException e) {
e.printStackTrace();
Assert.fail();
}
}
// Set a byte value
{
String column = "ByteColumn";
byte value = (byte) prng.nextInt(Byte.MAX_VALUE);
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
byte v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getByteValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getByteValue();
Assert.fail();
} catch (NullPointerException e) {
} catch (NotFoundException e) {
// expected
}
}
// Set a short value
{
String column = "ShortColumn";
short value = (short) prng.nextInt(Short.MAX_VALUE);
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
short v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getShortValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getShortValue();
Assert.fail();
} catch (NullPointerException e) {
// expected
} catch (NotFoundException e) {
// expected
}
}
// Set a int value
{
String column = "IntColumn";
int value = prng.nextInt();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
int v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getIntegerValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getIntegerValue();
Assert.fail();
} catch (NullPointerException e) {
// expected
} catch (NotFoundException e) {
// expected
}
}
// Set a long value
{
String column = "LongColumn";
long value = prng.nextLong();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
long v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getLongValue();
Assert.assertEquals(value, v);
// get as integer should fail
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult()
.getIntegerValue();
Assert.fail();
} catch (Exception e) {
// expected
}
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getLongValue();
Assert.fail();
} catch (NullPointerException e) {
// expected
} catch (NotFoundException e) {
// expected
}
}
// Set a float value
{
String column = "FloatColumn";
float value = prng.nextFloat();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
float v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getFloatValue();
Assert.assertEquals(value, v);
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
// verify column gone
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getFloatValue();
Assert.fail();
} catch (NullPointerException e) {
// expected
} catch (NotFoundException e) {
// expected
}
}
// Set a double value
{
String column = "DoubleColumn";
double value = prng.nextDouble();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.putValue(value, null).execute();
// Read
double v = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult().getDoubleValue();
Assert.assertEquals(value, v);
// get as integer should fail
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult()
.getIntegerValue();
Assert.fail();
} catch (Exception e) {
// expected
}
// Delete
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.deleteColumn().execute();
try {
keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult()
.getDoubleValue();
Assert.fail();
} catch (NullPointerException e) {
// expected
} catch (NotFoundException e) {
} catch (ConnectionException e) {
Assert.fail();
}
}
// Set long column with timestamp
{
String column = "TimestampColumn";
long value = prng.nextLong();
// Set
keyspace.prepareColumnMutation(CF_STANDARD1, key, column)
.withTimestamp(100)
.putValue(value, null)
.execute();
// Read
Column<String> c = keyspace.prepareQuery(CF_STANDARD1).getKey(key)
.getColumn(column).execute().getResult();
Assert.assertEquals(100, c.getTimestamp());
}
}
@Test
public void testEmptyColumn() {
ColumnListMutation<String> mutation = keyspace.prepareMutationBatch().withRow(CF_STANDARD1, "ABC");
try {
mutation.putColumn(null, 1L);
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
mutation.putColumn("", 1L);
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
mutation.deleteColumn("");
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
mutation.deleteColumn(null);
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testCqlCount() throws Exception {
LOG.info("CQL Test");
OperationResult<CqlResult<String, String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.withCql("SELECT count(*) FROM astyanaxunittests.standard1 where KEY='A';")
.execute();
long count = result.getResult().getNumber();
LOG.info("CQL Count: " + count);
Assert.assertTrue(0 <= count);
}
@Test
public void testGetSingleColumn() throws Exception {
keyspace.prepareColumnMutation(CF_STANDARD1, "A", "a").putValue(1, null).execute();
Column<String> column = keyspace.prepareQuery(CF_STANDARD1).getRow("A").getColumn("a").execute().getResult();
Assert.assertNotNull(column);
Assert.assertEquals(1, column.getIntegerValue());
}
@Test
public void testGetSingleKeyNotExists() throws Exception {
Column<String> column = keyspace.prepareQuery(CF_STANDARD1).getRow("AA").getColumn("ab").execute().getResult();
Assert.assertNull(column);
}
@Test
public void testFunctionalQuery() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
for (char keyName = 'A'; keyName <= 'Z'; keyName++) {
String rowKey = Character.toString(keyName);
ColumnListMutation<String> cfmStandard = m.withRow(CF_STANDARD1, rowKey);
for (char cName = 'a'; cName <= 'z'; cName++) {
cfmStandard.putColumn(Character.toString(cName),
(int) (cName - 'a') + 1, null);
}
m.withCaching(true);
m.execute();
m.discardMutations();
}
OperationResult<ColumnList<String>> r1 = keyspace
.prepareQuery(CF_STANDARD1).getKey("A").execute();
Assert.assertTrue(26 <= r1.getResult().size());
}
@Test
public void testNullKeyInMutation() throws Exception {
try {
keyspace.prepareMutationBatch().withRow(CF_STANDARD1, null).putColumn("abc", "def");
Assert.fail();
}
catch (Exception e) {
}
}
@Test
public void testColumnSlice() throws ConnectionException {
OperationResult<ColumnList<String>> r1 = keyspace
.prepareQuery(CF_STANDARD1).getKey("A")
.withColumnSlice("a", "b").execute();
Assert.assertEquals(2, r1.getResult().size());
}
@Test
public void testColumnRangeSlice() throws ConnectionException {
OperationResult<ColumnList<String>> r1 = keyspace
.prepareQuery(CF_STANDARD1)
.getKey("A")
.withColumnRange(
new RangeBuilder().setStart("a").setEnd("b")
.setLimit(5).build()).execute();
Assert.assertEquals(2, r1.getResult().size());
OperationResult<ColumnList<String>> r2 = keyspace
.prepareQuery(CF_STANDARD1).getKey("A")
.withColumnRange("a", null, false, 5).execute();
Assert.assertEquals(5, r2.getResult().size());
Assert.assertEquals("a", r2.getResult().getColumnByIndex(0).getName());
ByteBuffer EMPTY_BUFFER = ByteBuffer.wrap(new byte[0]);
OperationResult<ColumnList<String>> r3 = keyspace
.prepareQuery(CF_STANDARD1).getKey("A")
.withColumnRange(EMPTY_BUFFER, EMPTY_BUFFER, true, 5).execute();
Assert.assertEquals(5, r3.getResult().size());
Assert.assertEquals("z", r3.getResult().getColumnByIndex(0).getName());
}
@Test
public void testGetSingleColumnNotExists() throws ConnectionException {
Column<String> column = keyspace.prepareQuery(CF_STANDARD1).getRow("A").getColumn("DoesNotExist").execute().getResult();
Assert.assertNull(column);
}
@Test
public void testGetSingleColumnNotExistsAsync() {
Future<OperationResult<Column<String>>> future = null;
try {
future = keyspace.prepareQuery(CF_STANDARD1).getKey("A")
.getColumn("DoesNotExist").executeAsync();
future.get(1000, TimeUnit.MILLISECONDS);
} catch (ConnectionException e) {
LOG.info("ConnectionException: " + e.getMessage());
Assert.fail();
} catch (InterruptedException e) {
LOG.info(e.getMessage());
Assert.fail();
} catch (ExecutionException e) {
if (e.getCause() instanceof NotFoundException)
LOG.info(e.getCause().getMessage());
else {
Assert.fail(e.getMessage());
}
} catch (TimeoutException e) {
future.cancel(true);
LOG.info(e.getMessage());
Assert.fail();
}
}
@Test
public void testGetSingleKey() throws ConnectionException {
for (char key = 'A'; key <= 'Z'; key++) {
String keyName = Character.toString(key);
OperationResult<ColumnList<String>> result = keyspace.prepareQuery(CF_STANDARD1).getKey(keyName).execute();
Assert.assertNotNull(result.getResult());
Assert.assertFalse(result.getResult().isEmpty());
}
}
@Test
public void testGetSingleKeyAsync() throws Exception {
Future<OperationResult<ColumnList<String>>> future = keyspace.prepareQuery(CF_STANDARD1).getKey("A").executeAsync();
ColumnList<String> result = future.get(1000, TimeUnit.MILLISECONDS).getResult();
Assert.assertFalse(result.isEmpty());
}
@Test
public void testGetAllKeysRoot() throws ConnectionException {
LOG.info("Starting testGetAllKeysRoot...");
List<String> keys = new ArrayList<String>();
for (char key = 'A'; key <= 'Z'; key++) {
String keyName = Character.toString(key);
keys.add(keyName);
}
OperationResult<Rows<String, String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.getKeySlice(keys.toArray(new String[keys.size()]))
.execute();
Assert.assertEquals(26, result.getResult().size());
Row<String, String> row;
row = result.getResult().getRow("A");
Assert.assertEquals("A", row.getKey());
row = result.getResult().getRow("B");
Assert.assertEquals("B", row.getKey());
row = result.getResult().getRow("NonExistent");
Assert.assertNull(row);
row = result.getResult().getRowByIndex(10);
Assert.assertEquals("K", row.getKey());
LOG.info("... testGetAllKeysRoot");
}
@Test
public void testEmptyRowKey() {
try {
keyspace.prepareMutationBatch().withRow(CF_STANDARD1, "");
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
try {
keyspace.prepareMutationBatch().withRow(CF_STANDARD1, null);
Assert.fail();
}
catch (Exception e) {
LOG.info(e.getMessage());
}
}
@Test
public void testGetColumnSlice() throws ConnectionException {
LOG.info("Starting testGetColumnSlice...");
OperationResult<ColumnList<String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.getKey("A")
.withColumnSlice(
new ColumnSlice<String>("c", "h").setLimit(5))
.execute();
Assert.assertNotNull(result.getResult());
Assert.assertEquals(5, result.getResult().size());
}
@Test
public void testGetAllKeysPath() throws ConnectionException {
LOG.info("Starting testGetAllKeysPath...");
List<String> keys = new ArrayList<String>();
for (char key = 'A'; key <= 'Z'; key++) {
String keyName = Character.toString(key);
keys.add(keyName);
}
OperationResult<Rows<String, String>> result = keyspace
.prepareQuery(CF_STANDARD1)
.getKeySlice(keys.toArray(new String[keys.size()]))
.execute();
for (Row<String, String> row : result.getResult()) {
System.out.println(row.getColumns().size());
}
OperationResult<Map<String, Integer>> counts = keyspace
.prepareQuery(CF_STANDARD1)
.getKeySlice(keys.toArray(new String[keys.size()]))
.getColumnCounts()
.execute();
Assert.assertEquals(26, counts.getResult().size());
for (Entry<String, Integer> count : counts.getResult().entrySet()) {
Assert.assertEquals(new Integer(26), count.getValue());
}
LOG.info("Starting testGetAllKeysPath...");
}
public static class UserInfo implements Serializable {
private static final long serialVersionUID = 6366200973810770033L;
private String firstName;
private String lastName;
public UserInfo() {
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getFirstName() {
return this.firstName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getLastName() {
return this.lastName;
}
public boolean equals(Object other) {
UserInfo smo = (UserInfo) other;
return firstName.equals(smo.firstName)
&& lastName.equals(smo.lastName);
}
}
@Test
public void testHasValue() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_USER_INFO, "acct1234")
.putColumn("firstname", "john", null)
.putColumn("lastname", "smith", null)
.putColumn("address", "555 Elm St", null)
.putColumn("age", 30, null)
.putEmptyColumn("empty");
m.execute();
ColumnList<String> response = keyspace.prepareQuery(CF_USER_INFO).getRow("acct1234").execute().getResult();
Assert.assertEquals("firstname", response.getColumnByName("firstname").getName());
Assert.assertEquals("firstname", response.getColumnByName("firstname").getName());
Assert.assertEquals("john", response.getColumnByName("firstname").getStringValue());
Assert.assertEquals("john", response.getColumnByName("firstname").getStringValue());
Assert.assertEquals(true, response.getColumnByName("firstname").hasValue());
Assert.assertEquals(false, response.getColumnByName("empty").hasValue());
}
@Test
public void testDelete() throws Exception {
LOG.info("Starting testDelete...");
String rowKey = "DeleteMe_testDelete";
MutationBatch m = keyspace.prepareMutationBatch();
m.withRow(CF_STANDARD1, rowKey).putColumn("Column1", "X", null).putColumn("Column2", "X", null);
m.execute();
Column<String> column = keyspace.prepareQuery(CF_STANDARD1).getRow(rowKey).getColumn("Column1").execute().getResult();
Assert.assertEquals("X", column.getStringValue());
m = keyspace.prepareMutationBatch();
m.withRow(CF_STANDARD1, rowKey).deleteColumn("Column1");
m.execute();
column = keyspace.prepareQuery(CF_STANDARD1).getRow(rowKey).getColumn("Column1").execute().getResult();
Assert.assertNull(column);
LOG.info("... testDelete");
}
@Test
public void testDeleteLotsOfColumns() throws Exception {
LOG.info("Starting testDelete...");
String rowKey = "DeleteMe_testDeleteLotsOfColumns";
int nColumns = 100;
int pageSize = 25;
// Insert a bunch of rows
MutationBatch m = keyspace.prepareMutationBatch();
ColumnListMutation<String> rm = m.withRow(CF_STANDARD1, rowKey);
for (int i = 0; i < nColumns; i++) {
rm.putEmptyColumn("" + i, null);
}
m.execute();
// Verify count
int count = keyspace.prepareQuery(CF_STANDARD1)
.setConsistencyLevel(ConsistencyLevel.CL_QUORUM)
.getKey(rowKey).getCount().execute().getResult();
Assert.assertEquals(nColumns, count);
// Delete half of the columns
m = keyspace.prepareMutationBatch().setConsistencyLevel(ConsistencyLevel.CL_QUORUM);
rm = m.withRow(CF_STANDARD1, rowKey);
for (int i = 0; i < nColumns / 2; i++) {
rm.deleteColumn("" + i);
}
m.execute();
// Verify count
count = keyspace.prepareQuery(CF_STANDARD1)
.setConsistencyLevel(ConsistencyLevel.CL_QUORUM)
.getKey(rowKey).getCount().execute().getResult();
Assert.assertEquals(nColumns / 2, count);
// GET ROW COUNT WITH PAGINATION
RowQuery<String, String> query = keyspace.prepareQuery(CF_STANDARD1)
.setConsistencyLevel(ConsistencyLevel.CL_QUORUM).getKey(rowKey)
.withColumnRange(new RangeBuilder().setLimit(pageSize).build())
.autoPaginate(true);
ColumnList<String> result;
count = 0;
while (!(result = query.execute().getResult()).isEmpty()) {
count += result.size();
}
Assert.assertEquals(nColumns / 2, count);
// Delete all of the columns
m = keyspace.prepareMutationBatch().setConsistencyLevel(ConsistencyLevel.CL_QUORUM);
rm = m.withRow(CF_STANDARD1, rowKey);
for (int i = 0; i < nColumns; i++) {
rm.deleteColumn("" + i);
}
m.execute();
// Verify count
count = keyspace.prepareQuery(CF_STANDARD1)
.setConsistencyLevel(ConsistencyLevel.CL_QUORUM)
.getKey(rowKey).getCount().execute().getResult();
Assert.assertEquals(0, count);
LOG.info("... testDelete");
}
@Test
public void testCopy() throws ConnectionException {
String keyName = "A";
keyspace.prepareQuery(CF_STANDARD1).getKey(keyName).copyTo(CF_STANDARD2, keyName).execute();
ColumnList<String> list1 = keyspace.prepareQuery(CF_STANDARD1).getKey(keyName).execute().getResult();
ColumnList<String> list2 = keyspace.prepareQuery(CF_STANDARD2).getKey(keyName).execute().getResult();
Iterator<Column<String>> iter1 = list1.iterator();
Iterator<Column<String>> iter2 = list2.iterator();
while (iter1.hasNext()) {
Column<String> column1 = iter1.next();
Column<String> column2 = iter2.next();
Assert.assertEquals(column1.getName(), column2.getName());
Assert.assertEquals(column1.getByteBufferValue(),column2.getByteBufferValue());
}
Assert.assertFalse(iter2.hasNext());
}
@Test
public void testMutationMerge() throws Exception {
MutationBatch m1 = keyspace.prepareMutationBatch();
MutationBatch m2 = keyspace.prepareMutationBatch();
MutationBatch m3 = keyspace.prepareMutationBatch();
MutationBatch m4 = keyspace.prepareMutationBatch();
MutationBatch m5 = keyspace.prepareMutationBatch();
m1.withRow(CF_STANDARD1, "1").putColumn("1", "X", null);
m2.withRow(CF_STANDARD1, "2").putColumn("2", "X", null).putColumn("3", "X", null);
m3.withRow(CF_STANDARD1, "3").putColumn("4", "X", null).putColumn("5", "X", null).putColumn("6", "X", null);
m4.withRow(CF_STANDARD1, "1").putColumn("7", "X", null).putColumn("8", "X", null).putColumn("9", "X", null).putColumn("10", "X", null);
MutationBatch merged = keyspace.prepareMutationBatch();
Assert.assertEquals(merged.getRowCount(), 0);
merged.mergeShallow(m1);
Assert.assertEquals(merged.getRowCount(), 1);
merged.mergeShallow(m2);
Assert.assertEquals(merged.getRowCount(), 2);
merged.mergeShallow(m3);
Assert.assertEquals(merged.getRowCount(), 3);
merged.mergeShallow(m4);
Assert.assertEquals(merged.getRowCount(), 3);
merged.mergeShallow(m5);
Assert.assertEquals(merged.getRowCount(), 3);
merged.execute();
Rows<String, String> result = keyspace.prepareQuery(CF_STANDARD1).getRowSlice("1", "2", "3").execute().getResult();
Assert.assertTrue(5 == result.getRow("1").getColumns().size());
Assert.assertTrue(2 == result.getRow("2").getColumns().size());
Assert.assertTrue(3 == result.getRow("3").getColumns().size());
}
}
| 7,598 |
0 | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql | Create_ds/astyanax/astyanax-test/src/main/java/com/netflix/astyanax/cql/test/SingleRowColumnRangeQueryTests.java | /**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.astyanax.cql.test;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.cql.test.utils.ReadTests;
import com.netflix.astyanax.cql.test.utils.TestUtils;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
public class SingleRowColumnRangeQueryTests extends ReadTests {
private static ColumnFamily<String, String> CF_COLUMN_RANGE_TEST = TestUtils.CF_COLUMN_RANGE_TEST;
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_COLUMN_RANGE_TEST, null);
CF_COLUMN_RANGE_TEST.describe(keyspace);
}
@AfterClass
public static void tearDown() throws Exception {
keyspace.dropColumnFamily(CF_COLUMN_RANGE_TEST);
}
@Test
public void testColumnRangeQuery() throws Exception {
/** POPULATE DATA FOR TESTING */
TestUtils.populateRowsForColumnRange(keyspace);
Thread.sleep(1000);
boolean rowDeleted = false;
/** PERFORM READ TESTS */
readColumnRangeForAllRows(rowDeleted);
getColumnCountForAllRows(rowDeleted);
/** DELETE ALL ROWS */
TestUtils.deleteRowsForColumnRange(keyspace);
rowDeleted = true;
/** PERFORM READ TESTS FOR MISSING DATA */
readColumnRangeForAllRows(rowDeleted);
getColumnCountForAllRows(rowDeleted);
}
public void readColumnRangeForAllRows(boolean rowDeleted) throws Exception {
char ch = 'A';
while (ch <= 'Z') {
readColumnRangeForRowKey(String.valueOf(ch), rowDeleted);
ch++;
}
}
private void readColumnRangeForRowKey(String rowKey, boolean rowDeleted) throws Exception {
ColumnList<String> columns = keyspace
.prepareQuery(CF_COLUMN_RANGE_TEST)
.getKey(rowKey)
.withColumnRange("a", "z", false, -1)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(columns.isEmpty());
return;
}
Assert.assertFalse(columns.isEmpty());
char ch = 'a';
for (Column<String> c : columns) {
Assert.assertEquals(String.valueOf(ch), c.getName());
Assert.assertTrue( ch-'a'+1 == c.getIntegerValue());
ch++;
}
}
public void getColumnCountForAllRows(boolean rowDeleted) throws Exception {
char ch = 'A';
while (ch <= 'Z') {
getColumnCountForRowKey(String.valueOf(ch), rowDeleted);
ch++;
}
}
private void getColumnCountForRowKey(String rowKey, boolean rowDeleted) throws Exception {
Integer count = keyspace
.prepareQuery(CF_COLUMN_RANGE_TEST)
.getKey(rowKey)
.withColumnRange("a", "z", false, -1)
.getCount()
.execute().getResult();
int expectedCount = rowDeleted ? 0 : 26;
Assert.assertTrue(count.intValue() == expectedCount);
}
}
| 7,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.