index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/concurrency-limits/concurrency-limits-servlet/src/main/java/com/netflix/concurrency/limits | Create_ds/concurrency-limits/concurrency-limits-servlet/src/main/java/com/netflix/concurrency/limits/servlet/ConcurrencyLimitServletFilter.java | /**
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.concurrency.limits.servlet;
import com.netflix.concurrency.limits.Limiter;
import java.io.IOException;
import java.util.Optional;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Servlet {@link Filter} that enforces concurrency limits on all requests into the servlet.
*
* @see ServletLimiterBuilder
*/
public class ConcurrencyLimitServletFilter implements Filter {
private static final int STATUS_TOO_MANY_REQUESTS = 429;
private final Limiter<HttpServletRequest> limiter;
private final int throttleStatus;
public ConcurrencyLimitServletFilter(Limiter<HttpServletRequest> limiter) {
this(limiter, STATUS_TOO_MANY_REQUESTS);
}
public ConcurrencyLimitServletFilter(Limiter<HttpServletRequest> limiter, int throttleStatus) {
this.limiter = limiter;
this.throttleStatus = throttleStatus;
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
Optional<Limiter.Listener> listener = limiter.acquire((HttpServletRequest)request);
if (listener.isPresent()) {
try {
chain.doFilter(request, response);
listener.get().onSuccess();
} catch (Exception e) {
listener.get().onIgnore();
throw e;
}
} else {
outputThrottleError((HttpServletResponse)response);
}
}
protected void outputThrottleError(HttpServletResponse response) {
try {
response.setStatus(throttleStatus);
response.getWriter().print("Concurrency limit exceeded");
} catch (IOException e) {
}
}
@Override
public void destroy() {
}
}
| 3,100 |
0 | Create_ds/concurrency-limits/concurrency-limits-servlet/src/main/java/com/netflix/concurrency/limits | Create_ds/concurrency-limits/concurrency-limits-servlet/src/main/java/com/netflix/concurrency/limits/servlet/ServletLimiterBuilder.java | /**
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.concurrency.limits.servlet;
import com.netflix.concurrency.limits.Limiter;
import com.netflix.concurrency.limits.limiter.AbstractPartitionedLimiter;
import javax.servlet.http.HttpServletRequest;
import java.security.Principal;
import java.util.Optional;
import java.util.function.Function;
/**
* Builder to simplify creating a {@link Limiter} specific to a Servlet filter. By default,
* the same concurrency limit is shared by all requests. The limiter can be partitioned
* based on one of many request attributes. Only one type of partition may be specified.
*/
public final class ServletLimiterBuilder extends AbstractPartitionedLimiter.Builder<ServletLimiterBuilder, HttpServletRequest> {
/**
* Partition the limit by header
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByHeader(String name) {
return partitionResolver(request -> Optional.ofNullable(request.getHeader(name)).orElse(null));
}
/**
* Partition the limit by {@link Principal}. Percentages of the limit are partitioned to named
* groups. Group membership is derived from the provided mapping function.
* @param principalToGroup Mapping function from {@link Principal} to a named group.
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByUserPrincipal(Function<Principal, String> principalToGroup) {
return partitionResolver(request -> Optional.ofNullable(request.getUserPrincipal()).map(principalToGroup).orElse(null));
}
/**
* Partition the limit by request attribute
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByAttribute(String name) {
return partitionResolver(request -> Optional.ofNullable(request.getAttribute(name)).map(Object::toString).orElse(null));
}
/**
* Partition the limit by request parameter
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByParameter(String name) {
return partitionResolver(request -> Optional.ofNullable(request.getParameter(name)).orElse(null));
}
/**
* Partition the limit by the full path. Percentages of the limit are partitioned to named
* groups. Group membership is derived from the provided mapping function.
* @param pathToGroup Mapping function from full path to a named group.
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByPathInfo(Function<String, String> pathToGroup) {
return partitionResolver(request -> Optional.ofNullable(request.getPathInfo()).map(pathToGroup).orElse(null));
}
@Override
protected ServletLimiterBuilder self() {
return this;
}
}
| 3,101 |
0 | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/test/java/com/netflix/concurrency | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/test/java/com/netflix/concurrency/limits/ConcurrencyLimitServletFilterSimulationTest.java | package com.netflix.concurrency.limits;
import com.netflix.concurrency.limits.executors.BlockingAdaptiveExecutor;
import com.netflix.concurrency.limits.limit.FixedLimit;
import com.netflix.concurrency.limits.limit.VegasLimit;
import com.netflix.concurrency.limits.limiter.SimpleLimiter;
import com.netflix.concurrency.limits.servlet.jakarta.ConcurrencyLimitServletFilter;
import com.netflix.concurrency.limits.servlet.jakarta.ServletLimiterBuilder;
import org.eclipse.jetty.servlet.FilterHolder;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import jakarta.servlet.DispatcherType;
import jakarta.servlet.http.HttpServlet;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.security.Principal;
import java.util.EnumSet;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class ConcurrencyLimitServletFilterSimulationTest {
@ClassRule
public static HttpServerRule server = new HttpServerRule(context -> {
context.addServlet(HelloServlet.class, "/");
Limiter<HttpServletRequest> limiter = new ServletLimiterBuilder()
.limit(FixedLimit.of(10))
.partitionByUserPrincipal(Principal::getName)
.partition("live", 0.8)
.partition("batch", 0.2)
.build();
FilterHolder holder = new FilterHolder();
holder.setFilter(new ConcurrencyLimitServletFilter(limiter));
context.addFilter(holder, "/*", EnumSet.of(DispatcherType.REQUEST));
});
@Test
@Ignore
public void simulation() throws Exception {
Limit limit = VegasLimit.newDefault();
BlockingAdaptiveExecutor executor = new BlockingAdaptiveExecutor(
SimpleLimiter.newBuilder().limit(limit).build());
AtomicInteger errors = new AtomicInteger();
AtomicInteger success = new AtomicInteger();
Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(() -> {
System.out.println(String.format("errors=%d success=%d limit=%s", errors.getAndSet(0), success.getAndSet(0), limit));
}, 1, 1, TimeUnit.SECONDS);
while (true) {
executor.execute(() -> {
try {
server.get("/batch");
success.incrementAndGet();
} catch (Exception e) {
errors.incrementAndGet();
throw new RejectedExecutionException();
}
});
}
}
public static class HelloServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
TimeUnit.MILLISECONDS.sleep(100);
} catch (InterruptedException e) {
}
response.setContentType("text/html");
response.setStatus(HttpServletResponse.SC_OK);
response.getWriter().println("Hello from HelloServlet");
}
}
}
| 3,102 |
0 | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/test/java/com/netflix/concurrency | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/test/java/com/netflix/concurrency/limits/GroupServletLimiterTest.java | package com.netflix.concurrency.limits;
import com.netflix.concurrency.limits.Limiter.Listener;
import com.netflix.concurrency.limits.limit.VegasLimit;
import com.netflix.concurrency.limits.servlet.jakarta.ServletLimiterBuilder;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import jakarta.servlet.http.HttpServletRequest;
import java.security.Principal;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@RunWith(MockitoJUnitRunner.class)
public class GroupServletLimiterTest {
@Test
public void userPrincipalMatchesGroup() {
Map<String, String> principalToGroup = Mockito.spy(new HashMap<>());
principalToGroup.put("bob", "live");
Limiter<HttpServletRequest> limiter = new ServletLimiterBuilder()
.limit(VegasLimit.newDefault())
.partitionByUserPrincipal(p -> principalToGroup.get(p.getName()))
.partition("live", 0.8)
.partition("batch", 0.2)
.build();
HttpServletRequest request = createMockRequestWithPrincipal("bob");
Optional<Listener> listener = limiter.acquire(request);
Assert.assertTrue(listener.isPresent());
Mockito.verify(principalToGroup, Mockito.times(1)).get("bob");
}
@Test
public void userPrincipalDoesNotMatchGroup() {
Map<String, String> principalToGroup = Mockito.spy(new HashMap<>());
principalToGroup.put("bob", "live");
Limiter<HttpServletRequest> limiter = new ServletLimiterBuilder()
.limit(VegasLimit.newDefault())
.partitionByUserPrincipal(p -> principalToGroup.get(p.getName()))
.partition("live", 0.8)
.partition("batch", 0.2)
.build();
HttpServletRequest request = createMockRequestWithPrincipal("doesntexist");
Optional<Listener> listener = limiter.acquire(request);
Assert.assertTrue(listener.isPresent());
Mockito.verify(principalToGroup, Mockito.times(1)).get("doesntexist");
}
@Test
public void nullUserPrincipalDoesNotMatchGroup() {
Map<String, String> principalToGroup = Mockito.spy(new HashMap<>());
principalToGroup.put("bob", "live");
Limiter<HttpServletRequest> limiter = new ServletLimiterBuilder()
.limit(VegasLimit.newDefault())
.partitionByUserPrincipal(p -> principalToGroup.get(p.getName()))
.partition("live", 0.8)
.partition("batch", 0.2)
.build();
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getUserPrincipal()).thenReturn(null);
Optional<Listener> listener = limiter.acquire(request);
Assert.assertTrue(listener.isPresent());
Mockito.verify(principalToGroup, Mockito.times(0)).get(Mockito.<String>any());
}
@Test
public void nullUserPrincipalNameDoesNotMatchGroup() {
Map<String, String> principalToGroup = Mockito.spy(new HashMap<>());
principalToGroup.put("bob", "live");
Limiter<HttpServletRequest> limiter = new ServletLimiterBuilder()
.limit(VegasLimit.newDefault())
.partitionByUserPrincipal(p -> principalToGroup.get(p.getName()))
.partition("live", 0.8)
.partition("batch", 0.2)
.build();
HttpServletRequest request = createMockRequestWithPrincipal(null);
Optional<Listener> listener = limiter.acquire(request);
Assert.assertTrue(listener.isPresent());
Mockito.verify(principalToGroup, Mockito.times(1)).get(ArgumentMatchers.isNull());
}
@Test
public void pathMatchesGroup() {
Map<String, String> pathToGroup = Mockito.spy(new HashMap<>());
pathToGroup.put("/live/path", "live");
Limiter<HttpServletRequest> limiter = new ServletLimiterBuilder()
.limit(VegasLimit.newDefault())
.partitionByPathInfo(pathToGroup::get)
.partition("live", 0.8)
.partition("batch", 0.2)
.build();
HttpServletRequest request = createMockRequestWithPathInfo("/live/path");
Optional<Listener> listener = limiter.acquire(request);
Assert.assertTrue(listener.isPresent());
Mockito.verify(pathToGroup, Mockito.times(1)).get("/live/path");
}
@Test
public void pathDoesNotMatchesGroup() {
Map<String, String> pathToGroup = Mockito.spy(new HashMap<>());
pathToGroup.put("/live/path", "live");
Limiter<HttpServletRequest> limiter = new ServletLimiterBuilder()
.limit(VegasLimit.newDefault())
.partitionByPathInfo(pathToGroup::get)
.partition("live", 0.8)
.partition("batch", 0.2)
.build();
HttpServletRequest request = createMockRequestWithPathInfo("/other/path");
Optional<Listener> listener = limiter.acquire(request);
Assert.assertTrue(listener.isPresent());
Mockito.verify(pathToGroup, Mockito.times(1)).get("/other/path");
}
@Test
public void nullPathDoesNotMatchesGroup() {
Map<String, String> pathToGroup = Mockito.spy(new HashMap<>());
pathToGroup.put("/live/path", "live");
Limiter<HttpServletRequest> limiter = new ServletLimiterBuilder()
.limit(VegasLimit.newDefault())
.partitionByPathInfo(pathToGroup::get)
.partition("live", 0.8)
.partition("batch", 0.2)
.build();
HttpServletRequest request = createMockRequestWithPathInfo(null);
Optional<Listener> listener = limiter.acquire(request);
Assert.assertTrue(listener.isPresent());
Mockito.verify(pathToGroup, Mockito.times(0)).get(Mockito.<String>any());
}
private HttpServletRequest createMockRequestWithPrincipal(String name) {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Principal principal = Mockito.mock(Principal.class);
Mockito.when(request.getUserPrincipal()).thenReturn(principal);
Mockito.when(principal.getName()).thenReturn(name);
return request;
}
private HttpServletRequest createMockRequestWithPathInfo(String name) {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getPathInfo()).thenReturn(name);
return request;
}
}
| 3,103 |
0 | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/test/java/com/netflix/concurrency | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/test/java/com/netflix/concurrency/limits/ConcurrencyLimitServletFilterTest.java | package com.netflix.concurrency.limits;
import com.netflix.concurrency.limits.servlet.jakarta.ConcurrencyLimitServletFilter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.mock.web.MockFilterChain;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import jakarta.servlet.ServletException;
import jakarta.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class ConcurrencyLimitServletFilterTest {
@Mock
Limiter<HttpServletRequest> limiter;
@Mock
Limiter.Listener listener;
@Test
public void testDoFilterAllowed() throws ServletException, IOException {
ConcurrencyLimitServletFilter filter = new ConcurrencyLimitServletFilter(limiter);
when(limiter.acquire(any())).thenReturn(Optional.of(listener));
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
MockFilterChain filterChain = new MockFilterChain();
filter.doFilter(request, response, filterChain);
assertEquals(request, filterChain.getRequest(), "Request should be passed to the downstream chain");
assertEquals(response, filterChain.getResponse(), "Response should be passed to the downstream chain");
verify(listener).onSuccess();
}
@Test
public void testDoFilterThrottled() throws ServletException, IOException {
ConcurrencyLimitServletFilter filter = new ConcurrencyLimitServletFilter(limiter);
//Empty means to throttle this request
when(limiter.acquire(any())).thenReturn(Optional.empty());
MockHttpServletResponse response = new MockHttpServletResponse();
MockFilterChain filterChain = new MockFilterChain();
filter.doFilter(new MockHttpServletRequest(), response, filterChain);
assertNull(filterChain.getRequest(), "doFilter should not be called on the filterchain");
assertEquals(429, response.getStatus(), "Status should be 429 - too many requests");
}
@Test
public void testDoFilterThrottledCustomStatus() throws ServletException, IOException {
final int customThrottleStatus = 503;
ConcurrencyLimitServletFilter filter = new ConcurrencyLimitServletFilter(limiter, customThrottleStatus);
//Empty means to throttle this request
when(limiter.acquire(any())).thenReturn(Optional.empty());
MockHttpServletResponse response = new MockHttpServletResponse();
filter.doFilter(new MockHttpServletRequest(), response, new MockFilterChain());
assertEquals(customThrottleStatus, response.getStatus(), "custom status should be respected");
}
}
| 3,104 |
0 | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/test/java/com/netflix/concurrency | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/test/java/com/netflix/concurrency/limits/HttpServerRule.java | package com.netflix.concurrency.limits;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHandler;
import org.junit.rules.ExternalResource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.function.Consumer;
import java.util.stream.Collectors;
public class HttpServerRule extends ExternalResource {
private Server server;
private final Consumer<ServletContextHandler> customizer;
public HttpServerRule(Consumer<ServletContextHandler> customizer) {
this.customizer = customizer;
}
protected void before() throws Throwable {
this.server = new Server(0);
ServletHandler handler = new ServletHandler();
server.setHandler(handler);
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
customizer.accept(context);
server.setHandler(context);
server.start();
}
/**
* Override to tear down your specific external resource.
*/
protected void after() {
if (server != null) {
try {
server.stop();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public int getPort() {
return ((ServerConnector) server.getConnectors()[0]).getLocalPort();
}
public String get(String path) throws Exception {
URL url = new URL("http://localhost:" + getPort() + path);
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestMethod("GET");
int responseCode = con.getResponseCode();
if (responseCode != 200) {
throw new Exception(readString(con.getInputStream()));
} else {
return readString(con.getInputStream());
}
}
public String readString(InputStream is) throws IOException {
try (BufferedReader buffer = new BufferedReader(new InputStreamReader(is))) {
return buffer.lines().collect(Collectors.joining("\n"));
}
}
}
| 3,105 |
0 | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/main/java/com/netflix/concurrency/limits/servlet | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/main/java/com/netflix/concurrency/limits/servlet/jakarta/ConcurrencyLimitServletFilter.java | /**
* Copyright 2023 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.concurrency.limits.servlet.jakarta;
import com.netflix.concurrency.limits.Limiter;
import jakarta.servlet.Filter;
import jakarta.servlet.FilterChain;
import jakarta.servlet.FilterConfig;
import jakarta.servlet.ServletException;
import jakarta.servlet.ServletRequest;
import jakarta.servlet.ServletResponse;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Optional;
/**
* Servlet {@link Filter} that enforces concurrency limits on all requests into the servlet.
*
* @see ServletLimiterBuilder
*/
public class ConcurrencyLimitServletFilter implements Filter {
private static final int STATUS_TOO_MANY_REQUESTS = 429;
private final Limiter<HttpServletRequest> limiter;
private final int throttleStatus;
public ConcurrencyLimitServletFilter(Limiter<HttpServletRequest> limiter) {
this(limiter, STATUS_TOO_MANY_REQUESTS);
}
public ConcurrencyLimitServletFilter(Limiter<HttpServletRequest> limiter, int throttleStatus) {
this.limiter = limiter;
this.throttleStatus = throttleStatus;
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
Optional<Limiter.Listener> listener = limiter.acquire((HttpServletRequest) request);
if (listener.isPresent()) {
try {
chain.doFilter(request, response);
listener.get().onSuccess();
} catch (Exception e) {
listener.get().onIgnore();
throw e;
}
} else {
outputThrottleError((HttpServletResponse) response);
}
}
protected void outputThrottleError(HttpServletResponse response) {
try {
response.setStatus(throttleStatus);
response.getWriter().print("Concurrency limit exceeded");
} catch (IOException e) {
}
}
@Override
public void destroy() {
}
}
| 3,106 |
0 | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/main/java/com/netflix/concurrency/limits/servlet | Create_ds/concurrency-limits/concurrency-limits-servlet-jakarta/src/main/java/com/netflix/concurrency/limits/servlet/jakarta/ServletLimiterBuilder.java | /**
* Copyright 2023 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.concurrency.limits.servlet.jakarta;
import com.netflix.concurrency.limits.Limiter;
import com.netflix.concurrency.limits.limiter.AbstractPartitionedLimiter;
import jakarta.servlet.http.HttpServletRequest;
import java.security.Principal;
import java.util.Optional;
import java.util.function.Function;
/**
* Builder to simplify creating a {@link Limiter} specific to a Servlet filter. By default,
* the same concurrency limit is shared by all requests. The limiter can be partitioned
* based on one of many request attributes. Only one type of partition may be specified.
*/
public final class ServletLimiterBuilder extends AbstractPartitionedLimiter.Builder<ServletLimiterBuilder, HttpServletRequest> {
/**
* Partition the limit by header
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByHeader(String name) {
return partitionResolver(request -> Optional.ofNullable(request.getHeader(name)).orElse(null));
}
/**
* Partition the limit by {@link Principal}. Percentages of the limit are partitioned to named
* groups. Group membership is derived from the provided mapping function.
* @param principalToGroup Mapping function from {@link Principal} to a named group.
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByUserPrincipal(Function<Principal, String> principalToGroup) {
return partitionResolver(request -> Optional.ofNullable(request.getUserPrincipal()).map(principalToGroup).orElse(null));
}
/**
* Partition the limit by request attribute
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByAttribute(String name) {
return partitionResolver(request -> Optional.ofNullable(request.getAttribute(name)).map(Object::toString).orElse(null));
}
/**
* Partition the limit by request parameter
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByParameter(String name) {
return partitionResolver(request -> Optional.ofNullable(request.getParameter(name)).orElse(null));
}
/**
* Partition the limit by the full path. Percentages of the limit are partitioned to named
* groups. Group membership is derived from the provided mapping function.
* @param pathToGroup Mapping function from full path to a named group.
* @return Chainable builder
*/
public ServletLimiterBuilder partitionByPathInfo(Function<String, String> pathToGroup) {
return partitionResolver(request -> Optional.ofNullable(request.getPathInfo()).map(pathToGroup).orElse(null));
}
@Override
protected ServletLimiterBuilder self() {
return this;
}
}
| 3,107 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix | Create_ds/Priam/priam/src/test/java/com/netflix/priam/TestModule.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam;
import com.google.common.collect.ImmutableList;
import com.google.inject.AbstractModule;
import com.google.inject.Scopes;
import com.netflix.priam.backup.FakeCredentials;
import com.netflix.priam.backup.IBackupFileSystem;
import com.netflix.priam.backup.NullBackupFileSystem;
import com.netflix.priam.config.FakeBackupRestoreConfig;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.cred.ICredential;
import com.netflix.priam.identity.FakeMembership;
import com.netflix.priam.identity.FakePriamInstanceFactory;
import com.netflix.priam.identity.IMembership;
import com.netflix.priam.identity.IPriamInstanceFactory;
import com.netflix.priam.identity.config.FakeInstanceInfo;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.utils.FakeSleeper;
import com.netflix.priam.utils.Sleeper;
import com.netflix.spectator.api.DefaultRegistry;
import com.netflix.spectator.api.Registry;
import org.junit.Ignore;
import org.quartz.SchedulerFactory;
import org.quartz.impl.StdSchedulerFactory;
@Ignore
public class TestModule extends AbstractModule {
@Override
protected void configure() {
bind(IConfiguration.class).toInstance(new FakeConfiguration("fake-app"));
bind(IBackupRestoreConfig.class).to(FakeBackupRestoreConfig.class);
bind(InstanceInfo.class)
.toInstance(new FakeInstanceInfo("fakeInstance1", "az1", "us-east-1"));
bind(IPriamInstanceFactory.class).to(FakePriamInstanceFactory.class).in(Scopes.SINGLETON);
bind(SchedulerFactory.class).to(StdSchedulerFactory.class).in(Scopes.SINGLETON);
bind(IMembership.class)
.toInstance(
new FakeMembership(
ImmutableList.of(
"fakeInstance1", "fakeInstance2", "fakeInstance3")));
bind(ICredential.class).to(FakeCredentials.class).in(Scopes.SINGLETON);
bind(IBackupFileSystem.class).to(NullBackupFileSystem.class);
bind(Sleeper.class).to(FakeSleeper.class);
bind(Registry.class).toInstance(new DefaultRegistry());
}
}
| 3,108 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/tuner/JVMOptionTunerTest.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.scheduler.UnsupportedTypeException;
import java.util.*;
import java.util.stream.Collectors;
import org.junit.Test;
/** Created by aagrawal on 8/29/17. */
public class JVMOptionTunerTest {
private IConfiguration config;
JVMOptionsTuner tuner;
@Test
public void testCMS() throws Exception {
config = new GCConfiguration(GCType.CMS, null, null, null, null);
List<JVMOption> jvmOptionMap = getConfiguredJVMOptions(config);
// Validate that all CMS options should be uncommented.
long failedVerification =
jvmOptionMap
.stream()
.map(
jvmOption -> {
GCType gcType = GCTuner.getGCType(jvmOption);
if (gcType != null && gcType != GCType.CMS) {
return 1;
}
return 0;
})
.filter(returncode -> (returncode != 0))
.count();
if (failedVerification > 0) throw new Exception("Failed validation for CMS");
}
@Test
public void testG1GC() throws Exception {
config = new GCConfiguration(GCType.G1GC, null, null, null, null);
List<JVMOption> jvmOptionMap = getConfiguredJVMOptions(config);
// Validate that all G1GC options should be uncommented.
long failedVerification =
jvmOptionMap
.stream()
.map(
jvmOption -> {
GCType gcType = GCTuner.getGCType(jvmOption);
if (gcType != null && gcType != GCType.G1GC) {
return 1;
}
return 0;
})
.filter(returncode -> (returncode != 0))
.count();
if (failedVerification > 0) throw new Exception("Failed validation for G1GC");
}
@Test
public void testCMSUpsert() throws Exception {
JVMOption option1 = new JVMOption("-Dsample");
JVMOption option2 = new JVMOption("-Dsample2", "10", false, false);
JVMOption option3 = new JVMOption("-XX:NumberOfGCLogFiles", "20", false, false);
JVMOption xmnOption = new JVMOption("-Xmn", "3G", false, true);
JVMOption xmxOption = new JVMOption("-Xmx", "20G", false, true);
JVMOption xmsOption = new JVMOption("-Xms", "20G", false, true);
StringBuffer buffer =
new StringBuffer(
option1.toJVMOptionString()
+ ","
+ option2.toJVMOptionString()
+ ","
+ option3.toJVMOptionString());
config =
new GCConfiguration(
GCType.CMS,
null,
buffer.toString(),
xmnOption.getValue(),
xmxOption.getValue());
List<JVMOption> jvmOptions = getConfiguredJVMOptions(config);
// Verify all the options do exist.
assertTrue(jvmOptions.contains(option3));
assertTrue(jvmOptions.contains(option2));
assertTrue(jvmOptions.contains(option1));
// Verify heap options exist with the value provided.
assertTrue(jvmOptions.contains(xmnOption));
assertTrue(jvmOptions.contains(xmxOption));
assertTrue(jvmOptions.contains(xmsOption));
}
@Test
public void testCMSExclude() throws Exception {
JVMOption youngHeap = new JVMOption("-Xmn", "3G", false, true);
JVMOption maxHeap = new JVMOption("-Xmx", "12G", false, true);
JVMOption option1 = new JVMOption("-XX:+UseParNewGC");
JVMOption option2 = new JVMOption("-XX:NumberOfGCLogFiles", "20", false, false);
JVMOption option3 = new JVMOption("-XX:+UseG1GC", null, false, false);
StringBuffer buffer =
new StringBuffer(
option1.toJVMOptionString()
+ ","
+ option2.toJVMOptionString()
+ ","
+ option3.toJVMOptionString());
config = new GCConfiguration(GCType.CMS, buffer.toString(), null, "3G", "12G");
List<JVMOption> jvmOptions = getConfiguredJVMOptions(config);
// Verify all the options do not exist.
assertFalse(jvmOptions.contains(option3));
assertFalse(jvmOptions.contains(option2));
assertFalse(jvmOptions.contains(option1));
// Verify that Xmn is present since CMS needs tuning of young gen heap
assertTrue(jvmOptions.contains(maxHeap));
assertTrue(jvmOptions.contains(youngHeap));
}
@Test
public void testG1GCUpsertExclude() throws Exception {
JVMOption youngHeap = new JVMOption("-Xmn", "3G", true, true);
JVMOption maxHeap = new JVMOption("-Xmx", "12G", false, true);
JVMOption option1 = new JVMOption("-Dsample");
JVMOption option2 = new JVMOption("-Dsample2", "10", false, false);
JVMOption option3 = new JVMOption("-XX:NumberOfGCLogFiles", "20", false, false);
StringBuffer upsert =
new StringBuffer(
option1.toJVMOptionString()
+ ","
+ option2.toJVMOptionString()
+ ","
+ option3.toJVMOptionString());
JVMOption option4 = new JVMOption("-XX:NumberOfGCLogFiles", null, false, false);
JVMOption option5 = new JVMOption("-XX:+UseG1GC", null, false, false);
StringBuffer exclude =
new StringBuffer(option4.toJVMOptionString() + "," + option5.toJVMOptionString());
config =
new GCConfiguration(
GCType.G1GC, exclude.toString(), upsert.toString(), "3G", "12G");
List<JVMOption> jvmOptions = getConfiguredJVMOptions(config);
// Verify upserts exist
assertTrue(jvmOptions.contains(option1));
assertTrue(jvmOptions.contains(option2));
// Verify exclude exist. This is to prove that if an element is in EXCLUDE, it will always
// be excluded.
assertFalse(jvmOptions.contains(option3));
assertFalse(jvmOptions.contains(option4));
assertFalse(jvmOptions.contains(option5));
// Verify that Xmn is not present since G1GC autotunes the young gen heap
assertTrue(jvmOptions.contains(maxHeap));
assertFalse(jvmOptions.contains(youngHeap));
List<JVMOption> allJVMOptions = getConfiguredJVMOptions(config, false);
assertTrue(allJVMOptions.contains(youngHeap));
}
private List<JVMOption> getConfiguredJVMOptions(IConfiguration config) throws Exception {
return getConfiguredJVMOptions(config, true);
}
private List<JVMOption> getConfiguredJVMOptions(IConfiguration config, boolean filter)
throws Exception {
tuner = new JVMOptionsTuner(config);
List<String> configuredJVMOptions = tuner.updateJVMOptions();
if (filter) {
return configuredJVMOptions
.stream()
.map(JVMOption::parse)
.filter(jvmOption -> (jvmOption != null))
.filter(jvmOption -> !jvmOption.isCommented())
.collect(Collectors.toList());
} else {
return configuredJVMOptions.stream().map(JVMOption::parse).collect(Collectors.toList());
}
}
private class GCConfiguration extends FakeConfiguration {
private GCType gcType;
private String configuredJVMExclude;
private String configuredJVMUpsert;
private String configuredHeapNewSize;
private String configuredHeapSize;
GCConfiguration(
GCType gcType,
String configuredJVMExclude,
String configuredJVMUpsert,
String configuredHeapNewSize,
String configuredHeapSize) {
this.gcType = gcType;
this.configuredJVMExclude = configuredJVMExclude;
this.configuredJVMUpsert = configuredJVMUpsert;
this.configuredHeapNewSize = configuredHeapNewSize;
this.configuredHeapSize = configuredHeapSize;
}
@Override
public GCType getGCType() throws UnsupportedTypeException {
return gcType;
}
@Override
public String getJVMExcludeSet() {
return configuredJVMExclude;
}
@Override
public String getHeapSize() {
return configuredHeapSize;
}
@Override
public String getHeapNewSize() {
return configuredHeapNewSize;
}
@Override
public String getJVMUpsertSet() {
return configuredJVMUpsert;
}
}
}
| 3,109 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/tuner/StandardTunerTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner;
import static org.junit.Assert.assertEquals;
import com.google.common.io.Files;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.BackupRestoreConfig;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.config.InstanceInfo;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.nio.charset.Charset;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
public class StandardTunerTest {
/* note: these are, more or less, arbitrary partitioner class names. as long as the tests exercise the code, all is good */
private static final String A_PARTITIONER = "com.netflix.priam.utils.NonexistentPartitioner";
private static final String RANDOM_PARTITIONER = "org.apache.cassandra.dht.RandomPartitioner";
private static final String MURMUR_PARTITIONER = "org.apache.cassandra.dht.Murmur3Partitioner";
private static final String BOP_PARTITIONER = "org.apache.cassandra.dht.ByteOrderedPartitioner";
private final StandardTuner tuner;
private final InstanceInfo instanceInfo;
private final IBackupRestoreConfig backupRestoreConfig;
private final File target = new File("/tmp/priam_test.yaml");
private IConfiguration config;
public StandardTunerTest() {
Injector injector = Guice.createInjector(new BRTestModule());
this.tuner = injector.getInstance(StandardTuner.class);
this.instanceInfo = injector.getInstance(InstanceInfo.class);
this.backupRestoreConfig = injector.getInstance(BackupRestoreConfig.class);
this.config = injector.getInstance(IConfiguration.class);
File targetDir = new File(config.getYamlLocation()).getParentFile();
if (!targetDir.exists()) targetDir.mkdirs();
}
@Test
public void derivePartitioner_NullYamlEntry() {
String partitioner = tuner.derivePartitioner(null, A_PARTITIONER);
assertEquals(A_PARTITIONER, partitioner);
}
@Test
public void derivePartitioner_EmptyYamlEntry() {
String partitioner = tuner.derivePartitioner("", A_PARTITIONER);
assertEquals(A_PARTITIONER, partitioner);
}
@Test
public void derivePartitioner_RandomPartitioner() {
String partitioner = tuner.derivePartitioner(RANDOM_PARTITIONER, RANDOM_PARTITIONER);
assertEquals(RANDOM_PARTITIONER, partitioner);
}
@Test
public void derivePartitioner_MurmurPartitioner() {
String partitioner = tuner.derivePartitioner(MURMUR_PARTITIONER, MURMUR_PARTITIONER);
assertEquals(MURMUR_PARTITIONER, partitioner);
}
@Test
public void derivePartitioner_BOPPartitionerInYaml() {
String partitioner = tuner.derivePartitioner(BOP_PARTITIONER, MURMUR_PARTITIONER);
assertEquals(BOP_PARTITIONER, partitioner);
}
@Test
public void derivePartitioner_BOPPartitionerInConfig() {
String partitioner = tuner.derivePartitioner(RANDOM_PARTITIONER, BOP_PARTITIONER);
assertEquals(BOP_PARTITIONER, partitioner);
}
@Before
@After
public void cleanup() {
FileUtils.deleteQuietly(target);
}
@Test
public void dump() throws Exception {
Files.copy(new File("src/main/resources/incr-restore-cassandra.yaml"), target);
tuner.writeAllProperties(target.getAbsolutePath(), "your_host", "YourSeedProvider");
}
@Test
public void addExtraParams() throws Exception {
String cassParamName1 = "client_encryption_options.optional";
String priamKeyName1 = "Priam.client_encryption.optional";
String cassParamName2 = "client_encryption_options.keystore_password";
String priamKeyName2 = "Priam.client_encryption.keystore_password";
String cassParamName3 = "randomKey";
String priamKeyName3 = "Priam.randomKey";
String cassParamName4 = "randomGroup.randomKey";
String priamKeyName4 = "Priam.randomGroup.randomKey";
String extraConfigParam =
String.format(
"%s=%s,%s=%s,%s=%s,%s=%s",
priamKeyName1,
cassParamName1,
priamKeyName2,
cassParamName2,
priamKeyName3,
cassParamName3,
priamKeyName4,
cassParamName4);
Map extraParamValues = new HashMap();
extraParamValues.put(priamKeyName1, true);
extraParamValues.put(priamKeyName2, "test");
extraParamValues.put(priamKeyName3, "randomKeyValue");
extraParamValues.put(priamKeyName4, "randomGroupValue");
StandardTuner tuner =
new StandardTuner(
new TunerConfiguration(extraConfigParam, extraParamValues),
backupRestoreConfig,
instanceInfo);
Files.copy(new File("src/main/resources/incr-restore-cassandra.yaml"), target);
tuner.writeAllProperties(target.getAbsolutePath(), "your_host", "YourSeedProvider");
// Read the tuned file and verify
DumperOptions options = new DumperOptions();
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
Yaml yaml = new Yaml(options);
Map map = yaml.load(new FileInputStream(target));
Assert.assertEquals("your_host", map.get("listen_address"));
Assert.assertEquals("true", ((Map) map.get("client_encryption_options")).get("optional"));
Assert.assertEquals(
"test", ((Map) map.get("client_encryption_options")).get("keystore_password"));
Assert.assertEquals("randomKeyValue", map.get("randomKey"));
Assert.assertEquals("randomGroupValue", ((Map) map.get("randomGroup")).get("randomKey"));
}
private class TunerConfiguration extends FakeConfiguration {
String extraConfigParams;
Map extraParamValues;
TunerConfiguration(String extraConfigParam, Map<String, String> extraParamValues) {
this.extraConfigParams = extraConfigParam;
this.extraParamValues = extraParamValues;
}
@Override
public String getCassYamlVal(String priamKey) {
return extraParamValues.getOrDefault(priamKey, "").toString();
}
@Override
public String getExtraConfigParams() {
return extraConfigParams;
}
}
@Test
public void testPropertiesFiles() throws Exception {
FakeConfiguration fake = (FakeConfiguration) config;
File testRackDcFile = new File("src/test/resources/conf/cassandra-rackdc.properties");
File testYamlFile = new File("src/main/resources/incr-restore-cassandra.yaml");
String propertiesPath = new File(config.getYamlLocation()).getParentFile().getPath();
File rackDcFile =
new File(
Paths.get(propertiesPath, "cassandra-rackdc.properties")
.normalize()
.toString());
File configFile =
new File(Paths.get(propertiesPath, "properties_test.yaml").normalize().toString());
System.out.println(testRackDcFile);
System.out.println(rackDcFile);
Files.copy(testRackDcFile, rackDcFile);
Files.copy(testYamlFile, configFile);
try {
fake.fakeProperties.put(
"propertyOverrides.cassandra-rackdc",
"dc=${dc},rack=${rac},ec2_naming_scheme=legacy,dc_suffix=testsuffix");
tuner.writeAllProperties(configFile.getPath(), "your_host", "YourSeedProvider");
Properties prop = new Properties();
prop.load(new FileReader(rackDcFile));
assertEquals("us-east-1", prop.getProperty("dc"));
assertEquals("my_zone", prop.getProperty("rack"));
assertEquals("legacy", prop.getProperty("ec2_naming_scheme"));
assertEquals("testsuffix", prop.getProperty("dc_suffix"));
assertEquals(4, prop.stringPropertyNames().size());
} finally {
fake.fakeProperties.clear();
for (String line : Files.readLines(rackDcFile, Charset.defaultCharset())) {
System.out.println(line);
}
Files.copy(testRackDcFile, rackDcFile);
Files.copy(testYamlFile, configFile);
}
}
}
| 3,110 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/tuner | Create_ds/Priam/priam/src/test/java/com/netflix/priam/tuner/dse/DseTunerTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner.dse;
import com.google.common.io.Files;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.dse.DseConfigStub;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class DseTunerTest {
private IConfiguration config;
private DseConfigStub dseConfig;
private AuditLogTunerYaml auditLogTunerYaml;
private AuditLogTunerLog4J auditLogTunerLog4j;
private File targetFile;
private File targetDseYamlFile;
@Before
public void setup() throws IOException {
config = new FakeConfiguration();
dseConfig = new DseConfigStub();
auditLogTunerYaml = new AuditLogTunerYaml(dseConfig);
auditLogTunerLog4j = new AuditLogTunerLog4J(config, dseConfig);
File targetDir = new File(config.getCassHome() + "/conf");
if (!targetDir.exists()) targetDir.mkdirs();
targetFile = new File(config.getCassHome() + AuditLogTunerLog4J.AUDIT_LOG_FILE);
Files.copy(new File("src/test/resources/" + AuditLogTunerLog4J.AUDIT_LOG_FILE), targetFile);
}
@Test
public void auditLogProperties_Enabled() throws IOException {
dseConfig.setAuditLogEnabled(true);
auditLogTunerLog4j.tuneAuditLog();
Properties p = new Properties();
p.load(new FileReader(targetFile));
Assert.assertTrue(p.containsKey(AuditLogTunerLog4J.PRIMARY_AUDIT_LOG_ENTRY));
}
@Test
public void auditLogProperties_Disabled() throws IOException {
dseConfig.setAuditLogEnabled(false);
auditLogTunerLog4j.tuneAuditLog();
Properties p = new Properties();
p.load(new FileReader(targetFile));
Assert.assertFalse(p.containsKey(AuditLogTunerLog4J.PRIMARY_AUDIT_LOG_ENTRY));
}
/**
* This is different because we test the disabled step using the already used enabled file (not
* a clean copy over of the original props file from the resources dir), and vice versa
*
* @throws IOException
*/
@Test
public void auditLogProperties_ThereAndBackAgain() throws IOException {
auditLogProperties_Enabled();
auditLogProperties_Disabled();
auditLogProperties_Enabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Enabled();
auditLogProperties_Enabled();
auditLogProperties_Enabled();
auditLogProperties_Enabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Disabled();
auditLogProperties_Enabled();
auditLogProperties_Enabled();
}
@Test
public void auditLogYamlProperties_Enabled() throws IOException {
File targetDseDir = new File(config.getCassHome() + "/resources/dse/conf/");
if (!targetDseDir.exists()) {
targetDseDir.mkdirs();
}
int index = dseConfig.getDseYamlLocation().lastIndexOf('/') + 1;
targetDseYamlFile =
new File(targetDseDir + dseConfig.getDseYamlLocation().substring(index - 1));
Files.copy(
new File(
"src/test/resources/conf/"
+ dseConfig.getDseYamlLocation().substring(index)),
targetDseYamlFile);
dseConfig.setAuditLogEnabled(true);
auditLogTunerYaml.tuneAuditLog();
}
@Test
public void auditLogYamlProperties_Disabled() throws IOException {
File targetDseDir = new File(config.getCassHome() + "/resources/dse/conf/");
if (!targetDseDir.exists()) {
targetDseDir.mkdirs();
}
int index = dseConfig.getDseYamlLocation().lastIndexOf('/') + 1;
targetDseYamlFile =
new File(targetDseDir + dseConfig.getDseYamlLocation().substring(index - 1));
Files.copy(
new File(
"src/test/resources/conf/"
+ dseConfig.getDseYamlLocation().substring(index)),
targetDseYamlFile);
dseConfig.setAuditLogEnabled(false);
auditLogTunerYaml.tuneAuditLog();
}
}
| 3,111 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity/FakePriamInstanceFactory.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.identity;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.netflix.priam.identity.config.InstanceInfo;
import java.util.Comparator;
import java.util.Map;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.inject.Singleton;
@Singleton
public class FakePriamInstanceFactory implements IPriamInstanceFactory {
private final Map<Integer, PriamInstance> instances = Maps.newHashMap();
private final InstanceInfo instanceInfo;
@Inject
public FakePriamInstanceFactory(InstanceInfo instanceInfo) {
this.instanceInfo = instanceInfo;
}
@Override
public ImmutableSet<PriamInstance> getAllIds(String appName) {
return appName.endsWith("-dead")
? ImmutableSet.of()
: ImmutableSet.copyOf(
instances
.values()
.stream()
.sorted(Comparator.comparingInt(PriamInstance::getId))
.collect(Collectors.toList()));
}
@Override
public PriamInstance getInstance(String appName, String dc, int id) {
return instances.get(id);
}
@Override
public PriamInstance create(
String app,
int id,
String instanceID,
String hostname,
String ip,
String rac,
Map<String, Object> volumes,
String payload) {
PriamInstance ins = new PriamInstance();
ins.setApp(app);
ins.setRac(rac);
ins.setHost(hostname, ip);
ins.setId(id);
ins.setInstanceId(instanceID);
ins.setToken(payload);
ins.setVolumes(volumes);
ins.setDC(instanceInfo.getRegion());
instances.put(id, ins);
return ins;
}
@Override
public void delete(PriamInstance inst) {
instances.remove(inst.getId());
}
@Override
public void update(PriamInstance orig, PriamInstance inst) {
instances.put(inst.getId(), inst);
}
}
| 3,112 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity/FakeMembership.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.identity;
import com.google.common.collect.ImmutableSet;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class FakeMembership implements IMembership {
private ImmutableSet<String> instances;
private Set<String> acl;
public FakeMembership(List<String> priamInstances) {
this.instances = ImmutableSet.copyOf(priamInstances);
this.acl = new HashSet<>();
}
@Override
public ImmutableSet<String> getRacMembership() {
return instances;
}
@Override
public ImmutableSet<String> getCrossAccountRacMembership() {
return null;
}
@Override
public int getRacMembershipSize() {
return 3;
}
@Override
public int getRacCount() {
return 3;
}
@Override
public void addACL(Collection<String> listIPs, int from, int to) {
acl.addAll(listIPs);
}
@Override
public void removeACL(Collection<String> listIPs, int from, int to) {
acl.removeAll(listIPs);
}
@Override
public ImmutableSet<String> listACL(int from, int to) {
return ImmutableSet.copyOf(acl);
}
@Override
public void expandRacMembership(int count) {
// TODO Auto-generated method stub
}
}
| 3,113 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity/token/TokenRetrieverUtilsTest.java | package com.netflix.priam.identity.token;
import static org.hamcrest.core.AllOf.allOf;
import static org.hamcrest.core.IsNot.not;
import com.google.common.collect.ImmutableSet;
import com.netflix.priam.identity.PriamInstance;
import com.netflix.priam.utils.SystemUtils;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import mockit.Expectations;
import mockit.Mocked;
import org.codehaus.jettison.json.JSONObject;
import org.junit.Assert;
import org.junit.Test;
public class TokenRetrieverUtilsTest {
private static final String APP = "testapp";
private static final String STATUS_URL_FORMAT = "http://%s:8080/Priam/REST/v1/cassadmin/status";
private ImmutableSet<PriamInstance> instances =
ImmutableSet.copyOf(
IntStream.range(0, 6)
.<PriamInstance>mapToObj(
e ->
newMockPriamInstance(
APP,
"us-east",
(e < 3) ? "az1" : "az2",
e,
String.format("fakeInstance-%d", e),
String.format("127.0.0.%d", e),
String.format("fakeHost-%d", e),
String.valueOf(e)))
.collect(Collectors.toList()));
private Map<String, String> tokenToEndpointMap =
IntStream.range(0, 6)
.mapToObj(e -> Integer.valueOf(e))
.collect(
Collectors.toMap(
e -> String.valueOf(e), e -> String.format("127.0.0.%s", e)));
private List<String> liveInstances =
IntStream.range(0, 6)
.mapToObj(e -> String.format("127.0.0.%d", e))
.collect(Collectors.toList());
@Test
public void testRetrieveTokenOwnerWhenGossipAgrees(@Mocked SystemUtils systemUtils) {
// mark previous instance with tokenNumber 4 as down in gossip.
List<String> myliveInstances =
liveInstances
.stream()
.filter(x -> !x.equalsIgnoreCase("127.0.0.4"))
.collect(Collectors.toList());
new Expectations() {
{
SystemUtils.getDataFromUrl(anyString);
result = getStatus(myliveInstances, tokenToEndpointMap);
}
};
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
TokenRetrieverUtils.inferTokenOwnerFromGossip(instances, "4", "us-east");
Assert.assertEquals(
"127.0.0.4", inferredTokenOwnership.getTokenInformation().getIpAddress());
}
@Test
public void testRetrieveTokenOwnerWhenGossipDisagrees(@Mocked SystemUtils systemUtils) {
List<String> myliveInstances =
liveInstances
.stream()
.filter(x -> !x.equalsIgnoreCase("127.0.0.4"))
.collect(Collectors.toList());
new Expectations() {
{
SystemUtils.getDataFromUrl(
withArgThat(
allOf(
not(String.format(STATUS_URL_FORMAT, "127.0.0.0")),
not(String.format(STATUS_URL_FORMAT, "127.0.0.2")),
not(String.format(STATUS_URL_FORMAT, "127.0.0.5")))));
result = getStatus(myliveInstances, tokenToEndpointMap);
minTimes = 0;
SystemUtils.getDataFromUrl(String.format(STATUS_URL_FORMAT, "127.0.0.0"));
result = getStatus(liveInstances, tokenToEndpointMap);
minTimes = 0;
SystemUtils.getDataFromUrl(String.format(STATUS_URL_FORMAT, "127.0.0.2"));
result = getStatus(liveInstances, tokenToEndpointMap);
minTimes = 0;
SystemUtils.getDataFromUrl(String.format(STATUS_URL_FORMAT, "127.0.0.5"));
result = null;
minTimes = 0;
}
};
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
TokenRetrieverUtils.inferTokenOwnerFromGossip(instances, "4", "us-east");
Assert.assertEquals(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.MISMATCH,
inferredTokenOwnership.getTokenInformationStatus());
Assert.assertTrue(inferredTokenOwnership.getTokenInformation().isLive());
}
@Test
public void testRetrieveTokenOwnerWhenGossipDisagrees_2Nodes(@Mocked SystemUtils systemUtils) {
ImmutableSet<PriamInstance> myInstances =
ImmutableSet.copyOf(instances.stream().limit(3).collect(Collectors.toList()));
List<String> myLiveInstances = liveInstances.stream().limit(3).collect(Collectors.toList());
Map<String, String> myTokenToEndpointMap =
IntStream.range(0, 3)
.mapToObj(String::valueOf)
.collect(
Collectors.toMap(
Function.identity(), (i) -> tokenToEndpointMap.get(i)));
Map<String, String> alteredMap = new HashMap<>(myTokenToEndpointMap);
alteredMap.put("1", "1.2.3.4");
new Expectations() {
{
SystemUtils.getDataFromUrl(String.format(STATUS_URL_FORMAT, "127.0.0.0"));
result = getStatus(myLiveInstances, myTokenToEndpointMap);
minTimes = 0;
SystemUtils.getDataFromUrl(String.format(STATUS_URL_FORMAT, "127.0.0.2"));
result = getStatus(myLiveInstances, alteredMap);
minTimes = 0;
}
};
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
TokenRetrieverUtils.inferTokenOwnerFromGossip(myInstances, "1", "us-east");
Assert.assertEquals(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.MISMATCH,
inferredTokenOwnership.getTokenInformationStatus());
Assert.assertTrue(inferredTokenOwnership.getTokenInformation().isLive());
}
@Test
public void testRetrieveTokenOwnerWhenAllHostsInGossipReturnsNull(
@Mocked SystemUtils systemUtils) throws Exception {
new Expectations() {
{
SystemUtils.getDataFromUrl(anyString);
result = getStatus(liveInstances, tokenToEndpointMap);
}
};
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
TokenRetrieverUtils.inferTokenOwnerFromGossip(instances, "4", "us-east");
Assert.assertEquals(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.GOOD,
inferredTokenOwnership.getTokenInformationStatus());
Assert.assertTrue(inferredTokenOwnership.getTokenInformation().isLive());
}
@Test
public void testRetrieveTokenOwnerWhenAllInstancesThrowGossipParseException(
@Mocked SystemUtils systemUtils) {
new Expectations() {
{
SystemUtils.getDataFromUrl(anyString);
result = new TokenRetrieverUtils.GossipParseException("Test");
}
};
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
TokenRetrieverUtils.inferTokenOwnerFromGossip(instances, "4", "us-east");
Assert.assertEquals(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.UNREACHABLE_NODES,
inferredTokenOwnership.getTokenInformationStatus());
Assert.assertNull(inferredTokenOwnership.getTokenInformation());
}
private String newGossipRecord(
int tokenNumber, String ip, String dc, String rack, String status) {
return String.format(
"{\"TOKENS\":\"[%d]\",\"PUBLIC_IP\":\"%s\",\"RACK\":\"%s\",\"STATUS\":\"%s\",\"DC\":\"%s\"}",
tokenNumber, ip, dc, status, rack);
}
private String getStatus(List<String> liveInstances, Map<String, String> tokenToEndpointMap) {
JSONObject jsonObject = new JSONObject();
try {
jsonObject.put("live", liveInstances);
jsonObject.put("tokenToEndpointMap", tokenToEndpointMap);
} catch (Exception e) {
}
return jsonObject.toString();
}
private PriamInstance newMockPriamInstance(
String app,
String dc,
String rack,
int id,
String instanceId,
String hostIp,
String hostName,
String token) {
PriamInstance priamInstance = new PriamInstance();
priamInstance.setApp(app);
priamInstance.setDC(dc);
priamInstance.setRac(rack);
priamInstance.setId(id);
priamInstance.setInstanceId(instanceId);
priamInstance.setHost(hostName);
priamInstance.setHostIP(hostIp);
priamInstance.setToken(token);
return priamInstance;
}
}
| 3,114 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity/token/TokenRetrieverTest.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.identity.token;
import com.google.common.collect.*;
import com.google.common.truth.Truth;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.IMembership;
import com.netflix.priam.identity.IPriamInstanceFactory;
import com.netflix.priam.identity.PriamInstance;
import com.netflix.priam.identity.config.FakeInstanceInfo;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.utils.FakeSleeper;
import com.netflix.priam.utils.SystemUtils;
import com.netflix.priam.utils.TokenManager;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import mockit.Expectations;
import mockit.Mocked;
import org.apache.commons.lang3.math.Fraction;
import org.codehaus.jettison.json.JSONObject;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
/** Created by aagrawal on 3/1/19. */
public class TokenRetrieverTest {
@Mocked private IMembership membership;
private IPriamInstanceFactory factory;
private InstanceInfo instanceInfo;
private IConfiguration configuration;
private Map<String, String> tokenToEndpointMap =
IntStream.range(0, 6)
.boxed()
.collect(
Collectors.toMap(String::valueOf, e -> String.format("127.0.0.%s", e)));
private ImmutableList<String> liveInstances = ImmutableList.copyOf(tokenToEndpointMap.values());
public TokenRetrieverTest() {
Injector injector = Guice.createInjector(new BRTestModule());
instanceInfo = injector.getInstance(InstanceInfo.class);
configuration = injector.getInstance(IConfiguration.class);
factory = injector.getInstance(IPriamInstanceFactory.class);
}
@Test
public void testNoReplacementNormalScenario() throws Exception {
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of();
}
};
PriamInstance priamInstance = getTokenRetriever().grabExistingToken();
Truth.assertThat(priamInstance).isNull();
}
@Test
// There is no slot available for replacement as per Token Database.
public void testNoReplacementNoSpotAvailable() throws Exception {
List<PriamInstance> allInstances = getInstances(1);
Set<String> racMembership = getRacMembership(1);
racMembership.add(instanceInfo.getInstanceId());
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.copyOf(racMembership);
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
Truth.assertThat(tokenRetriever.grabExistingToken()).isNull();
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isFalse();
Truth.assertThat(factory.getAllIds(configuration.getAppName()))
.containsExactlyElementsIn(allInstances);
}
@Test
// There is a potential slot for dead token but we are unable to replace.
public void testNoReplacementNoGossipMatch(@Mocked SystemUtils systemUtils) throws Exception {
getInstances(2);
Set<String> racMembership = getRacMembership(1);
racMembership.add(instanceInfo.getInstanceId());
// gossip info returns null, thus unable to replace the instance.
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.copyOf(racMembership);
SystemUtils.getDataFromUrl(anyString);
result = getStatus(liveInstances, tokenToEndpointMap);
times = 1;
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
Truth.assertThat(tokenRetriever.grabExistingToken()).isNull();
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isFalse();
}
@Test
// There is a potential slot for dead token but we are unable to replace.
public void testUsePregeneratedTokenWhenThereIsNoGossipMatchForDeadToken(
@Mocked SystemUtils systemUtils) throws Exception {
create(0, "iid_0", "host_0", "127.0.0.0", instanceInfo.getRac(), 0 + "");
create(1, "new_slot", "host_1", "127.0.0.1", instanceInfo.getRac(), 1 + "");
// gossip info returns null, thus unable to replace the instance.
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of();
SystemUtils.getDataFromUrl(anyString);
result = getStatus(liveInstances, tokenToEndpointMap);
times = 1;
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
PriamInstance instance = tokenRetriever.grabExistingToken();
Truth.assertThat(instance).isNotNull();
Truth.assertThat(instance.getId()).isEqualTo(1);
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isFalse();
}
@Test
public void testReplacementGossipMatch(@Mocked SystemUtils systemUtils) throws Exception {
getInstances(6);
Set<String> racMembership = getRacMembership(2);
racMembership.add(instanceInfo.getInstanceId());
List<String> myliveInstances =
liveInstances
.stream()
.filter(x -> !x.equalsIgnoreCase("127.0.0.3"))
.collect(Collectors.toList());
String gossipResponse = getStatus(myliveInstances, tokenToEndpointMap);
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.copyOf(racMembership);
SystemUtils.getDataFromUrl(anyString);
returns(gossipResponse, gossipResponse, null, "random_value", gossipResponse);
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
Truth.assertThat(tokenRetriever.grabExistingToken()).isNotNull();
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isTrue();
Truth.assertThat(tokenRetriever.getReplacedIp().get()).isEqualTo("127.0.0.3");
}
@Test
public void testPrioritizeDeadTokens(@Mocked SystemUtils systemUtils) throws Exception {
create(0, "iid_0", "host_0", "127.0.0.0", instanceInfo.getRac(), 0 + "");
create(1, "new_slot", "host_1", "127.0.0.1", instanceInfo.getRac(), 1 + "");
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of();
SystemUtils.getDataFromUrl(anyString);
returns(null, null);
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
Truth.assertThat(tokenRetriever.grabExistingToken()).isNotNull();
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isTrue();
Truth.assertThat(tokenRetriever.getReplacedIp().get()).isEqualTo("127.0.0.0");
}
@Test
public void testPrioritizeDeadInstancesEvenIfAfterANewSlot(@Mocked SystemUtils systemUtils)
throws Exception {
create(0, "new_slot", "host_0", "127.0.0.0", instanceInfo.getRac(), 0 + "");
create(1, "iid_1", "host_1", "127.0.0.1", instanceInfo.getRac(), 1 + "");
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of();
SystemUtils.getDataFromUrl(anyString);
returns(null, null);
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
Truth.assertThat(tokenRetriever.grabExistingToken()).isNotNull();
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isTrue();
Truth.assertThat(tokenRetriever.getReplacedIp().get()).isEqualTo("127.0.0.1");
}
@Test
public void testNewTokenFailureIfProhibited() {
((FakeConfiguration) configuration).setCreateNewToken(false);
create(0, "iid_0", "host_0", "127.0.0.0", instanceInfo.getRac(), 0 + "");
create(1, "iid_1", "host_1", "127.0.0.1", instanceInfo.getRac(), 1 + "");
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of("iid_0", "iid_1");
}
};
Assertions.assertThrows(IllegalStateException.class, () -> getTokenRetriever().get());
}
@Test
public void testNewTokenNoInstancesInRac() throws Exception {
create(0, "iid_0", "host_0", "127.0.0.0", "az2", 0 + "");
create(1, "iid_1", "host_1", "127.0.0.1", "az2", 1 + "");
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of("iid_0", "iid_1");
membership.getRacCount();
result = 1;
membership.getRacMembershipSize();
result = 3;
}
};
PriamInstance instance = getTokenRetriever().get();
Truth.assertThat(instance.getToken()).isEqualTo("1808575600");
// region offset for us-east-1 + index of rac az1 (1808575600 + 0)
Truth.assertThat(instance.getId()).isEqualTo(1808575600);
}
@Test
public void testNewTokenGenerationNoInstancesWithLargeEnoughId() throws Exception {
create(0, "iid_0", "host_0", "127.0.0.0", "az1", 0 + "");
create(1, "iid_1", "host_1", "127.0.0.1", "az1", 1 + "");
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of("iid_0", "iid_1");
membership.getRacCount();
result = 1;
membership.getRacMembershipSize();
result = 3;
}
};
PriamInstance instance = getTokenRetriever().get();
Truth.assertThat(instance.getToken()).isEqualTo("170141183460469231731687303717692681326");
// region offset for us-east-1 + number of racs in cluster (3)
Truth.assertThat(instance.getId()).isEqualTo(1808575603);
}
@Test
public void testNewTokenFailureWhenMyRacIsNotInCluster() {
((FakeConfiguration) configuration).setRacs("az2", "az3");
create(0, "iid_0", "host_0", "127.0.0.0", "az2", 0 + "");
create(1, "iid_1", "host_1", "127.0.0.1", "az2", 1 + "");
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of("iid_0", "iid_1");
}
};
Assertions.assertThrows(IllegalStateException.class, () -> getTokenRetriever().get());
}
@Test
public void testNewTokenGenerationMultipleInstancesWithLargetEnoughIds() throws Exception {
create(2000000000, "iid_0", "host_0", "127.0.0.0", "az1", 0 + "");
create(2000000001, "iid_1", "host_1", "127.0.0.1", "az1", 1 + "");
new Expectations() {
{
membership.getRacMembership();
result = ImmutableSet.of("iid_0", "iid_1");
membership.getRacCount();
result = 1;
membership.getRacMembershipSize();
result = 3;
}
};
PriamInstance instance = getTokenRetriever().get();
Truth.assertThat(instance.getToken())
.isEqualTo("10856391546591660081525376676060033425699421368");
// max id (2000000001) + total instances (3)
Truth.assertThat(instance.getId()).isEqualTo(2000000004);
}
@Test
public void testPreassignedTokenNotReplacedIfPublicIPMatch(@Mocked SystemUtils systemUtils)
throws Exception {
// IP in DB doesn't matter so we make it different to confirm that
create(0, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "az1", 0 + "");
getInstances(5);
String gossipResponse = getStatus(liveInstances, tokenToEndpointMap);
new Expectations() {
{
SystemUtils.getDataFromUrl(anyString);
returns(gossipResponse, gossipResponse, null, "random_value", gossipResponse);
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
tokenRetriever.get();
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isFalse();
}
@Test
public void testPreassignedTokenNotReplacedIfPrivateIPMatch(@Mocked SystemUtils systemUtils)
throws Exception {
// IP in DB doesn't matter so we make it different to confirm that
create(0, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "az1", 0 + "");
getInstances(5);
Map<String, String> myTokenToEndpointMap =
IntStream.range(0, 7)
.boxed()
.collect(
Collectors.toMap(
String::valueOf, e -> String.format("127.1.1.%s", e)));
ImmutableList<String> myLiveInstances = ImmutableList.copyOf(tokenToEndpointMap.values());
String gossipResponse = getStatus(myLiveInstances, myTokenToEndpointMap);
new Expectations() {
{
SystemUtils.getDataFromUrl(anyString);
returns(gossipResponse, gossipResponse, null, "random_value", gossipResponse);
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
tokenRetriever.get();
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isFalse();
}
@Test
public void testGetPreassignedTokenThrowsIfOwnerIPIsLive(@Mocked SystemUtils systemUtils)
throws Exception {
getInstances(5);
create(6, instanceInfo.getInstanceId(), "host_5", "1.2.3.4", "az1", 6 + "");
Map<String, String> myTokenToEndpointMap =
IntStream.range(0, 7)
.boxed()
.collect(
Collectors.toMap(
String::valueOf, e -> String.format("18.221.0.%s", e)));
ImmutableList<String> myLiveInstances = ImmutableList.copyOf(myTokenToEndpointMap.values());
String gossipResponse = getStatus(myLiveInstances, myTokenToEndpointMap);
new Expectations() {
{
SystemUtils.getDataFromUrl(anyString);
returns(gossipResponse, gossipResponse, null, "random_value", gossipResponse);
}
};
Assertions.assertThrows(
TokenRetrieverUtils.GossipParseException.class, () -> getTokenRetriever().get());
}
@Test
public void testGetPreassignedTokenReplacesIfOwnerIPIsNotLive(@Mocked SystemUtils systemUtils)
throws Exception {
getInstances(5);
create(6, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "az1", 6 + "");
Map<String, String> myTokenToEndpointMap =
IntStream.range(0, 7)
.boxed()
.collect(
Collectors.toMap(
String::valueOf, e -> String.format("18.221.0.%s", e)));
List<String> myLiveInstances =
tokenToEndpointMap.values().stream().sorted().limit(6).collect(Collectors.toList());
String gossipResponse = getStatus(myLiveInstances, myTokenToEndpointMap);
new Expectations() {
{
SystemUtils.getDataFromUrl(anyString);
returns(gossipResponse, gossipResponse, null, "random_value", gossipResponse);
}
};
TokenRetriever tokenRetriever = getTokenRetriever();
tokenRetriever.get();
Truth.assertThat(tokenRetriever.getReplacedIp().isPresent()).isTrue();
}
@Test
public void testIPIsUpdatedWhenGrabbingPreassignedToken(@Mocked SystemUtils systemUtils)
throws Exception {
create(0, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "az1", 0 + "");
Truth.assertThat(getTokenRetriever().get().getHostIP()).isEqualTo("127.0.0.0");
}
@Test
public void testRingPositionFirst(@Mocked SystemUtils systemUtils) throws Exception {
getInstances(6);
create(0, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "az1", 0 + "");
TokenRetriever tokenRetriever = getTokenRetriever();
tokenRetriever.get();
Truth.assertThat(tokenRetriever.getRingPosition()).isEqualTo(Fraction.getFraction(0, 7));
}
@Test
public void testRingPositionMiddle(@Mocked SystemUtils systemUtils) throws Exception {
getInstances(3);
create(4, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "az1", 4 + "");
createByIndex(5);
createByIndex(6);
TokenRetriever tokenRetriever = getTokenRetriever();
tokenRetriever.get();
Truth.assertThat(tokenRetriever.getRingPosition()).isEqualTo(Fraction.getFraction(3, 6));
}
@Test
public void testRingPositionLast(@Mocked SystemUtils systemUtils) throws Exception {
getInstances(6);
create(7, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "az1", 7 + "");
TokenRetriever tokenRetriever = getTokenRetriever();
tokenRetriever.get();
Truth.assertThat(tokenRetriever.getRingPosition()).isEqualTo(Fraction.getFraction(6, 7));
}
@Test
public void testThrowOnDuplicateTokenInSameRegion() {
prepareTokenGenerationTest();
create(1, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "us-east-1d", 1808575600 + "");
Assert.assertThrows(
IllegalStateException.class, () -> getTokenRetriever().generateNewToken());
}
@Test
public void testIncrementDuplicateTokenInDifferentRegion() {
((FakeInstanceInfo) instanceInfo).setRegion("us-west-2");
create(1, instanceInfo.getInstanceId(), "host_0", "1.2.3.4", "us-west-2a", 1808575600 + "");
prepareTokenGenerationTest();
Truth.assertThat(getTokenRetriever().generateNewToken().getToken()).isEqualTo("1808575601");
}
private void prepareTokenGenerationTest() {
((FakeConfiguration) configuration).setCreateNewToken(true);
((FakeConfiguration) configuration)
.setPartitioner("org.apache.cassandra.dht.RandomPartitioner");
((FakeConfiguration) configuration).setRacs("us-east-1c", "us-east-1d", "us-east-1e");
((FakeInstanceInfo) instanceInfo).setRegion("us-east-1");
((FakeInstanceInfo) instanceInfo).setRac("us-east-1c");
new Expectations() {
{
membership.getRacMembershipSize();
result = 2;
}
};
new Expectations() {
{
membership.getRacCount();
result = 3;
}
};
}
private String getStatus(List<String> liveInstances, Map<String, String> tokenToEndpointMap) {
JSONObject jsonObject = new JSONObject();
try {
jsonObject.put("live", liveInstances);
jsonObject.put("tokenToEndpointMap", tokenToEndpointMap);
} catch (Exception e) {
}
return jsonObject.toString();
}
private List<PriamInstance> getInstances(int noOfInstances) {
List<PriamInstance> allInstances = Lists.newArrayList();
for (int i = 1; i <= noOfInstances; i++) allInstances.add(createByIndex(i));
return allInstances;
}
private PriamInstance createByIndex(int index) {
return create(
index,
String.format("instance_id_%d", index),
String.format("hostname_%d", index),
String.format("127.0.0.%d", index),
instanceInfo.getRac(),
index + "");
}
private Set<String> getRacMembership(int noOfInstances) {
return IntStream.range(1, noOfInstances + 1)
.mapToObj(i -> String.format("instance_id_%d", i))
.collect(Collectors.toSet());
}
private PriamInstance create(
int id, String instanceID, String hostname, String ip, String rac, String payload) {
return factory.create(
configuration.getAppName(), id, instanceID, hostname, ip, rac, null, payload);
}
private TokenRetriever getTokenRetriever() {
return new TokenRetriever(
factory,
membership,
configuration,
instanceInfo,
new FakeSleeper(),
new TokenManager(configuration));
}
}
| 3,115 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity/token/AssignedTokenRetrieverTest.java | package com.netflix.priam.identity.token;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.common.truth.Truth;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.IMembership;
import com.netflix.priam.identity.IPriamInstanceFactory;
import com.netflix.priam.identity.InstanceIdentity;
import com.netflix.priam.identity.PriamInstance;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.utils.ITokenManager;
import com.netflix.priam.utils.Sleeper;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import mockit.Expectations;
import mockit.Mocked;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
public class AssignedTokenRetrieverTest {
public static final String APP = "testapp";
public static final String DEAD_APP = "testapp-dead";
@Test
public void grabAssignedTokenStartDbInBootstrapModeWhenGossipAgreesCurrentInstanceIsTokenOwner(
@Mocked IPriamInstanceFactory factory,
@Mocked IConfiguration config,
@Mocked IMembership membership,
@Mocked Sleeper sleeper,
@Mocked ITokenManager tokenManager,
@Mocked InstanceInfo instanceInfo,
@Mocked TokenRetrieverUtils retrievalUtils)
throws Exception {
List<PriamInstance> liveHosts = newPriamInstances();
Collections.shuffle(liveHosts);
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
new TokenRetrieverUtils.InferredTokenOwnership();
inferredTokenOwnership.setTokenInformationStatus(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.GOOD);
inferredTokenOwnership.setTokenInformation(
new TokenRetrieverUtils.TokenInformation(liveHosts.get(0).getHostIP(), false));
new Expectations() {
{
config.getAppName();
result = APP;
factory.getAllIds(DEAD_APP);
result = ImmutableSet.of();
factory.getAllIds(APP);
result = ImmutableSet.copyOf(liveHosts);
instanceInfo.getInstanceId();
result = liveHosts.get(0).getInstanceId();
instanceInfo.getHostIP();
result = liveHosts.get(0).getHostIP();
TokenRetrieverUtils.inferTokenOwnerFromGossip(
ImmutableSet.copyOf(liveHosts),
liveHosts.get(0).getToken(),
liveHosts.get(0).getDC());
result = inferredTokenOwnership;
}
};
ITokenRetriever tokenRetriever =
new TokenRetriever(
factory, membership, config, instanceInfo, sleeper, tokenManager);
InstanceIdentity instanceIdentity =
new InstanceIdentity(factory, membership, config, instanceInfo, tokenRetriever);
Truth.assertThat(instanceIdentity.isReplace()).isFalse();
}
@Test
public void grabAssignedTokenStartDbInReplaceModeWhenGossipAgreesPreviousTokenOwnerIsNotLive(
@Mocked IPriamInstanceFactory factory,
@Mocked IConfiguration config,
@Mocked IMembership membership,
@Mocked Sleeper sleeper,
@Mocked ITokenManager tokenManager,
@Mocked InstanceInfo instanceInfo,
@Mocked TokenRetrieverUtils retrievalUtils)
throws Exception {
List<PriamInstance> liveHosts = newPriamInstances();
Collections.shuffle(liveHosts);
PriamInstance deadInstance = liveHosts.remove(0);
PriamInstance newInstance =
newMockPriamInstance(
deadInstance.getDC(),
deadInstance.getRac(),
deadInstance.getId(),
String.format("new-fakeInstance-%d", deadInstance.getId()),
String.format("127.1.1.%d", deadInstance.getId() + 100),
String.format("new-fakeHost-%d", deadInstance.getId()),
deadInstance.getToken());
// the case we are trying to test is when Priam restarted after it acquired the
// token. new instance is already registered with token database.
liveHosts.add(newInstance);
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
new TokenRetrieverUtils.InferredTokenOwnership();
inferredTokenOwnership.setTokenInformationStatus(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.GOOD);
inferredTokenOwnership.setTokenInformation(
new TokenRetrieverUtils.TokenInformation(deadInstance.getHostIP(), false));
new Expectations() {
{
config.getAppName();
result = APP;
factory.getAllIds(DEAD_APP);
result = ImmutableSet.of(deadInstance);
factory.getAllIds(APP);
result = ImmutableSet.copyOf(liveHosts);
instanceInfo.getInstanceId();
result = newInstance.getInstanceId();
TokenRetrieverUtils.inferTokenOwnerFromGossip(
ImmutableSet.copyOf(liveHosts),
newInstance.getToken(),
newInstance.getDC());
result = inferredTokenOwnership;
}
};
ITokenRetriever tokenRetriever =
new TokenRetriever(
factory, membership, config, instanceInfo, sleeper, tokenManager);
InstanceIdentity instanceIdentity =
new InstanceIdentity(factory, membership, config, instanceInfo, tokenRetriever);
Truth.assertThat(instanceIdentity.getReplacedIp()).isEqualTo(deadInstance.getHostIP());
Truth.assertThat(instanceIdentity.isReplace()).isTrue();
}
@Test
public void grabAssignedTokenThrowWhenGossipAgreesPreviousTokenOwnerIsLive(
@Mocked IPriamInstanceFactory factory,
@Mocked IConfiguration config,
@Mocked IMembership membership,
@Mocked Sleeper sleeper,
@Mocked ITokenManager tokenManager,
@Mocked InstanceInfo instanceInfo,
@Mocked TokenRetrieverUtils retrievalUtils) {
List<PriamInstance> liveHosts = newPriamInstances();
Collections.shuffle(liveHosts);
PriamInstance deadInstance = liveHosts.remove(0);
PriamInstance newInstance =
newMockPriamInstance(
deadInstance.getDC(),
deadInstance.getRac(),
deadInstance.getId(),
String.format("new-fakeInstance-%d", deadInstance.getId()),
String.format("127.1.1.%d", deadInstance.getId() + 100),
String.format("new-fakeHost-%d", deadInstance.getId()),
deadInstance.getToken());
// the case we are trying to test is when Priam restarted after it acquired the
// token. new instance is already registered with token database.
liveHosts.add(newInstance);
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
new TokenRetrieverUtils.InferredTokenOwnership();
inferredTokenOwnership.setTokenInformationStatus(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.GOOD);
inferredTokenOwnership.setTokenInformation(
new TokenRetrieverUtils.TokenInformation(deadInstance.getHostIP(), true));
new Expectations() {
{
config.getAppName();
result = APP;
factory.getAllIds(DEAD_APP);
result = ImmutableSet.of(deadInstance);
factory.getAllIds(APP);
result = ImmutableSet.copyOf(liveHosts);
instanceInfo.getInstanceId();
result = newInstance.getInstanceId();
TokenRetrieverUtils.inferTokenOwnerFromGossip(
ImmutableSet.copyOf(liveHosts),
newInstance.getToken(),
newInstance.getDC());
result = inferredTokenOwnership;
}
};
ITokenRetriever tokenRetriever =
new TokenRetriever(
factory, membership, config, instanceInfo, sleeper, tokenManager);
Assertions.assertThrows(
TokenRetrieverUtils.GossipParseException.class,
() ->
new InstanceIdentity(
factory, membership, config, instanceInfo, tokenRetriever));
}
@Test
public void grabAssignedTokenThrowToBuyTimeWhenGossipDisagreesOnPreviousTokenOwner(
@Mocked IPriamInstanceFactory factory,
@Mocked IConfiguration config,
@Mocked IMembership membership,
@Mocked Sleeper sleeper,
@Mocked ITokenManager tokenManager,
@Mocked InstanceInfo instanceInfo,
@Mocked TokenRetrieverUtils retrievalUtils) {
List<PriamInstance> liveHosts = newPriamInstances();
Collections.shuffle(liveHosts);
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
new TokenRetrieverUtils.InferredTokenOwnership();
inferredTokenOwnership.setTokenInformationStatus(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.MISMATCH);
inferredTokenOwnership.setTokenInformation(
new TokenRetrieverUtils.TokenInformation(liveHosts.get(0).getHostIP(), false));
new Expectations() {
{
config.getAppName();
result = APP;
factory.getAllIds(DEAD_APP);
result = ImmutableSet.of();
factory.getAllIds(APP);
result = ImmutableSet.copyOf(liveHosts);
instanceInfo.getInstanceId();
result = liveHosts.get(0).getInstanceId();
TokenRetrieverUtils.inferTokenOwnerFromGossip(
ImmutableSet.copyOf(liveHosts),
liveHosts.get(0).getToken(),
liveHosts.get(0).getDC());
result = inferredTokenOwnership;
}
};
ITokenRetriever tokenRetriever =
new TokenRetriever(
factory, membership, config, instanceInfo, sleeper, tokenManager);
Assertions.assertThrows(
TokenRetrieverUtils.GossipParseException.class,
() ->
new InstanceIdentity(
factory, membership, config, instanceInfo, tokenRetriever));
}
@Test
public void grabAssignedTokenStartDbInBootstrapModeWhenGossipDisagreesOnPreviousTokenOwner(
@Mocked IPriamInstanceFactory factory,
@Mocked IConfiguration config,
@Mocked IMembership membership,
@Mocked Sleeper sleeper,
@Mocked ITokenManager tokenManager,
@Mocked InstanceInfo instanceInfo,
@Mocked TokenRetrieverUtils retrievalUtils)
throws Exception {
List<PriamInstance> liveHosts = newPriamInstances();
Collections.shuffle(liveHosts);
TokenRetrieverUtils.InferredTokenOwnership inferredTokenOwnership =
new TokenRetrieverUtils.InferredTokenOwnership();
inferredTokenOwnership.setTokenInformationStatus(
TokenRetrieverUtils.InferredTokenOwnership.TokenInformationStatus.MISMATCH);
inferredTokenOwnership.setTokenInformation(
new TokenRetrieverUtils.TokenInformation(liveHosts.get(0).getHostIP(), false));
new Expectations() {
{
config.getAppName();
result = APP;
config.permitDirectTokenAssignmentWithGossipMismatch();
result = true;
factory.getAllIds(DEAD_APP);
result = ImmutableSet.of();
factory.getAllIds(APP);
result = ImmutableSet.copyOf(liveHosts);
instanceInfo.getInstanceId();
result = liveHosts.get(0).getInstanceId();
TokenRetrieverUtils.inferTokenOwnerFromGossip(
ImmutableSet.copyOf(liveHosts),
liveHosts.get(0).getToken(),
liveHosts.get(0).getDC());
result = inferredTokenOwnership;
}
};
ITokenRetriever tokenRetriever =
new TokenRetriever(
factory, membership, config, instanceInfo, sleeper, tokenManager);
InstanceIdentity instanceIdentity =
new InstanceIdentity(factory, membership, config, instanceInfo, tokenRetriever);
Truth.assertThat(Strings.isNullOrEmpty(instanceIdentity.getReplacedIp())).isTrue();
Truth.assertThat(instanceIdentity.isReplace()).isFalse();
}
private List<PriamInstance> newPriamInstances() {
List<PriamInstance> instances = new ArrayList<>();
instances.addAll(newPriamInstances("eu-west", "1a", 0, "127.3.1.%d"));
instances.addAll(newPriamInstances("eu-west", "1b", 3, "127.3.2.%d"));
instances.addAll(newPriamInstances("eu-west", "1c", 6, "127.3.3.%d"));
instances.addAll(newPriamInstances("us-east", "1c", 1, "127.1.3.%d"));
instances.addAll(newPriamInstances("us-east", "1a", 4, "127.1.1.%d"));
instances.addAll(newPriamInstances("us-east", "1b", 7, "127.1.2.%d"));
instances.addAll(newPriamInstances("us-west-2", "2a", 2, "127.2.1.%d"));
instances.addAll(newPriamInstances("us-west-2", "2b", 5, "127.2.2.%d"));
instances.addAll(newPriamInstances("us-west-2", "2c", 8, "127.2.3.%d"));
return instances;
}
private List<PriamInstance> newPriamInstances(
String dc, String rack, int seqNo, String ipRanges) {
return IntStream.range(0, 3)
.map(e -> seqNo + (e * 9))
.mapToObj(
e ->
newMockPriamInstance(
dc,
rack,
e,
String.format("fakeInstance-%d", e),
String.format(ipRanges, e),
String.format("fakeHost-%d", e),
Integer.toString(e)))
.collect(Collectors.toList());
}
private PriamInstance newMockPriamInstance(
String dc,
String rack,
int id,
String instanceId,
String hostIp,
String hostName,
String token) {
PriamInstance priamInstance = new PriamInstance();
priamInstance.setApp(APP);
priamInstance.setDC(dc);
priamInstance.setRac(rack);
priamInstance.setId(id);
priamInstance.setInstanceId(instanceId);
priamInstance.setHost(hostName);
priamInstance.setHostIP(hostIp);
priamInstance.setToken(token);
return priamInstance;
}
}
| 3,116 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity/config/TestAWSInstanceInfo.java | package com.netflix.priam.identity.config;
import com.google.common.truth.Truth;
import mockit.Expectations;
import org.junit.Before;
import org.junit.Test;
/** tests of {@link com.netflix.priam.identity.config.AWSInstanceInfo} */
public class TestAWSInstanceInfo {
private AWSInstanceInfo instanceInfo;
@Before
public void setUp() {
instanceInfo =
new AWSInstanceInfo(
() -> {
throw new RuntimeException("not implemented");
});
}
@Test
public void testPublicHostIP() {
new Expectations(instanceInfo) {
{
instanceInfo.tryGetDataFromUrl(AWSInstanceInfo.PUBLIC_HOSTIP_URL);
result = "1.2.3.4";
}
};
Truth.assertThat(instanceInfo.getHostIP()).isEqualTo("1.2.3.4");
}
@Test
public void testMissingPublicHostIP() {
new Expectations(instanceInfo) {
{
instanceInfo.tryGetDataFromUrl(AWSInstanceInfo.PUBLIC_HOSTIP_URL);
result = null;
instanceInfo.tryGetDataFromUrl(AWSInstanceInfo.LOCAL_HOSTIP_URL);
result = "1.2.3.4";
}
};
Truth.assertThat(instanceInfo.getHostIP()).isEqualTo("1.2.3.4");
}
@Test
public void testPublicHostname() {
new Expectations(instanceInfo) {
{
instanceInfo.tryGetDataFromUrl(AWSInstanceInfo.PUBLIC_HOSTNAME_URL);
result = "hostname";
}
};
Truth.assertThat(instanceInfo.getHostname()).isEqualTo("hostname");
}
@Test
public void testMissingPublicHostname() {
new Expectations(instanceInfo) {
{
instanceInfo.tryGetDataFromUrl(AWSInstanceInfo.PUBLIC_HOSTNAME_URL);
result = null;
instanceInfo.tryGetDataFromUrl(AWSInstanceInfo.LOCAL_HOSTNAME_URL);
result = "hostname";
}
};
Truth.assertThat(instanceInfo.getHostname()).isEqualTo("hostname");
}
}
| 3,117 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity | Create_ds/Priam/priam/src/test/java/com/netflix/priam/identity/config/FakeInstanceInfo.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.identity.config;
/** Created by aagrawal on 10/17/18. */
public class FakeInstanceInfo implements InstanceInfo {
private String instanceId;
private String availabilityZone;
private String region;
private String instanceType;
private String asg;
private String vpcId;
public FakeInstanceInfo(String instanceId, String availabilityZone, String region) {
this(instanceId, availabilityZone, region, "i2.xlarge", availabilityZone, "");
}
public FakeInstanceInfo(
String instanceId,
String availabilityZone,
String region,
String instanceType,
String asg,
String vpcId) {
this.instanceId = instanceId;
this.availabilityZone = availabilityZone;
this.region = region;
this.instanceType = instanceType;
this.asg = asg;
this.vpcId = vpcId;
}
@Override
public String getRac() {
return availabilityZone;
}
@Override
public String getHostname() {
return instanceId;
}
@Override
public String getHostIP() {
return "127.0.0.0";
}
@Override
public String getPrivateIP() {
return "127.1.1.0";
}
@Override
public String getInstanceId() {
return instanceId;
}
@Override
public String getInstanceType() {
return instanceType;
}
@Override
public String getVpcId() {
return vpcId;
}
@Override
public String getRegion() {
return region;
}
@Override
public String getAutoScalingGroup() {
return asg;
}
@Override
public InstanceEnvironment getInstanceEnvironment() {
return InstanceEnvironment.VPC;
}
public void setRac(String rac) {
this.availabilityZone = rac;
}
public void setRegion(String region) {
this.region = region;
}
}
| 3,118 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/connection/TestCassandraOperations.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.connection;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.mchange.io.FileUtils;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.IConfiguration;
import java.io.File;
import java.util.List;
import java.util.Map;
import mockit.Expectations;
import mockit.Mock;
import mockit.MockUp;
import mockit.Mocked;
import org.apache.cassandra.tools.NodeProbe;
import org.junit.Assert;
import org.junit.Test;
/** Created by aagrawal on 3/1/19. */
public class TestCassandraOperations {
private final String gossipInfo1 = "src/test/resources/gossipInfoSample_1.txt";
@Mocked private NodeProbe nodeProbe;
@Mocked private JMXNodeTool jmxNodeTool;
private static CassandraOperations cassandraOperations;
public TestCassandraOperations() {
new MockUp<JMXNodeTool>() {
@Mock
NodeProbe instance(IConfiguration config) {
return nodeProbe;
}
};
Injector injector = Guice.createInjector(new BRTestModule());
if (cassandraOperations == null)
cassandraOperations = injector.getInstance(CassandraOperations.class);
}
@Test
public void testGossipInfo() throws Exception {
String gossipInfoFromNodetool = FileUtils.getContentsAsString(new File(gossipInfo1));
new Expectations() {
{
nodeProbe.getGossipInfo();
result = gossipInfoFromNodetool;
nodeProbe.getTokens("127.0.0.1");
result = "123,234";
}
};
List<Map<String, String>> gossipInfoList = cassandraOperations.gossipInfo();
System.out.println(gossipInfoList);
Assert.assertEquals(7, gossipInfoList.size());
gossipInfoList
.stream()
.forEach(
gossipInfo -> {
Assert.assertEquals("us-east", gossipInfo.get("DC"));
Assert.assertNotNull(gossipInfo.get("PUBLIC_IP"));
Assert.assertEquals("1565153", gossipInfo.get("HEARTBEAT"));
if (gossipInfo.get("STATUS").equalsIgnoreCase("NORMAL"))
Assert.assertNotNull(gossipInfo.get("TOKENS"));
if (gossipInfo.get("PUBLIC_IP").equalsIgnoreCase("127.0.0.1"))
Assert.assertEquals("[123,234]", gossipInfo.get("TOKENS"));
});
}
}
| 3,119 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/config/FakeConfiguration.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.config;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Singleton;
@Singleton
public class FakeConfiguration implements IConfiguration {
private final String appName;
private String restorePrefix = "";
public Map<String, Object> fakeConfig;
private boolean mayCreateNewToken;
private ImmutableList<String> racs;
private boolean usePrivateIp;
private boolean checkThriftIsListening;
private boolean skipDeletingOthersIngressRules;
private boolean skipUpdatingOthersIngressRules;
private boolean skipIngressUnlessIPIsPublic;
private long compressionTransitionEpochMillis;
private boolean autoSnapshot;
private String partitioner;
public final Map<String, String> fakeProperties = new HashMap<>();
public FakeConfiguration() {
this("my_fake_cluster");
}
public FakeConfiguration(String appName) {
this.appName = appName;
fakeConfig = new HashMap<>();
this.mayCreateNewToken = true; // matches interface default
this.racs = ImmutableList.of("az1", "az2", "az3");
}
public void setFakeConfig(String key, Object value) {
fakeConfig.put(key, value);
}
@Override
public String getCassHome() {
return "/tmp/priam";
}
@Override
public void initialize() {
// TODO Auto-generated method stub
}
@Override
public String getBackupLocation() {
return "casstestbackup";
}
@Override
public String getBackupPrefix() {
return "TEST-netflix.platform.S3";
}
@Override
public String getCommitLogLocation() {
return "cass/commitlog";
}
@Override
public String getDataFileLocation() {
return "target/data";
}
@Override
public String getLogDirLocation() {
return null;
}
@Override
public String getCacheLocation() {
return "cass/caches";
}
@Override
public List<String> getRacs() {
return racs;
}
public void setRacs(String... racs) {
this.racs = ImmutableList.copyOf(racs);
}
@Override
public String getSnitch() {
return "org.apache.cassandra.locator.SimpleSnitch";
}
@Override
public String getAppName() {
return appName;
}
@Override
public String getRestorePrefix() {
return this.restorePrefix;
}
// For testing purposes only.
public void setRestorePrefix(String restorePrefix) {
this.restorePrefix = restorePrefix;
}
@Override
public String getBackupCommitLogLocation() {
return "cass/backup/cl/";
}
@Override
public String getCassStartupScript() {
return "/usr/bin/false";
}
@Override
public int getRemediateDeadCassandraRate() {
return 1;
}
@Override
public String getSeedProviderName() {
return "org.apache.cassandra.locator.SimpleSeedProvider";
}
@Override
public int getBackupRetentionDays() {
return 5;
}
@Override
public List<String> getBackupRacs() {
return Lists.newArrayList();
}
@Override
public String getYamlLocation() {
return getCassHome() + "/conf/cassandra.yaml";
}
@Override
public String getJVMOptionsFileLocation() {
return "src/test/resources/conf/jvm.options";
}
@Override
public String getCommitLogBackupPropsFile() {
return getCassHome() + "/conf/commitlog_archiving.properties";
}
public String getCassYamlVal(String priamKey) {
return "";
}
@Override
public boolean isPostRestoreHookEnabled() {
return true;
}
@Override
public String getPostRestoreHook() {
return "echo";
}
@Override
public String getPostRestoreHookHeartbeatFileName() {
return System.getProperty("java.io.tmpdir") + File.separator + "postrestorehook.heartbeat";
}
@Override
public String getPostRestoreHookDoneFileName() {
return System.getProperty("java.io.tmpdir") + File.separator + "postrestorehook.done";
}
@Override
public String getProperty(String key, String defaultValue) {
return fakeProperties.getOrDefault(key, defaultValue);
}
@Override
public String getMergedConfigurationDirectory() {
return fakeProperties.getOrDefault("priam_test_config", "/tmp/priam_test_config");
}
@Override
public ImmutableSet<String> getTunablePropertyFiles() {
String path = new File(getYamlLocation()).getParentFile().getPath();
return ImmutableSet.of(path + "/cassandra-rackdc.properties");
}
public String getRAC() {
return "my_zone";
}
public String getDC() {
return "us-east-1";
}
@Override
public boolean isCreateNewTokenEnable() {
return mayCreateNewToken;
}
public void setCreateNewToken(boolean mayCreateNewToken) {
this.mayCreateNewToken = mayCreateNewToken;
}
@Override
public boolean usePrivateIP() {
return usePrivateIp;
}
public void usePrivateIP(boolean usePrivateIp) {
this.usePrivateIp = usePrivateIp;
}
@Override
public boolean checkThriftServerIsListening() {
return checkThriftIsListening;
}
public void setCheckThriftServerIsListening(boolean checkThriftServerIsListening) {
this.checkThriftIsListening = checkThriftServerIsListening;
}
@Override
public boolean skipDeletingOthersIngressRules() {
return this.skipDeletingOthersIngressRules;
}
public void setSkipDeletingOthersIngressRules(boolean skipDeletingOthersIngressRules) {
this.skipDeletingOthersIngressRules = skipDeletingOthersIngressRules;
}
@Override
public boolean skipUpdatingOthersIngressRules() {
return this.skipUpdatingOthersIngressRules;
}
public void setSkipUpdatingOthersIngressRules(boolean skipUpdatingOthersIngressRules) {
this.skipUpdatingOthersIngressRules = skipUpdatingOthersIngressRules;
}
@Override
public BackupsToCompress getBackupsToCompress() {
return (BackupsToCompress)
fakeConfig.getOrDefault("Priam.backupsToCompress", BackupsToCompress.ALL);
}
@Override
public boolean skipIngressUnlessIPIsPublic() {
return this.skipIngressUnlessIPIsPublic;
}
public void setSkipIngressUnlessIPIsPublic(boolean skipIngressUnlessIPIsPublic) {
this.skipIngressUnlessIPIsPublic = skipIngressUnlessIPIsPublic;
}
@Override
public int getBackupThreads() {
return (Integer)
fakeConfig.getOrDefault(
"Priam.backup.threads", IConfiguration.super.getBackupThreads());
}
public void setCompressionTransitionEpochMillis(long transitionTime) {
compressionTransitionEpochMillis = transitionTime;
}
@Override
public long getCompressionTransitionEpochMillis() {
return compressionTransitionEpochMillis;
}
public FakeConfiguration setAutoSnapshot(boolean autoSnapshot) {
this.autoSnapshot = autoSnapshot;
return this;
}
@Override
public boolean getAutoSnapshot() {
return autoSnapshot;
}
public void setPartitioner(String partitioner) {
this.partitioner = partitioner;
}
@Override
public String getPartitioner() {
return partitioner;
}
}
| 3,120 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/config/PriamConfigurationPersisterTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.config;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertEquals;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.TestModule;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class PriamConfigurationPersisterTest {
private static PriamConfigurationPersister persister;
@Rule public TemporaryFolder folder = new TemporaryFolder();
private FakeConfiguration fakeConfiguration;
@Before
public void setUp() {
Injector injector = Guice.createInjector(new TestModule());
fakeConfiguration = (FakeConfiguration) injector.getInstance(IConfiguration.class);
fakeConfiguration.fakeProperties.put("priam_test_config", folder.getRoot().getPath());
if (persister == null) persister = injector.getInstance(PriamConfigurationPersister.class);
}
@After
public void cleanUp() {
fakeConfiguration.fakeProperties.clear();
}
@Test
@SuppressWarnings("unchecked")
public void execute() throws Exception {
Path structuredJson = Paths.get(folder.getRoot().getPath(), "structured.json");
persister.execute();
assertTrue(structuredJson.toFile().exists());
ObjectMapper objectMapper = new ObjectMapper();
Map<String, Object> myMap =
objectMapper.readValue(Files.readAllBytes(structuredJson), HashMap.class);
assertEquals(myMap.get("backupLocation"), fakeConfiguration.getBackupLocation());
}
@Test
public void getTimer() {
assertEquals(
"0 * * * * ? *",
PriamConfigurationPersister.getTimer(fakeConfiguration).getCronExpression());
}
}
| 3,121 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/config/FakeBackupRestoreConfig.java | /**
* Copyright 2018 Netflix, Inc.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.config;
import com.google.common.collect.ImmutableSet;
/** Created by aagrawal on 6/26/18. */
public class FakeBackupRestoreConfig implements IBackupRestoreConfig {
@Override
public String getSnapshotMetaServiceCronExpression() {
return "0 0/2 * 1/1 * ? *"; // Every 2 minutes for testing purposes
}
@Override
public boolean enableV2Backups() {
return false;
}
@Override
public boolean enableV2Restore() {
return false;
}
@Override
public int getBackupTTLMonitorPeriodInSec() {
return 0; // avoids sleeping altogether in tests.
}
@Override
public ImmutableSet<String> getBackupNotificationAdditionalMessageAttrs() {
return ImmutableSet.of();
}
}
| 3,122 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/notification/TestBackupNotificationMgr.java | package com.netflix.priam.notification;
import com.amazonaws.services.sns.model.MessageAttributeValue;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.AbstractBackupPath;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.ParseException;
import java.time.Instant;
import java.util.Map;
import javax.inject.Provider;
import mockit.Capturing;
import mockit.Expectations;
import mockit.Mocked;
import mockit.Verifications;
import org.junit.Before;
import org.junit.Test;
public class TestBackupNotificationMgr {
private Injector injector;
private BackupNotificationMgr backupNotificationMgr;
private Provider<AbstractBackupPath> abstractBackupPathProvider;
private IConfiguration configuration;
@Before
public void setUp() {
if (injector == null) {
injector = Guice.createInjector(new BRTestModule());
}
if (backupNotificationMgr == null) {
backupNotificationMgr = injector.getInstance(BackupNotificationMgr.class);
}
if (abstractBackupPathProvider == null) {
abstractBackupPathProvider = injector.getProvider(AbstractBackupPath.class);
}
}
@Test
public void testNotificationNonEmptyFilter(
@Mocked IBackupRestoreConfig backupRestoreConfig,
@Capturing INotificationService notificationService)
throws ParseException {
new Expectations() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
result = "SNAPSHOT_VERIFIED, META_V2";
maxTimes = 2;
}
};
new Expectations() {
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
Path path =
Paths.get(
"fakeDataLocation",
"fakeKeyspace",
"fakeColumnFamily",
"fakeBackup",
"fakeData.db");
AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get();
abstractBackupPath.parseLocal(path.toFile(), AbstractBackupPath.BackupFileType.META_V2);
backupNotificationMgr.notify(abstractBackupPath, UploadStatus.STARTED);
new Verifications() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
maxTimes = 2;
}
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
}
@Test
public void testNoNotificationsNonEmptyFilter(
@Mocked IBackupRestoreConfig backupRestoreConfig,
@Capturing INotificationService notificationService)
throws ParseException {
new Expectations() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
result = "META_V2";
maxTimes = 2;
}
};
new Expectations() {
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 0;
}
};
Path path =
Paths.get(
"fakeDataLocation",
"fakeKeyspace",
"fakeColumnFamily",
"fakeBackup",
"fakeData.db");
AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get();
abstractBackupPath.parseLocal(path.toFile(), AbstractBackupPath.BackupFileType.SST);
backupNotificationMgr.notify(abstractBackupPath, UploadStatus.STARTED);
new Verifications() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
maxTimes = 2;
}
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 0;
}
};
}
@Test
public void testNotificationsEmptyFilter(
@Mocked IBackupRestoreConfig backupRestoreConfig,
@Capturing INotificationService notificationService)
throws ParseException {
new Expectations() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
result = "";
maxTimes = 1;
}
};
new Expectations() {
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
Path path =
Paths.get(
"fakeDataLocation",
"fakeKeyspace",
"fakeColumnFamily",
"fakeBackup",
"fakeData.db");
AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get();
abstractBackupPath.parseLocal(path.toFile(), AbstractBackupPath.BackupFileType.SST);
backupNotificationMgr.notify(abstractBackupPath, UploadStatus.STARTED);
new Verifications() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
maxTimes = 1;
}
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
}
@Test
public void testNotificationsInvalidFilter(
@Mocked IBackupRestoreConfig backupRestoreConfig,
@Capturing INotificationService notificationService)
throws ParseException {
new Expectations() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
result = "SOME_FAKE_FILE_TYPE_1, SOME_FAKE_FILE_TYPE_2";
maxTimes = 2;
}
};
new Expectations() {
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
Path path =
Paths.get(
"fakeDataLocation",
"fakeKeyspace",
"fakeColumnFamily",
"fakeBackup",
"fakeData.db");
AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get();
abstractBackupPath.parseLocal(path.toFile(), AbstractBackupPath.BackupFileType.SST);
backupNotificationMgr.notify(abstractBackupPath, UploadStatus.STARTED);
new Verifications() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
maxTimes = 2;
}
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
}
@Test
public void testNotificationsPartiallyValidFilter(
@Mocked IBackupRestoreConfig backupRestoreConfig,
@Capturing INotificationService notificationService)
throws ParseException {
new Expectations() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
result = "SOME_FAKE_FILE_TYPE_1, SOME_FAKE_FILE_TYPE_2, META_V2";
maxTimes = 2;
}
};
new Expectations() {
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
Path path =
Paths.get(
"fakeDataLocation",
"fakeKeyspace",
"fakeColumnFamily",
"fakeBackup",
"fakeData.db");
AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get();
abstractBackupPath.parseLocal(path.toFile(), AbstractBackupPath.BackupFileType.META_V2);
backupNotificationMgr.notify(abstractBackupPath, UploadStatus.STARTED);
new Verifications() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
maxTimes = 2;
}
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
}
@Test
public void testNoNotificationsPartiallyValidFilter(
@Mocked IBackupRestoreConfig backupRestoreConfig,
@Capturing INotificationService notificationService)
throws ParseException {
new Expectations() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
result = "SOME_FAKE_FILE_TYPE_1, SOME_FAKE_FILE_TYPE_2, SST";
maxTimes = 2;
}
};
new Expectations() {
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 0;
}
};
Path path =
Paths.get(
"fakeDataLocation",
"fakeKeyspace",
"fakeColumnFamily",
"fakeBackup",
"fakeData.db");
AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get();
abstractBackupPath.parseLocal(path.toFile(), AbstractBackupPath.BackupFileType.META_V2);
backupNotificationMgr.notify(abstractBackupPath, UploadStatus.STARTED);
new Verifications() {
{
backupRestoreConfig.getBackupNotifyComponentIncludeList();
maxTimes = 2;
}
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 0;
}
};
}
@Test
public void testNotify(@Capturing INotificationService notificationService) {
new Expectations() {
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
backupNotificationMgr.notify("some_random", Instant.EPOCH);
new Verifications() {
{
notificationService.notify(anyString, (Map<String, MessageAttributeValue>) any);
maxTimes = 1;
}
};
}
}
| 3,123 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/health/TestThriftChecker.java | package com.netflix.priam.health;
import com.netflix.priam.config.FakeConfiguration;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import mockit.Expectations;
import mockit.Mocked;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestThriftChecker {
private FakeConfiguration config;
private ThriftChecker thriftChecker;
@Mocked private Process mockProcess;
@Before
public void TestThriftChecker() {
config = new FakeConfiguration();
thriftChecker = new ThriftChecker(config);
}
@Test
public void testThriftServerIsListeningDisabled() {
config.setCheckThriftServerIsListening(false);
Assert.assertTrue(thriftChecker.isThriftServerListening());
}
@Test
public void testThriftServerIsNotListening() {
config.setCheckThriftServerIsListening(true);
Assert.assertFalse(thriftChecker.isThriftServerListening());
}
@Test
public void testThriftServerIsListening() throws IOException {
config.setCheckThriftServerIsListening(true);
final InputStream mockOutput = new ByteArrayInputStream("1".getBytes());
new Expectations() {
{
mockProcess.getInputStream();
result = mockOutput;
}
};
// Mock out the ps call
final Runtime r = Runtime.getRuntime();
String[] cmd = {
"/bin/sh", "-c", "ss -tuln | grep -c " + config.getThriftPort(), " 2>/dev/null"
};
new Expectations(r) {
{
r.exec(cmd);
result = mockProcess;
}
};
Assert.assertTrue(thriftChecker.isThriftServerListening());
}
@Test
public void testThriftServerIsListeningException() throws IOException {
config.setCheckThriftServerIsListening(true);
final IOException mockOutput = new IOException("Command exited with code 0");
new Expectations() {
{
mockProcess.getInputStream();
result = mockOutput;
}
};
// Mock out the ps call
final Runtime r = Runtime.getRuntime();
String[] cmd = {
"/bin/sh", "-c", "ss -tuln | grep -c " + config.getThriftPort(), " 2>/dev/null"
};
new Expectations(r) {
{
r.exec(cmd);
result = mockProcess;
}
};
Assert.assertTrue(thriftChecker.isThriftServerListening());
}
}
| 3,124 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/health/TestInstanceStatus.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.health;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.backup.Status;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/** Test InstanceState Created by aagrawal on 9/22/17. */
public class TestInstanceStatus {
private TestInstanceState testInstanceState;
@Before
public void setUp() {
Injector injector = Guice.createInjector(new BRTestModule());
InstanceState instanceState = injector.getInstance(InstanceState.class);
testInstanceState = new TestInstanceState(instanceState);
}
@Test
public void testHealth() {
// Verify good health.
Assert.assertTrue(
testInstanceState
.setParams(false, true, true, true, true, true, true, true)
.isHealthy());
Assert.assertTrue(
testInstanceState
.setParams(false, true, true, true, true, false, true, true)
.isHealthy());
Assert.assertTrue(
testInstanceState
.setParams(false, true, true, true, false, true, true, true)
.isHealthy());
Assert.assertTrue(
testInstanceState
.setParams(true, false, true, true, false, true, true, true)
.isHealthy());
Assert.assertTrue(
testInstanceState
.setParams(true, true, false, true, true, true, true, true)
.isHealthy());
Assert.assertTrue(
testInstanceState
.setParams(true, true, true, false, true, true, true, true)
.isHealthy());
Assert.assertTrue(
testInstanceState
.setParams(true, true, true, true, true, true, false, true)
.isHealthy());
Assert.assertTrue(
testInstanceState
.setParams(true, true, true, true, false, false, true, true)
.isHealthy());
// Negative health case scenarios.
Assert.assertFalse(
testInstanceState
.setParams(false, false, true, true, false, true, true, true)
.isHealthy());
Assert.assertFalse(
testInstanceState
.setParams(false, true, false, true, true, true, true, true)
.isHealthy());
Assert.assertFalse(
testInstanceState
.setParams(false, true, true, false, true, true, true, true)
.isHealthy());
Assert.assertFalse(
testInstanceState
.setParams(false, true, true, true, true, true, false, true)
.isHealthy());
Assert.assertFalse(
testInstanceState
.setParams(false, true, true, true, false, false, true, true)
.isHealthy());
Assert.assertFalse(
testInstanceState
.setParams(false, true, true, true, false, false, true, false)
.isHealthy());
}
private class TestInstanceState {
private final InstanceState instanceState;
TestInstanceState(InstanceState instanceState1) {
this.instanceState = instanceState1;
}
InstanceState setParams(
boolean isRestoring,
boolean isYmlWritten,
boolean isCassandraProcessAlive,
boolean isGossipEnabled,
boolean isThriftEnabled,
boolean isNativeEnabled,
boolean isRequiredDirectoriesExist,
boolean shouldCassandraBeAlive) {
instanceState.setYmlWritten(isYmlWritten);
instanceState.setCassandraProcessAlive(isCassandraProcessAlive);
instanceState.setIsNativeTransportActive(isNativeEnabled);
instanceState.setIsThriftActive(isThriftEnabled);
instanceState.setIsGossipActive(isGossipEnabled);
instanceState.setIsRequiredDirectoriesExist(isRequiredDirectoriesExist);
instanceState.setShouldCassandraBeAlive(shouldCassandraBeAlive);
if (isRestoring) instanceState.setRestoreStatus(Status.STARTED);
else instanceState.setRestoreStatus(Status.FINISHED);
return instanceState;
}
}
}
| 3,125 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/health/TestCassandraMonitor.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.health;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.connection.JMXNodeTool;
import com.netflix.priam.defaultimpl.ICassandraProcess;
import com.netflix.priam.merics.CassMonitorMetrics;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import mockit.*;
import org.apache.cassandra.tools.NodeProbe;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/** Created by aagrawal on 7/18/17. */
public class TestCassandraMonitor {
private static CassandraMonitor monitor;
private static InstanceState instanceState;
private static CassMonitorMetrics cassMonitorMetrics;
private static IThriftChecker thriftChecker;
private IConfiguration config;
@Mocked private Process mockProcess;
@Mocked private NodeProbe nodeProbe;
@Mocked private ICassandraProcess cassProcess;
@Before
public void setUp() {
Injector injector = Guice.createInjector(new BRTestModule());
config = injector.getInstance(IConfiguration.class);
thriftChecker = injector.getInstance(ThriftChecker.class);
if (instanceState == null) instanceState = injector.getInstance(InstanceState.class);
if (cassMonitorMetrics == null)
cassMonitorMetrics = injector.getInstance(CassMonitorMetrics.class);
if (monitor == null)
monitor =
new CassandraMonitor(
config, instanceState, cassProcess, cassMonitorMetrics, thriftChecker);
}
@Test
public void testCassandraMonitor() throws Exception {
monitor.execute();
Assert.assertFalse(CassandraMonitor.hasCassadraStarted());
CassandraMonitor.setIsCassadraStarted();
Assert.assertTrue(CassandraMonitor.hasCassadraStarted());
monitor.execute();
Assert.assertFalse(CassandraMonitor.hasCassadraStarted());
}
@Test
public void testNoAutoRemediation() throws Exception {
new MockUp<JMXNodeTool>() {
@Mock
NodeProbe instance(IConfiguration config) {
return nodeProbe;
}
};
final InputStream mockOutput = new ByteArrayInputStream("a process".getBytes());
new Expectations() {
{
mockProcess.getInputStream();
result = mockOutput;
nodeProbe.isGossipRunning();
result = true;
nodeProbe.isNativeTransportRunning();
result = true;
nodeProbe.isThriftServerRunning();
result = true;
}
};
// Mock out the ps call
final Runtime r = Runtime.getRuntime();
String[] cmd = {
"/bin/sh",
"-c",
"ps -ef |grep -v -P \"\\sgrep\\s\" | grep " + config.getCassProcessName()
};
new Expectations(r) {
{
r.exec(cmd);
result = mockProcess;
}
};
instanceState.setShouldCassandraBeAlive(false);
instanceState.setCassandraProcessAlive(false);
monitor.execute();
Assert.assertTrue(!instanceState.shouldCassandraBeAlive());
Assert.assertTrue(instanceState.isCassandraProcessAlive());
new Verifications() {
{
cassProcess.start(anyBoolean);
times = 0;
}
};
}
@Test
public void testAutoRemediationRateLimit() throws Exception {
final InputStream mockOutput = new ByteArrayInputStream("".getBytes());
instanceState.setShouldCassandraBeAlive(true);
instanceState.markLastAttemptedStartTime();
new Expectations() {
{
// 6 calls to execute should = 12 calls to getInputStream();
mockProcess.getInputStream();
result = mockOutput;
times = 12;
cassProcess.start(true);
times = 2;
}
};
// Mock out the ps call
final Runtime r = Runtime.getRuntime();
String[] cmd = {
"/bin/sh",
"-c",
"ps -ef |grep -v -P \"\\sgrep\\s\" | grep " + config.getCassProcessName()
};
new Expectations(r) {
{
r.exec(cmd);
result = mockProcess;
}
};
// Sleep ahead to ensure we have permits in the rate limiter
monitor.execute();
Thread.sleep(1500);
monitor.execute();
monitor.execute();
Thread.sleep(1500);
monitor.execute();
monitor.execute();
monitor.execute();
new Verifications() {};
}
}
| 3,126 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/resources/CassandraConfigTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.resources;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableList;
import com.google.inject.Guice;
import com.netflix.priam.PriamServer;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.identity.DoubleRing;
import com.netflix.priam.identity.InstanceIdentity;
import com.netflix.priam.identity.PriamInstance;
import com.netflix.priam.merics.CassMonitorMetrics;
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.List;
import javax.ws.rs.core.Response;
import mockit.Expectations;
import mockit.Mocked;
import org.junit.Before;
import org.junit.Test;
public class CassandraConfigTest {
private @Mocked PriamServer priamServer;
private @Mocked DoubleRing doubleRing;
private CassandraConfig resource;
private InstanceIdentity instanceIdentity;
@Before
public void setUp() {
CassMonitorMetrics cassMonitorMetrics =
Guice.createInjector(new BRTestModule()).getInstance(CassMonitorMetrics.class);
instanceIdentity =
Guice.createInjector(new BRTestModule()).getInstance(InstanceIdentity.class);
resource = new CassandraConfig(priamServer, doubleRing, cassMonitorMetrics);
}
@Test
public void getSeeds(@Mocked final InstanceIdentity identity) throws Exception {
final List<String> seeds = ImmutableList.of("seed1", "seed2", "seed3");
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
times = 1;
identity.getSeeds();
result = seeds;
times = 1;
}
};
Response response = resource.getSeeds();
assertEquals(200, response.getStatus());
assertEquals("seed1,seed2,seed3", response.getEntity());
}
@Test
public void getSeeds_notFound(@Mocked final InstanceIdentity identity) throws Exception {
final List<String> seeds = ImmutableList.of();
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
times = 1;
identity.getSeeds();
result = seeds;
times = 1;
}
};
Response response = resource.getSeeds();
assertEquals(500, response.getStatus());
}
@Test
public void getSeeds_handlesUnknownHostException(@Mocked final InstanceIdentity identity)
throws Exception {
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
identity.getSeeds();
result = new UnknownHostException();
}
};
Response response = resource.getSeeds();
assertEquals(500, response.getStatus());
}
@Test
public void getToken(
@Mocked final InstanceIdentity identity, @Mocked final PriamInstance instance) {
final String token = "myToken";
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
times = 2;
identity.getInstance();
result = instance;
times = 2;
instance.getToken();
result = token;
times = 2;
}
};
Response response = resource.getToken();
assertEquals(200, response.getStatus());
assertEquals(token, response.getEntity());
}
@Test
public void getToken_notFound(
@Mocked final InstanceIdentity identity, @Mocked final PriamInstance instance) {
final String token = "";
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
identity.getInstance();
result = instance;
instance.getToken();
result = token;
}
};
Response response = resource.getToken();
assertEquals(500, response.getStatus());
}
@Test
public void getToken_handlesException(
@Mocked final InstanceIdentity identity, @Mocked final PriamInstance instance) {
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
identity.getInstance();
result = instance;
instance.getToken();
result = new RuntimeException();
}
};
Response response = resource.getToken();
assertEquals(500, response.getStatus());
}
@Test
public void isReplaceToken(@Mocked final InstanceIdentity identity) {
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
identity.isReplace();
result = true;
}
};
Response response = resource.isReplaceToken();
assertEquals(200, response.getStatus());
assertEquals("true", response.getEntity());
}
@Test
public void isReplaceToken_handlesException(@Mocked final InstanceIdentity identity) {
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
identity.isReplace();
result = new RuntimeException();
}
};
Response response = resource.isReplaceToken();
assertEquals(500, response.getStatus());
}
@Test
public void getReplacedAddress(@Mocked final InstanceIdentity identity) {
final String replacedIp = "127.0.0.1";
new Expectations() {
{
priamServer.getInstanceIdentity();
result = identity;
identity.getReplacedIp();
result = replacedIp;
}
};
Response response = resource.getReplacedIp();
assertEquals(200, response.getStatus());
assertEquals(replacedIp, response.getEntity());
}
@Test
public void setReplacedIp() {
new Expectations() {
{
priamServer.getInstanceIdentity();
result = instanceIdentity;
}
};
Response response = resource.setReplacedIp("127.0.0.1");
assertEquals(200, response.getStatus());
assertEquals("127.0.0.1", instanceIdentity.getReplacedIp());
assertTrue(instanceIdentity.isReplace());
response = resource.setReplacedIp(null);
assertEquals(400, response.getStatus());
}
@Test
public void doubleRing() throws Exception {
new Expectations() {
{
doubleRing.backup();
doubleRing.doubleSlots();
}
};
Response response = resource.doubleRing();
assertEquals(200, response.getStatus());
}
@Test
public void doubleRing_ioExceptionInBackup() throws Exception {
final IOException exception = new IOException();
new Expectations() {
{
doubleRing.backup();
result = exception;
doubleRing.restore();
}
};
try {
resource.doubleRing();
fail("Excepted RuntimeException");
} catch (RuntimeException e) {
assertEquals(exception, e.getCause());
}
}
@Test(expected = IOException.class)
public void doubleRing_ioExceptionInRestore() throws Exception {
new Expectations() {
{
doubleRing.backup();
result = new IOException();
doubleRing.restore();
result = new IOException();
}
};
resource.doubleRing();
}
@Test(expected = ClassNotFoundException.class)
public void doubleRing_classNotFoundExceptionInRestore() throws Exception {
new Expectations() {
{
doubleRing.backup();
result = new IOException();
doubleRing.restore();
result = new ClassNotFoundException();
}
};
resource.doubleRing();
}
}
| 3,127 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/resources/BackupServletV2Test.java | package com.netflix.priam.resources;
import static org.junit.Assert.assertEquals;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.*;
import com.netflix.priam.backupv2.MetaV2Proxy;
import com.netflix.priam.backupv2.SnapshotMetaTask;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.restore.Restore;
import com.netflix.priam.utils.DateUtil;
import com.netflix.priam.utils.GsonJsonSerializer;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import javax.inject.Provider;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import mockit.Expectations;
import mockit.Mocked;
import org.joda.time.DateTime;
import org.junit.Before;
import org.junit.Test;
public class BackupServletV2Test {
private IConfiguration config;
private @Mocked Restore restoreObj;
private @Mocked SnapshotMetaTask snapshotBackup;
private @Mocked BackupVerification backupVerification;
private @Mocked FileSnapshotStatusMgr backupStatusMgr;
private @Mocked BackupRestoreUtil backupRestoreUtil;
private @Mocked MetaV2Proxy metaV2Proxy;
private BackupServletV2 resource;
private RestoreServlet restoreResource;
private InstanceInfo instanceInfo;
private static final String backupDate = "201812011000";
private static final Path location =
Paths.get(
"some_bucket/casstestbackup/1049_fake-app/1808575600",
AbstractBackupPath.BackupFileType.META_V2.toString(),
"1859817645000",
"SNAPPY",
"PLAINTEXT",
"meta_v2_201812011000.json");
private static Provider<AbstractBackupPath> pathProvider;
private static IConfiguration configuration;
@Before
public void setUp() {
Injector injector = Guice.createInjector(new BRTestModule());
config = injector.getInstance(IConfiguration.class);
instanceInfo = injector.getInstance(InstanceInfo.class);
resource = injector.getInstance(BackupServletV2.class);
restoreResource = injector.getInstance(RestoreServlet.class);
pathProvider = injector.getProvider(AbstractBackupPath.class);
configuration = injector.getInstance(IConfiguration.class);
}
@Test
public void testBackup() throws Exception {
new Expectations() {
{
snapshotBackup.execute();
}
};
Response response = resource.backup();
assertEquals(200, response.getStatus());
assertEquals("[\"ok\"]", response.getEntity());
assertEquals(
MediaType.APPLICATION_JSON_TYPE, response.getMetadata().get("Content-Type").get(0));
}
@Test
public void testRestoreMinimal() throws Exception {
final String dateRange = null;
final String oldRegion = "us-east-1";
new Expectations() {
{
instanceInfo.getRegion();
result = oldRegion;
restoreObj.restore(new DateUtil.DateRange((Instant) any, (Instant) any));
}
};
expectCassandraStartup();
Response response = restoreResource.restore(dateRange);
assertEquals(200, response.getStatus());
assertEquals("[\"ok\"]", response.getEntity());
assertEquals(
MediaType.APPLICATION_JSON_TYPE, response.getMetadata().get("Content-Type").get(0));
}
@Test
public void testRestoreWithDateRange() throws Exception {
final String dateRange = "201101010000,201112312359";
new Expectations() {
{
DateUtil.getDate(dateRange.split(",")[0]);
result = new DateTime(2011, 1, 1, 0, 0).toDate();
times = 1;
DateUtil.getDate(dateRange.split(",")[1]);
result = new DateTime(2011, 12, 31, 23, 59).toDate();
times = 1;
restoreObj.restore(new DateUtil.DateRange(dateRange));
}
};
expectCassandraStartup();
Response response = restoreResource.restore(dateRange);
assertEquals(200, response.getStatus());
assertEquals("[\"ok\"]", response.getEntity());
assertEquals(
MediaType.APPLICATION_JSON_TYPE, response.getMetadata().get("Content-Type").get(0));
}
// TODO: create CassandraController interface and inject, instead of static util method
private void expectCassandraStartup() {
new Expectations() {
{
config.getCassStartupScript();
result = "/usr/bin/false";
config.getHeapNewSize();
result = "2G";
config.getHeapSize();
result = "8G";
config.getDataFileLocation();
result = "/var/lib/cassandra/data";
config.getCommitLogLocation();
result = "/var/lib/cassandra/commitlog";
config.getBackupLocation();
result = "backup";
config.getCacheLocation();
result = "/var/lib/cassandra/saved_caches";
config.getJmxPort();
result = 7199;
config.getMaxDirectMemory();
result = "50G";
}
};
}
@Test
public void testValidate() throws Exception {
new Expectations() {
{
backupVerification.verifyLatestBackup(
BackupVersion.SNAPSHOT_META_SERVICE,
anyBoolean,
new DateUtil.DateRange((Instant) any, (Instant) any));
result = Optional.of(getBackupVerificationResult());
}
};
Response response =
resource.validateV2SnapshotByDate(
new DateUtil.DateRange(Instant.now(), Instant.now()).toString(), true);
assertEquals(200, response.getStatus());
assertEquals(
GsonJsonSerializer.getGson().toJson(getBackupVerificationResult()),
response.getEntity().toString());
}
@Test
public void testValidateNoBackups() throws Exception {
new Expectations() {
{
backupVerification.verifyLatestBackup(
BackupVersion.SNAPSHOT_META_SERVICE,
anyBoolean,
new DateUtil.DateRange((Instant) any, (Instant) any));
result = Optional.empty();
}
};
Response response =
resource.validateV2SnapshotByDate(
new DateUtil.DateRange(Instant.now(), Instant.now()).toString(), true);
assertEquals(204, response.getStatus());
assertEquals(
response.getEntity().toString(), "No valid meta found for provided time range");
}
@Test
public void testValidateV2SnapshotByDate() throws Exception {
new Expectations() {
{
backupVerification.verifyLatestBackup(
BackupVersion.SNAPSHOT_META_SERVICE,
anyBoolean,
new DateUtil.DateRange((Instant) any, (Instant) any));
result = Optional.of(getBackupVerificationResult());
}
};
Response response =
resource.validateV2SnapshotByDate(
new DateUtil.DateRange(Instant.now(), Instant.now()).toString(), true);
assertEquals(200, response.getStatus());
assertEquals(
GsonJsonSerializer.getGson().toJson(getBackupVerificationResult()),
response.getEntity().toString());
}
// @Test
// public void testListDateRange() throws Exception {
// Optional<AbstractBackupPath> abstractBackupPath = getAbstractBackupPath();
// String dateRange = String.format("%s,%s",
// new SimpleDateFormat("yyyymmddhhmm").format(new Date())
// , new SimpleDateFormat("yyyymmddhhmm").format(new Date()));
// new Expectations() {{
// backupRestoreUtil.getLatestValidMetaPath(metaV2Proxy,
// new DateUtil.DateRange((Instant) any, (Instant) any)); result =
// abstractBackupPath;
//
// backupRestoreUtil.getAllFiles(
// abstractBackupPath.get(),
// new DateUtil.DateRange((Instant) any, (Instant) any), metaV2Proxy,
// pathProvider); result = getBackupPathList();
// }};
//
// Response response =
// resource.list(dateRange);
// assertEquals(200, response.getStatus());
// }
@Test
public void testListDateRangeNoBackups() throws Exception {
String dateRange =
String.format(
"%s,%s",
new SimpleDateFormat("yyyymmdd").format(new Date()),
new SimpleDateFormat("yyyymmdd").format(new Date()));
new Expectations() {
{
backupRestoreUtil.getLatestValidMetaPath(
metaV2Proxy, new DateUtil.DateRange((Instant) any, (Instant) any));
result = Optional.empty();
}
};
Response response = resource.list(dateRange);
assertEquals(200, response.getStatus());
assertEquals(response.getEntity().toString(), "No valid meta found!");
}
@Test
public void testBackUpInfo() throws Exception {
List<BackupMetadata> backupMetadataList = new ArrayList<>();
backupMetadataList.add(getBackupMetaData());
new Expectations() {
{
backupStatusMgr.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_META_SERVICE,
new DateUtil.DateRange((Instant) any, (Instant) any));
result = backupMetadataList;
}
};
Response response = resource.info(backupDate);
assertEquals(200, response.getStatus());
assertEquals(
GsonJsonSerializer.getGson().toJson(backupMetadataList),
response.getEntity().toString());
}
@Test
public void testBackUpInfoNoBackups() {
new Expectations() {
{
backupStatusMgr.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_META_SERVICE,
new DateUtil.DateRange((Instant) any, (Instant) any));
result = new ArrayList<>();
}
};
Response response = resource.info(backupDate);
assertEquals(200, response.getStatus());
assertEquals(
GsonJsonSerializer.getGson().toJson(new ArrayList<>()),
response.getEntity().toString());
}
private static BackupVerificationResult getBackupVerificationResult() {
BackupVerificationResult result = new BackupVerificationResult();
result.valid = true;
result.manifestAvailable = true;
result.remotePath = "some_random";
result.filesMatched = 123;
result.snapshotInstant = Instant.EPOCH;
return result;
}
private static BackupMetadata getBackupMetaData() throws Exception {
BackupMetadata backupMetadata =
new BackupMetadata(
BackupVersion.SNAPSHOT_META_SERVICE,
"123",
new Date(DateUtil.parseInstant(backupDate).toEpochMilli()));
backupMetadata.setCompleted(
new Date(
DateUtil.parseInstant(backupDate)
.plus(30, ChronoUnit.MINUTES)
.toEpochMilli()));
backupMetadata.setStatus(Status.FINISHED);
backupMetadata.setSnapshotLocation(location.toString());
return backupMetadata;
}
private static Optional<AbstractBackupPath> getAbstractBackupPath() throws Exception {
Path path =
Paths.get(
configuration.getDataFileLocation(),
"keyspace1",
"columnfamily1",
"backup",
"mc-1234-Data.db");
AbstractBackupPath abstractBackupPath = pathProvider.get();
abstractBackupPath.parseLocal(path.toFile(), AbstractBackupPath.BackupFileType.SST_V2);
return Optional.of(abstractBackupPath);
}
private static List<AbstractBackupPath> getBackupPathList() throws Exception {
List<AbstractBackupPath> abstractBackupPathList = new ArrayList<>();
Path path =
Paths.get(
configuration.getDataFileLocation(),
"keyspace1",
"columnfamily1",
"backup",
"mc-1234-Data.db");
AbstractBackupPath abstractBackupPath1 = pathProvider.get();
abstractBackupPath1.parseLocal(path.toFile(), AbstractBackupPath.BackupFileType.SST_V2);
abstractBackupPathList.add(abstractBackupPath1);
path =
Paths.get(
configuration.getDataFileLocation(),
"keyspace1",
"columnfamily1",
"backup",
"mc-1234-Data.db");
AbstractBackupPath abstractBackupPath2 = pathProvider.get();
abstractBackupPath2.parseLocal(
path.toFile(), AbstractBackupPath.BackupFileType.SNAPSHOT_VERIFIED);
abstractBackupPathList.add(abstractBackupPath2);
return abstractBackupPathList;
}
}
| 3,128 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/resources/BackupServletTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.resources;
import static org.junit.Assert.assertEquals;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.*;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.health.InstanceState;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.restore.Restore;
import com.netflix.priam.utils.DateUtil;
import java.time.Instant;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import mockit.Expectations;
import mockit.Mocked;
import org.joda.time.DateTime;
import org.junit.Before;
import org.junit.Test;
public class BackupServletTest {
private IConfiguration config;
private @Mocked Restore restoreObj;
private @Mocked SnapshotBackup snapshotBackup;
private BackupServlet resource;
private RestoreServlet restoreResource;
private InstanceInfo instanceInfo;
@Before
public void setUp() {
Injector injector = Guice.createInjector(new BRTestModule());
config = injector.getInstance(IConfiguration.class);
InstanceState instanceState = injector.getInstance(InstanceState.class);
instanceInfo = injector.getInstance(InstanceInfo.class);
resource = injector.getInstance(BackupServlet.class);
restoreResource = injector.getInstance(RestoreServlet.class);
}
@Test
public void backup() throws Exception {
new Expectations() {
{
snapshotBackup.execute();
}
};
Response response = resource.backup();
assertEquals(200, response.getStatus());
assertEquals("[\"ok\"]", response.getEntity());
assertEquals(
MediaType.APPLICATION_JSON_TYPE, response.getMetadata().get("Content-Type").get(0));
}
@Test
public void restore_minimal() throws Exception {
final String dateRange = null;
final String oldRegion = "us-east-1";
new Expectations() {
{
instanceInfo.getRegion();
result = oldRegion;
restoreObj.restore(new DateUtil.DateRange((Instant) any, (Instant) any));
}
};
expectCassandraStartup();
Response response = restoreResource.restore(dateRange);
assertEquals(200, response.getStatus());
assertEquals("[\"ok\"]", response.getEntity());
assertEquals(
MediaType.APPLICATION_JSON_TYPE, response.getMetadata().get("Content-Type").get(0));
}
@Test
public void restore_withDateRange() throws Exception {
final String dateRange = "201101010000,201112312359";
new Expectations() {
{
DateUtil.getDate(dateRange.split(",")[0]);
result = new DateTime(2011, 1, 1, 0, 0).toDate();
times = 1;
DateUtil.getDate(dateRange.split(",")[1]);
result = new DateTime(2011, 12, 31, 23, 59).toDate();
times = 1;
restoreObj.restore(new DateUtil.DateRange(dateRange));
}
};
expectCassandraStartup();
Response response = restoreResource.restore(dateRange);
assertEquals(200, response.getStatus());
assertEquals("[\"ok\"]", response.getEntity());
assertEquals(
MediaType.APPLICATION_JSON_TYPE, response.getMetadata().get("Content-Type").get(0));
}
// TODO: create CassandraController interface and inject, instead of static util method
private void expectCassandraStartup() {
new Expectations() {
{
config.getCassStartupScript();
result = "/usr/bin/false";
config.getHeapNewSize();
result = "2G";
config.getHeapSize();
result = "8G";
config.getDataFileLocation();
result = "/var/lib/cassandra/data";
config.getCommitLogLocation();
result = "/var/lib/cassandra/commitlog";
config.getBackupLocation();
result = "backup";
config.getCacheLocation();
result = "/var/lib/cassandra/saved_caches";
config.getJmxPort();
result = 7199;
config.getMaxDirectMemory();
result = "50G";
}
};
}
}
| 3,129 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/resources/PriamConfigTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.resources;
import static org.junit.Assert.*;
import com.netflix.priam.PriamServer;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.utils.GsonJsonSerializer;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.core.Response;
import mockit.Expectations;
import mockit.Mocked;
import org.junit.Before;
import org.junit.Test;
public class PriamConfigTest {
private @Mocked PriamServer priamServer;
private PriamConfig resource;
private FakeConfiguration fakeConfiguration;
@Before
public void setUp() {
resource = new PriamConfig(priamServer);
fakeConfiguration = new FakeConfiguration("cass_test");
fakeConfiguration.fakeProperties.put("test.prop", "test_value");
}
@Test
public void getPriamConfig() {
new Expectations() {
{
priamServer.getConfiguration();
result = fakeConfiguration;
times = 3;
}
};
Response response = resource.getPriamConfig("all");
assertEquals(200, response.getStatus());
Map<String, Object> result =
GsonJsonSerializer.getGson().fromJson(response.getEntity().toString(), Map.class);
assertNotNull(result);
assertTrue(!result.isEmpty());
final Map<String, String> expected = new HashMap<>();
expected.put("backupLocation", "casstestbackup");
String expectedJsonString = GsonJsonSerializer.getGson().toJson(expected);
response = resource.getPriamConfigByName("all", "backupLocation");
assertEquals(200, response.getStatus());
assertEquals(expectedJsonString, response.getEntity());
result = GsonJsonSerializer.getGson().fromJson(response.getEntity().toString(), Map.class);
assertEquals(result, expected);
Response badResponse = resource.getPriamConfigByName("all", "getUnrealThing");
assertEquals(404, badResponse.getStatus());
}
@Test
public void getProperty() {
final Map<String, String> expected = new HashMap<>();
expected.put("test.prop", "test_value");
new Expectations() {
{
priamServer.getConfiguration();
result = fakeConfiguration;
times = 3;
}
};
String expectedJsonString = GsonJsonSerializer.getGson().toJson(expected);
Response response = resource.getProperty("test.prop", null);
assertEquals(200, response.getStatus());
assertEquals(expectedJsonString, response.getEntity());
Map<String, Object> result =
GsonJsonSerializer.getGson().fromJson(response.getEntity().toString(), Map.class);
assertNotNull(result);
assertTrue(!result.isEmpty());
Response defaultResponse = resource.getProperty("not.a.property", "NOVALUE");
expected.clear();
expected.put("not.a.property", "NOVALUE");
expectedJsonString = GsonJsonSerializer.getGson().toJson(expected);
assertEquals(200, defaultResponse.getStatus());
assertEquals(expectedJsonString, defaultResponse.getEntity());
result =
GsonJsonSerializer.getGson()
.fromJson(defaultResponse.getEntity().toString(), Map.class);
assertEquals(result, expected);
Response badResponse = resource.getProperty("not.a.property", null);
assertEquals(404, badResponse.getStatus());
}
}
| 3,130 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/resources/PriamInstanceResourceTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.resources;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableSet;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.IPriamInstanceFactory;
import com.netflix.priam.identity.PriamInstance;
import com.netflix.priam.identity.config.InstanceInfo;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import mockit.Expectations;
import mockit.Mocked;
import org.junit.Before;
import org.junit.Test;
public class PriamInstanceResourceTest {
private static final String APP_NAME = "myApp";
private static final int NODE_ID = 3;
private @Mocked IConfiguration config;
private @Mocked IPriamInstanceFactory factory;
private @Mocked InstanceInfo instanceInfo;
private PriamInstanceResource resource;
@Before
public void setUp() {
resource = new PriamInstanceResource(config, factory, instanceInfo);
}
@Test
public void getInstances(
@Mocked final PriamInstance instance1,
@Mocked final PriamInstance instance2,
@Mocked final PriamInstance instance3) {
new Expectations() {
final ImmutableSet<PriamInstance> instances =
ImmutableSet.of(instance1, instance2, instance3);
{
config.getAppName();
result = APP_NAME;
factory.getAllIds(APP_NAME);
result = instances;
instance1.toString();
result = "instance1";
instance2.toString();
result = "instance2";
instance3.toString();
result = "instance3";
}
};
assertEquals("instance1\ninstance2\ninstance3\n", resource.getInstances());
}
@Test
public void getInstance(@Mocked final PriamInstance instance) {
final String expected = "plain text describing the instance";
new Expectations() {
{
config.getAppName();
result = APP_NAME;
factory.getInstance(APP_NAME, instanceInfo.getRegion(), NODE_ID);
result = instance;
instance.toString();
result = expected;
}
};
assertEquals(expected, resource.getInstance(NODE_ID));
}
@Test
public void getInstance_notFound() {
new Expectations() {
{
config.getAppName();
result = APP_NAME;
factory.getInstance(APP_NAME, instanceInfo.getRegion(), NODE_ID);
result = null;
}
};
try {
resource.getInstance(NODE_ID);
fail("Expected WebApplicationException thrown");
} catch (WebApplicationException e) {
assertEquals(404, e.getResponse().getStatus());
assertEquals(
"No priam instance with id " + NODE_ID + " found", e.getResponse().getEntity());
}
}
@Test
public void createInstance(@Mocked final PriamInstance instance) {
final String instanceID = "i-abc123";
final String hostname = "dom.com";
final String ip = "123.123.123.123";
final String rack = "us-east-1a";
final String token = "1234567890";
new Expectations() {
{
config.getAppName();
result = APP_NAME;
factory.create(APP_NAME, NODE_ID, instanceID, hostname, ip, rack, null, token);
result = instance;
instance.getId();
result = NODE_ID;
}
};
Response response = resource.createInstance(NODE_ID, instanceID, hostname, ip, rack, token);
assertEquals(201, response.getStatus());
assertEquals("/" + NODE_ID, response.getMetadata().getFirst("location").toString());
}
@Test
public void deleteInstance(@Mocked final PriamInstance instance) {
new Expectations() {
{
config.getAppName();
result = APP_NAME;
factory.getInstance(APP_NAME, instanceInfo.getRegion(), NODE_ID);
result = instance;
factory.delete(instance);
}
};
Response response = resource.deleteInstance(NODE_ID);
assertEquals(204, response.getStatus());
}
@Test
public void deleteInstance_notFound() {
new Expectations() {
{
config.getAppName();
result = APP_NAME;
factory.getInstance(APP_NAME, instanceInfo.getRegion(), NODE_ID);
result = null;
}
};
try {
resource.getInstance(NODE_ID);
fail("Expected WebApplicationException thrown");
} catch (WebApplicationException e) {
assertEquals(404, e.getResponse().getStatus());
assertEquals(
"No priam instance with id " + NODE_ID + " found", e.getResponse().getEntity());
}
}
}
| 3,131 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/stream/StreamingTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.stream;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.aws.RemoteBackupPath;
import com.netflix.priam.backup.AbstractBackupPath;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.InstanceIdentity;
import com.netflix.priam.utils.FifoQueue;
import org.junit.Assert;
import org.junit.Test;
public class StreamingTest {
@Test
public void testFifoAddAndRemove() {
FifoQueue<Long> queue = new FifoQueue<>(10);
for (long i = 0; i < 100; i++) queue.adjustAndAdd(i);
Assert.assertEquals(10, queue.size());
Assert.assertEquals(new Long(90), queue.first());
}
@Test
public void testAbstractPath() {
Injector injector = Guice.createInjector(new BRTestModule());
IConfiguration conf = injector.getInstance(IConfiguration.class);
InstanceIdentity factory = injector.getInstance(InstanceIdentity.class);
String region = factory.getInstanceInfo().getRegion();
FifoQueue<AbstractBackupPath> queue = new FifoQueue<>(10);
for (int i = 10; i < 30; i++) {
RemoteBackupPath path = new RemoteBackupPath(conf, factory);
path.parseRemote(
"test_backup/"
+ region
+ "/fakecluster/123456/201108"
+ i
+ "0000"
+ "/SNAP/ks1/cf2/f1"
+ i
+ ".db");
queue.adjustAndAdd(path);
}
for (int i = 10; i < 30; i++) {
RemoteBackupPath path = new RemoteBackupPath(conf, factory);
path.parseRemote(
"test_backup/"
+ region
+ "/fakecluster/123456/201108"
+ i
+ "0000"
+ "/SNAP/ks1/cf2/f2"
+ i
+ ".db");
queue.adjustAndAdd(path);
}
for (int i = 10; i < 30; i++) {
RemoteBackupPath path = new RemoteBackupPath(conf, factory);
path.parseRemote(
"test_backup/"
+ region
+ "/fakecluster/123456/201108"
+ i
+ "0000"
+ "/SNAP/ks1/cf2/f3"
+ i
+ ".db");
queue.adjustAndAdd(path);
}
RemoteBackupPath path = new RemoteBackupPath(conf, factory);
path.parseRemote(
"test_backup/"
+ region
+ "/fakecluster/123456/201108290000"
+ "/SNAP/ks1/cf2/f129.db");
Assert.assertTrue(queue.contains(path));
path.parseRemote(
"test_backup/"
+ region
+ "/fakecluster/123456/201108290000"
+ "/SNAP/ks1/cf2/f229.db");
Assert.assertTrue(queue.contains(path));
path.parseRemote(
"test_backup/"
+ region
+ "/fakecluster/123456/201108290000"
+ "/SNAP/ks1/cf2/f329.db");
Assert.assertTrue(queue.contains(path));
path.parseRemote(
"test_backup/"
+ region
+ "/fakecluster/123456/201108260000/SNAP/ks1/cf2/f326.db To: cass/data/ks1/cf2/f326.db");
Assert.assertEquals(path, queue.first());
}
@Test
public void testIgnoreIndexFiles() {
String[] testInputs =
new String[] {
"User_Authentication_Audit.User_Authentication_Audit_appkey_idx-hc-93-Digest.sha1",
"User_Authentication_Audit.User_Authentication_Audit_appkey_idx-hc-93-Filter.db",
"User_Authentication_Audit.User_Authentication_Audit_appkey_idx-hc-93-Data.db",
"User_Authentication_Audit.User_Authentication_Audit_appkey_idx-hc-93-Statistics.db",
"CS_Agents.CS_Agents_supervisorEmpSk_idx-hc-1-Filter.db",
"CS_Agents.CS_Agents_supervisorEmpSk_idx-hc-1-Digest.sha1",
"CS_Agents.CS_Agents_supervisorEmpSk_idx-hc-1-Statistics.db",
"CS_Agents.CS_Agents_supervisorEmpSk_idx-hc-1-Data.db"
};
}
}
| 3,132 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/configSource/CompositeConfigSourceTest.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.configSource;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class CompositeConfigSourceTest {
private static final Logger LOGGER =
LoggerFactory.getLogger(CompositeConfigSourceTest.class.getName());
@Test
public void read() {
MemoryConfigSource memoryConfigSource = new MemoryConfigSource();
IConfigSource configSource = new CompositeConfigSource(memoryConfigSource);
configSource.initialize("foo", "bar");
Assert.assertEquals(0, configSource.size());
configSource.set("foo", "bar");
Assert.assertEquals(1, configSource.size());
Assert.assertEquals("bar", configSource.get("foo"));
// verify that the writes went to mem source.
Assert.assertEquals(1, memoryConfigSource.size());
Assert.assertEquals("bar", memoryConfigSource.get("foo"));
}
@Test
public void readMultiple() {
MemoryConfigSource m1 = new MemoryConfigSource();
m1.set("foo", "foo");
MemoryConfigSource m2 = new MemoryConfigSource();
m2.set("bar", "bar");
MemoryConfigSource m3 = new MemoryConfigSource();
m3.set("baz", "baz");
IConfigSource configSource = new CompositeConfigSource(m1, m2, m3);
Assert.assertEquals(3, configSource.size());
Assert.assertEquals("foo", configSource.get("foo"));
Assert.assertEquals("bar", configSource.get("bar"));
Assert.assertEquals("baz", configSource.get("baz"));
// read default
Assert.assertEquals("test", configSource.get("doesnotexist", "test"));
}
}
| 3,133 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/configSource/PropertiesConfigSourceTest.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.configSource;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class PropertiesConfigSourceTest {
private static final Logger LOGGER =
LoggerFactory.getLogger(PropertiesConfigSourceTest.class.getName());
@Test
public void readFile() {
PropertiesConfigSource configSource = new PropertiesConfigSource("conf/Priam.properties");
configSource.initialize("asgName", "region");
Assert.assertEquals(
"\"/tmp/commitlog\"", configSource.get("Priam.backup.commitlog.location"));
Assert.assertEquals(7102, configSource.get("Priam.thrift.port", 0));
// File has 13 lines, but line 6 is "Priam.jmx.port7501", so it gets filtered out with empty
// string check.
Assert.assertEquals(13, configSource.size());
}
@Test
public void updateKey() {
PropertiesConfigSource configSource = new PropertiesConfigSource("conf/Priam.properties");
configSource.initialize("asgName", "region");
// File has 13 lines, but line 6 is "Priam.jmx.port7501", so it gets filtered out with empty
// string check.
Assert.assertEquals(13, configSource.size());
configSource.set("foo", "bar");
Assert.assertEquals(14, configSource.size());
Assert.assertEquals("bar", configSource.get("foo"));
Assert.assertEquals(7102, configSource.get("Priam.thrift.port", 0));
configSource.set("Priam.thrift.port", Integer.toString(10));
Assert.assertEquals(10, configSource.get("Priam.thrift.port", 0));
}
}
| 3,134 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/configSource/AbstractConfigSourceTest.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.configSource;
import com.google.common.collect.ImmutableList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class AbstractConfigSourceTest {
private static final Logger LOGGER =
LoggerFactory.getLogger(AbstractConfigSourceTest.class.getName());
@Test
public void lists() {
AbstractConfigSource source = new MemoryConfigSource();
source.set("foo", "bar,baz, qux ");
final List<String> values = source.getList("foo");
LOGGER.info("Values {}", values);
Assert.assertEquals(ImmutableList.of("bar", "baz", "qux"), values);
}
@Test
public void oneItem() {
AbstractConfigSource source = new MemoryConfigSource();
source.set("foo", "bar");
final List<String> values = source.getList("foo");
LOGGER.info("Values {}", values);
Assert.assertEquals(ImmutableList.of("bar"), values);
}
@Test
public void oneItemWithSpace() {
AbstractConfigSource source = new MemoryConfigSource();
source.set("foo", "\tbar ");
final List<String> values = source.getList("foo");
LOGGER.info("Values {}", values);
Assert.assertEquals(ImmutableList.of("bar"), values);
}
}
| 3,135 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/configSource/SystemPropertiesConfigSourceTest.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.configSource;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class SystemPropertiesConfigSourceTest {
private static final Logger LOGGER =
LoggerFactory.getLogger(SystemPropertiesConfigSourceTest.class.getName());
@Test
public void read() {
final String key = "java.version";
SystemPropertiesConfigSource configSource = new SystemPropertiesConfigSource();
configSource.initialize("asgName", "region");
// sys props are filtered to starting with priam, so this should be missing.
Assert.assertEquals(null, configSource.get(key));
Assert.assertEquals(0, configSource.size());
}
}
| 3,136 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/scheduler/TestGuiceSingleton.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.scheduler;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import javax.inject.Singleton;
import org.junit.Test;
public class TestGuiceSingleton {
@Test
public void testSingleton() {
Injector injector = Guice.createInjector(new GModules());
injector.getInstance(EmptryInterface.class).print();
injector.getInstance(EmptryInterface.class).print();
injector.getInstance(EmptryInterface.class).print();
printInjected();
printInjected();
printInjected();
printInjected();
}
private void printInjected() {
Injector injector = Guice.createInjector(new GModules());
injector.getInstance(EmptryInterface.class).print();
}
interface EmptryInterface {
void print();
}
@Singleton
public static class GuiceSingleton implements EmptryInterface {
public void print() {
System.out.println(this.toString());
this.toString();
}
}
static class GModules extends AbstractModule {
@Override
protected void configure() {
bind(EmptryInterface.class).to(GuiceSingleton.class).asEagerSingleton();
}
}
}
| 3,137 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/scheduler/TestSimpleTimer.java | package com.netflix.priam.scheduler;
import com.google.common.truth.Truth;
import java.text.ParseException;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Date;
import org.junit.Assert;
import org.junit.Test;
import org.quartz.Trigger;
public class TestSimpleTimer {
private static final int PERIOD = 10;
private static final Instant START = Instant.EPOCH.plus(5, ChronoUnit.SECONDS);
@Test
public void sunnyDay() throws ParseException {
assertions(new SimpleTimer("foo", PERIOD, START).getTrigger(), START);
}
@Test
public void startBeforeEpoch() {
Assert.assertThrows(
IllegalArgumentException.class,
() -> new SimpleTimer("foo", PERIOD, Instant.EPOCH.minus(5, ChronoUnit.SECONDS)));
}
@Test
public void startAtEpoch() throws ParseException {
assertions(new SimpleTimer("foo", PERIOD, Instant.EPOCH).getTrigger(), Instant.EPOCH);
}
@Test
public void startMoreThanOnePeriodAfterEpoch() throws ParseException {
Instant start = Instant.EPOCH.plus(2 * PERIOD, ChronoUnit.SECONDS);
assertions(new SimpleTimer("foo", PERIOD, start).getTrigger(), start);
}
@Test
public void negativePeriod() {
Assert.assertThrows(
IllegalArgumentException.class, () -> new SimpleTimer("foo", -PERIOD, START));
}
@Test
public void zeroPeriod() {
Assert.assertThrows(IllegalArgumentException.class, () -> new SimpleTimer("foo", 0, START));
}
private void assertions(Trigger trigger, Instant start) {
Instant now = Instant.now();
Instant nextFireTime = trigger.getFireTimeAfter(Date.from(now)).toInstant();
Truth.assertThat(nextFireTime.getEpochSecond() % PERIOD)
.isEqualTo(start.getEpochSecond() % PERIOD);
Truth.assertThat(nextFireTime).isAtMost(Instant.now().plus(PERIOD, ChronoUnit.SECONDS));
Truth.assertThat(trigger.getFinalFireTime()).isNull();
Truth.assertThat(trigger.getEndTime()).isNull();
}
}
| 3,138 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/scheduler/TestScheduler.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.scheduler;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.TestModule;
import com.netflix.priam.config.IConfiguration;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
public class TestScheduler {
// yuck, but marginally better than using Thread.sleep
private static CountDownLatch latch;
@Test
public void testSchedule() throws Exception {
latch = new CountDownLatch(1);
Injector inject = Guice.createInjector(new TestModule());
PriamScheduler scheduler = inject.getInstance(PriamScheduler.class);
scheduler.start();
scheduler.addTask("test", TestTask.class, new SimpleTimer("testtask", 10));
// verify the task has run or fail in 1s
latch.await(1000, TimeUnit.MILLISECONDS);
scheduler.shutdown();
}
@Test
@Ignore(
"not sure what this test really does, except test countdown latch and thread context switching")
public void testSingleInstanceSchedule() throws Exception {
latch = new CountDownLatch(3);
Injector inject = Guice.createInjector(new TestModule());
PriamScheduler scheduler = inject.getInstance(PriamScheduler.class);
scheduler.start();
scheduler.addTask("test2", SingleTestTask.class, SingleTestTask.getTimer());
// verify 3 tasks run or fail in 1s
latch.await(2000, TimeUnit.MILLISECONDS);
scheduler.shutdown();
Assert.assertEquals(3, SingleTestTask.count);
}
@Ignore
public static class TestTask extends Task {
@Inject
public TestTask(IConfiguration config) {
// todo: mock the MBeanServer instead, but this will prevent exceptions due to duplicate
// registrations
super(config);
}
@Override
public void execute() {
latch.countDown();
}
@Override
public String getName() {
return "test";
}
}
@Ignore
@Singleton
public static class SingleTestTask extends Task {
@Inject
public SingleTestTask(IConfiguration config) {
super(config);
}
static int count = 0;
@Override
public void execute() {
++count;
latch.countDown();
try {
// todo : why is this sleep important?
Thread.sleep(55); // 5sec
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public String getName() {
return "test2";
}
static TaskTimer getTimer() {
return new SimpleTimer("test2", 11L);
}
}
}
| 3,139 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/utils/Murmur3TokenManagerTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.utils;
import static com.netflix.priam.utils.TokenManager.MAXIMUM_TOKEN_MURMUR3;
import static com.netflix.priam.utils.TokenManager.MINIMUM_TOKEN_MURMUR3;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import com.google.common.collect.ImmutableList;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IConfiguration;
import java.math.BigInteger;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
public class Murmur3TokenManagerTest {
private TokenManager tokenManager;
@Before
public void setUp() {
IConfiguration config = new Murmur3Configuration();
this.tokenManager = new TokenManager(config);
}
@Test(expected = IllegalArgumentException.class)
public void initialToken_zeroSize() {
tokenManager.initialToken(0, 0, 1);
}
@Test(expected = IllegalArgumentException.class)
public void initialToken_negativePosition() {
tokenManager.initialToken(1, -1, 1);
}
@Test(expected = IllegalArgumentException.class)
public void initialToken_negativeOffset() {
tokenManager.initialToken(1, 0, -1);
}
@Test
public void initialToken_positionZero() {
assertEquals(MINIMUM_TOKEN_MURMUR3, tokenManager.initialToken(1, 0, 0));
assertEquals(MINIMUM_TOKEN_MURMUR3, tokenManager.initialToken(10, 0, 0));
assertEquals(MINIMUM_TOKEN_MURMUR3, tokenManager.initialToken(133, 0, 0));
}
@Test
public void initialToken_offsets_zeroPosition() {
assertEquals(
MINIMUM_TOKEN_MURMUR3.add(BigInteger.valueOf(7)),
tokenManager.initialToken(1, 0, 7));
assertEquals(
MINIMUM_TOKEN_MURMUR3.add(BigInteger.valueOf(11)),
tokenManager.initialToken(2, 0, 11));
assertEquals(
MINIMUM_TOKEN_MURMUR3.add(BigInteger.valueOf(Integer.MAX_VALUE)),
tokenManager.initialToken(256, 0, Integer.MAX_VALUE));
}
@Test
public void initialToken_cannotExceedMaximumToken() {
final int maxRingSize = Integer.MAX_VALUE;
final int maxPosition = maxRingSize - 1;
final int maxOffset = Integer.MAX_VALUE;
assertEquals(
1,
MAXIMUM_TOKEN_MURMUR3.compareTo(
tokenManager.initialToken(maxRingSize, maxPosition, maxOffset)));
}
@Test
public void createToken() {
assertEquals(
MAXIMUM_TOKEN_MURMUR3
.subtract(MINIMUM_TOKEN_MURMUR3)
.divide(BigInteger.valueOf(256))
.multiply(BigInteger.TEN)
.add(BigInteger.valueOf(tokenManager.regionOffset("region")))
.add(MINIMUM_TOKEN_MURMUR3)
.toString(),
tokenManager.createToken(10, 256, "region"));
}
@Test(expected = IllegalArgumentException.class)
public void findClosestToken_emptyTokenList() {
tokenManager.findClosestToken(BigInteger.ZERO, Collections.emptyList());
}
@Test
public void findClosestToken_singleTokenList() {
final BigInteger onlyToken = BigInteger.valueOf(100);
assertEquals(
onlyToken,
tokenManager.findClosestToken(BigInteger.TEN, ImmutableList.of(onlyToken)));
}
@Test
public void findClosestToken_multipleTokenList() {
List<BigInteger> tokenList =
ImmutableList.of(BigInteger.ONE, BigInteger.TEN, BigInteger.valueOf(100));
assertEquals(BigInteger.ONE, tokenManager.findClosestToken(BigInteger.ONE, tokenList));
assertEquals(
BigInteger.TEN, tokenManager.findClosestToken(BigInteger.valueOf(9), tokenList));
assertEquals(BigInteger.TEN, tokenManager.findClosestToken(BigInteger.TEN, tokenList));
assertEquals(
BigInteger.TEN, tokenManager.findClosestToken(BigInteger.valueOf(12), tokenList));
assertEquals(
BigInteger.TEN, tokenManager.findClosestToken(BigInteger.valueOf(51), tokenList));
assertEquals(
BigInteger.valueOf(100),
tokenManager.findClosestToken(BigInteger.valueOf(56), tokenList));
assertEquals(
BigInteger.valueOf(100),
tokenManager.findClosestToken(BigInteger.valueOf(100), tokenList));
}
@Test
public void findClosestToken_tieGoesToLargerToken() {
assertEquals(
BigInteger.TEN,
tokenManager.findClosestToken(
BigInteger.valueOf(5), ImmutableList.of(BigInteger.ZERO, BigInteger.TEN)));
}
@Test
public void test4Splits() {
// example tokens from http://wiki.apache.org/cassandra/Operations
final String expectedTokens =
"-9223372036854775808,-4611686018427387904," + "0,4611686018427387904";
String[] tokens = expectedTokens.split(",");
int splits = tokens.length;
for (int i = 0; i < splits; i++)
assertEquals(new BigInteger(tokens[i]), tokenManager.initialToken(splits, i, 0));
}
@Test
public void test16Splits() {
final String expectedTokens =
"-9223372036854775808,-8070450532247928832,"
+ "-6917529027641081856,-5764607523034234880,"
+ "-4611686018427387904,-3458764513820540928,"
+ "-2305843009213693952,-1152921504606846976,"
+ "0,1152921504606846976,"
+ "2305843009213693952,3458764513820540928,"
+ "4611686018427387904,5764607523034234880,"
+ "6917529027641081856,8070450532247928832";
String[] tokens = expectedTokens.split(",");
int splits = tokens.length;
for (int i = 0; i < splits; i++)
assertEquals(new BigInteger(tokens[i]), tokenManager.initialToken(splits, i, 0));
}
@Test
public void regionOffset() {
String allRegions = "us-west-2,us-east,us-west,eu-east,eu-west,ap-northeast,ap-southeast";
for (String region1 : allRegions.split(","))
for (String region2 : allRegions.split(",")) {
if (region1.equals(region2)) continue;
assertFalse(
"Diffrence seems to be low",
Math.abs(
tokenManager.regionOffset(region1)
- tokenManager.regionOffset(region2))
< 100);
}
}
@Test
public void testMultiToken() {
int h1 = tokenManager.regionOffset("vijay");
int h2 = tokenManager.regionOffset("vijay2");
BigInteger t1 = tokenManager.initialToken(100, 10, h1);
BigInteger t2 = tokenManager.initialToken(100, 10, h2);
BigInteger tokendistance = t1.subtract(t2).abs();
int hashDiffrence = h1 - h2;
assertEquals(new BigInteger("" + hashDiffrence).abs(), tokendistance);
BigInteger t3 = tokenManager.initialToken(100, 99, h1);
BigInteger t4 = tokenManager.initialToken(100, 99, h2);
tokendistance = t3.subtract(t4).abs();
assertEquals(new BigInteger("" + hashDiffrence).abs(), tokendistance);
}
private class Murmur3Configuration extends FakeConfiguration {
private final String partitioner = "org.apache.cassandra.dht.Murmur3Partitioner";
@Override
public String getPartitioner() {
return partitioner;
}
}
}
| 3,140 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/utils/FakeSleeper.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.utils;
/** TODO: Replace with a mock object */
public class FakeSleeper implements Sleeper {
@Override
public void sleep(long waitTimeMs) throws InterruptedException {
// no-op
}
public void sleepQuietly(long waitTimeMs) {
// no-op
}
}
| 3,141 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/utils/TestSystemUtils.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.utils;
import org.junit.Assert;
import org.junit.Test;
/** Created by aagrawal on 12/1/18. */
public class TestSystemUtils {
@Test
public void testGetDataFromUrl() {
String dummyurl = "https://jsonplaceholder.typicode.com/todos/1";
String response = SystemUtils.getDataFromUrl(dummyurl);
Assert.assertNotNull(response);
}
}
| 3,142 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/utils/TestDateUtils.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.utils;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import org.apache.commons.lang3.StringUtils;
import org.junit.Assert;
import org.junit.Test;
/** Created by aagrawal on 11/29/18. */
public class TestDateUtils {
@Test
public void testDateRangeDefault() {
String input = "default";
Instant now = DateUtil.getInstant();
DateUtil.DateRange dateRange = new DateUtil.DateRange(input);
// Start and end should be a day apart.
Assert.assertEquals(
dateRange.getEndTime(), dateRange.getStartTime().plus(1, ChronoUnit.DAYS));
if (Duration.between(dateRange.getEndTime(), now).getSeconds() > 5)
throw new AssertionError(
String.format(
"End date: %s and now: %s should be almost same",
dateRange.getEndTime(), now));
}
@Test
public void testDateRangeEmpty() {
Instant now = DateUtil.getInstant();
DateUtil.DateRange dateRange = new DateUtil.DateRange(" ");
// Start and end should be a day apart.
Assert.assertEquals(
dateRange.getEndTime(), dateRange.getStartTime().plus(1, ChronoUnit.DAYS));
if (Duration.between(dateRange.getEndTime(), now).getSeconds() > 5)
throw new AssertionError(
String.format(
"End date: %s and now: %s should be almost same",
dateRange.getEndTime(), now));
}
@Test
public void testDateRangeValues() {
String start = "201801011201";
String end = "201801051201";
DateUtil.DateRange dateRange = new DateUtil.DateRange(start + "," + end);
Assert.assertEquals(Instant.ofEpochSecond(1514808060), dateRange.getStartTime());
Assert.assertEquals(Instant.ofEpochSecond(1515153660), dateRange.getEndTime());
start = "20180101";
end = "20180105";
dateRange = new DateUtil.DateRange(start + "," + end);
Assert.assertEquals(Instant.ofEpochSecond(1514764800), dateRange.getStartTime());
Assert.assertEquals(Instant.ofEpochSecond(1515110400), dateRange.getEndTime());
}
@Test
public void testDateRangeRandom() {
DateUtil.DateRange dateRange = new DateUtil.DateRange("some,random,values");
Assert.assertEquals(null, dateRange.getStartTime());
Assert.assertEquals(null, dateRange.getEndTime());
}
@Test
public void testDateRangeMatch() {
Instant dateStart = Instant.ofEpochMilli(1543632497000L);
Instant dateEnd = Instant.ofEpochMilli(1543819697000L);
DateUtil.DateRange dateRange = new DateUtil.DateRange(dateStart, dateEnd);
Assert.assertEquals("1543", dateRange.match());
dateRange = new DateUtil.DateRange(dateStart, null);
Assert.assertEquals(StringUtils.EMPTY, dateRange.match());
}
@Test
public void testFutureDateRangeValues() {
String start = "202801011201";
String end = "202801051201";
DateUtil.DateRange dateRange = new DateUtil.DateRange(start + "," + end);
Assert.assertEquals(Instant.ofEpochSecond(1830340860), dateRange.getStartTime());
Assert.assertEquals(Instant.ofEpochSecond(1830686460), dateRange.getEndTime());
Assert.assertEquals("1830", dateRange.match());
}
}
| 3,143 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/utils/RandomTokenManagerTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.utils;
import static com.netflix.priam.utils.TokenManager.MAXIMUM_TOKEN_RANDOM;
import static com.netflix.priam.utils.TokenManager.MINIMUM_TOKEN_RANDOM;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import com.google.common.collect.ImmutableList;
import com.netflix.priam.config.FakeConfiguration;
import java.math.BigInteger;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
public class RandomTokenManagerTest {
private TokenManager tokenManager;
@Before
public void setUp() {
FakeConfiguration config = new FakeConfiguration();
this.tokenManager = new TokenManager(config);
}
@Test(expected = IllegalArgumentException.class)
public void initialToken_zeroSize() {
tokenManager.initialToken(0, 0, 1);
}
@Test(expected = IllegalArgumentException.class)
public void initialToken_negativePosition() {
tokenManager.initialToken(1, -1, 1);
}
@Test(expected = IllegalArgumentException.class)
public void initialToken_negativeOffset() {
tokenManager.initialToken(1, 0, -1);
}
@Test
public void initialToken_positionZero() {
assertEquals(MINIMUM_TOKEN_RANDOM, tokenManager.initialToken(1, 0, 0));
assertEquals(MINIMUM_TOKEN_RANDOM, tokenManager.initialToken(10, 0, 0));
assertEquals(MINIMUM_TOKEN_RANDOM, tokenManager.initialToken(133, 0, 0));
}
@Test
public void initialToken_offsets_zeroPosition() {
assertEquals(
MINIMUM_TOKEN_RANDOM.add(BigInteger.valueOf(7)),
tokenManager.initialToken(1, 0, 7));
assertEquals(
MINIMUM_TOKEN_RANDOM.add(BigInteger.valueOf(11)),
tokenManager.initialToken(2, 0, 11));
assertEquals(
MINIMUM_TOKEN_RANDOM.add(BigInteger.valueOf(Integer.MAX_VALUE)),
tokenManager.initialToken(256, 0, Integer.MAX_VALUE));
}
@Test
public void initialToken_cannotExceedMaximumToken() {
final int maxRingSize = Integer.MAX_VALUE;
final int maxPosition = maxRingSize - 1;
final int maxOffset = Integer.MAX_VALUE;
assertEquals(
1,
MAXIMUM_TOKEN_RANDOM.compareTo(
tokenManager.initialToken(maxRingSize, maxPosition, maxOffset)));
}
@Test
public void createToken() {
assertEquals(
MAXIMUM_TOKEN_RANDOM
.divide(BigInteger.valueOf(256))
.multiply(BigInteger.TEN)
.add(BigInteger.valueOf(tokenManager.regionOffset("region")))
.toString(),
tokenManager.createToken(10, 256, "region"));
}
@Test(expected = IllegalArgumentException.class)
public void findClosestToken_emptyTokenList() {
tokenManager.findClosestToken(BigInteger.ZERO, Collections.emptyList());
}
@Test
public void findClosestToken_singleTokenList() {
final BigInteger onlyToken = BigInteger.valueOf(100);
assertEquals(
onlyToken,
tokenManager.findClosestToken(BigInteger.TEN, ImmutableList.of(onlyToken)));
}
@Test
public void findClosestToken_multipleTokenList() {
List<BigInteger> tokenList =
ImmutableList.of(BigInteger.ONE, BigInteger.TEN, BigInteger.valueOf(100));
assertEquals(BigInteger.ONE, tokenManager.findClosestToken(BigInteger.ONE, tokenList));
assertEquals(
BigInteger.TEN, tokenManager.findClosestToken(BigInteger.valueOf(9), tokenList));
assertEquals(BigInteger.TEN, tokenManager.findClosestToken(BigInteger.TEN, tokenList));
assertEquals(
BigInteger.TEN, tokenManager.findClosestToken(BigInteger.valueOf(12), tokenList));
assertEquals(
BigInteger.TEN, tokenManager.findClosestToken(BigInteger.valueOf(51), tokenList));
assertEquals(
BigInteger.valueOf(100),
tokenManager.findClosestToken(BigInteger.valueOf(56), tokenList));
assertEquals(
BigInteger.valueOf(100),
tokenManager.findClosestToken(BigInteger.valueOf(100), tokenList));
}
@Test
public void findClosestToken_tieGoesToLargerToken() {
assertEquals(
BigInteger.TEN,
tokenManager.findClosestToken(
BigInteger.valueOf(5), ImmutableList.of(BigInteger.ZERO, BigInteger.TEN)));
}
@Test
public void test4Splits() {
// example tokens from http://wiki.apache.org/cassandra/Operations
final String expectedTokens =
"0,42535295865117307932921825928971026432,"
+ "85070591730234615865843651857942052864,127605887595351923798765477786913079296";
String[] tokens = expectedTokens.split(",");
int splits = tokens.length;
for (int i = 0; i < splits; i++)
assertEquals(new BigInteger(tokens[i]), tokenManager.initialToken(splits, i, 0));
}
@Test
public void test16Splits() {
final String expectedTokens =
"0,10633823966279326983230456482242756608,"
+ "21267647932558653966460912964485513216,31901471898837980949691369446728269824,"
+ "42535295865117307932921825928971026432,53169119831396634916152282411213783040,"
+ "63802943797675961899382738893456539648,74436767763955288882613195375699296256,"
+ "85070591730234615865843651857942052864,95704415696513942849074108340184809472,"
+ "106338239662793269832304564822427566080,116972063629072596815535021304670322688,"
+ "127605887595351923798765477786913079296,138239711561631250781995934269155835904,"
+ "148873535527910577765226390751398592512,159507359494189904748456847233641349120";
String[] tokens = expectedTokens.split(",");
int splits = tokens.length;
for (int i = 0; i < splits; i++)
assertEquals(new BigInteger(tokens[i]), tokenManager.initialToken(splits, i, 0));
}
@Test
public void regionOffset() {
String allRegions = "us-west-2,us-east,us-west,eu-east,eu-west,ap-northeast,ap-southeast";
for (String region1 : allRegions.split(","))
for (String region2 : allRegions.split(",")) {
if (region1.equals(region2)) continue;
assertFalse(
"Diffrence seems to be low",
Math.abs(
tokenManager.regionOffset(region1)
- tokenManager.regionOffset(region2))
< 100);
}
}
@Test
public void testMultiToken() {
int h1 = tokenManager.regionOffset("vijay");
int h2 = tokenManager.regionOffset("vijay2");
BigInteger t1 = tokenManager.initialToken(100, 10, h1);
BigInteger t2 = tokenManager.initialToken(100, 10, h2);
BigInteger tokendistance = t1.subtract(t2).abs();
int hashDiffrence = h1 - h2;
assertEquals(new BigInteger("" + hashDiffrence).abs(), tokendistance);
BigInteger t3 = tokenManager.initialToken(100, 99, h1);
BigInteger t4 = tokenManager.initialToken(100, 99, h2);
tokendistance = t3.subtract(t4).abs();
assertEquals(new BigInteger("" + hashDiffrence).abs(), tokendistance);
}
}
| 3,144 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/utils/TestGsonJsonSerializer.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.utils;
import com.netflix.priam.backup.BackupMetadata;
import com.netflix.priam.backup.BackupVersion;
import com.netflix.priam.health.InstanceState;
import java.time.LocalDateTime;
import java.util.Calendar;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Created by aagrawal on 10/12/17. */
public class TestGsonJsonSerializer {
private static final Logger LOG = LoggerFactory.getLogger(TestGsonJsonSerializer.class);
@Test
public void testBackupMetaData() throws Exception {
BackupMetadata metadata =
new BackupMetadata(
BackupVersion.SNAPSHOT_BACKUP, "123", Calendar.getInstance().getTime());
String json = metadata.toString();
LOG.info(json);
// Deserialize it.
BackupMetadata metadata1 =
GsonJsonSerializer.getGson().fromJson(json, BackupMetadata.class);
LOG.info(metadata1.toString());
Assert.assertEquals(metadata.getSnapshotDate(), metadata1.getSnapshotDate());
Assert.assertEquals(metadata.getToken(), metadata1.getToken());
}
@Test
public void testRestoreStatus() throws Exception {
InstanceState.RestoreStatus restoreStatus = new InstanceState.RestoreStatus();
restoreStatus.setStartDateRange(LocalDateTime.now().minusDays(2).withSecond(0).withNano(0));
restoreStatus.setEndDateRange(LocalDateTime.now().minusHours(3).withSecond(0).withNano(0));
restoreStatus.setExecutionStartTime(LocalDateTime.now().withSecond(0).withNano(0));
LOG.info(restoreStatus.toString());
InstanceState.RestoreStatus restoreStatus1 =
GsonJsonSerializer.getGson()
.fromJson(restoreStatus.toString(), InstanceState.RestoreStatus.class);
LOG.info(restoreStatus1.toString());
Assert.assertEquals(
restoreStatus.getExecutionStartTime(), restoreStatus1.getExecutionStartTime());
Assert.assertEquals(restoreStatus.getStartDateRange(), restoreStatus1.getStartDateRange());
Assert.assertEquals(restoreStatus.getEndDateRange(), restoreStatus1.getEndDateRange());
}
}
| 3,145 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/utils/BackupFileUtils.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.utils;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.EnumSet;
import org.apache.cassandra.io.sstable.Component;
import org.apache.commons.io.FileUtils;
/** Created by aagrawal on 9/23/18. */
public class BackupFileUtils {
public static void cleanupDir(Path dir) {
if (dir.toFile().exists())
try {
FileUtils.cleanDirectory(dir.toFile());
} catch (IOException e) {
e.printStackTrace();
}
}
public static void generateDummyFiles(
Path dummyDir,
int noOfKeyspaces,
int noOfCf,
int noOfSstables,
String backupDir,
String snapshotName,
boolean cleanup)
throws Exception {
// Clean the dummy directory
if (cleanup) cleanupDir(dummyDir);
for (int i = 1; i <= noOfKeyspaces; i++) {
String keyspaceName = "sample" + i;
for (int j = 1; j <= noOfCf; j++) {
String columnfamilyname = "cf" + j;
for (int k = 1; k <= noOfSstables; k++) {
String prefixName = "mc-" + k + "-big";
for (Component.Type type : EnumSet.allOf(Component.Type.class)) {
Path componentPath =
Paths.get(
dummyDir.toFile().getAbsolutePath(),
keyspaceName,
columnfamilyname,
backupDir,
snapshotName,
prefixName + "-" + type.name() + ".db");
componentPath.getParent().toFile().mkdirs();
try (FileWriter fileWriter = new FileWriter(componentPath.toFile())) {
fileWriter.write("");
}
}
}
Path componentPath =
Paths.get(
dummyDir.toFile().getAbsolutePath(),
keyspaceName,
columnfamilyname,
backupDir,
snapshotName,
"manifest.json");
try (FileWriter fileWriter = new FileWriter(componentPath.toFile())) {
fileWriter.write("");
}
}
}
}
}
| 3,146 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/cassandra | Create_ds/Priam/priam/src/test/java/com/netflix/priam/cassandra/token/TestDoublingLogic.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.cassandra.token;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
/** Seems like skip 3 is the magic number.... this test will make sure to test the same. */
public class TestDoublingLogic {
private static final int RACS = 2;
private static final int NODES_PER_RACS = 2;
@Test
public void testSkip() {
List<String> nodes = new ArrayList<>();
for (int i = 0; i < NODES_PER_RACS; i++)
for (int j = 0; j < RACS; j++) nodes.add("RAC-" + j);
// printNodes(nodes);
List<String> newNodes = nodes;
for (int i = 0; i < 15; i++) {
int count = newNodes.size();
newNodes = doubleNodes(newNodes);
assertEquals(newNodes.size(), count * 2);
// printNodes(newNodes);
validate(newNodes, nodes);
}
}
public void printNodes(List<String> nodes) {
System.out.println("=== Printing - Array of Size :" + nodes.size());
System.out.println(StringUtils.join(nodes, "\n"));
System.out.println(
"=====================Completed doubling==============================="
+ nodes.size());
}
private void validate(List<String> newNodes, List<String> nodes) {
String temp = "";
int count = 0;
for (String node : newNodes) {
if (temp.equals(node)) count++;
else count = 0;
if (count == 2) {
System.out.println("Found an issue.....");
throw new RuntimeException();
}
temp = node;
}
// compare if they are the same set...
boolean test = true;
for (int i = 0; i < nodes.size(); i++) {
if (!newNodes.get(i).equals(nodes.get(i))) test = false;
}
if (test)
throw new RuntimeException(
"Awesome we are back to the natural order... No need to test more");
}
private List<String> doubleNodes(List<String> nodes) {
List<String> lst = new ArrayList<>();
Map<Integer, String> return_ = new HashMap<>();
for (int i = 0; i < nodes.size(); i++) {
return_.put(i * 2, nodes.get(i));
}
for (int i = 0; i < nodes.size() * 2; i++) {
if (0 == i % 2) {
// rotate
if (i + 3 >= (nodes.size() * 2)) {
int delta = (i + 3) - (nodes.size() * 2);
return_.put(delta, return_.get(i));
}
return_.put(i + 3, return_.get(i));
}
}
for (int i = 0; i < nodes.size() * 2; i++) lst.add(return_.get(i));
return lst;
}
}
| 3,147 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/defaultimpl/CassandraProcessManagerTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.defaultimpl;
import com.google.inject.Guice;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.health.InstanceState;
import com.netflix.priam.merics.CassMonitorMetrics;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class CassandraProcessManagerTest {
private CassandraProcessManager cpm;
@Before
public void setup() {
IConfiguration config = new FakeConfiguration("test_cluster");
InstanceState instanceState =
Guice.createInjector(new BRTestModule()).getInstance(InstanceState.class);
CassMonitorMetrics cassMonitorMetrics =
Guice.createInjector(new BRTestModule()).getInstance(CassMonitorMetrics.class);
cpm = new CassandraProcessManager(config, instanceState, cassMonitorMetrics);
}
@Test
public void logProcessOutput_BadApp() throws IOException, InterruptedException {
Process p = null;
try {
p = new ProcessBuilder("ls", "/tmppppp").start();
int exitValue = p.waitFor();
Assert.assertTrue(0 != exitValue);
cpm.logProcessOutput(p);
} catch (IOException ioe) {
if (p != null) cpm.logProcessOutput(p);
}
}
/** note: this will succeed on a *nix machine, unclear about anything else... */
@Test
public void logProcessOutput_GoodApp() throws IOException, InterruptedException {
Process p = new ProcessBuilder("true").start();
int exitValue = p.waitFor();
Assert.assertEquals(0, exitValue);
cpm.logProcessOutput(p);
}
}
| 3,148 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/defaultimpl/FakeCassandraProcess.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.defaultimpl;
import java.io.IOException;
/** Created by aagrawal on 10/3/17. */
public class FakeCassandraProcess implements ICassandraProcess {
@Override
public void start(boolean join_ring) throws IOException {
// do nothing
}
@Override
public void stop(boolean force) throws IOException {
// do nothing
}
}
| 3,149 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backupv2/TestBackupUtils.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backupv2;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.AbstractBackupPath;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.IConfiguration;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Provider;
import org.apache.commons.io.FileUtils;
/** Created by aagrawal on 12/17/18. */
public class TestBackupUtils {
private MetaFileWriterBuilder metaFileWriterBuilder;
private Provider<AbstractBackupPath> pathProvider;
protected final String keyspace = "keyspace1";
protected final String columnfamily = "columnfamily1";
private final String dataDir;
@Inject
public TestBackupUtils() {
Injector injector = Guice.createInjector(new BRTestModule());
pathProvider = injector.getProvider(AbstractBackupPath.class);
metaFileWriterBuilder = injector.getInstance(MetaFileWriterBuilder.class);
dataDir = injector.getInstance(IConfiguration.class).getDataFileLocation();
}
public Path createMeta(List<String> filesToAdd, Instant snapshotTime) throws IOException {
MetaFileWriterBuilder.DataStep dataStep =
metaFileWriterBuilder.newBuilder().startMetaFileGeneration(snapshotTime);
ImmutableSetMultimap.Builder<String, AbstractBackupPath> builder =
ImmutableSetMultimap.builder();
for (String file : filesToAdd) {
String basename = Paths.get(file).getFileName().toString();
int lastIndex = basename.lastIndexOf('-');
lastIndex = lastIndex < 0 ? basename.length() : lastIndex;
String prefix = basename.substring(0, lastIndex);
AbstractBackupPath path = pathProvider.get();
path.parseRemote(file);
path.setCreationTime(path.getLastModified());
builder.put(prefix, path);
}
dataStep.addColumnfamilyResult(keyspace, columnfamily, builder.build());
Path metaPath = dataStep.endMetaFileGeneration().getMetaFilePath();
metaPath.toFile().setLastModified(snapshotTime.toEpochMilli());
return metaPath;
}
public String createFile(String fileName, Instant lastModifiedTime) throws Exception {
Path path = Paths.get(dataDir, keyspace, columnfamily, fileName);
FileUtils.forceMkdirParent(path.toFile());
try (FileWriter fileWriter = new FileWriter(path.toFile())) {
fileWriter.write("");
}
path.toFile().setLastModified(lastModifiedTime.toEpochMilli());
return path.toString();
}
}
| 3,150 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backupv2/TestBackupV2Service.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backupv2;
import com.google.common.truth.Truth;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.AbstractBackup;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.connection.JMXNodeTool;
import com.netflix.priam.defaultimpl.IService;
import com.netflix.priam.identity.token.ITokenRetriever;
import com.netflix.priam.scheduler.PriamScheduler;
import com.netflix.priam.tuner.CassandraTunerService;
import com.netflix.priam.tuner.TuneCassandra;
import com.netflix.priam.utils.BackupFileUtils;
import com.netflix.priam.utils.DateUtil;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.Set;
import mockit.Expectations;
import mockit.Mocked;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.quartz.SchedulerException;
/** Created by aagrawal on 3/9/19. */
public class TestBackupV2Service {
private final PriamScheduler scheduler;
private final SnapshotMetaTask snapshotMetaTask;
private final CassandraTunerService cassandraTunerService;
private final ITokenRetriever tokenRetriever;
public TestBackupV2Service() {
Injector injector = Guice.createInjector(new BRTestModule());
scheduler = injector.getInstance(PriamScheduler.class);
snapshotMetaTask = injector.getInstance(SnapshotMetaTask.class);
cassandraTunerService = injector.getInstance(CassandraTunerService.class);
tokenRetriever = injector.getInstance(ITokenRetriever.class);
}
@Before
public void cleanup() throws SchedulerException {
scheduler.getScheduler().clear();
}
@Test
public void testBackupDisabled(
@Mocked IConfiguration configuration, @Mocked IBackupRestoreConfig backupRestoreConfig)
throws Exception {
new Expectations() {
{
backupRestoreConfig.getSnapshotMetaServiceCronExpression();
result = "-1";
backupRestoreConfig.getBackupTTLMonitorPeriodInSec();
result = 600;
configuration.getDataFileLocation();
result = "target/data";
}
};
Path dummyDataDirectoryLocation = Paths.get(configuration.getDataFileLocation());
Instant snapshotInstant = DateUtil.getInstant();
String snapshotName = snapshotMetaTask.generateSnapshotName(snapshotInstant);
// Create one V2 snapshot.
BackupFileUtils.generateDummyFiles(
dummyDataDirectoryLocation,
2,
3,
3,
AbstractBackup.SNAPSHOT_FOLDER,
snapshotName,
true);
// Create one V1 snapshot.
String snapshotV1Name = DateUtil.formatInstant(DateUtil.yyyyMMdd, snapshotInstant);
BackupFileUtils.generateDummyFiles(
dummyDataDirectoryLocation,
2,
3,
3,
AbstractBackup.SNAPSHOT_FOLDER,
snapshotV1Name,
false);
IService backupService =
new BackupV2Service(
configuration,
backupRestoreConfig,
scheduler,
snapshotMetaTask,
cassandraTunerService,
tokenRetriever);
backupService.scheduleService();
Truth.assertThat(scheduler.getScheduler().getJobGroupNames()).hasSize(1);
// snapshot V2 name should not be there.
Set<Path> backupPaths =
AbstractBackup.getBackupDirectories(configuration, AbstractBackup.SNAPSHOT_FOLDER);
for (Path backupPath : backupPaths) {
Assert.assertFalse(Files.exists(Paths.get(backupPath.toString(), snapshotName)));
Assert.assertTrue(Files.exists(Paths.get(backupPath.toString(), snapshotV1Name)));
}
}
@Test
public void testBackupEnabled(
@Mocked IConfiguration configuration, @Mocked IBackupRestoreConfig backupRestoreConfig)
throws Exception {
new Expectations() {
{
backupRestoreConfig.getSnapshotMetaServiceCronExpression();
result = "0 0 0/1 1/1 * ? *";
backupRestoreConfig.getBackupTTLMonitorPeriodInSec();
result = 600;
backupRestoreConfig.getBackupVerificationCronExpression();
result = "0 0 0/1 1/1 * ? *";
backupRestoreConfig.enableV2Backups();
result = true;
configuration.isIncrementalBackupEnabled();
result = true;
configuration.getBackupCronExpression();
result = "-1";
}
};
IService backupService =
new BackupV2Service(
configuration,
backupRestoreConfig,
scheduler,
snapshotMetaTask,
cassandraTunerService,
tokenRetriever);
backupService.scheduleService();
Assert.assertEquals(4, scheduler.getScheduler().getJobKeys(null).size());
}
@Test
public void testBackup(
@Mocked IConfiguration configuration, @Mocked IBackupRestoreConfig backupRestoreConfig)
throws Exception {
new Expectations() {
{
backupRestoreConfig.getSnapshotMetaServiceCronExpression();
result = "0 0 0/1 1/1 * ? *";
backupRestoreConfig.getBackupTTLMonitorPeriodInSec();
result = 600;
backupRestoreConfig.getBackupVerificationCronExpression();
result = "0 0 0/1 1/1 * ? *";
configuration.isIncrementalBackupEnabled();
result = false;
configuration.getDataFileLocation();
result = "target/data";
}
};
IService backupService =
new BackupV2Service(
configuration,
backupRestoreConfig,
scheduler,
snapshotMetaTask,
cassandraTunerService,
tokenRetriever);
backupService.scheduleService();
Assert.assertEquals(3, scheduler.getScheduler().getJobKeys(null).size());
}
@Test
public void updateService(
@Mocked IConfiguration configuration,
@Mocked IBackupRestoreConfig backupRestoreConfig,
@Mocked JMXNodeTool nodeTool,
@Mocked TuneCassandra tuneCassandra)
throws Exception {
new Expectations() {
{
backupRestoreConfig.getSnapshotMetaServiceCronExpression();
result = "0 0 0/1 1/1 * ? *";
result = "0 0 0/1 1/1 * ? *";
result = "-1";
result = "-1";
configuration.isIncrementalBackupEnabled();
result = true;
backupRestoreConfig.enableV2Backups();
result = true;
backupRestoreConfig.getBackupVerificationCronExpression();
result = "-1";
backupRestoreConfig.getBackupTTLMonitorPeriodInSec();
result = 600;
configuration.getBackupCronExpression();
result = "-1";
}
};
IService backupService =
new BackupV2Service(
configuration,
backupRestoreConfig,
scheduler,
snapshotMetaTask,
cassandraTunerService,
tokenRetriever);
backupService.scheduleService();
Assert.assertEquals(3, scheduler.getScheduler().getJobKeys(null).size());
backupService.onChangeUpdateService();
Assert.assertEquals(1, scheduler.getScheduler().getJobKeys(null).size());
}
}
| 3,151 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backupv2/TestSnapshotMetaTask.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backupv2;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.AbstractBackup;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.scheduler.TaskTimer;
import com.netflix.priam.utils.BackupFileUtils;
import com.netflix.priam.utils.DateUtil;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Created by aagrawal on 6/20/18. */
public class TestSnapshotMetaTask {
private static final Logger logger =
LoggerFactory.getLogger(TestSnapshotMetaTask.class.getName());
private static Path dummyDataDirectoryLocation;
private final IConfiguration configuration;
private final IBackupRestoreConfig backupRestoreConfig;
private final SnapshotMetaTask snapshotMetaService;
private final TestMetaFileReader metaFileReader;
private final InstanceInfo instanceInfo;
public TestSnapshotMetaTask() {
Injector injector = Guice.createInjector(new BRTestModule());
configuration = injector.getInstance(IConfiguration.class);
backupRestoreConfig = injector.getInstance(IBackupRestoreConfig.class);
snapshotMetaService = injector.getInstance(SnapshotMetaTask.class);
metaFileReader = new TestMetaFileReader();
instanceInfo = injector.getInstance(InstanceInfo.class);
}
@Before
public void setUp() {
dummyDataDirectoryLocation = Paths.get(configuration.getDataFileLocation());
BackupFileUtils.cleanupDir(dummyDataDirectoryLocation);
}
@Test
public void testSnapshotMetaServiceEnabled() throws Exception {
TaskTimer taskTimer = SnapshotMetaTask.getTimer(backupRestoreConfig);
Assert.assertNotNull(taskTimer);
}
@Test
public void testMetaFileName() throws Exception {
String fileName = MetaFileInfo.getMetaFileName(DateUtil.getInstant());
Path path = Paths.get(dummyDataDirectoryLocation.toFile().getAbsolutePath(), fileName);
Assert.assertTrue(metaFileReader.isValidMetaFile(path));
path = Paths.get(dummyDataDirectoryLocation.toFile().getAbsolutePath(), fileName + ".tmp");
Assert.assertFalse(metaFileReader.isValidMetaFile(path));
}
private void test(int noOfSstables, int noOfKeyspaces, int noOfCf) throws Exception {
Instant snapshotInstant = DateUtil.getInstant();
String snapshotName = snapshotMetaService.generateSnapshotName(snapshotInstant);
BackupFileUtils.generateDummyFiles(
dummyDataDirectoryLocation,
noOfKeyspaces,
noOfCf,
noOfSstables,
AbstractBackup.SNAPSHOT_FOLDER,
snapshotName,
true);
snapshotMetaService.setSnapshotName(snapshotName);
Path metaFileLocation =
snapshotMetaService.processSnapshot(snapshotInstant).getMetaFilePath();
Assert.assertNotNull(metaFileLocation);
Assert.assertTrue(metaFileLocation.toFile().exists());
Assert.assertTrue(metaFileLocation.toFile().isFile());
Assert.assertEquals(
snapshotInstant.getEpochSecond(),
(metaFileLocation.toFile().lastModified() / 1000));
// Try reading meta file.
metaFileReader.setNoOfSstables(noOfSstables + 1);
metaFileReader.readMeta(metaFileLocation);
MetaFileInfo metaFileInfo = metaFileReader.getMetaFileInfo();
Assert.assertEquals(1, metaFileInfo.getVersion());
Assert.assertEquals(configuration.getAppName(), metaFileInfo.getAppName());
Assert.assertEquals(instanceInfo.getRac(), metaFileInfo.getRack());
Assert.assertEquals(instanceInfo.getRegion(), metaFileInfo.getRegion());
// Cleanup
metaFileLocation.toFile().delete();
BackupFileUtils.cleanupDir(dummyDataDirectoryLocation);
}
@Test
public void testMetaFile() throws Exception {
test(5, 1, 1);
}
@Test
public void testSize() throws Exception {
test(1000, 2, 2);
}
static class TestMetaFileReader extends MetaFileReader {
private int noOfSstables;
void setNoOfSstables(int noOfSstables) {
this.noOfSstables = noOfSstables;
}
@Override
public void process(ColumnFamilyResult columnfamilyResult) {
Assert.assertEquals(noOfSstables, columnfamilyResult.getSstables().size());
}
}
}
| 3,152 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backupv2/TestBackupTTLTask.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backupv2;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.AbstractBackupPath;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.backup.FakeBackupFileSystem;
import com.netflix.priam.backup.Status;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.health.InstanceState;
import com.netflix.priam.utils.BackupFileUtils;
import com.netflix.priam.utils.DateUtil;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.ParseException;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Provider;
import mockit.Expectations;
import mockit.Mocked;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
/** Created by aagrawal on 12/17/18. */
public class TestBackupTTLTask {
private TestBackupUtils testBackupUtils = new TestBackupUtils();
private IConfiguration configuration;
private static BackupTTLTask backupTTLService;
private static FakeBackupFileSystem backupFileSystem;
private Provider<AbstractBackupPath> pathProvider;
private Path[] metas;
private Map<String, String> allFilesMap = new HashMap<>();
public TestBackupTTLTask() {
Injector injector = Guice.createInjector(new BRTestModule());
configuration = injector.getInstance(IConfiguration.class);
if (backupTTLService == null) backupTTLService = injector.getInstance(BackupTTLTask.class);
if (backupFileSystem == null)
backupFileSystem = injector.getInstance(FakeBackupFileSystem.class);
pathProvider = injector.getProvider(AbstractBackupPath.class);
}
public void prepTest(int daysForSnapshot) throws Exception {
BackupFileUtils.cleanupDir(Paths.get(configuration.getDataFileLocation()));
Instant current = DateUtil.getInstant();
List<String> list = new ArrayList<>();
List<String> allFiles = new ArrayList<>();
metas = new Path[3];
Instant time =
current.minus(
daysForSnapshot + configuration.getGracePeriodDaysForCompaction() + 1,
ChronoUnit.DAYS);
String file1 = testBackupUtils.createFile("mc-1-Data.db", time);
String file2 =
testBackupUtils.createFile("mc-2-Data.db", time.plus(10, ChronoUnit.MINUTES));
list.clear();
list.add(getRemoteFromLocal(file1));
list.add(getRemoteFromLocal(file2));
metas[0] = testBackupUtils.createMeta(list, time.plus(20, ChronoUnit.MINUTES));
allFiles.add(getRemoteFromLocal(file1));
allFiles.add(getRemoteFromLocal(file2));
time = current.minus(daysForSnapshot, ChronoUnit.DAYS);
String file3 = testBackupUtils.createFile("mc-3-Data.db", time);
String file4 =
testBackupUtils.createFile("mc-4-Data.db", time.plus(10, ChronoUnit.MINUTES));
list.clear();
list.add(getRemoteFromLocal(file1));
list.add(getRemoteFromLocal(file4));
metas[1] = testBackupUtils.createMeta(list, time.plus(20, ChronoUnit.MINUTES));
allFiles.add(getRemoteFromLocal(file3));
allFiles.add(getRemoteFromLocal(file4));
time = current.minus(daysForSnapshot - 1, ChronoUnit.DAYS);
String file5 = testBackupUtils.createFile("mc-5-Data.db", time);
String file6 =
testBackupUtils.createFile("mc-6-Data.db", time.plus(10, ChronoUnit.MINUTES));
String file7 =
testBackupUtils.createFile("mc-7-Data.db", time.plus(20, ChronoUnit.MINUTES));
list.clear();
list.add(getRemoteFromLocal(file4));
// list.add(getRemoteFromLocal(file6));
list.add(getRemoteFromLocal(file7));
metas[2] = testBackupUtils.createMeta(list, time.plus(40, ChronoUnit.MINUTES));
allFiles.add(getRemoteFromLocal(file5));
allFiles.add(getRemoteFromLocal(file6));
allFiles.add(getRemoteFromLocal(file7));
allFiles.stream()
.forEach(
file -> {
Path path = Paths.get(file);
allFilesMap.put(path.toFile().getName(), file);
});
for (int i = 0; i < metas.length; i++) {
AbstractBackupPath path = pathProvider.get();
path.parseLocal(metas[i].toFile(), AbstractBackupPath.BackupFileType.META_V2);
allFiles.add(path.getRemotePath());
allFilesMap.put("META" + i, path.getRemotePath());
}
backupFileSystem.setupTest(allFiles);
}
private String getRemoteFromLocal(String localPath) throws ParseException {
AbstractBackupPath path = pathProvider.get();
path.parseLocal(new File(localPath), AbstractBackupPath.BackupFileType.SST_V2);
return path.getRemotePath();
}
@After
public void cleanup() {
BackupFileUtils.cleanupDir(Paths.get(configuration.getDataFileLocation()));
backupFileSystem.cleanup();
}
private List<String> getAllFiles() {
List<String> remoteFiles = new ArrayList<>();
backupFileSystem.listFileSystem("", null, null).forEachRemaining(remoteFiles::add);
return remoteFiles;
}
@Test
public void testTTL() throws Exception {
int daysForSnapshot = configuration.getBackupRetentionDays();
prepTest(daysForSnapshot);
// Run ttl till 2nd meta file.
backupTTLService.execute();
List<String> remoteFiles = getAllFiles();
// Confirm the files.
Assert.assertEquals(8, remoteFiles.size());
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-4-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-5-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-6-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-7-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-1-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("META1")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("META2")));
// Remains because of GRACE PERIOD.
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-3-Data.db")));
Assert.assertFalse(remoteFiles.contains(allFilesMap.get("mc-2-Data.db")));
Assert.assertFalse(remoteFiles.contains(allFilesMap.get("META0")));
}
@Test
public void testTTLNext() throws Exception {
int daysForSnapshot = configuration.getBackupRetentionDays() + 1;
prepTest(daysForSnapshot);
// Run ttl till 3rd meta file.
backupTTLService.execute();
List<String> remoteFiles = getAllFiles();
// Confirm the files.
Assert.assertEquals(6, remoteFiles.size());
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-4-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-6-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-7-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("META2")));
// GRACE PERIOD files.
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-3-Data.db")));
Assert.assertTrue(remoteFiles.contains(allFilesMap.get("mc-5-Data.db")));
Assert.assertFalse(remoteFiles.contains(allFilesMap.get("mc-1-Data.db")));
Assert.assertFalse(remoteFiles.contains(allFilesMap.get("mc-2-Data.db")));
Assert.assertFalse(remoteFiles.contains(allFilesMap.get("META0")));
Assert.assertFalse(remoteFiles.contains(allFilesMap.get("META1")));
}
@Test
public void testRestoreMode(@Mocked InstanceState state) throws Exception {
new Expectations() {
{
state.getRestoreStatus().getStatus();
result = Status.STARTED;
}
};
backupTTLService.execute();
}
}
| 3,153 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backupv2/TestForgottenFileManager.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backupv2;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.merics.BackupMetrics;
import com.netflix.priam.utils.DateUtil;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/** Created by aagrawal on 1/1/19. */
public class TestForgottenFileManager {
private ForgottenFilesManager forgottenFilesManager;
private TestBackupUtils testBackupUtils;
private ForgottenFilesConfiguration configuration;
private List<Path> allFiles = new ArrayList<>();
private Instant snapshotInstant;
private Path snapshotDir;
public TestForgottenFileManager() {
Injector injector = Guice.createInjector(new BRTestModule());
BackupMetrics backupMetrics = injector.getInstance(BackupMetrics.class);
configuration = new ForgottenFilesConfiguration();
forgottenFilesManager = new ForgottenFilesManager(configuration, backupMetrics);
testBackupUtils = injector.getInstance(TestBackupUtils.class);
}
@Before
public void prep() throws Exception {
cleanup();
Instant now = DateUtil.getInstant();
snapshotInstant = now;
Path file1 = Paths.get(testBackupUtils.createFile("file1", now.minus(10, ChronoUnit.DAYS)));
Path file2 = Paths.get(testBackupUtils.createFile("file2", now.minus(8, ChronoUnit.DAYS)));
Path file3 = Paths.get(testBackupUtils.createFile("file3", now.minus(6, ChronoUnit.DAYS)));
Path file4 = Paths.get(testBackupUtils.createFile("file4", now.minus(4, ChronoUnit.DAYS)));
Path file5 = Paths.get(testBackupUtils.createFile("file5", now.minus(1, ChronoUnit.DAYS)));
Path file6 =
Paths.get(
testBackupUtils.createFile(
"tmplink-lb-59516-big-Index.db", now.minus(3, ChronoUnit.DAYS)));
Path file7 =
Paths.get(testBackupUtils.createFile("file7.tmp", now.minus(3, ChronoUnit.DAYS)));
allFiles.add(file1);
allFiles.add(file2);
allFiles.add(file3);
allFiles.add(file4);
allFiles.add(file5);
allFiles.add(file6);
allFiles.add(file7);
// Create a snapshot with file2, file3, file4.
Path columnfamilyDir = file1.getParent();
snapshotDir =
Paths.get(
columnfamilyDir.toString(),
"snapshot",
"snap_v2_" + DateUtil.formatInstant(DateUtil.yyyyMMddHHmm, now));
snapshotDir.toFile().mkdirs();
Files.createLink(Paths.get(snapshotDir.toString(), file2.getFileName().toString()), file2);
Files.createLink(Paths.get(snapshotDir.toString(), file3.getFileName().toString()), file3);
Files.createLink(Paths.get(snapshotDir.toString(), file4.getFileName().toString()), file4);
}
@After
public void cleanup() throws Exception {
String dataDir = configuration.getDataFileLocation();
org.apache.commons.io.FileUtils.cleanDirectory(new File(dataDir));
}
@Test
public void testMoveForgottenFiles() throws IOException, InterruptedException {
Collection<File> files = allFiles.stream().map(Path::toFile).collect(Collectors.toList());
Path lostFoundDir =
Paths.get(configuration.getDataFileLocation(), forgottenFilesManager.LOST_FOUND);
// Lets create some extra symlinks in the LOST_FOUND folder. They should not exist anymore
Path randomSymlink = Paths.get(lostFoundDir.toFile().getAbsolutePath(), "random");
Files.createDirectory(lostFoundDir);
Files.createSymbolicLink(randomSymlink, lostFoundDir);
forgottenFilesManager.moveForgottenFiles(
new File(configuration.getDataFileLocation()), files);
// Extra symlinks are deleted.
Assert.assertFalse(Files.exists(randomSymlink));
// Symlinks are created for all the files. They are not moved yet.
Collection<File> symlinkFiles = FileUtils.listFiles(lostFoundDir.toFile(), null, false);
Assert.assertEquals(allFiles.size(), symlinkFiles.size());
for (Path file : allFiles) {
Path symlink = Paths.get(lostFoundDir.toString(), file.getFileName().toString());
Assert.assertTrue(symlinkFiles.contains(symlink.toFile()));
Assert.assertTrue(Files.isSymbolicLink(symlink));
Assert.assertTrue(Files.exists(file));
}
// Lets change the configuration and try again!!
configuration.setGracePeriodForgottenFileInDaysForRead(0);
forgottenFilesManager.moveForgottenFiles(
new File(configuration.getDataFileLocation()), files);
Collection<File> movedFiles = FileUtils.listFiles(lostFoundDir.toFile(), null, false);
Assert.assertEquals(allFiles.size(), movedFiles.size());
movedFiles
.stream()
.forEach(
file -> {
Assert.assertTrue(
Files.isRegularFile(Paths.get(file.getAbsolutePath())));
});
allFiles.stream()
.forEach(
file -> {
Assert.assertFalse(file.toFile().exists());
});
configuration.setGracePeriodForgottenFileInDaysForRead(
ForgottenFilesConfiguration.DEFAULT_GRACE_PERIOD);
}
@Test
public void getColumnfamilyFiles() {
Path columnfamilyDir = allFiles.get(0).getParent();
Collection<File> columnfamilyFiles =
forgottenFilesManager.getColumnfamilyFiles(
snapshotInstant, columnfamilyDir.toFile());
Assert.assertEquals(3, columnfamilyFiles.size());
Assert.assertTrue(columnfamilyFiles.contains(allFiles.get(0).toFile()));
Assert.assertTrue(columnfamilyFiles.contains(allFiles.get(1).toFile()));
Assert.assertTrue(columnfamilyFiles.contains(allFiles.get(2).toFile()));
}
@Test
public void findAndMoveForgottenFiles() {
Path lostFoundDir =
Paths.get(allFiles.get(0).getParent().toString(), forgottenFilesManager.LOST_FOUND);
forgottenFilesManager.findAndMoveForgottenFiles(snapshotInstant, snapshotDir.toFile());
// Only one potential forgotten file - file1. It will be symlink here.
Collection<File> movedFiles = FileUtils.listFiles(lostFoundDir.toFile(), null, false);
Assert.assertEquals(1, movedFiles.size());
Assert.assertTrue(
movedFiles
.iterator()
.next()
.getName()
.equals(allFiles.get(0).getFileName().toString()));
Assert.assertTrue(
Files.isSymbolicLink(Paths.get(movedFiles.iterator().next().getAbsolutePath())));
// All files still remain in columnfamily dir.
Collection<File> cfFiles =
FileUtils.listFiles(new File(allFiles.get(0).getParent().toString()), null, false);
Assert.assertEquals(allFiles.size(), cfFiles.size());
// Snapshot is untouched.
Collection<File> snapshotFiles = FileUtils.listFiles(snapshotDir.toFile(), null, false);
Assert.assertEquals(3, snapshotFiles.size());
// Lets change the configuration and try again!!
configuration.setGracePeriodForgottenFileInDaysForRead(0);
forgottenFilesManager.findAndMoveForgottenFiles(snapshotInstant, snapshotDir.toFile());
configuration.setGracePeriodForgottenFileInDaysForRead(
ForgottenFilesConfiguration.DEFAULT_GRACE_PERIOD);
movedFiles = FileUtils.listFiles(lostFoundDir.toFile(), null, false);
Assert.assertEquals(1, movedFiles.size());
Assert.assertTrue(
Files.isRegularFile(Paths.get(movedFiles.iterator().next().getAbsolutePath())));
cfFiles =
FileUtils.listFiles(new File(allFiles.get(0).getParent().toString()), null, false);
Assert.assertEquals(6, cfFiles.size());
int temp_file_name = 1;
for (File file : cfFiles) {
file.getName().equals(allFiles.get(temp_file_name++).getFileName().toString());
}
// Snapshot is untouched.
snapshotFiles = FileUtils.listFiles(snapshotDir.toFile(), null, false);
Assert.assertEquals(3, snapshotFiles.size());
}
private class ForgottenFilesConfiguration extends FakeConfiguration {
protected static final int DEFAULT_GRACE_PERIOD = 3;
private int gracePeriodForgottenFileInDaysForRead = DEFAULT_GRACE_PERIOD;
@Override
public boolean isForgottenFileMoveEnabled() {
return true;
}
@Override
public int getForgottenFileGracePeriodDaysForRead() {
return gracePeriodForgottenFileInDaysForRead;
}
public void setGracePeriodForgottenFileInDaysForRead(
int gracePeriodForgottenFileInDaysForRead) {
this.gracePeriodForgottenFileInDaysForRead = gracePeriodForgottenFileInDaysForRead;
}
}
}
| 3,154 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backupv2/TestBackupVerificationTask.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backupv2;
import com.google.common.collect.ImmutableList;
import com.google.common.truth.Truth;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.*;
import com.netflix.priam.health.InstanceState;
import com.netflix.priam.merics.Metrics;
import com.netflix.priam.notification.BackupNotificationMgr;
import com.netflix.priam.scheduler.UnsupportedTypeException;
import com.netflix.priam.utils.DateUtil.DateRange;
import com.netflix.spectator.api.Counter;
import com.netflix.spectator.api.Registry;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import javax.inject.Inject;
import mockit.*;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
/** Created by aagrawal on 2/1/19. */
public class TestBackupVerificationTask {
@Inject private BackupVerificationTask backupVerificationService;
private Counter badVerifications;
@Mocked private BackupVerification backupVerification;
@Mocked private BackupNotificationMgr backupNotificationMgr;
@Before
public void setUp() {
new MockBackupVerification();
new MockBackupNotificationMgr();
Injector injector = Guice.createInjector(new BRTestModule());
injector.injectMembers(this);
badVerifications =
injector.getInstance(Registry.class)
.counter(Metrics.METRIC_PREFIX + "backup.verification.failure");
}
private static final class MockBackupVerification extends MockUp<BackupVerification> {
private static boolean throwError;
private static ImmutableList<BackupVerificationResult> results;
private static ImmutableList<BackupMetadata> verifiedBackups;
public static void setResults(BackupVerificationResult... newResults) {
results = ImmutableList.copyOf(newResults);
}
public static void setVerifiedBackups(BackupMetadata... newVerifiedBackups) {
verifiedBackups = ImmutableList.copyOf(newVerifiedBackups);
}
public static void shouldThrow(boolean newThrowError) {
throwError = newThrowError;
}
@Mock
public List<BackupMetadata> verifyBackupsInRange(
BackupVersion backupVersion, DateRange dateRange)
throws UnsupportedTypeException, IllegalArgumentException {
if (throwError) throw new IllegalArgumentException("DummyError");
return verifiedBackups;
}
@Mock
public Optional<BackupVerificationResult> verifyLatestBackup(
BackupVersion backupVersion, boolean force, DateRange dateRange)
throws UnsupportedTypeException, IllegalArgumentException {
if (throwError) throw new IllegalArgumentException("DummyError");
return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0));
}
}
private static final class MockBackupNotificationMgr extends MockUp<BackupNotificationMgr> {}
@Test
public void throwError() {
MockBackupVerification.shouldThrow(true);
Assertions.assertThrows(
IllegalArgumentException.class, () -> backupVerificationService.execute());
}
@Test
public void validBackups() throws Exception {
MockBackupVerification.shouldThrow(false);
MockBackupVerification.setVerifiedBackups(getRecentlyValidatedMetadata());
backupVerificationService.execute();
Truth.assertThat(badVerifications.count()).isEqualTo(0);
new Verifications() {
{
backupNotificationMgr.notify(anyString, (Instant) any);
times = 1;
}
};
}
@Test
public void invalidBackups() {
MockBackupVerification.shouldThrow(false);
MockBackupVerification.setVerifiedBackups(getInvalidBackupMetadata());
Assertions.assertThrows(
NullPointerException.class, () -> backupVerificationService.execute());
}
@Test
public void previouslyVerifiedBackups() throws Exception {
MockBackupVerification.shouldThrow(false);
MockBackupVerification.setVerifiedBackups(getPreviouslyValidatedMetadata());
backupVerificationService.execute();
Truth.assertThat(badVerifications.count()).isEqualTo(0);
new Verifications() {
{
backupNotificationMgr.notify(anyString, (Instant) any);
times = 0;
}
};
}
@Test
public void noBackups() throws Exception {
MockBackupVerification.shouldThrow(false);
MockBackupVerification.setVerifiedBackups();
backupVerificationService.execute();
Truth.assertThat(badVerifications.count()).isEqualTo(1);
new Verifications() {
{
backupNotificationMgr.notify(anyString, (Instant) any);
maxTimes = 0;
}
};
}
@Test
public void testRestoreMode(@Mocked InstanceState state) throws Exception {
new Expectations() {
{
state.getRestoreStatus().getStatus();
result = Status.STARTED;
}
};
backupVerificationService.execute();
Truth.assertThat(badVerifications.count()).isEqualTo(0);
new Verifications() {
{
backupVerification.verifyBackupsInRange((BackupVersion) any, (DateRange) any);
maxTimes = 0;
}
{
backupNotificationMgr.notify(anyString, (Instant) any);
maxTimes = 0;
}
};
}
private static BackupMetadata getInvalidBackupMetadata() {
return new BackupMetadata(BackupVersion.SNAPSHOT_META_SERVICE, "12345", new Date());
}
private static BackupMetadata getPreviouslyValidatedMetadata() {
BackupMetadata backupMetadata =
new BackupMetadata(BackupVersion.SNAPSHOT_META_SERVICE, "12345", new Date());
backupMetadata.setLastValidated(
new Date(Instant.now().minus(1, ChronoUnit.HOURS).toEpochMilli()));
return backupMetadata;
}
private static BackupMetadata getRecentlyValidatedMetadata() {
BackupMetadata backupMetadata =
new BackupMetadata(BackupVersion.SNAPSHOT_META_SERVICE, "12345", new Date());
backupMetadata.setLastValidated(
new Date(Instant.now().plus(1, ChronoUnit.HOURS).toEpochMilli()));
backupMetadata.setSnapshotLocation("bucket/path/to/file.db");
return backupMetadata;
}
}
| 3,155 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backupv2/TestMetaV2Proxy.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backupv2;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.AbstractBackupPath;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.backup.BackupRestoreException;
import com.netflix.priam.backup.FakeBackupFileSystem;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.utils.DateUtil;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import javax.inject.Provider;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/** Created by aagrawal on 12/5/18. */
public class TestMetaV2Proxy {
private FakeBackupFileSystem fs;
private IConfiguration configuration;
private TestBackupUtils backupUtils;
private IMetaProxy metaProxy;
private Provider<AbstractBackupPath> abstractBackupPathProvider;
public TestMetaV2Proxy() {
Injector injector = Guice.createInjector(new BRTestModule());
configuration = injector.getInstance(IConfiguration.class);
fs = injector.getInstance(FakeBackupFileSystem.class);
fs.setupTest(getRemoteFakeFiles());
backupUtils = new TestBackupUtils();
metaProxy = injector.getInstance(MetaV2Proxy.class);
abstractBackupPathProvider = injector.getProvider(AbstractBackupPath.class);
}
@Before
public void setUp() {
new File(configuration.getDataFileLocation()).mkdirs();
}
@Test
public void testMetaPrefix() {
// Null date range
Assert.assertEquals(getPrefix() + "/META_V2", metaProxy.getMetaPrefix(null));
Instant now = Instant.now();
// No end date.
Assert.assertEquals(
getPrefix() + "/META_V2/" + now.toEpochMilli(),
metaProxy.getMetaPrefix(new DateUtil.DateRange(now, null)));
// No start date
Assert.assertEquals(
getPrefix() + "/META_V2",
metaProxy.getMetaPrefix(new DateUtil.DateRange(null, Instant.now())));
long start = 1834567890L;
long end = 1834877776L;
Assert.assertEquals(
getPrefix() + "/META_V2/1834",
metaProxy.getMetaPrefix(
new DateUtil.DateRange(
Instant.ofEpochSecond(start), Instant.ofEpochSecond(end))));
}
@Test
public void testIsMetaFileValid() throws Exception {
Instant snapshotInstant = DateUtil.getInstant();
Path metaPath = backupUtils.createMeta(getRemoteFakeFiles(), snapshotInstant);
AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get();
abstractBackupPath.parseLocal(metaPath.toFile(), AbstractBackupPath.BackupFileType.META_V2);
Assert.assertTrue(metaProxy.isMetaFileValid(abstractBackupPath).valid);
FileUtils.deleteQuietly(metaPath.toFile());
List<String> fileToAdd = getRemoteFakeFiles();
fileToAdd.add(
Paths.get(
getPrefix(),
AbstractBackupPath.BackupFileType.SST_V2.toString(),
"1859817645000",
"keyspace1",
"columnfamily1",
"SNAPPY",
"PLAINTEXT",
"file9.Data.db")
.toString());
metaPath = backupUtils.createMeta(fileToAdd, snapshotInstant);
Assert.assertFalse(metaProxy.isMetaFileValid(abstractBackupPath).valid);
FileUtils.deleteQuietly(metaPath.toFile());
metaPath = Paths.get(configuration.getDataFileLocation(), "meta_v2_201801010000.json");
Assert.assertFalse(metaProxy.isMetaFileValid(abstractBackupPath).valid);
}
@Test
public void testGetSSTFilesFromMeta() throws Exception {
Instant snapshotInstant = DateUtil.getInstant();
List<String> remoteFiles = getRemoteFakeFiles();
Path metaPath = backupUtils.createMeta(remoteFiles, snapshotInstant);
List<String> filesFromMeta = metaProxy.getSSTFilesFromMeta(metaPath);
filesFromMeta.removeAll(remoteFiles);
Assert.assertTrue(filesFromMeta.isEmpty());
}
@Test
public void testGetIncrementalFiles() throws Exception {
DateUtil.DateRange dateRange = new DateUtil.DateRange("202812071820,20281229");
Iterator<AbstractBackupPath> incrementals = metaProxy.getIncrementals(dateRange);
int i = 0;
while (incrementals.hasNext()) {
System.out.println(incrementals.next());
i++;
}
Assert.assertEquals(3, i);
}
@Test
public void testFindMetaFiles() throws BackupRestoreException {
List<AbstractBackupPath> metas =
metaProxy.findMetaFiles(
new DateUtil.DateRange(
Instant.ofEpochMilli(1859824860000L),
Instant.ofEpochMilli(1859828420000L)));
Assert.assertEquals(1, metas.size());
Assert.assertEquals("meta_v2_202812071801.json", metas.get(0).getFileName());
Assert.assertTrue(fs.doesRemoteFileExist(Paths.get(metas.get(0).getRemotePath())));
metas =
metaProxy.findMetaFiles(
new DateUtil.DateRange(
Instant.ofEpochMilli(1859824860000L),
Instant.ofEpochMilli(1859828460000L)));
Assert.assertEquals(2, metas.size());
}
@Test
public void testFindLatestValidMetaFile() {}
private String getPrefix() {
return "casstestbackup/1049_fake-app/1808575600";
}
private List<String> getRemoteFakeFiles() {
List<Path> files = new ArrayList<>();
files.add(
Paths.get(
getPrefix(),
AbstractBackupPath.BackupFileType.SST_V2.toString(),
"1859817645000",
"keyspace1",
"columnfamily1",
"SNAPPY",
"PLAINTEXT",
"file1-Data.db"));
files.add(
Paths.get(
getPrefix(),
AbstractBackupPath.BackupFileType.SST_V2.toString(),
"1859818845000",
"keyspace1",
"columnfamily1",
"SNAPPY",
"PLAINTEXT",
"file2-Data.db"));
files.add(
Paths.get(
getPrefix(),
AbstractBackupPath.BackupFileType.META_V2.toString(),
"1859824860000",
"SNAPPY",
"PLAINTEXT",
"meta_v2_202812071801.json"));
files.add(
Paths.get(
getPrefix(),
AbstractBackupPath.BackupFileType.SST_V2.toString(),
"1859826045000",
"keyspace1",
"columnfamily1",
"SNAPPY",
"PLAINTEXT",
"manifest"));
files.add(
Paths.get(
getPrefix(),
AbstractBackupPath.BackupFileType.SST_V2.toString(),
"1859828410000",
"keyspace1",
"columnfamily1",
"SNAPPY",
"PLAINTEXT",
"file3-Data.db"));
files.add(
Paths.get(
getPrefix(),
AbstractBackupPath.BackupFileType.SST_V2.toString(),
"1859828420000",
"keyspace1",
"columnfamily1",
"SNAPPY",
"PLAINTEXT",
"file4-Data.db"));
files.add(
Paths.get(
getPrefix(),
AbstractBackupPath.BackupFileType.META_V2.toString(),
"1859828460000",
"SNAPPY",
"PLAINTEXT",
"meta_v2_202812071901.json"));
return files.stream().map(Path::toString).collect(Collectors.toList());
}
@After
public void cleanup() throws IOException {
FileUtils.cleanDirectory(new File(configuration.getDataFileLocation()));
}
@Test
public void testCleanupOldMetaFiles() throws IOException {
generateDummyMetaFiles();
Path dataDir = Paths.get(configuration.getDataFileLocation());
Assert.assertEquals(4, dataDir.toFile().listFiles().length);
// clean the directory
metaProxy.cleanupOldMetaFiles();
Assert.assertEquals(1, dataDir.toFile().listFiles().length);
Path dummy = Paths.get(dataDir.toString(), "dummy.tmp");
Assert.assertTrue(dummy.toFile().exists());
}
private void generateDummyMetaFiles() throws IOException {
Path dataDir = Paths.get(configuration.getDataFileLocation());
FileUtils.cleanDirectory(dataDir.toFile());
FileUtils.write(
Paths.get(
configuration.getDataFileLocation(),
MetaFileInfo.getMetaFileName(DateUtil.getInstant()))
.toFile(),
"dummy",
"UTF-8");
FileUtils.write(
Paths.get(
configuration.getDataFileLocation(),
MetaFileInfo.getMetaFileName(
DateUtil.getInstant().minus(10, ChronoUnit.MINUTES)))
.toFile(),
"dummy",
"UTF-8");
FileUtils.write(
Paths.get(
configuration.getDataFileLocation(),
MetaFileInfo.getMetaFileName(DateUtil.getInstant()) + ".tmp")
.toFile(),
"dummy",
"UTF-8");
FileUtils.write(
Paths.get(configuration.getDataFileLocation(), "dummy.tmp").toFile(),
"dummy",
"UTF-8");
}
}
| 3,156 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/aws/TestRemoteBackupPath.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.aws;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.AbstractBackupPath;
import com.netflix.priam.backup.AbstractBackupPath.BackupFileType;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.cryptography.CryptographyAlgorithm;
import com.netflix.priam.utils.DateUtil;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.ParseException;
import java.time.Instant;
import javax.inject.Provider;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Created by aagrawal on 11/23/18. */
public class TestRemoteBackupPath {
private static final Logger logger = LoggerFactory.getLogger(TestRemoteBackupPath.class);
private Provider<AbstractBackupPath> pathFactory;
private IConfiguration configuration;
public TestRemoteBackupPath() {
Injector injector = Guice.createInjector(new BRTestModule());
pathFactory = injector.getProvider(AbstractBackupPath.class);
configuration = injector.getInstance(IConfiguration.class);
}
@Test
public void testV1BackupPathsSST() throws ParseException {
Path path =
Paths.get(
configuration.getDataFileLocation(),
"keyspace1",
"columnfamily1",
"backup",
"mc-1234-Data.db");
AbstractBackupPath abstractBackupPath = pathFactory.get();
abstractBackupPath.parseLocal(path.toFile(), BackupFileType.SST);
// Verify parse local
Assert.assertEquals(
0, abstractBackupPath.getLastModified().toEpochMilli()); // File do not exist.
Assert.assertEquals("keyspace1", abstractBackupPath.getKeyspace());
Assert.assertEquals("columnfamily1", abstractBackupPath.getColumnFamily());
Assert.assertEquals(BackupFileType.SST, abstractBackupPath.getType());
Assert.assertEquals(path.toFile(), abstractBackupPath.getBackupFile());
Assert.assertEquals(
0,
abstractBackupPath
.getTime()
.toInstant()
.toEpochMilli()); // Since file do not exist.
// Verify toRemote and parseRemote.
String remotePath = abstractBackupPath.getRemotePath();
logger.info(remotePath);
AbstractBackupPath abstractBackupPath2 = pathFactory.get();
abstractBackupPath2.parseRemote(remotePath);
validateAbstractBackupPath(abstractBackupPath, abstractBackupPath2);
Assert.assertEquals(abstractBackupPath.getTime(), abstractBackupPath2.getTime());
}
private void validateAbstractBackupPath(AbstractBackupPath abp1, AbstractBackupPath abp2) {
Assert.assertEquals(abp1.getKeyspace(), abp2.getKeyspace());
Assert.assertEquals(abp1.getColumnFamily(), abp2.getColumnFamily());
Assert.assertEquals(abp1.getFileName(), abp2.getFileName());
Assert.assertEquals(abp1.getType(), abp2.getType());
Assert.assertEquals(abp1.getCompression(), abp2.getCompression());
Assert.assertEquals(abp1.getEncryption(), abp2.getEncryption());
}
@Test
public void testV1BackupPathsSnap() throws ParseException {
Path path =
Paths.get(
configuration.getDataFileLocation(),
"keyspace1",
"columnfamily1",
"snapshot",
"201801011201",
"mc-1234-Data.db");
AbstractBackupPath abstractBackupPath = pathFactory.get();
abstractBackupPath.parseLocal(path.toFile(), BackupFileType.SNAP);
// Verify parse local
Assert.assertEquals(
0, abstractBackupPath.getLastModified().toEpochMilli()); // File do not exist.
Assert.assertEquals("keyspace1", abstractBackupPath.getKeyspace());
Assert.assertEquals("columnfamily1", abstractBackupPath.getColumnFamily());
Assert.assertEquals(BackupFileType.SNAP, abstractBackupPath.getType());
Assert.assertEquals(path.toFile(), abstractBackupPath.getBackupFile());
Assert.assertEquals(
"201801011201", DateUtil.formatyyyyMMddHHmm(abstractBackupPath.getTime()));
// Verify toRemote and parseRemote.
String remotePath = abstractBackupPath.getRemotePath();
logger.info(remotePath);
AbstractBackupPath abstractBackupPath2 = pathFactory.get();
abstractBackupPath2.parseRemote(remotePath);
validateAbstractBackupPath(abstractBackupPath, abstractBackupPath2);
Assert.assertEquals(abstractBackupPath.getTime(), abstractBackupPath2.getTime());
}
@Test
public void testV1BackupPathsMeta() throws ParseException {
Path path = Paths.get(configuration.getDataFileLocation(), "meta.json");
AbstractBackupPath abstractBackupPath = pathFactory.get();
abstractBackupPath.parseLocal(path.toFile(), BackupFileType.META);
// Verify parse local
Assert.assertEquals(
0, abstractBackupPath.getLastModified().toEpochMilli()); // File do not exist.
Assert.assertEquals(null, abstractBackupPath.getKeyspace());
Assert.assertEquals(null, abstractBackupPath.getColumnFamily());
Assert.assertEquals(BackupFileType.META, abstractBackupPath.getType());
Assert.assertEquals(path.toFile(), abstractBackupPath.getBackupFile());
// Verify toRemote and parseRemote.
String remotePath = abstractBackupPath.getRemotePath();
logger.info(remotePath);
AbstractBackupPath abstractBackupPath2 = pathFactory.get();
abstractBackupPath2.parseRemote(remotePath);
validateAbstractBackupPath(abstractBackupPath, abstractBackupPath2);
Assert.assertEquals(abstractBackupPath.getTime(), abstractBackupPath2.getTime());
}
@Test
public void testV2BackupPathSST() throws ParseException {
Path path =
Paths.get(
configuration.getDataFileLocation(),
"keyspace1",
"columnfamily1",
"backup",
"mc-1234-Data.db");
AbstractBackupPath abstractBackupPath = pathFactory.get();
abstractBackupPath.parseLocal(path.toFile(), BackupFileType.SST_V2);
// Verify parse local
Assert.assertEquals(
0, abstractBackupPath.getLastModified().toEpochMilli()); // File do not exist.
Assert.assertEquals("keyspace1", abstractBackupPath.getKeyspace());
Assert.assertEquals("columnfamily1", abstractBackupPath.getColumnFamily());
Assert.assertEquals("SNAPPY", abstractBackupPath.getCompression().name());
Assert.assertEquals(BackupFileType.SST_V2, abstractBackupPath.getType());
Assert.assertEquals(path.toFile(), abstractBackupPath.getBackupFile());
// Verify toRemote and parseRemote.
Instant now = DateUtil.getInstant();
abstractBackupPath.setLastModified(now);
String remotePath = abstractBackupPath.getRemotePath();
logger.info(remotePath);
AbstractBackupPath abstractBackupPath2 = pathFactory.get();
abstractBackupPath2.parseRemote(remotePath);
Assert.assertEquals(
now.toEpochMilli() / 1_000L * 1_000L,
abstractBackupPath2.getLastModified().toEpochMilli());
validateAbstractBackupPath(abstractBackupPath, abstractBackupPath2);
}
@Test
public void testV2BackupPathMeta() throws ParseException {
Path path = Paths.get(configuration.getDataFileLocation(), "meta_v2_201801011201.json");
AbstractBackupPath abstractBackupPath = pathFactory.get();
abstractBackupPath.parseLocal(path.toFile(), BackupFileType.META_V2);
// Verify parse local
Assert.assertEquals(
0, abstractBackupPath.getLastModified().toEpochMilli()); // File do not exist.
Assert.assertEquals(null, abstractBackupPath.getKeyspace());
Assert.assertEquals(null, abstractBackupPath.getColumnFamily());
Assert.assertEquals(BackupFileType.META_V2, abstractBackupPath.getType());
Assert.assertEquals(path.toFile(), abstractBackupPath.getBackupFile());
Assert.assertEquals(CryptographyAlgorithm.PLAINTEXT, abstractBackupPath.getEncryption());
// Verify toRemote and parseRemote.
Instant now = DateUtil.getInstant();
abstractBackupPath.setLastModified(now);
String remotePath = abstractBackupPath.getRemotePath();
logger.info(remotePath);
Assert.assertEquals("SNAPPY", abstractBackupPath.getCompression().name());
AbstractBackupPath abstractBackupPath2 = pathFactory.get();
abstractBackupPath2.parseRemote(remotePath);
Assert.assertEquals(
now.toEpochMilli() / 1_000L * 1_000L,
abstractBackupPath2.getLastModified().toEpochMilli());
validateAbstractBackupPath(abstractBackupPath, abstractBackupPath2);
}
@Test
public void testRemoteV2Prefix() throws ParseException {
Path path = Paths.get("test_backup");
AbstractBackupPath abstractBackupPath = pathFactory.get();
Assert.assertEquals(
"casstestbackup/1049_fake-app/1808575600/META_V2",
abstractBackupPath.remoteV2Prefix(path, BackupFileType.META_V2).toString());
path = Paths.get("s3-bucket-name", "fake_base_dir", "-6717_random_fake_app");
Assert.assertEquals(
"fake_base_dir/-6717_random_fake_app/1808575600/META_V2",
abstractBackupPath.remoteV2Prefix(path, BackupFileType.META_V2).toString());
}
}
| 3,157 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestBackupStatusMgr.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.utils.DateUtil;
import com.netflix.priam.utils.DateUtil.DateRange;
import java.io.File;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import org.apache.commons.io.FileUtils;
import org.joda.time.DateTime;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Created by aagrawal on 7/11/17. */
public class TestBackupStatusMgr {
private static final Logger logger = LoggerFactory.getLogger(TestBackupStatusMgr.class);
private static IConfiguration configuration;
private static IBackupStatusMgr backupStatusMgr;
private final String backupDate = "201812011000";
@BeforeClass
public static void setup() {
Injector injector = Guice.createInjector(new BRTestModule());
// cleanup old saved file, if any
configuration = injector.getInstance(IConfiguration.class);
backupStatusMgr = injector.getInstance(IBackupStatusMgr.class);
}
@Before
@After
public void cleanup() {
FileUtils.deleteQuietly(new File(configuration.getBackupStatusFileLoc()));
}
private void prepare() throws Exception {
cleanup();
Instant start = DateUtil.parseInstant(backupDate);
backupStatusMgr.finish(getBackupMetaData(start, Status.FINISHED));
backupStatusMgr.failed(getBackupMetaData(start.plus(2, ChronoUnit.HOURS), Status.FAILED));
backupStatusMgr.finish(getBackupMetaData(start.plus(4, ChronoUnit.HOURS), Status.FINISHED));
backupStatusMgr.failed(getBackupMetaData(start.plus(6, ChronoUnit.HOURS), Status.FAILED));
backupStatusMgr.failed(getBackupMetaData(start.plus(8, ChronoUnit.HOURS), Status.FAILED));
backupStatusMgr.finish(getBackupMetaData(start.plus(1, ChronoUnit.DAYS), Status.FINISHED));
backupStatusMgr.finish(getBackupMetaData(start.plus(2, ChronoUnit.DAYS), Status.FINISHED));
}
private BackupMetadata getBackupMetaData(Instant startTime, Status status) throws Exception {
BackupMetadata backupMetadata =
new BackupMetadata(
BackupVersion.SNAPSHOT_BACKUP, "123", new Date(startTime.toEpochMilli()));
backupMetadata.setCompleted(
new Date(startTime.plus(30, ChronoUnit.MINUTES).toEpochMilli()));
backupMetadata.setStatus(status);
backupMetadata.setSnapshotLocation("file.txt");
return backupMetadata;
}
@Test
public void testSnapshotUpdateMethod() throws Exception {
Date startTime = DateUtil.getDate("198407110720");
BackupMetadata backupMetadata =
new BackupMetadata(BackupVersion.SNAPSHOT_BACKUP, "123", startTime);
backupStatusMgr.start(backupMetadata);
Optional<BackupMetadata> backupMetadata1 =
backupStatusMgr.locate(startTime).stream().findFirst();
Assert.assertNull(backupMetadata1.get().getLastValidated());
backupMetadata.setLastValidated(Calendar.getInstance().getTime());
backupMetadata.setCassandraSnapshotSuccess(true);
backupMetadata.setSnapshotLocation("random");
backupStatusMgr.update(backupMetadata);
backupMetadata1 = backupStatusMgr.locate(startTime).stream().findFirst();
Assert.assertNotNull(backupMetadata1.get().getLastValidated());
Assert.assertTrue(backupMetadata1.get().isCassandraSnapshotSuccess());
Assert.assertEquals("random", backupMetadata1.get().getSnapshotLocation());
}
@Test
public void testSnapshotStatusAddFinish() throws Exception {
Date startTime = DateUtil.getDate("198407110720");
BackupMetadata backupMetadata =
new BackupMetadata(BackupVersion.SNAPSHOT_BACKUP, "123", startTime);
backupStatusMgr.start(backupMetadata);
List<BackupMetadata> metadataList = backupStatusMgr.locate(startTime);
Assert.assertNotNull(metadataList);
Assert.assertTrue(!metadataList.isEmpty());
Assert.assertEquals(1, metadataList.size());
Assert.assertEquals(startTime, metadataList.get(0).getStart());
logger.info("Snapshot start: {}", metadataList.get(0));
backupStatusMgr.finish(backupMetadata);
metadataList = backupStatusMgr.locate(startTime);
Assert.assertNotNull(metadataList);
Assert.assertTrue(!metadataList.isEmpty());
Assert.assertEquals(1, metadataList.size());
Assert.assertEquals(Status.FINISHED, metadataList.get(0).getStatus());
Assert.assertTrue(metadataList.get(0).getCompleted() != null);
logger.info("Snapshot finished: {}", metadataList.get(0));
}
@Test
public void testSnapshotStatusAddFailed() throws Exception {
Date startTime = DateUtil.getDate("198407120720");
BackupMetadata backupMetadata =
new BackupMetadata(BackupVersion.SNAPSHOT_BACKUP, "123", startTime);
backupStatusMgr.start(backupMetadata);
List<BackupMetadata> metadataList = backupStatusMgr.locate(startTime);
Assert.assertNotNull(metadataList);
Assert.assertTrue(!metadataList.isEmpty());
Assert.assertEquals(1, metadataList.size());
Assert.assertEquals(startTime, metadataList.get(0).getStart());
logger.info("Snapshot start: {}", metadataList.get(0));
backupStatusMgr.failed(backupMetadata);
metadataList = backupStatusMgr.locate(startTime);
Assert.assertNotNull(metadataList);
Assert.assertTrue(!metadataList.isEmpty());
Assert.assertEquals(1, metadataList.size());
Assert.assertEquals(Status.FAILED, metadataList.get(0).getStatus());
Assert.assertTrue(metadataList.get(0).getCompleted() != null);
logger.info("Snapshot failed: {}", metadataList.get(0));
}
@Test
public void testSnapshotStatusMultiAddFinishInADay() throws Exception {
final int noOfEntries = 10;
Date startTime = DateUtil.getDate("19840101");
for (int i = 0; i < noOfEntries; i++) {
assert startTime != null;
Date time = new DateTime(startTime.getTime()).plusHours(i).toDate();
BackupMetadata backupMetadata =
new BackupMetadata(BackupVersion.SNAPSHOT_BACKUP, "123", time);
backupStatusMgr.start(backupMetadata);
backupStatusMgr.finish(backupMetadata);
}
List<BackupMetadata> metadataList = backupStatusMgr.locate(startTime);
Assert.assertEquals(noOfEntries, metadataList.size());
logger.info(metadataList.toString());
// Ensure that list is always maintained from latest to eldest
Date latest = null;
for (BackupMetadata backupMetadata : metadataList) {
if (latest == null) latest = backupMetadata.getStart();
else {
Assert.assertTrue(backupMetadata.getStart().before(latest));
latest = backupMetadata.getStart();
}
}
}
@Test
public void testSnapshotStatusSize() throws Exception {
final int noOfEntries = backupStatusMgr.getCapacity() + 1;
Date startTime = DateUtil.getDate("19850101");
for (int i = 0; i < noOfEntries; i++) {
assert startTime != null;
Date time = new DateTime(startTime.getTime()).plusDays(i).toDate();
BackupMetadata backupMetadata =
new BackupMetadata(BackupVersion.SNAPSHOT_BACKUP, "123", time);
backupStatusMgr.start(backupMetadata);
backupStatusMgr.finish(backupMetadata);
}
// Verify there is only capacity entries
Assert.assertEquals(
backupStatusMgr.getCapacity(), backupStatusMgr.getAllSnapshotStatus().size());
}
@Test
public void getLatestBackup() throws Exception {
prepare();
Instant start = DateUtil.parseInstant(backupDate);
List<BackupMetadata> list =
backupStatusMgr.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(
backupDate
+ ","
+ DateUtil.formatInstant(
DateUtil.yyyyMMddHHmm,
start.plus(12, ChronoUnit.HOURS))));
Optional<BackupMetadata> backupMetadata = list.stream().findFirst();
Assert.assertEquals(
start.plus(4, ChronoUnit.HOURS), backupMetadata.get().getStart().toInstant());
}
@Test
public void getLatestBackupFailure() throws Exception {
Optional<BackupMetadata> backupMetadata =
backupStatusMgr
.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(backupDate + "," + backupDate))
.stream()
.findFirst();
Assert.assertFalse(backupMetadata.isPresent());
backupStatusMgr.failed(getBackupMetaData(DateUtil.parseInstant(backupDate), Status.FAILED));
backupMetadata =
backupStatusMgr
.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(backupDate + "," + backupDate))
.stream()
.findFirst();
Assert.assertFalse(backupMetadata.isPresent());
}
@Test
public void getLatestBackupMetadata() throws Exception {
prepare();
List<BackupMetadata> list =
backupStatusMgr.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(backupDate + "," + "201812031000"));
list.forEach(System.out::println);
}
}
| 3,158 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestCompression.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import com.netflix.priam.compress.ChunkedStream;
import com.netflix.priam.compress.CompressionType;
import com.netflix.priam.compress.ICompression;
import com.netflix.priam.compress.SnappyCompression;
import com.netflix.priam.utils.SystemUtils;
import java.io.*;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TestCompression {
private final File randomContentFile = new File("/tmp/content.txt");
@Before
public void setup() throws IOException {
try (FileOutputStream stream = new FileOutputStream(randomContentFile)) {
for (int i = 0; i < (5 * 5); i++) {
stream.write(
"This is a test... Random things happen... and you are responsible for it...\n"
.getBytes("UTF-8"));
stream.write(
"The quick brown fox jumps over the lazy dog.The quick brown fox jumps over the lazy dog.The quick brown fox jumps over the lazy dog.\n"
.getBytes("UTF-8"));
}
}
}
@After
public void done() {
FileUtils.deleteQuietly(randomContentFile);
}
@Test
public void zipTest() throws IOException {
String zipFileName = "/tmp/compressed.zip";
File decompressedTempOutput = new File("/tmp/compress-test-out.txt");
try {
try (ZipOutputStream out =
new ZipOutputStream(
new BufferedOutputStream(new FileOutputStream(zipFileName)));
BufferedInputStream source =
new BufferedInputStream(
new FileInputStream(randomContentFile), 2048); ) {
byte data[] = new byte[2048];
ZipEntry entry = new ZipEntry(randomContentFile.getName());
out.putNextEntry(entry);
int count;
while ((count = source.read(data, 0, 2048)) != -1) {
out.write(data, 0, count);
}
}
assertTrue(randomContentFile.length() > new File(zipFileName).length());
ZipFile zipfile = new ZipFile(zipFileName);
Enumeration e = zipfile.entries();
while (e.hasMoreElements()) {
ZipEntry entry = (ZipEntry) e.nextElement();
try (BufferedInputStream is =
new BufferedInputStream(zipfile.getInputStream(entry));
BufferedOutputStream dest1 =
new BufferedOutputStream(
new FileOutputStream(decompressedTempOutput), 2048)) {
int c;
byte d[] = new byte[2048];
while ((c = is.read(d, 0, 2048)) != -1) {
dest1.write(d, 0, c);
}
}
}
String md1 = SystemUtils.md5(randomContentFile);
String md2 = SystemUtils.md5(decompressedTempOutput);
assertEquals(md1, md2);
} finally {
FileUtils.deleteQuietly(new File(zipFileName));
FileUtils.deleteQuietly(decompressedTempOutput);
}
}
@Test
public void snappyTest() throws IOException {
ICompression compress = new SnappyCompression();
testCompressor(compress);
}
private void testCompressor(ICompression compress) throws IOException {
File compressedOutputFile = new File("/tmp/test1.compress");
File decompressedTempOutput = new File("/tmp/compress-test-out.txt");
long chunkSize = 5L * 1024 * 1024;
try {
Iterator<byte[]> it =
new ChunkedStream(
new FileInputStream(randomContentFile),
chunkSize,
CompressionType.SNAPPY);
try (FileOutputStream ostream = new FileOutputStream(compressedOutputFile)) {
while (it.hasNext()) {
byte[] chunk = it.next();
ostream.write(chunk);
}
ostream.flush();
}
assertTrue(randomContentFile.length() > compressedOutputFile.length());
compress.decompressAndClose(
new FileInputStream(compressedOutputFile),
new FileOutputStream(decompressedTempOutput));
String md1 = SystemUtils.md5(randomContentFile);
String md2 = SystemUtils.md5(decompressedTempOutput);
assertEquals(md1, md2);
} finally {
FileUtils.deleteQuietly(compressedOutputFile);
FileUtils.deleteQuietly(decompressedTempOutput);
}
}
}
| 3,159 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/FakeBackupFileSystem.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.netflix.priam.aws.RemoteBackupPath;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.merics.BackupMetrics;
import com.netflix.priam.notification.BackupNotificationMgr;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Path;
import java.time.Instant;
import java.util.*;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.inject.Singleton;
import org.json.simple.JSONArray;
@Singleton
public class FakeBackupFileSystem extends AbstractFileSystem {
private List<AbstractBackupPath> flist = new ArrayList<>();
public Set<String> downloadedFiles = new HashSet<>();
public Set<String> uploadedFiles = new HashSet<>();
private String baseDir;
private String region;
private String clusterName;
@Inject
public FakeBackupFileSystem(
IConfiguration configuration,
BackupMetrics backupMetrics,
BackupNotificationMgr backupNotificationMgr,
Provider<AbstractBackupPath> pathProvider) {
super(configuration, backupMetrics, backupNotificationMgr, pathProvider);
}
public void setupTest(List<String> files) {
clearTest();
for (String file : files) {
AbstractBackupPath path = pathProvider.get();
path.parseRemote(file);
flist.add(path);
}
}
private void clearTest() {
flist.clear();
downloadedFiles.clear();
uploadedFiles.clear();
}
public void addFile(String file) {
AbstractBackupPath path = pathProvider.get();
path.parseRemote(file);
flist.add(path);
}
@SuppressWarnings("unchecked")
@Override
public Iterator<AbstractBackupPath> list(String bucket, Date start, Date till) {
String[] paths = bucket.split(String.valueOf(RemoteBackupPath.PATH_SEP));
if (paths.length > 1) {
baseDir = paths[1];
region = paths[2];
clusterName = paths[3];
}
List<AbstractBackupPath> tmpList = new ArrayList<>();
for (AbstractBackupPath path : flist) {
if ((path.time.after(start) && path.time.before(till))
|| path.time.equals(start)
&& path.baseDir.equals(baseDir)
&& path.clusterName.equals(clusterName)
&& path.region.equals(region)) {
tmpList.add(path);
}
}
return tmpList.iterator();
}
@Override
public Iterator<String> listFileSystem(String prefix, String delimiter, String marker) {
ArrayList<String> items = new ArrayList<>();
flist.stream()
.forEach(
abstractBackupPath -> {
if (abstractBackupPath.getRemotePath().startsWith(prefix))
items.add(abstractBackupPath.getRemotePath());
});
return items.iterator();
}
public void shutdown() {
// nop
}
@Override
public long getFileSize(String remotePath) throws BackupRestoreException {
return 0;
}
@Override
public boolean doesRemoteFileExist(Path remotePath) {
for (AbstractBackupPath abstractBackupPath : flist) {
if (abstractBackupPath.getRemotePath().equalsIgnoreCase(remotePath.toString()))
return true;
}
return false;
}
@Override
public void deleteFiles(List<Path> remotePaths) throws BackupRestoreException {
remotePaths
.stream()
.forEach(
remotePath -> {
AbstractBackupPath path = pathProvider.get();
path.parseRemote(remotePath.toString());
flist.remove(path);
});
}
@Override
public void cleanup() {
clearTest();
}
@Override
protected void downloadFileImpl(AbstractBackupPath path, String suffix)
throws BackupRestoreException {
File localFile = new File(path.newRestoreFile().getAbsolutePath() + suffix);
if (path.getType() == AbstractBackupPath.BackupFileType.META) {
// List all files and generate the file
try (FileWriter fr = new FileWriter(localFile)) {
JSONArray jsonObj = new JSONArray();
for (AbstractBackupPath filePath : flist) {
if (filePath.type == AbstractBackupPath.BackupFileType.SNAP
&& filePath.time.equals(path.time)) {
jsonObj.add(filePath.getRemotePath());
}
}
fr.write(jsonObj.toJSONString());
fr.flush();
} catch (IOException io) {
throw new BackupRestoreException(io.getMessage(), io);
}
}
downloadedFiles.add(path.getRemotePath());
}
@Override
protected long uploadFileImpl(AbstractBackupPath path, Instant target)
throws BackupRestoreException {
uploadedFiles.add(path.getBackupFile().getAbsolutePath());
addFile(path.getRemotePath());
return path.getBackupFile().length();
}
}
| 3,160 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestBackupFile.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.aws.RemoteBackupPath;
import com.netflix.priam.backup.AbstractBackupPath.BackupFileType;
import com.netflix.priam.identity.InstanceIdentity;
import com.netflix.priam.utils.DateUtil;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.Date;
import java.text.ParseException;
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestBackupFile {
private static Injector injector;
private static String region;
@BeforeClass
public static void setup() throws IOException {
injector = Guice.createInjector(new BRTestModule());
File file =
new File("target/data/Keyspace1/Standard1/", "Keyspace1-Standard1-ia-5-Data.db");
if (!file.exists()) {
File dir1 = new File("target/data/Keyspace1/Standard1/");
if (!dir1.exists()) dir1.mkdirs();
byte b = 8;
long oneKB = (1024L);
System.out.println(oneKB);
BufferedOutputStream bos1 = new BufferedOutputStream(new FileOutputStream(file));
for (long i = 0; i < oneKB; i++) {
bos1.write(b);
}
bos1.flush();
bos1.close();
}
InstanceIdentity factory = injector.getInstance(InstanceIdentity.class);
factory.getInstance().setToken("1234567"); // Token
region = factory.getInstanceInfo().getRegion();
}
@AfterClass
public static void cleanup() throws IOException {
File file = new File("Keyspace1-Standard1-ia-5-Data.db");
FileUtils.deleteQuietly(file);
}
@Test
public void testBackupFileCreation() throws ParseException {
// Test snapshot file
String snapshotfile =
"target/data/Keyspace1/Standard1/snapshots/201108082320/Keyspace1-Standard1-ia-5-Data.db";
RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class);
backupfile.parseLocal(new File(snapshotfile), BackupFileType.SNAP);
Assert.assertEquals(BackupFileType.SNAP, backupfile.type);
Assert.assertEquals("Keyspace1", backupfile.keyspace);
Assert.assertEquals("Standard1", backupfile.columnFamily);
Assert.assertEquals("1234567", backupfile.token);
Assert.assertEquals("fake-app", backupfile.clusterName);
Assert.assertEquals(region, backupfile.region);
Assert.assertEquals("casstestbackup", backupfile.baseDir);
Assert.assertEquals(
"casstestbackup/"
+ region
+ "/fake-app/1234567/201108082320/SNAP/Keyspace1/Standard1/Keyspace1-Standard1-ia-5-Data.db",
backupfile.getRemotePath());
}
@Test
public void testIncBackupFileCreation() throws ParseException {
// Test incremental file
File bfile = new File("target/data/Keyspace1/Standard1/Keyspace1-Standard1-ia-5-Data.db");
RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class);
backupfile.parseLocal(bfile, BackupFileType.SST);
Assert.assertEquals(BackupFileType.SST, backupfile.type);
Assert.assertEquals("Keyspace1", backupfile.keyspace);
Assert.assertEquals("Standard1", backupfile.columnFamily);
Assert.assertEquals("1234567", backupfile.token);
Assert.assertEquals("fake-app", backupfile.clusterName);
Assert.assertEquals(region, backupfile.region);
Assert.assertEquals("casstestbackup", backupfile.baseDir);
String datestr = DateUtil.formatyyyyMMddHHmm(new Date(bfile.lastModified()));
Assert.assertEquals(
"casstestbackup/"
+ region
+ "/fake-app/1234567/"
+ datestr
+ "/SST/Keyspace1/Standard1/Keyspace1-Standard1-ia-5-Data.db",
backupfile.getRemotePath());
}
@Test
public void testMetaFileCreation() throws ParseException {
// Test snapshot file
String filestr = "cass/data/1234567.meta";
File bfile = new File(filestr);
RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class);
backupfile.parseLocal(bfile, BackupFileType.META);
backupfile.setTime(DateUtil.getDate("201108082320"));
Assert.assertEquals(BackupFileType.META, backupfile.type);
Assert.assertEquals("1234567", backupfile.token);
Assert.assertEquals("fake-app", backupfile.clusterName);
Assert.assertEquals(region, backupfile.region);
Assert.assertEquals("casstestbackup", backupfile.baseDir);
Assert.assertEquals(
"casstestbackup/" + region + "/fake-app/1234567/201108082320/META/1234567.meta",
backupfile.getRemotePath());
}
}
| 3,161 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestFileIterator.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.amazonaws.AmazonClientException;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.aws.S3FileSystem;
import com.netflix.priam.identity.InstanceIdentity;
import com.netflix.priam.utils.DateUtil;
import java.io.IOException;
import java.util.*;
import mockit.Mock;
import mockit.MockUp;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
/**
* Unit test for backup file iterator
*
* @author Praveen Sadhu
*/
public class TestFileIterator {
private static Date startTime, endTime;
private static S3FileSystem s3FileSystem;
private static String region;
private static String bucket = "TESTBUCKET";
@BeforeClass
public static void setup() throws InterruptedException, IOException {
AmazonS3Client s3client = new MockAmazonS3Client().getMockInstance();
new MockObjectListing();
Injector injector = Guice.createInjector(new BRTestModule());
InstanceIdentity factory = injector.getInstance(InstanceIdentity.class);
region = factory.getInstanceInfo().getRegion();
s3FileSystem = injector.getInstance(S3FileSystem.class);
s3FileSystem.setS3Client(s3client);
DateUtil.DateRange dateRange = new DateUtil.DateRange("201108110030,201108110530");
startTime = new Date(dateRange.getStartTime().toEpochMilli());
endTime = new Date(dateRange.getEndTime().toEpochMilli());
}
static class MockAmazonS3Client extends MockUp<AmazonS3Client> {
@Mock
public ObjectListing listObjects(ListObjectsRequest listObjectsRequest)
throws AmazonClientException {
ObjectListing listing = new ObjectListing();
listing.setBucketName(listObjectsRequest.getBucketName());
listing.setPrefix(listObjectsRequest.getPrefix());
return listing;
}
@Mock
public ObjectListing listNextBatchOfObjects(ObjectListing previousObjectListing)
throws AmazonClientException {
ObjectListing listing = new ObjectListing();
listing.setBucketName(previousObjectListing.getBucketName());
listing.setPrefix(previousObjectListing.getPrefix());
return new ObjectListing();
}
}
// MockObjectListing class
@Ignore
static class MockObjectListing extends MockUp<ObjectListing> {
static boolean truncated = true;
static boolean firstcall = true;
static boolean simfilter = false; // Simulate filtering
@Mock
public List<S3ObjectSummary> getObjectSummaries() {
if (firstcall) {
firstcall = false;
if (simfilter) return getObjectSummaryEmpty();
return getObjectSummary();
} else {
if (simfilter) {
simfilter = false; // reset
return getObjectSummaryEmpty();
} else truncated = false;
return getNextObjectSummary();
}
}
@Mock
public boolean isTruncated() {
return truncated;
}
}
@Test
public void testIteratorEmptySet() {
DateUtil.DateRange dateRange = new DateUtil.DateRange("201107110601,201107111101");
Date stime = new Date(dateRange.getStartTime().toEpochMilli());
Date etime = new Date(dateRange.getEndTime().toEpochMilli());
Iterator<AbstractBackupPath> fileIterator = s3FileSystem.list(bucket, stime, etime);
Set<String> files = new HashSet<>();
while (fileIterator.hasNext()) files.add(fileIterator.next().getRemotePath());
Assert.assertEquals(0, files.size());
}
@Test
public void testIterator() {
MockObjectListing.truncated = false;
MockObjectListing.firstcall = true;
MockObjectListing.simfilter = false;
Iterator<AbstractBackupPath> fileIterator = s3FileSystem.list(bucket, startTime, endTime);
Set<String> files = new HashSet<>();
while (fileIterator.hasNext()) files.add(fileIterator.next().getRemotePath());
Assert.assertEquals(3, files.size());
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/SNAP/ks1/cf1/f1.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110430/SST/ks1/cf1/f2.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/META/meta.json"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110600/SST/ks1/cf1/f3.db"));
}
@Test
public void testIteratorTruncated() {
MockObjectListing.truncated = true;
MockObjectListing.firstcall = true;
MockObjectListing.simfilter = false;
Iterator<AbstractBackupPath> fileIterator = s3FileSystem.list(bucket, startTime, endTime);
Set<String> files = new HashSet<>();
while (fileIterator.hasNext()) files.add(fileIterator.next().getRemotePath());
Assert.assertEquals(5, files.size());
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/SNAP/ks1/cf1/f1.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110430/SST/ks1/cf1/f2.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/META/meta.json"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110600/SST/ks1/cf1/f3.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/SNAP/ks2/cf1/f1.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110430/SST/ks2/cf1/f2.db"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110600/SST/ks2/cf1/f3.db"));
}
@Test
public void testIteratorTruncatedOOR() {
MockObjectListing.truncated = true;
MockObjectListing.firstcall = true;
MockObjectListing.simfilter = true;
Iterator<AbstractBackupPath> fileIterator = s3FileSystem.list(bucket, startTime, endTime);
Set<String> files = new HashSet<>();
while (fileIterator.hasNext()) files.add(fileIterator.next().getRemotePath());
Assert.assertEquals(2, files.size());
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201107110030/SNAP/ks1/cf1/f1.db"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201107110430/SST/ks1/cf1/f2.db"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201107110030/META/meta.json"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201107110600/SST/ks1/cf1/f3.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/SNAP/ks2/cf1/f1.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110430/SST/ks2/cf1/f2.db"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110600/SST/ks2/cf1/f3.db"));
}
@Test
public void testRestorePathIteration() {
MockObjectListing.truncated = true;
MockObjectListing.firstcall = true;
MockObjectListing.simfilter = false;
Iterator<AbstractBackupPath> fileIterator =
s3FileSystem.list(
"RESTOREBUCKET/test_restore_backup/fake-restore-region/fakerestorecluster",
startTime,
endTime);
Set<String> files = new HashSet<>();
while (fileIterator.hasNext()) files.add(fileIterator.next().getRemotePath());
while (fileIterator.hasNext()) files.add(fileIterator.next().getRemotePath());
Assert.assertEquals(5, files.size());
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/SNAP/ks1/cf1/f1.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110430/SST/ks1/cf1/f2.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/META/meta.json"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110600/SST/ks1/cf1/f3.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110030/SNAP/ks2/cf1/f1.db"));
Assert.assertTrue(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110430/SST/ks2/cf1/f2.db"));
Assert.assertFalse(
files.contains(
"test_backup/"
+ region
+ "/fakecluster/123456/201108110600/SST/ks2/cf1/f3.db"));
}
private static List<S3ObjectSummary> getObjectSummary() {
List<S3ObjectSummary> list = new ArrayList<>();
S3ObjectSummary summary = new S3ObjectSummary();
summary.setKey(
"test_backup/" + region + "/fakecluster/123456/201108110030/SNAP/ks1/cf1/f1.db");
list.add(summary);
summary = new S3ObjectSummary();
summary.setKey(
"test_backup/" + region + "/fakecluster/123456/201108110430/SST/ks1/cf1/f2.db");
list.add(summary);
summary = new S3ObjectSummary();
summary.setKey(
"test_backup/" + region + "/fakecluster/123456/201108110600/SST/ks1/cf1/f3.db");
list.add(summary);
summary = new S3ObjectSummary();
summary.setKey("test_backup/" + region + "/fakecluster/123456/201108110030/META/meta.json");
list.add(summary);
return list;
}
private static List<S3ObjectSummary> getObjectSummaryEmpty() {
return new ArrayList<>();
}
private static List<S3ObjectSummary> getNextObjectSummary() {
List<S3ObjectSummary> list = new ArrayList<>();
S3ObjectSummary summary = new S3ObjectSummary();
summary.setKey(
"test_backup/" + region + "/fakecluster/123456/201108110030/SNAP/ks2/cf1/f1.db");
list.add(summary);
summary = new S3ObjectSummary();
summary.setKey(
"test_backup/" + region + "/fakecluster/123456/201108110430/SST/ks2/cf1/f2.db");
list.add(summary);
summary = new S3ObjectSummary();
summary.setKey(
"test_backup/" + region + "/fakecluster/123456/201108110600/SST/ks2/cf1/f3.db");
list.add(summary);
return list;
}
}
| 3,162 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestBackupService.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.connection.JMXNodeTool;
import com.netflix.priam.defaultimpl.IService;
import com.netflix.priam.scheduler.PriamScheduler;
import com.netflix.priam.tuner.CassandraTunerService;
import com.netflix.priam.tuner.TuneCassandra;
import com.netflix.priam.utils.BackupFileUtils;
import com.netflix.priam.utils.DateUtil;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.Set;
import mockit.Expectations;
import mockit.Mocked;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.quartz.SchedulerException;
/** Created by aagrawal on 3/10/19. */
public class TestBackupService {
private final PriamScheduler scheduler;
private final CassandraTunerService cassandraTunerService;
public TestBackupService() {
Injector injector = Guice.createInjector(new BRTestModule());
this.scheduler = injector.getInstance(PriamScheduler.class);
this.cassandraTunerService = injector.getInstance(CassandraTunerService.class);
}
@Before
public void cleanup() throws SchedulerException {
scheduler.getScheduler().clear();
}
@Test
public void testBackupDisabled(
@Mocked IConfiguration configuration, @Mocked IBackupRestoreConfig backupRestoreConfig)
throws Exception {
new Expectations() {
{
configuration.getBackupCronExpression();
result = "-1";
configuration.getDataFileLocation();
result = "target/data";
}
};
Path dummyDataDirectoryLocation = Paths.get(configuration.getDataFileLocation());
Instant snapshotInstant = DateUtil.getInstant();
// Create one V1 snapshot.
String snapshotV1Name = DateUtil.formatInstant(DateUtil.yyyyMMdd, snapshotInstant);
BackupFileUtils.generateDummyFiles(
dummyDataDirectoryLocation,
2,
3,
3,
AbstractBackup.SNAPSHOT_FOLDER,
snapshotV1Name,
true);
String snapshotName = "meta_v2_" + snapshotV1Name;
// Create one V2 snapshot.
BackupFileUtils.generateDummyFiles(
dummyDataDirectoryLocation,
2,
3,
3,
AbstractBackup.SNAPSHOT_FOLDER,
snapshotName,
false);
IService backupService =
new BackupService(
configuration, backupRestoreConfig, scheduler, cassandraTunerService);
backupService.scheduleService();
Assert.assertEquals(0, scheduler.getScheduler().getJobKeys(null).size());
// snapshot V1 name should not be there.
Set<Path> backupPaths =
AbstractBackup.getBackupDirectories(configuration, AbstractBackup.SNAPSHOT_FOLDER);
for (Path backupPath : backupPaths) {
Assert.assertTrue(Files.exists(Paths.get(backupPath.toString(), snapshotName)));
Assert.assertFalse(Files.exists(Paths.get(backupPath.toString(), snapshotV1Name)));
}
}
@Test
public void testBackupEnabled(
@Mocked IConfiguration configuration, @Mocked IBackupRestoreConfig backupRestoreConfig)
throws Exception {
new Expectations() {
{
configuration.getBackupCronExpression();
result = "0 0/1 * 1/1 * ? *";
configuration.isIncrementalBackupEnabled();
result = false;
}
};
IService backupService =
new BackupService(
configuration, backupRestoreConfig, scheduler, cassandraTunerService);
backupService.scheduleService();
Assert.assertEquals(2, scheduler.getScheduler().getJobKeys(null).size());
}
@Test
public void testBackupEnabledWithIncremental(
@Mocked IConfiguration configuration, @Mocked IBackupRestoreConfig backupRestoreConfig)
throws Exception {
new Expectations() {
{
configuration.getBackupCronExpression();
result = "0 0/1 * 1/1 * ? *";
configuration.isIncrementalBackupEnabled();
result = true;
}
};
IService backupService =
new BackupService(
configuration, backupRestoreConfig, scheduler, cassandraTunerService);
backupService.scheduleService();
Assert.assertEquals(3, scheduler.getScheduler().getJobKeys(null).size());
}
@Test
public void updateService(
@Mocked IConfiguration configuration,
@Mocked IBackupRestoreConfig backupRestoreConfig,
@Mocked JMXNodeTool nodeTool,
@Mocked TuneCassandra tuneCassandra)
throws Exception {
new Expectations() {
{
configuration.getBackupCronExpression();
result = "0 0/1 * 1/1 * ? *";
result = "0 0/1 * 1/1 * ? *";
result = "-1";
result = "-1";
configuration.isIncrementalBackupEnabled();
result = true;
backupRestoreConfig.enableV2Backups();
result = true;
backupRestoreConfig.getSnapshotMetaServiceCronExpression();
result = "0 0/1 * 1/1 * ? *";
}
};
IService backupService =
new BackupService(
configuration, backupRestoreConfig, scheduler, cassandraTunerService);
backupService.scheduleService();
Assert.assertEquals(3, scheduler.getScheduler().getJobKeys(null).size());
System.out.println("After updated");
backupService.onChangeUpdateService();
System.out.println(scheduler.getScheduler().getJobKeys(null));
Assert.assertEquals(2, scheduler.getScheduler().getJobKeys(null).size());
}
}
| 3,163 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestCustomizedTPE.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.netflix.priam.scheduler.BlockingSubmitThreadPoolExecutor;
import java.util.concurrent.Callable;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestCustomizedTPE {
private static final Logger logger = LoggerFactory.getLogger(TestCustomizedTPE.class);
private static final int MAX_THREADS = 10;
// timeout 1 sec
private static final int TIME_OUT = 10 * 1000;
private final BlockingSubmitThreadPoolExecutor startTest =
new BlockingSubmitThreadPoolExecutor(
MAX_THREADS, new LinkedBlockingDeque<>(MAX_THREADS), TIME_OUT);
@Test
public void testExecutor() throws InterruptedException {
final AtomicInteger count = new AtomicInteger();
for (int i = 0; i < 100; i++) {
startTest.submit(
(Callable<Void>)
() -> {
Thread.sleep(100);
logger.info("Count:{}", count.incrementAndGet());
return null;
});
}
startTest.sleepTillEmpty();
Assert.assertEquals(100, count.get());
}
@Test
public void testException() {
boolean success = false;
try {
for (int i = 0; i < 100; i++) {
startTest.submit(
(Callable<Void>)
() -> {
logger.info("Sleeping for 2 * timeout.");
Thread.sleep(TIME_OUT * 2);
return null;
});
}
} catch (RuntimeException ex) {
success = true;
}
Assert.assertTrue("Failure to timeout...", success);
}
}
| 3,164 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/FakePostRestoreHook.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.backup;
import com.netflix.priam.restore.IPostRestoreHook;
class FakePostRestoreHook implements IPostRestoreHook {
public boolean hasValidParameters() {
return true;
}
public void execute() {
// no op
}
}
| 3,165 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/FakeCredentials.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.netflix.priam.cred.ICredential;
public class FakeCredentials implements ICredential {
public AWSCredentialsProvider getAwsCredentialProvider() {
// TODO Auto-generated method stub
return null;
}
}
| 3,166 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestBackupHelperImpl.java | package com.netflix.priam.backup;
import com.google.common.collect.ImmutableList;
import com.google.common.truth.Truth;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.compress.CompressionType;
import com.netflix.priam.config.BackupsToCompress;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IConfiguration;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.Collection;
import java.util.Objects;
import javax.inject.Provider;
import org.apache.commons.io.FileUtils;
import org.junit.*;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Enclosed.class)
public class TestBackupHelperImpl {
private static final String COMPRESSED_DATA = "compressed-1234-Data.db";
private static final String COMPRESSION_INFO = "compressed-1234-CompressionInfo.db";
private static final String UNCOMPRESSED_DATA = "uncompressed-1234-Data.db";
private static final String RANDOM_DATA = "random-1234-Data.db";
private static final String RANDOM_COMPONENT = "random-1234-compressioninfo.db";
private static final ImmutableList<String> TABLE_PARTS =
ImmutableList.of(
COMPRESSED_DATA,
COMPRESSION_INFO,
UNCOMPRESSED_DATA,
RANDOM_DATA,
RANDOM_COMPONENT);
private static final String DIRECTORY = "target/data/ks/cf/backup/";
@RunWith(Parameterized.class)
public static class ParameterizedTests {
private final BackupHelperImpl backupHelper;
private final String tablePart;
private final CompressionType compressionAlgorithm;
@BeforeClass
public static void setUp() throws IOException {
FileUtils.forceMkdir(new File(DIRECTORY));
}
@Before
public void createFiles() throws IOException {
for (String tablePart : TABLE_PARTS) {
File file = Paths.get(DIRECTORY, tablePart).toFile();
if (file.createNewFile()) {
FileUtils.forceDeleteOnExit(file);
} else {
throw new IllegalStateException("failed to create " + tablePart);
}
}
}
@AfterClass
public static void tearDown() throws IOException {
FileUtils.deleteDirectory(new File(DIRECTORY));
}
@Parameterized.Parameters
public static Collection<Object[]> data() {
return Arrays.asList(
new Object[][] {
{BackupsToCompress.NONE, COMPRESSED_DATA, CompressionType.NONE},
{BackupsToCompress.NONE, COMPRESSION_INFO, CompressionType.NONE},
{BackupsToCompress.NONE, UNCOMPRESSED_DATA, CompressionType.NONE},
{BackupsToCompress.NONE, RANDOM_DATA, CompressionType.NONE},
{BackupsToCompress.NONE, RANDOM_COMPONENT, CompressionType.NONE},
{BackupsToCompress.ALL, COMPRESSED_DATA, CompressionType.SNAPPY},
{BackupsToCompress.ALL, COMPRESSION_INFO, CompressionType.SNAPPY},
{BackupsToCompress.ALL, UNCOMPRESSED_DATA, CompressionType.SNAPPY},
{BackupsToCompress.ALL, RANDOM_DATA, CompressionType.SNAPPY},
{BackupsToCompress.ALL, RANDOM_COMPONENT, CompressionType.SNAPPY},
{BackupsToCompress.IF_REQUIRED, COMPRESSED_DATA, CompressionType.NONE},
{BackupsToCompress.IF_REQUIRED, COMPRESSION_INFO, CompressionType.NONE},
{BackupsToCompress.IF_REQUIRED, UNCOMPRESSED_DATA, CompressionType.SNAPPY},
{BackupsToCompress.IF_REQUIRED, RANDOM_DATA, CompressionType.SNAPPY},
{BackupsToCompress.IF_REQUIRED, RANDOM_COMPONENT, CompressionType.SNAPPY},
});
}
public ParameterizedTests(BackupsToCompress which, String tablePart, CompressionType algo) {
this.tablePart = tablePart;
this.compressionAlgorithm = algo;
Injector injector = Guice.createInjector(new BRTestModule());
FakeConfiguration fakeConfiguration =
(FakeConfiguration) injector.getInstance(IConfiguration.class);
fakeConfiguration.setFakeConfig("Priam.backupsToCompress", which);
IFileSystemContext context = injector.getInstance(IFileSystemContext.class);
Provider<AbstractBackupPath> pathFactory =
injector.getProvider(AbstractBackupPath.class);
backupHelper = new BackupHelperImpl(fakeConfiguration, context, pathFactory);
}
@Test
public void testCorrectCompressionType() throws Exception {
File parent = new File(DIRECTORY);
AbstractBackupPath.BackupFileType backupFileType =
AbstractBackupPath.BackupFileType.SST_V2;
ImmutableList<ListenableFuture<AbstractBackupPath>> futures =
backupHelper.uploadAndDeleteAllFiles(parent, backupFileType, false);
AbstractBackupPath abstractBackupPath = null;
for (ListenableFuture<AbstractBackupPath> future : futures) {
if (future.get().getFileName().equals(tablePart)) {
abstractBackupPath = future.get();
break;
}
}
Truth.assertThat(Objects.requireNonNull(abstractBackupPath).getCompression())
.isEqualTo(compressionAlgorithm);
}
}
public static class ProgrammaticTests {
private final BackupHelperImpl backupHelper;
private final FakeConfiguration config;
@BeforeClass
public static void setUp() throws IOException {
FileUtils.forceMkdir(new File(DIRECTORY));
for (String tablePart : TABLE_PARTS) {
File file = Paths.get(DIRECTORY, tablePart).toFile();
if (file.createNewFile()) {
FileUtils.forceDeleteOnExit(file);
} else {
throw new IllegalStateException("failed to create " + tablePart);
}
}
}
@AfterClass
public static void tearDown() throws IOException {
FileUtils.deleteDirectory(new File(DIRECTORY));
}
public ProgrammaticTests() {
Injector injector = Guice.createInjector(new BRTestModule());
config = (FakeConfiguration) injector.getInstance(IConfiguration.class);
IFileSystemContext context = injector.getInstance(IFileSystemContext.class);
Provider<AbstractBackupPath> pathFactory =
injector.getProvider(AbstractBackupPath.class);
backupHelper = new BackupHelperImpl(config, context, pathFactory);
}
@Test
public void testDataFilesAreLast() throws IOException {
AbstractBackupPath.BackupFileType fileType = AbstractBackupPath.BackupFileType.SST_V2;
boolean dataFilesAreLast =
backupHelper
.getBackupPaths(new File(DIRECTORY), fileType)
.asList()
.stream()
.skip(2)
.allMatch(p -> p.getBackupFile().getName().endsWith("-Data.db"));
Truth.assertThat(dataFilesAreLast).isTrue();
}
@Test
public void testNonDataFilesComeFirst() throws IOException {
AbstractBackupPath.BackupFileType fileType = AbstractBackupPath.BackupFileType.SST_V2;
boolean nonDataFilesComeFirst =
backupHelper
.getBackupPaths(new File(DIRECTORY), fileType)
.asList()
.stream()
.limit(2)
.noneMatch(p -> p.getBackupFile().getName().endsWith("-Data.db"));
Truth.assertThat(nonDataFilesComeFirst).isTrue();
}
@Test
public void testNeverCompressedOldFilesAreCompressed() throws IOException {
AbstractBackupPath.BackupFileType fileType = AbstractBackupPath.BackupFileType.SST_V2;
long transitionInstant = Instant.now().plus(1, ChronoUnit.DAYS).toEpochMilli();
config.setCompressionTransitionEpochMillis(transitionInstant);
config.setFakeConfig("Priam.backupsToCompress", BackupsToCompress.NONE);
boolean backupsAreCompressed =
backupHelper
.getBackupPaths(new File(DIRECTORY), fileType)
.stream()
.allMatch(p -> p.getCompression() == CompressionType.SNAPPY);
Truth.assertThat(backupsAreCompressed).isTrue();
}
@Test
public void testOptionallyCompressedOldFilesAreCompressed() throws IOException {
AbstractBackupPath.BackupFileType fileType = AbstractBackupPath.BackupFileType.SST_V2;
long transitionInstant = Instant.now().plus(1, ChronoUnit.DAYS).toEpochMilli();
config.setCompressionTransitionEpochMillis(transitionInstant);
config.setFakeConfig("Priam.backupsToCompress", BackupsToCompress.IF_REQUIRED);
boolean backupsAreCompressed =
backupHelper
.getBackupPaths(new File(DIRECTORY), fileType)
.stream()
.allMatch(p -> p.getCompression() == CompressionType.SNAPPY);
Truth.assertThat(backupsAreCompressed).isTrue();
}
}
}
| 3,167 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/BRTestModule.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.google.inject.AbstractModule;
import com.google.inject.Scopes;
import com.google.inject.name.Names;
import com.netflix.priam.aws.auth.IS3Credential;
import com.netflix.priam.aws.auth.S3RoleAssumptionCredential;
import com.netflix.priam.backupv2.IMetaProxy;
import com.netflix.priam.backupv2.MetaV1Proxy;
import com.netflix.priam.backupv2.MetaV2Proxy;
import com.netflix.priam.config.FakeBackupRestoreConfig;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.cred.ICredential;
import com.netflix.priam.cryptography.IFileCryptography;
import com.netflix.priam.cryptography.pgp.PgpCryptography;
import com.netflix.priam.defaultimpl.FakeCassandraProcess;
import com.netflix.priam.defaultimpl.ICassandraProcess;
import com.netflix.priam.identity.FakeMembership;
import com.netflix.priam.identity.FakePriamInstanceFactory;
import com.netflix.priam.identity.IMembership;
import com.netflix.priam.identity.IPriamInstanceFactory;
import com.netflix.priam.identity.config.FakeInstanceInfo;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.restore.IPostRestoreHook;
import com.netflix.priam.utils.FakeSleeper;
import com.netflix.priam.utils.Sleeper;
import com.netflix.spectator.api.DefaultRegistry;
import com.netflix.spectator.api.Registry;
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Collections;
import org.junit.Ignore;
import org.quartz.SchedulerFactory;
import org.quartz.impl.StdSchedulerFactory;
@Ignore
public class BRTestModule extends AbstractModule {
@Override
protected void configure() {
bind(IConfiguration.class).toInstance(new FakeConfiguration("fake-app"));
bind(IBackupRestoreConfig.class).to(FakeBackupRestoreConfig.class);
bind(InstanceInfo.class)
.toInstance(new FakeInstanceInfo("fakeInstance1", "az1", "us-east-1"));
bind(IPriamInstanceFactory.class).to(FakePriamInstanceFactory.class);
bind(SchedulerFactory.class).to(StdSchedulerFactory.class).in(Scopes.SINGLETON);
bind(IMembership.class)
.toInstance(new FakeMembership(Collections.singletonList("fakeInstance1")));
bind(ICredential.class).to(FakeNullCredential.class).in(Scopes.SINGLETON);
bind(IBackupFileSystem.class)
.annotatedWith(Names.named("backup"))
.to(FakeBackupFileSystem.class)
.in(Scopes.SINGLETON);
bind(Sleeper.class).to(FakeSleeper.class);
bind(IS3Credential.class)
.annotatedWith(Names.named("awss3roleassumption"))
.to(S3RoleAssumptionCredential.class);
bind(IBackupFileSystem.class)
.annotatedWith(Names.named("encryptedbackup"))
.to(NullBackupFileSystem.class);
bind(IFileCryptography.class)
.annotatedWith(Names.named("filecryptoalgorithm"))
.to(PgpCryptography.class);
bind(ICassandraProcess.class).to(FakeCassandraProcess.class);
bind(IPostRestoreHook.class).to(FakePostRestoreHook.class);
bind(Registry.class).toInstance(new DefaultRegistry());
bind(IMetaProxy.class).annotatedWith(Names.named("v1")).to(MetaV1Proxy.class);
bind(IMetaProxy.class).annotatedWith(Names.named("v2")).to(MetaV2Proxy.class);
bind(DynamicRateLimiter.class).to(FakeDynamicRateLimiter.class);
bind(Clock.class).toInstance(Clock.fixed(Instant.EPOCH, ZoneId.systemDefault()));
}
}
| 3,168 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestS3FileSystem.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.*;
import com.amazonaws.services.s3.model.lifecycle.LifecycleFilter;
import com.amazonaws.services.s3.model.lifecycle.LifecyclePrefixPredicate;
import com.google.api.client.util.Preconditions;
import com.google.common.collect.Lists;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.aws.DataPart;
import com.netflix.priam.aws.RemoteBackupPath;
import com.netflix.priam.aws.S3FileSystem;
import com.netflix.priam.aws.S3PartUploader;
import com.netflix.priam.backup.AbstractBackupPath.BackupFileType;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.merics.BackupMetrics;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import mockit.Mock;
import mockit.MockUp;
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestS3FileSystem {
private static Injector injector;
private static final Logger logger = LoggerFactory.getLogger(TestS3FileSystem.class);
private static final File DIR = new File("target/data/KS1/CF1/backups/201108082320/");
private static BackupMetrics backupMetrics;
private static String region;
private static IConfiguration configuration;
public TestS3FileSystem() {
if (injector == null) injector = Guice.createInjector(new BRTestModule());
if (backupMetrics == null) backupMetrics = injector.getInstance(BackupMetrics.class);
if (configuration == null) configuration = injector.getInstance(IConfiguration.class);
InstanceInfo instanceInfo = injector.getInstance(InstanceInfo.class);
region = instanceInfo.getRegion();
}
@BeforeClass
public static void setUp() {
new MockS3PartUploader();
new MockAmazonS3Client();
if (!DIR.exists()) DIR.mkdirs();
}
@AfterClass
public static void cleanup() throws IOException {
FileUtils.cleanDirectory(DIR);
}
@Test
public void testFileUpload() throws Exception {
MockS3PartUploader.setup();
AbstractFileSystem fs = injector.getInstance(NullBackupFileSystem.class);
RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class);
backupfile.parseLocal(localFile(), BackupFileType.SNAP);
long noOfFilesUploaded = backupMetrics.getUploadRate().count();
// temporary hack to allow tests to complete in a timely fashion
// This will be removed once we stop inheriting from AbstractFileSystem
fs.uploadAndDeleteInternal(backupfile, Instant.EPOCH, 0 /* retries */);
Assert.assertEquals(1, backupMetrics.getUploadRate().count() - noOfFilesUploaded);
}
@Test
public void testFileUploadDeleteExists() throws Exception {
MockS3PartUploader.setup();
IBackupFileSystem fs = injector.getInstance(NullBackupFileSystem.class);
RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class);
backupfile.parseLocal(localFile(), BackupFileType.SST_V2);
fs.uploadAndDelete(backupfile, false /* async */);
Assert.assertTrue(fs.checkObjectExists(Paths.get(backupfile.getRemotePath())));
// Lets delete the file now.
List<Path> deleteFiles = Lists.newArrayList();
deleteFiles.add(Paths.get(backupfile.getRemotePath()));
fs.deleteRemoteFiles(deleteFiles);
Assert.assertFalse(fs.checkObjectExists(Paths.get(backupfile.getRemotePath())));
}
@Test
public void testFileUploadFailures() throws Exception {
MockS3PartUploader.setup();
MockS3PartUploader.partFailure = true;
long noOfFailures = backupMetrics.getInvalidUploads().count();
S3FileSystem fs = injector.getInstance(S3FileSystem.class);
RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class);
backupfile.parseLocal(localFile(), BackupFileType.SNAP);
try {
// temporary hack to allow tests to complete in a timely fashion
// This will be removed once we stop inheriting from AbstractFileSystem
fs.uploadAndDeleteInternal(backupfile, Instant.EPOCH, 0 /* retries */);
} catch (BackupRestoreException e) {
// ignore
}
Assert.assertEquals(0, MockS3PartUploader.compattempts);
Assert.assertEquals(1, backupMetrics.getInvalidUploads().count() - noOfFailures);
}
@Test
public void testFileUploadCompleteFailure() throws Exception {
MockS3PartUploader.setup();
MockS3PartUploader.completionFailure = true;
S3FileSystem fs = injector.getInstance(S3FileSystem.class);
fs.setS3Client(new MockAmazonS3Client().getMockInstance());
RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class);
backupfile.parseLocal(localFile(), BackupFileType.SNAP);
try {
// temporary hack to allow tests to complete in a timely fashion
// This will be removed once we stop inheriting from AbstractFileSystem
fs.uploadAndDeleteInternal(backupfile, Instant.EPOCH, 0 /* retries */);
} catch (BackupRestoreException e) {
// ignore
}
}
@Test
public void testCleanupAdd() throws Exception {
MockAmazonS3Client.setRuleAvailable(false);
S3FileSystem fs = injector.getInstance(S3FileSystem.class);
fs.cleanup();
Assert.assertEquals(1, MockAmazonS3Client.bconf.getRules().size());
BucketLifecycleConfiguration.Rule rule = MockAmazonS3Client.bconf.getRules().get(0);
Assert.assertEquals("casstestbackup/" + region + "/fake-app/", rule.getId());
Assert.assertEquals(configuration.getBackupRetentionDays(), rule.getExpirationInDays());
}
@Test
public void testCleanupIgnore() throws Exception {
MockAmazonS3Client.setRuleAvailable(true);
S3FileSystem fs = injector.getInstance(S3FileSystem.class);
fs.cleanup();
Assert.assertEquals(1, MockAmazonS3Client.bconf.getRules().size());
BucketLifecycleConfiguration.Rule rule = MockAmazonS3Client.bconf.getRules().get(0);
Assert.assertEquals("casstestbackup/" + region + "/fake-app/", rule.getId());
Assert.assertEquals(configuration.getBackupRetentionDays(), rule.getExpirationInDays());
}
@Test
public void testCleanupUpdate() throws Exception {
MockAmazonS3Client.setRuleAvailable(true);
S3FileSystem fs = injector.getInstance(S3FileSystem.class);
String clusterPrefix = "casstestbackup/" + region + "/fake-app/";
MockAmazonS3Client.updateRule(
MockAmazonS3Client.getBucketLifecycleConfig(clusterPrefix, 2));
fs.cleanup();
Assert.assertEquals(1, MockAmazonS3Client.bconf.getRules().size());
BucketLifecycleConfiguration.Rule rule = MockAmazonS3Client.bconf.getRules().get(0);
Assert.assertEquals("casstestbackup/" + region + "/fake-app/", rule.getId());
Assert.assertEquals(configuration.getBackupRetentionDays(), rule.getExpirationInDays());
}
@Test
public void testDeleteObjects() throws Exception {
S3FileSystem fs = injector.getInstance(S3FileSystem.class);
List<Path> filesToDelete = new ArrayList<>();
// Empty files
fs.deleteRemoteFiles(filesToDelete);
// Lets add some random files now.
filesToDelete.add(Paths.get("a.txt"));
fs.deleteRemoteFiles(filesToDelete);
// Emulate error now.
try {
MockAmazonS3Client.emulateError = true;
fs.deleteRemoteFiles(filesToDelete);
Assert.assertTrue(false);
} catch (BackupRestoreException e) {
Assert.assertTrue(true);
}
}
private File localFile() throws IOException {
String caller = Thread.currentThread().getStackTrace()[1].getMethodName();
File file = new File(DIR + caller + "KS1-CF1-ia-1-Data.db");
if (file.createNewFile()) {
byte[] data = new byte[5 << 10];
Arrays.fill(data, (byte) 8);
try (BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(file))) {
os.write(data);
}
}
Preconditions.checkState(file.exists());
return file;
}
// Mock Nodeprobe class
static class MockS3PartUploader extends MockUp<S3PartUploader> {
static int compattempts = 0;
static int partAttempts = 0;
static boolean partFailure = false;
static boolean completionFailure = false;
private static List<PartETag> partETags;
@Mock
public void $init(AmazonS3 client, DataPart dp, List<PartETag> partETags) {
MockS3PartUploader.partETags = partETags;
}
@Mock
private Void uploadPart() throws AmazonClientException, BackupRestoreException {
++partAttempts;
if (partFailure) throw new BackupRestoreException("Test exception");
partETags.add(new PartETag(0, null));
return null;
}
@Mock
public CompleteMultipartUploadResult completeUpload() throws BackupRestoreException {
++compattempts;
if (completionFailure) throw new BackupRestoreException("Test exception");
return null;
}
@Mock
public Void retriableCall() throws AmazonClientException, BackupRestoreException {
logger.info("MOCK UPLOADING...");
return uploadPart();
}
public static void setup() {
compattempts = 0;
partAttempts = 0;
partFailure = false;
completionFailure = false;
}
}
static class MockAmazonS3Client extends MockUp<AmazonS3Client> {
private boolean ruleAvailable = false;
static BucketLifecycleConfiguration bconf;
static boolean emulateError = false;
@Mock
public InitiateMultipartUploadResult initiateMultipartUpload(
InitiateMultipartUploadRequest initiateMultipartUploadRequest)
throws AmazonClientException {
return new InitiateMultipartUploadResult();
}
public PutObjectResult putObject(PutObjectRequest putObjectRequest)
throws SdkClientException {
PutObjectResult result = new PutObjectResult();
result.setETag("ad");
return result;
}
@Mock
public BucketLifecycleConfiguration getBucketLifecycleConfiguration(String bucketName) {
return bconf;
}
@Mock
public void setBucketLifecycleConfiguration(
String bucketName, BucketLifecycleConfiguration bucketLifecycleConfiguration) {
bconf = bucketLifecycleConfiguration;
}
@Mock
public DeleteObjectsResult deleteObjects(DeleteObjectsRequest var1)
throws SdkClientException, AmazonServiceException {
if (emulateError) throw new AmazonServiceException("Unable to reach AWS");
return null;
}
static BucketLifecycleConfiguration.Rule getBucketLifecycleConfig(
String prefix, int expirationDays) {
return new BucketLifecycleConfiguration.Rule()
.withExpirationInDays(expirationDays)
.withFilter(new LifecycleFilter(new LifecyclePrefixPredicate(prefix)))
.withStatus(BucketLifecycleConfiguration.ENABLED)
.withId(prefix);
}
static void setRuleAvailable(boolean ruleAvailable) {
if (ruleAvailable) {
bconf = new BucketLifecycleConfiguration();
if (bconf.getRules() == null) bconf.setRules(Lists.newArrayList());
List<BucketLifecycleConfiguration.Rule> rules = bconf.getRules();
String clusterPath = "casstestbackup/" + region + "/fake-app/";
List<BucketLifecycleConfiguration.Rule> potentialRules =
rules.stream()
.filter(rule -> rule.getId().equalsIgnoreCase(clusterPath))
.collect(Collectors.toList());
if (potentialRules == null || potentialRules.isEmpty())
rules.add(
getBucketLifecycleConfig(
clusterPath, configuration.getBackupRetentionDays()));
}
}
static void updateRule(BucketLifecycleConfiguration.Rule updatedRule) {
List<BucketLifecycleConfiguration.Rule> rules = bconf.getRules();
Optional<BucketLifecycleConfiguration.Rule> updateRule =
rules.stream()
.filter(rule -> rule.getId().equalsIgnoreCase(updatedRule.getId()))
.findFirst();
if (updateRule.isPresent()) {
rules.remove(updateRule.get());
rules.add(updatedRule);
} else {
rules.add(updatedRule);
}
bconf.setRules(rules);
}
}
}
| 3,169 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/FakeDynamicRateLimiter.java | package com.netflix.priam.backup;
import java.time.Instant;
public class FakeDynamicRateLimiter implements DynamicRateLimiter {
@Override
public void acquire(AbstractBackupPath dir, Instant target, int tokens) {}
}
| 3,170 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestBackupVerification.java | /*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.AbstractBackupPath.BackupFileType;
import com.netflix.priam.backupv2.IMetaProxy;
import com.netflix.priam.backupv2.MetaV1Proxy;
import com.netflix.priam.backupv2.MetaV2Proxy;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.utils.DateUtil;
import com.netflix.priam.utils.DateUtil.DateRange;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import mockit.Mock;
import mockit.MockUp;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/** Created by aagrawal on 1/23/19. */
public class TestBackupVerification {
private final BackupVerification backupVerification;
private final IConfiguration configuration;
private final IBackupStatusMgr backupStatusMgr;
private final String backupDate = "201812011000";
private final String backupDateEnd = "201812021000";
private final Path location =
Paths.get(
"some_bucket/casstestbackup/1049_fake-app/1808575600",
BackupFileType.META_V2.toString(),
"1859817645000",
"SNAPPY",
"PLAINTEXT",
"meta_v2_201812011000.json");
private final int numFakeBackups = 10;
public TestBackupVerification() {
Injector injector = Guice.createInjector(new BRTestModule());
backupVerification = injector.getInstance(BackupVerification.class);
configuration = injector.getInstance(IConfiguration.class);
backupStatusMgr = injector.getInstance(IBackupStatusMgr.class);
}
static class MockMetaV1Proxy extends MockUp<MetaV1Proxy> {
@Mock
public BackupVerificationResult isMetaFileValid(AbstractBackupPath metaBackupPath) {
return getBackupVerificationResult();
}
}
static class MockMetaV2Proxy extends MockUp<MetaV2Proxy> {
@Mock
public BackupVerificationResult isMetaFileValid(AbstractBackupPath metaBackupPath) {
return getBackupVerificationResult();
}
}
@Before
@After
public void cleanup() {
new MockMetaV1Proxy();
new MockMetaV2Proxy();
FileUtils.deleteQuietly(new File(configuration.getBackupStatusFileLoc()));
}
@Test
public void noBackup() throws Exception {
Optional<BackupVerificationResult> backupVerificationResultOptinal =
backupVerification.verifyLatestBackup(
BackupVersion.SNAPSHOT_BACKUP,
false,
new DateRange(Instant.now(), Instant.now()));
Assert.assertFalse(backupVerificationResultOptinal.isPresent());
backupVerificationResultOptinal =
backupVerification.verifyLatestBackup(
BackupVersion.SNAPSHOT_META_SERVICE,
false,
new DateRange(Instant.now(), Instant.now()));
Assert.assertFalse(backupVerificationResultOptinal.isPresent());
}
@Test
public void noBackupDateRange() throws Exception {
List<BackupMetadata> backupVerificationResults =
backupVerification.verifyBackupsInRange(
BackupVersion.SNAPSHOT_BACKUP, new DateRange(Instant.now(), Instant.now()));
Assert.assertFalse(backupVerificationResults.size() > 0);
backupVerificationResults =
backupVerification.verifyBackupsInRange(
BackupVersion.SNAPSHOT_META_SERVICE,
new DateRange(Instant.now(), Instant.now()));
Assert.assertFalse(backupVerificationResults.size() > 0);
}
private void setUp() throws Exception {
Instant start = DateUtil.parseInstant(backupDate);
for (int i = 0; i < numFakeBackups - 1; i++) {
backupStatusMgr.finish(
getBackupMetaData(
BackupVersion.SNAPSHOT_BACKUP,
start.plus(i + 1, ChronoUnit.MINUTES),
Status.FINISHED));
}
backupStatusMgr.finish(
getBackupMetaData(BackupVersion.SNAPSHOT_BACKUP, start, Status.FINISHED));
backupStatusMgr.failed(
getBackupMetaData(
BackupVersion.SNAPSHOT_BACKUP,
start.plus(20, ChronoUnit.MINUTES),
Status.FAILED));
for (int i = 0; i < numFakeBackups - 1; i++) {
backupStatusMgr.finish(
getBackupMetaData(
BackupVersion.SNAPSHOT_META_SERVICE,
start.plus(i + 1, ChronoUnit.MINUTES),
Status.FINISHED));
}
backupStatusMgr.finish(
getBackupMetaData(BackupVersion.SNAPSHOT_META_SERVICE, start, Status.FINISHED));
}
@Test
public void verifyBackupVersion1() throws Exception {
setUp();
// Verify for backup version 1.0
Optional<BackupVerificationResult> backupVerificationResultOptinal =
backupVerification.verifyLatestBackup(
BackupVersion.SNAPSHOT_BACKUP,
false,
new DateRange(backupDate + "," + backupDate));
Assert.assertTrue(backupVerificationResultOptinal.isPresent());
Assert.assertEquals(Instant.EPOCH, backupVerificationResultOptinal.get().snapshotInstant);
Optional<BackupMetadata> backupMetadata =
backupStatusMgr
.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(backupDate + "," + backupDate))
.stream()
.findFirst();
Assert.assertTrue(backupMetadata.isPresent());
Assert.assertNotNull(backupMetadata.get().getLastValidated());
backupMetadata =
backupStatusMgr
.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_META_SERVICE,
new DateRange(backupDate + "," + backupDate))
.stream()
.findFirst();
Assert.assertTrue(backupMetadata.isPresent());
Assert.assertNull(backupMetadata.get().getLastValidated());
}
@Test
public void verifyBackupVersion1DateRange() throws Exception {
setUp();
// Verify for backup version 1.0
List<BackupMetadata> backupVerificationResults =
backupVerification.verifyBackupsInRange(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(backupDate + "," + backupDateEnd));
Assert.assertTrue(!backupVerificationResults.isEmpty());
Assert.assertTrue(backupVerificationResults.size() == numFakeBackups);
List<BackupMetadata> backupMetadata =
backupStatusMgr.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(backupDate + "," + backupDateEnd));
Assert.assertTrue(!backupMetadata.isEmpty());
Assert.assertTrue(backupMetadata.size() == numFakeBackups);
backupMetadata.stream().forEach(b -> Assert.assertNotNull(b.getLastValidated()));
backupMetadata =
backupStatusMgr.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_META_SERVICE,
new DateRange(backupDate + "," + backupDateEnd));
Assert.assertTrue(!backupMetadata.isEmpty());
Assert.assertTrue(backupMetadata.size() == numFakeBackups);
backupMetadata.stream().forEach(b -> Assert.assertNull(b.getLastValidated()));
}
@Test
public void verifyBackupVersion2() throws Exception {
setUp();
// Verify for backup version 2.0
Optional<BackupVerificationResult> backupVerificationResultOptinal =
backupVerification.verifyLatestBackup(
BackupVersion.SNAPSHOT_META_SERVICE,
false,
new DateRange(backupDate + "," + backupDate));
Assert.assertTrue(backupVerificationResultOptinal.isPresent());
Assert.assertEquals(Instant.EPOCH, backupVerificationResultOptinal.get().snapshotInstant);
Assert.assertEquals("some_random", backupVerificationResultOptinal.get().remotePath);
Optional<BackupMetadata> backupMetadata =
backupStatusMgr
.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_META_SERVICE,
new DateRange(backupDate + "," + backupDate))
.stream()
.findFirst();
Assert.assertTrue(backupMetadata.isPresent());
Assert.assertNotNull(backupMetadata.get().getLastValidated());
// Retry the verification, it should not try and re-verify
backupVerificationResultOptinal =
backupVerification.verifyLatestBackup(
BackupVersion.SNAPSHOT_META_SERVICE,
false,
new DateRange(backupDate + "," + backupDate));
Assert.assertTrue(backupVerificationResultOptinal.isPresent());
Assert.assertEquals(
DateUtil.parseInstant(backupDate),
backupVerificationResultOptinal.get().snapshotInstant);
Assert.assertNotEquals("some_random", backupVerificationResultOptinal.get().remotePath);
Assert.assertEquals(
location.subpath(1, location.getNameCount()).toString(),
backupVerificationResultOptinal.get().remotePath);
backupMetadata =
backupStatusMgr
.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(backupDate + "," + backupDate))
.stream()
.findFirst();
Assert.assertTrue(backupMetadata.isPresent());
Assert.assertNull(backupMetadata.get().getLastValidated());
}
@Test
public void verifyBackupVersion2DateRange() throws Exception {
setUp();
// Verify for backup version 2.0
List<BackupMetadata> backupVerificationResults =
backupVerification.verifyBackupsInRange(
BackupVersion.SNAPSHOT_META_SERVICE,
new DateRange(backupDate + "," + backupDateEnd));
Assert.assertTrue(!backupVerificationResults.isEmpty());
Assert.assertTrue(backupVerificationResults.size() == numFakeBackups);
List<BackupMetadata> backupMetadata =
backupStatusMgr.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_META_SERVICE,
new DateRange(backupDate + "," + backupDateEnd));
Assert.assertTrue(!backupMetadata.isEmpty());
Assert.assertTrue(backupMetadata.size() == numFakeBackups);
backupMetadata.stream().forEach(b -> Assert.assertNotNull(b.getLastValidated()));
backupMetadata =
backupStatusMgr.getLatestBackupMetadata(
BackupVersion.SNAPSHOT_BACKUP,
new DateRange(backupDate + "," + backupDateEnd));
Assert.assertTrue(!backupMetadata.isEmpty());
Assert.assertTrue(backupMetadata.size() == numFakeBackups);
backupMetadata.stream().forEach(b -> Assert.assertNull(b.getLastValidated()));
}
private BackupMetadata getBackupMetaData(
BackupVersion backupVersion, Instant startTime, Status status) throws Exception {
BackupMetadata backupMetadata =
new BackupMetadata(backupVersion, "123", new Date(startTime.toEpochMilli()));
backupMetadata.setCompleted(
new Date(startTime.plus(30, ChronoUnit.MINUTES).toEpochMilli()));
backupMetadata.setStatus(status);
backupMetadata.setSnapshotLocation(location.toString());
return backupMetadata;
}
private static BackupVerificationResult getBackupVerificationResult() {
BackupVerificationResult result = new BackupVerificationResult();
result.valid = true;
result.manifestAvailable = true;
result.remotePath = "some_random";
result.filesMatched = 123;
result.snapshotInstant = Instant.EPOCH;
return result;
}
@Test
public void testGetMetaProxy() {
IMetaProxy metaProxy = backupVerification.getMetaProxy(BackupVersion.SNAPSHOT_META_SERVICE);
Assert.assertTrue(metaProxy != null);
}
}
| 3,171 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestBackupDynamicRateLimiter.java | package com.netflix.priam.backup;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableMap;
import com.google.common.truth.Truth;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.aws.RemoteBackupPath;
import com.netflix.priam.config.FakeConfiguration;
import java.nio.file.Paths;
import java.time.Clock;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestBackupDynamicRateLimiter {
private static final Instant NOW = Instant.ofEpochMilli(1 << 16);
private static final Instant LATER = NOW.plusMillis(Duration.ofHours(1).toMillis());
private static final int DIR_SIZE = 1 << 16;
private BackupDynamicRateLimiter rateLimiter;
private FakeConfiguration config;
private Injector injector;
@Before
public void setUp() {
injector = Guice.createInjector(new BRTestModule());
config = injector.getInstance(FakeConfiguration.class);
}
@Test
public void sunnyDay() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 1), NOW, DIR_SIZE);
Stopwatch timer = timePermitAcquisition(getBackupPath(), LATER, 21);
Truth.assertThat(timer.elapsed(TimeUnit.MILLISECONDS)).isAtLeast(1_000);
Truth.assertThat(timer.elapsed(TimeUnit.MILLISECONDS)).isAtMost(2_000);
}
@Test
public void targetSetToEpoch() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 1), NOW, DIR_SIZE);
Stopwatch timer = timePermitAcquisition(getBackupPath(), Instant.EPOCH, 20);
assertNoRateLimiting(timer);
}
@Test
public void pathIsNotASnapshot() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 1), NOW, DIR_SIZE);
AbstractBackupPath path =
getBackupPath(
"target/data/Keyspace1/Standard1/backups/Keyspace1-Standard1-ia-4-Data.db");
Stopwatch timer = timePermitAcquisition(path, LATER, 20);
assertNoRateLimiting(timer);
}
@Test
public void targetIsNow() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 1), NOW, DIR_SIZE);
Stopwatch timer = timePermitAcquisition(getBackupPath(), NOW, 20);
assertNoRateLimiting(timer);
}
@Test
public void targetIsInThePast() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 1), NOW, DIR_SIZE);
Instant target = NOW.minus(Duration.ofHours(1L));
Stopwatch timer = timePermitAcquisition(getBackupPath(), target, 20);
assertNoRateLimiting(timer);
}
@Test
public void noBackupThreads() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 0), NOW, DIR_SIZE);
Assert.assertThrows(
IllegalStateException.class,
() -> timePermitAcquisition(getBackupPath(), LATER, 20));
}
@Test
public void negativeBackupThreads() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", -1), NOW, DIR_SIZE);
Assert.assertThrows(
IllegalStateException.class,
() -> timePermitAcquisition(getBackupPath(), LATER, 20));
}
@Test
public void noData() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 1), NOW, 0);
Stopwatch timer = timePermitAcquisition(getBackupPath(), LATER, 20);
assertNoRateLimiting(timer);
}
@Test
public void noPermitsRequested() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 1), NOW, DIR_SIZE);
Assert.assertThrows(
IllegalArgumentException.class,
() -> timePermitAcquisition(getBackupPath(), LATER, 0));
}
@Test
public void negativePermitsRequested() {
rateLimiter = getRateLimiter(ImmutableMap.of("Priam.backup.threads", 1), NOW, DIR_SIZE);
Assert.assertThrows(
IllegalArgumentException.class,
() -> timePermitAcquisition(getBackupPath(), LATER, -1));
}
private RemoteBackupPath getBackupPath() {
return getBackupPath(
"target/data/Keyspace1/Standard1/snapshots/snap_v2_202201010000/.STANDARD1_field1_idx_1/Keyspace1-Standard1-ia-4-Data.db");
}
private RemoteBackupPath getBackupPath(String filePath) {
RemoteBackupPath path = injector.getInstance(RemoteBackupPath.class);
path.parseLocal(Paths.get(filePath).toFile(), AbstractBackupPath.BackupFileType.SST_V2);
return path;
}
private Stopwatch timePermitAcquisition(AbstractBackupPath path, Instant now, int permits) {
rateLimiter.acquire(path, now, permits); // Do this once first or else it won't throttle.
Stopwatch timer = Stopwatch.createStarted();
rateLimiter.acquire(path, now, permits);
timer.stop();
return timer;
}
private BackupDynamicRateLimiter getRateLimiter(
Map<String, Object> properties, Instant now, long directorySize) {
properties.forEach(config::setFakeConfig);
return new BackupDynamicRateLimiter(
config,
Clock.fixed(now, ZoneId.systemDefault()),
new FakeDirectorySize(directorySize));
}
private void assertNoRateLimiting(Stopwatch timer) {
Truth.assertThat(timer.elapsed(TimeUnit.MILLISECONDS)).isAtMost(1);
}
private static final class FakeDirectorySize implements DirectorySize {
private final long size;
FakeDirectorySize(long size) {
this.size = size;
}
@Override
public long getBytes(String location) {
return size;
}
}
}
| 3,172 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/FakeNullCredential.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.netflix.priam.cred.ICredential;
class FakeNullCredential implements ICredential {
public AWSCredentialsProvider getAwsCredentialProvider() {
// TODO Auto-generated method stub
return null;
}
}
| 3,173 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestAbstractFileSystem.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.merics.BackupMetrics;
import com.netflix.priam.notification.BackupNotificationMgr;
import com.netflix.priam.utils.BackupFileUtils;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.ParseException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Random;
import java.util.concurrent.*;
import javax.inject.Inject;
import javax.inject.Provider;
import org.apache.commons.io.FileUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* The goal of this class is to test common functionality which are encapsulated in
* AbstractFileSystem. The actual upload/download of a file to remote file system is beyond the
* scope of this class. Created by aagrawal on 9/22/18.
*/
public class TestAbstractFileSystem {
private Injector injector;
private IConfiguration configuration;
private BackupMetrics backupMetrics;
private BackupNotificationMgr backupNotificationMgr;
private FailureFileSystem failureFileSystem;
private MyFileSystem myFileSystem;
@Before
public void setBackupMetrics() {
if (injector == null) injector = Guice.createInjector(new BRTestModule());
if (configuration == null) configuration = injector.getInstance(IConfiguration.class);
if (backupNotificationMgr == null)
backupNotificationMgr = injector.getInstance(BackupNotificationMgr.class);
backupMetrics = injector.getInstance(BackupMetrics.class);
Provider<AbstractBackupPath> pathProvider = injector.getProvider(AbstractBackupPath.class);
if (failureFileSystem == null)
failureFileSystem =
new FailureFileSystem(
configuration, backupMetrics, backupNotificationMgr, pathProvider);
if (myFileSystem == null)
myFileSystem =
new MyFileSystem(
configuration, backupMetrics, backupNotificationMgr, pathProvider);
BackupFileUtils.cleanupDir(Paths.get(configuration.getDataFileLocation()));
}
@Test
public void testFailedRetriesUpload() throws Exception {
try {
Collection<File> files = generateFiles(1, 1, 1);
for (File file : files) {
failureFileSystem.uploadAndDelete(getDummyPath(file.toPath()), false /* async */);
}
} catch (BackupRestoreException e) {
// Verify the failure metric for upload is incremented.
Assert.assertEquals(1, (int) backupMetrics.getInvalidUploads().count());
}
}
private AbstractBackupPath getDummyPath() throws ParseException {
return getDummyPath(Paths.get(configuration.getDataFileLocation() + "/ks/cf/file-Data.db"));
}
private AbstractBackupPath getDummyPath(Path localPath) throws ParseException {
AbstractBackupPath path = injector.getInstance(AbstractBackupPath.class);
path.parseLocal(localPath.toFile(), AbstractBackupPath.BackupFileType.SST_V2);
return path;
}
private Collection<File> generateFiles(int noOfKeyspaces, int noOfCf, int noOfSstables)
throws Exception {
Path dataDir = Paths.get(configuration.getDataFileLocation());
BackupFileUtils.generateDummyFiles(
dataDir, noOfKeyspaces, noOfCf, noOfSstables, "snapshot", "201812310000", true);
String[] ext = {"db"};
return FileUtils.listFiles(dataDir.toFile(), ext, true);
}
@Test
public void testFailedRetriesDownload() throws Exception {
try {
failureFileSystem.downloadFile(getDummyPath(), "", 2);
} catch (BackupRestoreException e) {
// Verify the failure metric for download is incremented.
Assert.assertEquals(1, (int) backupMetrics.getInvalidDownloads().count());
}
}
@Test
public void testUpload() throws Exception {
File file = generateFiles(1, 1, 1).iterator().next();
myFileSystem.uploadAndDelete(getDummyPath(file.toPath()), false /* async */);
Assert.assertEquals(1, (int) backupMetrics.getValidUploads().actualCount());
Assert.assertFalse(file.exists());
}
@Test
public void testDownload() throws Exception {
// Dummy download
myFileSystem.downloadFile(getDummyPath(), "", 2);
// Verify the success metric for download is incremented.
Assert.assertEquals(1, (int) backupMetrics.getValidDownloads().actualCount());
}
@Test
public void testAsyncUpload() throws Exception {
File file = generateFiles(1, 1, 1).iterator().next();
myFileSystem
.uploadAndDelete(getDummyPath(file.toPath()), Instant.EPOCH, true /* async */)
.get();
Assert.assertEquals(1, (int) backupMetrics.getValidUploads().actualCount());
Assert.assertEquals(0, myFileSystem.getUploadTasksQueued());
}
@Test
public void testAsyncUploadBulk() throws Exception {
// Testing the queue feature works.
// 1. Give 1000 dummy files to upload. File upload takes some random time to upload
Collection<File> files = generateFiles(1, 1, 20);
List<Future<AbstractBackupPath>> futures = new ArrayList<>();
for (File file : files) {
futures.add(
myFileSystem.uploadAndDelete(
getDummyPath(file.toPath()), Instant.EPOCH, true /* async */));
}
// Verify all the work is finished.
for (Future<AbstractBackupPath> future : futures) {
future.get();
}
// 2. Success metric is incremented correctly
Assert.assertEquals(files.size(), (int) backupMetrics.getValidUploads().actualCount());
// 3. The task queue is empty after upload is finished.
Assert.assertEquals(0, myFileSystem.getUploadTasksQueued());
}
@Test
public void testUploadDedup() throws Exception {
// Testing the de-duping works.
Collection<File> files = generateFiles(1, 1, 1);
File file = files.iterator().next();
AbstractBackupPath abstractBackupPath = getDummyPath(file.toPath());
// 1. Give same file to upload x times. Only one request will be entertained.
int size = 10;
ExecutorService threads = Executors.newFixedThreadPool(size);
List<Callable<Boolean>> torun = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
torun.add(
() -> {
myFileSystem.uploadAndDelete(abstractBackupPath, false /* async */);
return Boolean.TRUE;
});
}
// all tasks executed in different threads, at 'once'.
List<Future<Boolean>> futures = threads.invokeAll(torun);
// no more need for the threadpool
threads.shutdown();
for (Future<Boolean> future : futures) {
try {
future.get();
} catch (InterruptedException | ExecutionException e) {
// Do nothing.
}
}
// 2. Verify the success metric for upload is not same as size, i.e. some amount of
// de-duping happened.
Assert.assertNotEquals(size, (int) backupMetrics.getValidUploads().actualCount());
}
@Test
public void testAsyncUploadFailure() throws Exception {
// Testing single async upload.
Collection<File> files = generateFiles(1, 1, 1);
for (File file : files) {
Future<AbstractBackupPath> future =
failureFileSystem.uploadAndDelete(
getDummyPath(file.toPath()), Instant.EPOCH, true /* async */);
try {
future.get();
} catch (Exception e) {
// 1. Future get returns error message.
// 2. Verify the failure metric for upload is incremented.
Assert.assertEquals(1, (int) backupMetrics.getInvalidUploads().count());
// 3. The task queue is empty after upload is finished.
Assert.assertEquals(0, failureFileSystem.getUploadTasksQueued());
break;
}
}
}
@Test
public void testAsyncDownload() throws Exception {
// Testing single async download.
Future<Path> future = myFileSystem.asyncDownloadFile(getDummyPath(), 2);
future.get();
// 1. Verify the success metric for download is incremented.
Assert.assertEquals(1, (int) backupMetrics.getValidDownloads().actualCount());
// 2. Verify the queue size is '0' after success.
Assert.assertEquals(0, myFileSystem.getDownloadTasksQueued());
}
@Test
public void testAsyncDownloadBulk() throws Exception {
// Testing the queue feature works.
// 1. Give 1000 dummy files to download. File download takes some random time to download.
int totalFiles = 1000;
List<Future<Path>> futureList = new ArrayList<>();
for (int i = 0; i < totalFiles; i++)
futureList.add(myFileSystem.asyncDownloadFile(getDummyPath(Paths.get("" + i)), 2));
// Ensure processing is finished.
for (Future future1 : futureList) {
future1.get();
}
// 2. Success metric is incremented correctly -> exactly 1000 times.
Assert.assertEquals(totalFiles, (int) backupMetrics.getValidDownloads().actualCount());
// 3. The task queue is empty after download is finished.
Assert.assertEquals(0, myFileSystem.getDownloadTasksQueued());
}
@Test
public void testAsyncDownloadFailure() throws Exception {
Future<Path> future = failureFileSystem.asyncDownloadFile(getDummyPath(), 2);
try {
future.get();
} catch (Exception e) {
// Verify the failure metric for upload is incremented.
Assert.assertEquals(1, (int) backupMetrics.getInvalidDownloads().count());
}
}
class FailureFileSystem extends NullBackupFileSystem {
@Inject
public FailureFileSystem(
IConfiguration configuration,
BackupMetrics backupMetrics,
BackupNotificationMgr backupNotificationMgr,
Provider<AbstractBackupPath> pathProvider) {
super(configuration, backupMetrics, backupNotificationMgr, pathProvider);
}
@Override
protected void downloadFileImpl(AbstractBackupPath path, String suffix)
throws BackupRestoreException {
throw new BackupRestoreException(
"User injected failure file system error for testing download. Remote path: "
+ path.getRemotePath());
}
@Override
protected long uploadFileImpl(AbstractBackupPath path, Instant target)
throws BackupRestoreException {
throw new BackupRestoreException(
"User injected failure file system error for testing upload. Local path: "
+ path.getBackupFile().getAbsolutePath());
}
}
class MyFileSystem extends NullBackupFileSystem {
private final Random random = new Random();
@Inject
public MyFileSystem(
IConfiguration configuration,
BackupMetrics backupMetrics,
BackupNotificationMgr backupNotificationMgr,
Provider<AbstractBackupPath> pathProvider) {
super(configuration, backupMetrics, backupNotificationMgr, pathProvider);
}
@Override
protected void downloadFileImpl(AbstractBackupPath path, String suffix)
throws BackupRestoreException {
try {
Thread.sleep(random.nextInt(20));
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@Override
protected long uploadFileImpl(AbstractBackupPath path, Instant target)
throws BackupRestoreException {
try {
Thread.sleep(random.nextInt(20));
} catch (InterruptedException e) {
e.printStackTrace();
}
return 0;
}
}
}
| 3,174 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/TestBackup.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.name.Names;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import mockit.Mock;
import mockit.MockUp;
import org.apache.cassandra.tools.NodeProbe;
import org.apache.commons.io.FileUtils;
import org.junit.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Unit test case to test a snapshot backup and incremental backup
*
* @author Praveen Sadhu
*/
public class TestBackup {
private static Injector injector;
private static FakeBackupFileSystem filesystem;
private static final Logger logger = LoggerFactory.getLogger(TestBackup.class);
private static final Set<String> expectedFiles = new HashSet<>();
@BeforeClass
public static void setup() throws InterruptedException, IOException {
new MockNodeProbe();
injector = Guice.createInjector(new BRTestModule());
filesystem =
(FakeBackupFileSystem)
injector.getInstance(
Key.get(IBackupFileSystem.class, Names.named("backup")));
}
@AfterClass
public static void cleanup() throws IOException {
File file = new File("target/data");
FileUtils.deleteQuietly(file);
}
@Test
public void testSnapshotBackup() throws Exception {
filesystem.cleanup();
SnapshotBackup backup = injector.getInstance(SnapshotBackup.class);
//
// backup.execute();
// Assert.assertEquals(3, filesystem.uploadedFiles.size());
// System.out.println("***** "+filesystem.uploadedFiles.size());
// boolean metafile = false;
// for (String filePath : expectedFiles)
// Assert.assertTrue(filesystem.uploadedFiles.contains(filePath));
//
// for(String filepath : filesystem.uploadedFiles){
// if( filepath.endsWith("meta.json")){
// metafile = true;
// break;
// }
// }
// Assert.assertTrue(metafile);
}
@Test
public void testIncrementalBackup() throws Exception {
filesystem.cleanup();
generateIncrementalFiles();
IncrementalBackup backup = injector.getInstance(IncrementalBackup.class);
backup.execute();
Assert.assertEquals(4, filesystem.uploadedFiles.size());
for (String filePath : expectedFiles)
Assert.assertTrue(filesystem.uploadedFiles.contains(filePath));
}
@Test
public void testClusterSpecificColumnFamiliesSkippedBefore21() throws Exception {
String[] columnFamilyDirs = {"schema_columns", "local", "peers", "LocationInfo"};
testClusterSpecificColumnFamiliesSkipped(columnFamilyDirs);
}
@Test
public void testClusterSpecificColumnFamiliesSkippedFrom21() throws Exception {
String[] columnFamilyDirs = {
"schema_columns-296e9c049bec30c5828dc17d3df2132a",
"local-7ad54392bcdd45d684174c047860b347",
"peers-37c71aca7ac2383ba74672528af04d4f",
"LocationInfo-9f5c6374d48633299a0a5094bf9ad1e4"
};
testClusterSpecificColumnFamiliesSkipped(columnFamilyDirs);
}
@Test
public void testSkippingEmptyFiles() throws Exception {
filesystem.cleanup();
File tmp = new File("target/data/");
if (tmp.exists()) cleanup(tmp);
File emptyFile =
new File(
"target/data/Keyspace1/Standard1/backups/Keyspace1-Standard1-ia-1-Data.db");
File parent = emptyFile.getParentFile();
if (!parent.exists()) parent.mkdirs();
Assert.assertTrue(emptyFile.createNewFile());
IncrementalBackup backup = injector.getInstance(IncrementalBackup.class);
backup.execute();
Assert.assertTrue(filesystem.uploadedFiles.isEmpty());
Assert.assertFalse(emptyFile.exists());
}
private void testClusterSpecificColumnFamiliesSkipped(String[] columnFamilyDirs)
throws Exception {
filesystem.cleanup();
File tmp = new File("target/data/");
if (tmp.exists()) cleanup(tmp);
// Generate "data"
generateIncrementalFiles();
Set<String> systemfiles = new HashSet<>();
// Generate system files
for (String columnFamilyDir : columnFamilyDirs) {
String columnFamily = columnFamilyDir.split("-")[0];
systemfiles.add(
String.format(
"target/data/system/%s/backups/system-%s-ka-1-Data.db",
columnFamilyDir, columnFamily));
systemfiles.add(
String.format(
"target/data/system/%s/backups/system-%s-ka-1-Index.db",
columnFamilyDir, columnFamily));
}
for (String systemFilePath : systemfiles) {
File file = new File(systemFilePath);
genTestFile(file);
// Not cluster specific columns should be backed up
if (systemFilePath.contains("schema_columns"))
expectedFiles.add(file.getAbsolutePath());
}
IncrementalBackup backup = injector.getInstance(IncrementalBackup.class);
backup.execute();
Assert.assertEquals(6, filesystem.uploadedFiles.size());
for (String filePath : expectedFiles)
Assert.assertTrue(filesystem.uploadedFiles.contains(filePath));
}
private static void generateIncrementalFiles() {
File tmp = new File("target/data/");
if (tmp.exists()) cleanup(tmp);
// Setup
Set<String> files = new HashSet<>();
files.add("target/data/Keyspace1/Standard1/backups/Keyspace1-Standard1-ia-1-Data.db");
files.add("target/data/Keyspace1/Standard1/backups/Keyspace1-Standard1-ia-1-Index.db");
files.add("target/data/Keyspace1/Standard1/backups/Keyspace1-Standard1-ia-2-Data.db");
files.add("target/data/Keyspace1/Standard1/backups/Keyspace1-Standard1-ia-3-Data.db");
expectedFiles.clear();
for (String filePath : files) {
File file = new File(filePath);
genTestFile(file);
expectedFiles.add(file.getAbsolutePath());
}
}
private static void genTestFile(File file) {
try {
File parent = file.getParentFile();
if (!parent.exists()) parent.mkdirs();
BufferedOutputStream bos1 = new BufferedOutputStream(new FileOutputStream(file));
for (long i = 0; i < (5L * 1024); i++) bos1.write((byte) 8);
bos1.flush();
bos1.close();
} catch (Exception e) {
logger.error(e.getMessage());
}
}
private static void cleanup(File dir) {
FileUtils.deleteQuietly(dir);
}
// Mock Nodeprobe class
@Ignore
static class MockNodeProbe extends MockUp<NodeProbe> {
@Mock
public void takeSnapshot(String snapshotName, String columnFamily, String... keyspaces) {
File tmp = new File("target/data/");
if (tmp.exists()) cleanup(tmp);
// Setup
Set<String> files = new HashSet<>();
files.add(
"target/data/Keyspace1/Standard1/snapshots/"
+ snapshotName
+ "/Keyspace1-Standard1-ia-5-Data.db");
files.add(
"target/data/Keyspace1/Standard1/snapshots/201101081230/Keyspace1-Standard1-ia-6-Data.db");
files.add(
"target/data/Keyspace1/Standard1/snapshots/"
+ snapshotName
+ "/Keyspace1-Standard1-ia-7-Data.db");
expectedFiles.clear();
for (String filePath : files) {
File file = new File(filePath);
genTestFile(file);
if (!filePath.contains("Keyspace1-Standard1-ia-6-Data.db")) // skip
expectedFiles.add(file.getAbsolutePath());
}
}
@Mock
public void clearSnapshot(String tag, String... keyspaces) {
cleanup(new File("target/data"));
}
}
}
| 3,175 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/NullBackupFileSystem.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.merics.BackupMetrics;
import com.netflix.priam.notification.BackupNotificationMgr;
import java.nio.file.Path;
import java.time.Instant;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Provider;
public class NullBackupFileSystem extends AbstractFileSystem {
@Inject
public NullBackupFileSystem(
IConfiguration configuration,
BackupMetrics backupMetrics,
BackupNotificationMgr backupNotificationMgr,
Provider<AbstractBackupPath> pathProvider) {
super(configuration, backupMetrics, backupNotificationMgr, pathProvider);
}
public void shutdown() {
// NOP
}
@Override
public long getFileSize(String remotePath) throws BackupRestoreException {
return 0;
}
@Override
public void deleteFiles(List<Path> remotePaths) throws BackupRestoreException {
// Do nothing.
}
@Override
public Iterator<String> listFileSystem(String prefix, String delimiter, String marker) {
return Collections.emptyIterator();
}
@Override
public void cleanup() {
// TODO Auto-generated method stub
}
@Override
protected void downloadFileImpl(AbstractBackupPath path, String suffix)
throws BackupRestoreException {}
@Override
protected boolean doesRemoteFileExist(Path remotePath) {
return false;
}
@Override
protected long uploadFileImpl(AbstractBackupPath path, Instant target)
throws BackupRestoreException {
return 0;
}
}
| 3,176 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/identity/InstanceIdentityTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup.identity;
import static org.junit.Assert.*;
import com.google.common.collect.ImmutableList;
import com.netflix.priam.identity.DoubleRing;
import com.netflix.priam.identity.InstanceIdentity;
import com.netflix.priam.identity.PriamInstance;
import org.junit.Test;
public class InstanceIdentityTest extends InstanceTestUtils {
@Test
public void testCreateToken() throws Exception {
identity = createInstanceIdentity("az1", "fakeinstance1");
int hash = tokenManager.regionOffset(instanceInfo.getRegion());
assertEquals(0, identity.getInstance().getId() - hash);
identity = createInstanceIdentity("az1", "fakeinstance2");
assertEquals(3, identity.getInstance().getId() - hash);
identity = createInstanceIdentity("az1", "fakeinstance3");
assertEquals(6, identity.getInstance().getId() - hash);
// try next zone
identity = createInstanceIdentity("az2", "fakeinstance4");
assertEquals(1, identity.getInstance().getId() - hash);
identity = createInstanceIdentity("az2", "fakeinstance5");
assertEquals(4, identity.getInstance().getId() - hash);
identity = createInstanceIdentity("az2", "fakeinstance6");
assertEquals(7, identity.getInstance().getId() - hash);
// next
identity = createInstanceIdentity("az3", "fakeinstance7");
assertEquals(2, identity.getInstance().getId() - hash);
identity = createInstanceIdentity("az3", "fakeinstance8");
assertEquals(5, identity.getInstance().getId() - hash);
identity = createInstanceIdentity("az3", "fakeinstance9");
assertEquals(8, identity.getInstance().getId() - hash);
}
@Test
public void testGetSeeds() throws Exception {
createInstances();
identity = createInstanceIdentity("az1", "fakeinstance1");
assertEquals(3, identity.getSeeds().size());
}
@Test
public void testDoubleSlots() throws Exception {
createInstances();
int before = factory.getAllIds(config.getAppName()).size();
new DoubleRing(config, factory, tokenManager, instanceInfo).doubleSlots();
ImmutableList<PriamInstance> lst = factory.getAllIds(config.getAppName()).asList();
for (int i = 0; i < lst.size(); i++) {
System.out.println(lst.get(i));
if (0 == i % 2) continue;
assertEquals(InstanceIdentity.DUMMY_INSTANCE_ID, lst.get(i).getInstanceId());
}
assertEquals(before * 2, lst.size());
}
@Test
public void testDoubleGrap() throws Exception {
createInstances();
new DoubleRing(config, factory, tokenManager, instanceInfo).doubleSlots();
int hash = tokenManager.regionOffset(instanceInfo.getRegion());
identity = createInstanceIdentity("az1", "fakeinstancex");
printInstance(identity.getInstance(), hash);
}
public void printInstance(PriamInstance ins, int hash) {
System.out.println("ID: " + (ins.getId() - hash));
System.out.println("PayLoad: " + ins.getToken());
}
}
| 3,177 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/identity/DoubleRingTest.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup.identity;
import static org.junit.Assert.assertEquals;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.netflix.priam.identity.DoubleRing;
import com.netflix.priam.identity.InstanceIdentity;
import com.netflix.priam.identity.PriamInstance;
import java.util.List;
import org.junit.Test;
public class DoubleRingTest extends InstanceTestUtils {
@Test
public void testDouble() throws Exception {
createInstances();
int originalSize = factory.getAllIds(config.getAppName()).size();
new DoubleRing(config, factory, tokenManager, instanceInfo).doubleSlots();
ImmutableSet<PriamInstance> doubled = factory.getAllIds(config.getAppName());
assertEquals(originalSize * 2, doubled.size());
validate(doubled.asList());
}
private void validate(List<PriamInstance> doubled) {
List<String> validator = Lists.newArrayList();
for (int i = 0; i < doubled.size(); i++) {
validator.add(tokenManager.createToken(i, doubled.size(), instanceInfo.getRegion()));
}
for (int i = 0; i < doubled.size(); i++) {
PriamInstance ins = doubled.get(i);
assertEquals(validator.get(i), ins.getToken());
int id = ins.getId() - tokenManager.regionOffset(instanceInfo.getRegion());
System.out.println(ins);
if (0 != id % 2) assertEquals(ins.getInstanceId(), InstanceIdentity.DUMMY_INSTANCE_ID);
}
}
@Test
public void testBR() throws Exception {
createInstances();
int intialSize = factory.getAllIds(config.getAppName()).size();
DoubleRing ring = new DoubleRing(config, factory, tokenManager, instanceInfo);
ring.backup();
ring.doubleSlots();
assertEquals(intialSize * 2, factory.getAllIds(config.getAppName()).size());
ring.restore();
assertEquals(intialSize, factory.getAllIds(config.getAppName()).size());
}
}
| 3,178 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup | Create_ds/Priam/priam/src/test/java/com/netflix/priam/backup/identity/InstanceTestUtils.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.backup.identity;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.identity.*;
import com.netflix.priam.identity.config.FakeInstanceInfo;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.identity.token.*;
import com.netflix.priam.utils.FakeSleeper;
import com.netflix.priam.utils.ITokenManager;
import com.netflix.priam.utils.Sleeper;
import com.netflix.priam.utils.TokenManager;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before;
import org.junit.Ignore;
@Ignore
abstract class InstanceTestUtils {
private final List<String> instances = new ArrayList<>();
private IMembership membership;
FakeConfiguration config;
IPriamInstanceFactory factory;
InstanceIdentity identity;
private Sleeper sleeper;
ITokenManager tokenManager;
InstanceInfo instanceInfo;
private final String region = "us-east-1";
@Before
public void setup() throws Exception {
instances.add("fakeinstance1");
instances.add("fakeinstance2");
instances.add("fakeinstance3");
instances.add("fakeinstance4");
instances.add("fakeinstance5");
instances.add("fakeinstance6");
instances.add("fakeinstance7");
instances.add("fakeinstance8");
instances.add("fakeinstance9");
membership = new FakeMembership(instances);
config = new FakeConfiguration("fake-app");
instanceInfo = new FakeInstanceInfo("fakeinstance1", "az1", region);
tokenManager = new TokenManager(config);
factory = new FakePriamInstanceFactory(instanceInfo);
sleeper = new FakeSleeper();
identity = createInstanceIdentity(instanceInfo.getRac(), instanceInfo.getInstanceId());
}
void createInstances() throws Exception {
createInstanceIdentity("az1", "fakeinstance1");
createInstanceIdentity("az1", "fakeinstance2");
createInstanceIdentity("az1", "fakeinstance3");
// try next zone
createInstanceIdentity("az2", "fakeinstance4");
createInstanceIdentity("az2", "fakeinstance5");
createInstanceIdentity("az2", "fakeinstance6");
// next zone
createInstanceIdentity("az3", "fakeinstance7");
createInstanceIdentity("az3", "fakeinstance8");
createInstanceIdentity("az3", "fakeinstance9");
}
InstanceIdentity createInstanceIdentity(String zone, String instanceId) throws Exception {
InstanceInfo newInstanceInfo = new FakeInstanceInfo(instanceId, zone, region);
ITokenRetriever tokenRetriever =
new TokenRetriever(
factory, membership, config, newInstanceInfo, sleeper, tokenManager);
return new InstanceIdentity(factory, membership, config, newInstanceInfo, tokenRetriever);
}
}
| 3,179 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/restore/TestPostRestoreHook.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.restore;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.TestModule;
import com.netflix.priam.config.IConfiguration;
import java.io.File;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestPostRestoreHook {
@Before
@After
public void setup() {
Injector inject = Guice.createInjector(new TestModule());
IConfiguration configuration = inject.getInstance(IConfiguration.class);
// ensure heartbeat and done files are not present
File heartBeatFile = new File(configuration.getPostRestoreHookHeartbeatFileName());
if (heartBeatFile.exists()) {
heartBeatFile.delete();
}
File doneFile = new File(configuration.getPostRestoreHookDoneFileName());
if (doneFile.exists()) {
doneFile.delete();
}
}
@Test
/*
Test to validate hasValidParameters. Expected to pass since none of the parameters in
FakeConfiguration are blank
*/
public void testPostRestoreHookValidParameters() {
Injector inject = Guice.createInjector(new TestModule());
IPostRestoreHook postRestoreHook = inject.getInstance(IPostRestoreHook.class);
Assert.assertTrue(postRestoreHook.hasValidParameters());
}
@Test
/*
Test to validate execute method. This is a happy path since heart beat file is emited as soon
as test case starts, and postrestorehook completes execution once the child process completes
execution. Test fails in case of any exception.
*/
public void testPostRestoreHookExecuteHappyPath() throws Exception {
Injector inject = Guice.createInjector(new TestModule());
IPostRestoreHook postRestoreHook = inject.getInstance(IPostRestoreHook.class);
IConfiguration configuration = inject.getInstance(IConfiguration.class);
startHeartBeatThreadWithDelay(
0,
configuration.getPostRestoreHookHeartbeatFileName(),
configuration.getPostRestoreHookDoneFileName());
postRestoreHook.execute();
}
@Test
/*
Test to validate execute method. This is a variant of above method, where heartbeat is
produced after an initial delay. This delay causes PostRestoreHook to terminate the child
process since there is no heartbeat multiple times, and eventually once the heartbeat starts,
PostRestoreHook waits for the child process to complete execution. Test fails in case of any
exception.
*/
public void testPostRestoreHookExecuteHeartBeatDelay() throws Exception {
Injector inject = Guice.createInjector(new TestModule());
IPostRestoreHook postRestoreHook = inject.getInstance(IPostRestoreHook.class);
IConfiguration configuration = inject.getInstance(IConfiguration.class);
startHeartBeatThreadWithDelay(
1000,
configuration.getPostRestoreHookHeartbeatFileName(),
configuration.getPostRestoreHookDoneFileName());
postRestoreHook.execute();
}
/**
* Starts a thread to emit heartbeat and finish with a done file.
*
* @param delayInMs any start up delay if needed
* @param heartBeatfileName name of the heart beat file
* @param doneFileName name of the done file
*/
private void startHeartBeatThreadWithDelay(
long delayInMs, String heartBeatfileName, String doneFileName) {
Thread heartBeatEmitThread =
new Thread(
() -> {
File heartBeatFile = new File(heartBeatfileName);
try {
// add a delay to heartbeat
Thread.sleep(delayInMs);
if (!heartBeatFile.exists() && !heartBeatFile.createNewFile()) {
Assert.fail("Unable to create heartbeat file");
}
for (int i = 0; i < 10; i++) {
FileUtils.touch(heartBeatFile);
Thread.sleep(1000);
}
File doneFile = new File(doneFileName);
doneFile.createNewFile();
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
});
heartBeatEmitThread.start();
}
}
| 3,180 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/restore/TestRestore.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.restore;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.netflix.priam.backup.BRTestModule;
import com.netflix.priam.backup.FakeBackupFileSystem;
import com.netflix.priam.backup.Status;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.health.InstanceState;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.utils.DateUtil;
import java.io.IOException;
import java.util.ArrayList;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestRestore {
private static FakeBackupFileSystem filesystem;
private static ArrayList<String> fileList = new ArrayList<>();
private static FakeConfiguration conf;
private static String region;
private static Restore restore;
private static InstanceState instanceState;
@BeforeClass
public static void setup() throws InterruptedException, IOException {
Injector injector = Guice.createInjector(new BRTestModule());
if (filesystem == null) filesystem = injector.getInstance(FakeBackupFileSystem.class);
if (conf == null) conf = (FakeConfiguration) injector.getInstance(IConfiguration.class);
region = injector.getInstance(InstanceInfo.class).getRegion();
if (restore == null) restore = injector.getInstance(Restore.class);
if (instanceState == null) instanceState = injector.getInstance(InstanceState.class);
}
private static void populateBackupFileSystem(String baseDir) {
fileList.clear();
fileList.add(baseDir + "/" + region + "/fakecluster/123456/201108110030/META/meta.json");
fileList.add(
baseDir + "/" + region + "/fakecluster/123456/201108110030/SNAP/ks1/cf1/f1.db");
fileList.add(
baseDir + "/" + region + "/fakecluster/123456/201108110030/SNAP/ks1/cf1/f2.db");
fileList.add(
baseDir + "/" + region + "/fakecluster/123456/201108110030/SNAP/ks2/cf1/f2.db");
fileList.add(baseDir + "/" + region + "/fakecluster/123456/201108110530/SST/ks2/cf1/f3.db");
fileList.add(baseDir + "/" + region + "/fakecluster/123456/201108110600/SST/ks2/cf1/f4.db");
filesystem.setupTest(fileList);
conf.setRestorePrefix("RESTOREBUCKET/" + baseDir + "/" + region + "/fakecluster");
}
@Test
public void testRestore() throws Exception {
populateBackupFileSystem("test_backup");
String dateRange = "201108110030,201108110530";
restore.restore(new DateUtil.DateRange(dateRange));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(0)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(1)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(2)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(3)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(4)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(5)));
Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus());
}
@Test
public void testRestoreWithIncremental() throws Exception {
populateBackupFileSystem("test_backup");
String dateRange = "201108110030,201108110730";
restore.restore(new DateUtil.DateRange(dateRange));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(0)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(1)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(2)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(3)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(4)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(5)));
Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus());
}
@Test
public void testRestoreLatestWithEmptyMeta() throws Exception {
populateBackupFileSystem("test_backup");
String metafile =
"test_backup/" + region + "/fakecluster/123456/201108110130/META/meta.json";
filesystem.addFile(metafile);
String dateRange = "201108110030,201108110530";
restore.restore(new DateUtil.DateRange(dateRange));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(0)));
Assert.assertTrue(filesystem.downloadedFiles.contains(metafile));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(1)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(2)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(3)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(4)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(5)));
Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus());
Assert.assertEquals(metafile, instanceState.getRestoreStatus().getSnapshotMetaFile());
}
@Test
public void testRestoreLatest() throws Exception {
populateBackupFileSystem("test_backup");
String metafile =
"test_backup/" + region + "/fakecluster/123456/201108110130/META/meta.json";
filesystem.addFile(metafile);
String snapFile =
"test_backup/" + region + "/fakecluster/123456/201108110130/SNAP/ks1/cf1/f9.db";
filesystem.addFile(snapFile);
String dateRange = "201108110030,201108110530";
restore.restore(new DateUtil.DateRange(dateRange));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(0)));
Assert.assertTrue(filesystem.downloadedFiles.contains(metafile));
Assert.assertTrue(filesystem.downloadedFiles.contains(snapFile));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(1)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(2)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(3)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(4)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(5)));
Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus());
Assert.assertEquals(metafile, instanceState.getRestoreStatus().getSnapshotMetaFile());
}
@Test
public void testNoSnapshots() throws Exception {
populateBackupFileSystem("test_backup");
filesystem.setupTest(fileList);
String dateRange = "201109110030,201109110530";
restore.restore(new DateUtil.DateRange(dateRange));
Assert.assertEquals(Status.FAILED, instanceState.getRestoreStatus().getStatus());
}
@Test
public void testRestoreFromDiffCluster() throws Exception {
populateBackupFileSystem("test_backup_new");
String dateRange = "201108110030,201108110530";
restore.restore(new DateUtil.DateRange(dateRange));
System.out.println("Downloaded files: " + filesystem.downloadedFiles);
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(0)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(1)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(2)));
Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(3)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(4)));
Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(5)));
Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus());
}
}
| 3,181 |
0 | Create_ds/Priam/priam/src/test/java/com/netflix/priam | Create_ds/Priam/priam/src/test/java/com/netflix/priam/dse/DseConfigStub.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.dse;
import com.netflix.priam.config.FakeConfiguration;
import com.netflix.priam.tuner.dse.IDseConfiguration;
import java.util.HashSet;
import java.util.Set;
public class DseConfigStub implements IDseConfiguration {
private boolean auditLogEnabled;
public String getDseYamlLocation() {
return new FakeConfiguration().getCassHome() + "/resources/dse/conf/dse.yaml";
}
public String getDseDelegatingSnitch() {
return null;
}
public NodeType getNodeType() {
return null;
}
public boolean isAuditLogEnabled() {
return auditLogEnabled;
}
public void setAuditLogEnabled(boolean b) {
auditLogEnabled = b;
}
public String getAuditLogExemptKeyspaces() {
return "YourSwellKeyspace";
}
public Set<AuditLogCategory> getAuditLogCategories() {
return new HashSet<AuditLogCategory>() {
{
this.add(AuditLogCategory.ALL);
}
};
}
}
| 3,182 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix | Create_ds/Priam/priam/src/main/java/com/netflix/priam/PriamServer.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam;
import com.netflix.priam.backup.BackupService;
import com.netflix.priam.backupv2.BackupV2Service;
import com.netflix.priam.cluster.management.ClusterManagementService;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.config.PriamConfigurationPersister;
import com.netflix.priam.defaultimpl.ICassandraProcess;
import com.netflix.priam.defaultimpl.IService;
import com.netflix.priam.health.CassandraMonitor;
import com.netflix.priam.identity.InstanceIdentity;
import com.netflix.priam.restore.RestoreContext;
import com.netflix.priam.scheduler.PriamScheduler;
import com.netflix.priam.tuner.CassandraTunerService;
import com.netflix.priam.utils.Sleeper;
import com.netflix.priam.utils.SystemUtils;
import java.io.IOException;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Start all tasks here - Property update task - Backup task - Restore task - Incremental backup */
@Singleton
public class PriamServer implements IService {
private final PriamScheduler scheduler;
private final IConfiguration config;
private final InstanceIdentity instanceIdentity;
private final Sleeper sleeper;
private final ICassandraProcess cassProcess;
private final RestoreContext restoreContext;
private final IService backupV2Service;
private final IService backupService;
private final IService cassandraTunerService;
private final IService clusterManagementService;
private static final int CASSANDRA_MONITORING_INITIAL_DELAY = 10;
private static final Logger logger = LoggerFactory.getLogger(PriamServer.class);
@Inject
public PriamServer(
IConfiguration config,
PriamScheduler scheduler,
InstanceIdentity id,
Sleeper sleeper,
ICassandraProcess cassProcess,
RestoreContext restoreContext,
BackupService backupService,
BackupV2Service backupV2Service,
CassandraTunerService cassandraTunerService,
ClusterManagementService clusterManagementService) {
this.config = config;
this.scheduler = scheduler;
this.instanceIdentity = id;
this.sleeper = sleeper;
this.cassProcess = cassProcess;
this.restoreContext = restoreContext;
this.backupService = backupService;
this.backupV2Service = backupV2Service;
this.cassandraTunerService = cassandraTunerService;
this.clusterManagementService = clusterManagementService;
}
private void createDirectories() throws IOException {
SystemUtils.createDirs(config.getBackupCommitLogLocation());
SystemUtils.createDirs(config.getCommitLogLocation());
SystemUtils.createDirs(config.getCacheLocation());
SystemUtils.createDirs(config.getDataFileLocation());
SystemUtils.createDirs(config.getLogDirLocation());
}
@Override
public void scheduleService() throws Exception {
// Create all the required directories for priam and Cassandra.
createDirectories();
// Do not start Priam if you are out of service.
if (instanceIdentity.getInstance().isOutOfService()) return;
// start to schedule jobs
scheduler.start();
// Set up cassandra tuning.
cassandraTunerService.scheduleService();
// Determine if we need to restore from backup else start cassandra.
if (restoreContext.isRestoreEnabled()) {
restoreContext.restore();
} else { // no restores needed
logger.info("No restore needed, task not scheduled");
if (!config.doesCassandraStartManually()) cassProcess.start(true); // Start cassandra.
else
logger.info(
"config.doesCassandraStartManually() is set to True, hence Cassandra needs to be started manually ...");
}
/*
* Run the delayed task (after 10 seconds) to Monitor Cassandra
* If Restore option is chosen, then Running Cassandra instance is stopped
* Hence waiting for Cassandra to stop
*/
scheduler.addTaskWithDelay(
CassandraMonitor.JOBNAME,
CassandraMonitor.class,
CassandraMonitor.getTimer(),
CASSANDRA_MONITORING_INITIAL_DELAY);
// Set up management services like flush, compactions etc.
clusterManagementService.scheduleService();
// Set up the background configuration dumping thread
scheduleTask(
scheduler,
PriamConfigurationPersister.class,
PriamConfigurationPersister.getTimer(config));
// Set up V1 Snapshot Service
backupService.scheduleService();
// Set up V2 Snapshot Service
backupV2Service.scheduleService();
}
@Override
public void updateServicePre() throws Exception {}
@Override
public void updateServicePost() throws Exception {}
public InstanceIdentity getInstanceIdentity() {
return instanceIdentity;
}
public PriamScheduler getScheduler() {
return scheduler;
}
public IConfiguration getConfiguration() {
return config;
}
}
| 3,183 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/PropertiesFileTuner.java | package com.netflix.priam.tuner;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Splitter;
import com.netflix.priam.config.IConfiguration;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import javax.inject.Inject;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration2.ex.ConfigurationException;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.text.StringSubstitutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Support tuning standard .properties files
*
* <p>
*/
public class PropertiesFileTuner {
private static final Logger logger = LoggerFactory.getLogger(PropertiesFileTuner.class);
protected final IConfiguration config;
@Inject
public PropertiesFileTuner(IConfiguration config) {
this.config = config;
}
@SuppressWarnings("unchecked")
public void updateAndSaveProperties(String propertyFile)
throws IOException, ConfigurationException {
try {
PropertiesConfiguration properties = new PropertiesConfiguration();
properties.getLayout().load(properties, new FileReader(propertyFile));
String overrides =
config.getProperty(
"propertyOverrides." + FilenameUtils.getBaseName(propertyFile), null);
if (overrides != null) {
// Allow use of the IConfiguration object as template strings
Map<String, Object> map = new ObjectMapper().convertValue(config, Map.class);
String resolvedOverrides = new StringSubstitutor(map).replace(overrides);
Splitter.on(",")
.withKeyValueSeparator("=")
.split(resolvedOverrides)
.forEach(properties::setProperty);
}
properties.getLayout().save(properties, new FileWriter(propertyFile));
} catch (IOException | ConfigurationException e) {
logger.error("Could not tune " + propertyFile + ". Does it exist? Is it writable?", e);
throw e;
}
}
}
| 3,184 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/ICassandraTuner.java | /**
* Copyright 2017 Netflix, Inc.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.tuner;
import com.google.inject.ImplementedBy;
import java.io.IOException;
@ImplementedBy(StandardTuner.class)
public interface ICassandraTuner {
void writeAllProperties(String yamlLocation, String hostname, String seedProvider)
throws Exception;
void updateAutoBootstrap(String yamlLocation, boolean autobootstrap) throws IOException;
default void updateJVMOptions() throws Exception {};
}
| 3,185 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/StandardTuner.java | /**
* Copyright 2017 Netflix, Inc.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.tuner;
import com.google.common.collect.Lists;
import com.netflix.priam.backup.IncrementalBackup;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.restore.Restore;
import java.io.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
/**
* Tune the standard cassandra parameters/configurations. eg. cassandra.yaml, jvm.options, bootstrap
* etc.
*/
public class StandardTuner implements ICassandraTuner {
private static final Logger logger = LoggerFactory.getLogger(StandardTuner.class);
protected final IConfiguration config;
protected final IBackupRestoreConfig backupRestoreConfig;
private final InstanceInfo instanceInfo;
@Inject
public StandardTuner(
IConfiguration config,
IBackupRestoreConfig backupRestoreConfig,
InstanceInfo instanceInfo) {
this.config = config;
this.backupRestoreConfig = backupRestoreConfig;
this.instanceInfo = instanceInfo;
}
@SuppressWarnings("unchecked")
public void writeAllProperties(String yamlLocation, String hostname, String seedProvider)
throws Exception {
DumperOptions options = new DumperOptions();
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
Yaml yaml = new Yaml(options);
File yamlFile = new File(yamlLocation);
Map map = yaml.load(new FileInputStream(yamlFile));
map.put("cluster_name", config.getAppName());
map.put("storage_port", config.getStoragePort());
map.put("ssl_storage_port", config.getSSLStoragePort());
map.put("start_rpc", config.isThriftEnabled());
map.put("rpc_port", config.getThriftPort());
map.put("start_native_transport", config.isNativeTransportEnabled());
map.put("native_transport_port", config.getNativeTransportPort());
map.put("listen_address", hostname);
map.put("rpc_address", hostname);
// Dont bootstrap in restore mode
if (!Restore.isRestoreEnabled(config, instanceInfo)) {
map.put("auto_bootstrap", config.getAutoBoostrap());
} else {
map.put("auto_bootstrap", false);
}
map.put("saved_caches_directory", config.getCacheLocation());
map.put("commitlog_directory", config.getCommitLogLocation());
map.put("data_file_directories", Lists.newArrayList(config.getDataFileLocation()));
boolean enableIncremental = IncrementalBackup.isEnabled(config, backupRestoreConfig);
map.put("incremental_backups", enableIncremental);
map.put("endpoint_snitch", config.getSnitch());
if (map.containsKey("in_memory_compaction_limit_in_mb")) {
map.remove("in_memory_compaction_limit_in_mb");
}
map.put("compaction_throughput_mb_per_sec", config.getCompactionThroughput());
map.put(
"partitioner",
derivePartitioner(map.get("partitioner").toString(), config.getPartitioner()));
if (map.containsKey("memtable_total_space_in_mb")) {
map.remove("memtable_total_space_in_mb");
}
map.put("stream_throughput_outbound_megabits_per_sec", config.getStreamingThroughputMB());
if (map.containsKey("multithreaded_compaction")) {
map.remove("multithreaded_compaction");
}
map.put("max_hint_window_in_ms", config.getMaxHintWindowInMS());
map.put("hinted_handoff_throttle_in_kb", config.getHintedHandoffThrottleKb());
map.put("authenticator", config.getAuthenticator());
map.put("authorizer", config.getAuthorizer());
map.put("internode_compression", config.getInternodeCompression());
map.put("dynamic_snitch", config.isDynamicSnitchEnabled());
map.put("concurrent_reads", config.getConcurrentReadsCnt());
map.put("concurrent_writes", config.getConcurrentWritesCnt());
map.put("concurrent_compactors", config.getConcurrentCompactorsCnt());
map.put("rpc_server_type", config.getRpcServerType());
map.put("rpc_min_threads", config.getRpcMinThreads());
map.put("rpc_max_threads", config.getRpcMaxThreads());
// Add private ip address as broadcast_rpc_address. This will ensure that COPY function
// works correctly.
map.put("broadcast_rpc_address", instanceInfo.getPrivateIP());
map.put("tombstone_warn_threshold", config.getTombstoneWarnThreshold());
map.put("tombstone_failure_threshold", config.getTombstoneFailureThreshold());
map.put("streaming_socket_timeout_in_ms", config.getStreamingSocketTimeoutInMS());
map.put("memtable_cleanup_threshold", config.getMemtableCleanupThreshold());
map.put(
"compaction_large_partition_warning_threshold_mb",
config.getCompactionLargePartitionWarnThresholdInMB());
map.put("auto_snapshot", config.getAutoSnapshot());
map.put("disk_failure_policy", config.getDiskFailurePolicy());
List<?> seedp = (List) map.get("seed_provider");
Map<String, String> m = (Map<String, String>) seedp.get(0);
m.put("class_name", seedProvider);
configfureSecurity(map);
configureGlobalCaches(config, map);
// force to 1 until vnodes are properly supported
map.put("num_tokens", 1);
// Additional C* Yaml properties, which can be set via Priam.extra.params
addExtraCassParams(map);
// Custom specific C* yaml properties which might not be available in Apache C* OSS
addCustomCassParams(map);
// remove troublesome properties
map.remove("flush_largest_memtables_at");
map.remove("reduce_cache_capacity_to");
logger.info(yaml.dump(map));
yaml.dump(map, new FileWriter(yamlFile));
// TODO: port commit log backups to the PropertiesFileTuner implementation
configureCommitLogBackups();
PropertiesFileTuner propertyTuner = new PropertiesFileTuner(config);
for (String propertyFile : config.getTunablePropertyFiles()) {
propertyTuner.updateAndSaveProperties(propertyFile);
}
}
/**
* This method can be overwritten in child classes for any additional tunings to C* Yaml.
* Default implementation is left empty intentionally for child classes to override. This is
* useful when custom YAML properties are supported in deployed C*.
*
* @param map
*/
protected void addCustomCassParams(Map map) {}
/**
* Overridable by derived classes to inject a wrapper snitch.
*
* @return Sntich to be used by this cluster
*/
protected String getSnitch() {
return config.getSnitch();
}
/** Setup the cassandra 1.1 global cache values */
private void configureGlobalCaches(IConfiguration config, Map yaml) {
final String keyCacheSize = config.getKeyCacheSizeInMB();
if (!StringUtils.isEmpty(keyCacheSize)) {
yaml.put("key_cache_size_in_mb", Integer.valueOf(keyCacheSize));
final String keyCount = config.getKeyCacheKeysToSave();
if (!StringUtils.isEmpty(keyCount))
yaml.put("key_cache_keys_to_save", Integer.valueOf(keyCount));
}
final String rowCacheSize = config.getRowCacheSizeInMB();
if (!StringUtils.isEmpty(rowCacheSize)) {
yaml.put("row_cache_size_in_mb", Integer.valueOf(rowCacheSize));
final String rowCount = config.getRowCacheKeysToSave();
if (!StringUtils.isEmpty(rowCount))
yaml.put("row_cache_keys_to_save", Integer.valueOf(rowCount));
}
}
String derivePartitioner(String fromYaml, String fromConfig) {
if (fromYaml == null || fromYaml.isEmpty()) return fromConfig;
// this check is to prevent against overwriting an existing yaml file that has
// a partitioner not RandomPartitioner or (as of cass 1.2) Murmur3Partitioner.
// basically we don't want to hose existing deployments by changing the partitioner
// unexpectedly on them
final String lowerCase = fromYaml.toLowerCase();
if (lowerCase.contains("randomparti") || lowerCase.contains("murmur")) return fromConfig;
return fromYaml;
}
protected void configfureSecurity(Map map) {
// the client-side ssl settings
Map clientEnc = (Map) map.get("client_encryption_options");
clientEnc.put("enabled", config.isClientSslEnabled());
// the server-side (internode) ssl settings
Map serverEnc = (Map) map.get("server_encryption_options");
serverEnc.put("internode_encryption", config.getInternodeEncryption());
}
protected void configureCommitLogBackups() {
if (!config.isBackingUpCommitLogs()) return;
Properties props = new Properties();
props.put("archive_command", config.getCommitLogBackupArchiveCmd());
props.put("restore_command", config.getCommitLogBackupRestoreCmd());
props.put("restore_directories", config.getCommitLogBackupRestoreFromDirs());
props.put("restore_point_in_time", config.getCommitLogBackupRestorePointInTime());
try (FileOutputStream fos =
new FileOutputStream(new File(config.getCommitLogBackupPropsFile()))) {
props.store(fos, "cassandra commit log archive props, as written by priam");
} catch (IOException e) {
logger.error("Could not store commitlog_archiving.properties", e);
}
}
public void updateAutoBootstrap(String yamlFile, boolean autobootstrap) throws IOException {
DumperOptions options = new DumperOptions();
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
Yaml yaml = new Yaml(options);
@SuppressWarnings("rawtypes")
Map map = yaml.load(new FileInputStream(yamlFile));
// Dont bootstrap in restore mode
map.put("auto_bootstrap", autobootstrap);
if (logger.isInfoEnabled()) {
logger.info("Updating yaml: " + yaml.dump(map));
}
yaml.dump(map, new FileWriter(yamlFile));
}
@Override
public final void updateJVMOptions() throws Exception {
if (config.supportsTuningJVMOptionsFile()) {
JVMOptionsTuner tuner = new JVMOptionsTuner(config);
// Overwrite default jvm.options file.
tuner.updateAndSaveJVMOptions(config.getJVMOptionsFileLocation());
}
}
public void addExtraCassParams(Map map) {
String params = config.getExtraConfigParams();
if (StringUtils.isEmpty(params)) {
logger.info("Updating yaml: no extra cass params");
return;
}
String[] pairs = params.split(",");
logger.info("Updating yaml: adding extra cass params");
for (String pair1 : pairs) {
String[] pair = pair1.split("=");
String priamKey = pair[0];
String cassKey = pair[1];
String cassVal = config.getCassYamlVal(priamKey);
if (!StringUtils.isBlank(cassKey) && !StringUtils.isBlank(cassVal)) {
if (!cassKey.contains(".")) {
logger.info(
"Updating yaml: PriamKey: [{}], Key: [{}], OldValue: [{}], NewValue: [{}]",
priamKey,
cassKey,
map.get(cassKey),
cassVal);
map.put(cassKey, cassVal);
} else {
// split the cassandra key. We will get the group and get the key name.
String[] cassKeySplit = cassKey.split("\\.");
Map cassKeyMap = ((Map) map.getOrDefault(cassKeySplit[0], new HashMap()));
map.putIfAbsent(cassKeySplit[0], cassKeyMap);
logger.info(
"Updating yaml: PriamKey: [{}], Key: [{}], OldValue: [{}], NewValue: [{}]",
priamKey,
cassKey,
cassKeyMap.get(cassKeySplit[1]),
cassVal);
cassKeyMap.put(cassKeySplit[1], cassVal);
}
}
}
}
}
| 3,186 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/GCType.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner;
import com.netflix.priam.scheduler.UnsupportedTypeException;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Garbage collection types supported by Priam for Cassandra (CMS/G1GC). Created by aagrawal on
* 8/24/17.
*/
public enum GCType {
CMS("CMS"),
G1GC("G1GC");
private static final Logger logger = LoggerFactory.getLogger(GCType.class);
private final String gcType;
GCType(String gcType) {
this.gcType = gcType.toUpperCase();
}
/*
* Helper method to find the garbage colleciton type - case insensitive as user may put value which are not right case.
* This returns the GCType if one is found. Refer to table below to understand the use-case.
*
* GCTypeValue|acceptNullorEmpty|acceptIllegalValue|Result
* Valid value |NA |NA |GCType
* Empty string |True |NA |NULL
* NULL |True |NA |NULL
* Empty string |False |NA |UnsupportedTypeException
* NULL |False |NA |UnsupportedTypeException
* Illegal value |NA |True |NULL
* Illegal value |NA |False |UnsupportedTypeException
*/
public static GCType lookup(
String gcType, boolean acceptNullOrEmpty, boolean acceptIllegalValue)
throws UnsupportedTypeException {
if (StringUtils.isEmpty(gcType))
if (acceptNullOrEmpty) return null;
else {
String message =
String.format(
"%s is not a supported GC Type. Supported values are %s",
gcType, getSupportedValues());
logger.error(message);
throw new UnsupportedTypeException(message);
}
try {
return GCType.valueOf(gcType.toUpperCase());
} catch (IllegalArgumentException ex) {
String message =
String.format(
"%s is not a supported GCType. Supported values are %s",
gcType, getSupportedValues());
if (acceptIllegalValue) {
message =
message
+ ". Since acceptIllegalValue is set to True, returning NULL instead.";
logger.error(message);
return null;
}
logger.error(message);
throw new UnsupportedTypeException(message, ex);
}
}
private static String getSupportedValues() {
StringBuilder supportedValues = new StringBuilder();
boolean first = true;
for (GCType type : GCType.values()) {
if (!first) supportedValues.append(",");
supportedValues.append(type);
first = false;
}
return supportedValues.toString();
}
public static GCType lookup(String gcType) throws UnsupportedTypeException {
return lookup(gcType, false, false);
}
public String getGcType() {
return gcType;
}
}
| 3,187 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/JVMOption.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
/** POJO to parse and store the JVM option from jvm.options file. Created by aagrawal on 8/28/17. */
public class JVMOption {
private String jvmOption;
private String value;
private boolean isCommented;
private boolean isHeapJVMOption;
private static final Pattern pattern = Pattern.compile("(#)*(-[^=]+)=?(.*)?");
// A new pattern is required because heap do not separate JVM key,value with "=".
private static final Pattern heapPattern =
Pattern.compile(
"(#)*(-Xm[x|s|n])([0-9]+[K|M|G])?"); // Pattern.compile("(#)*-(Xm[x|s|n])([0-9]+)(K|M|G)?");
public JVMOption(String jvmOption) {
this.jvmOption = jvmOption;
}
public JVMOption(String jvmOption, String value, boolean isCommented, boolean isHeapJVMOption) {
this.jvmOption = jvmOption;
this.value = value;
this.isCommented = isCommented;
this.isHeapJVMOption = isHeapJVMOption;
}
public String toJVMOptionString() {
final StringBuilder sb = new StringBuilder();
if (isCommented) sb.append("#");
sb.append(jvmOption);
if (value != null) {
if (!isHeapJVMOption) sb.append("=");
sb.append(value);
}
return sb.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
JVMOption jvmOption1 = (JVMOption) o;
return isCommented == jvmOption1.isCommented
&& isHeapJVMOption == jvmOption1.isHeapJVMOption
&& Objects.equals(jvmOption, jvmOption1.jvmOption)
&& Objects.equals(value, jvmOption1.value);
}
@Override
public int hashCode() {
return Objects.hash(jvmOption, value, isCommented, isHeapJVMOption);
}
public String getJvmOption() {
return jvmOption;
}
public JVMOption setJvmOption(String jvmOption) {
this.jvmOption = jvmOption.trim();
return this;
}
public String getValue() {
return value;
}
public JVMOption setValue(String value) {
if (!StringUtils.isEmpty(value)) this.value = value;
return this;
}
public boolean isCommented() {
return isCommented;
}
public JVMOption setCommented(boolean commented) {
isCommented = commented;
return this;
}
public boolean isHeapJVMOption() {
return isHeapJVMOption;
}
public JVMOption setHeapJVMOption(boolean heapJVMOption) {
isHeapJVMOption = heapJVMOption;
return this;
}
public static JVMOption parse(String line) {
JVMOption result = null;
// See if it is heap JVM option.
Matcher matcher = heapPattern.matcher(line);
if (matcher.matches()) {
boolean isCommented = (matcher.group(1) != null);
return new JVMOption(matcher.group(2))
.setCommented(isCommented)
.setValue(matcher.group(3))
.setHeapJVMOption(true);
}
// See if other heap option.
matcher = pattern.matcher(line);
if (matcher.matches()) {
boolean isCommented = (matcher.group(1) != null);
return new JVMOption(matcher.group(2))
.setCommented(isCommented)
.setValue(matcher.group(3));
}
return result;
}
}
| 3,188 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/TuneCassandra.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.health.InstanceState;
import com.netflix.priam.scheduler.SimpleTimer;
import com.netflix.priam.scheduler.Task;
import com.netflix.priam.scheduler.TaskTimer;
import java.io.IOException;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class TuneCassandra extends Task {
private static final Logger LOGGER = LoggerFactory.getLogger(TuneCassandra.class);
private static final String JOBNAME = "Tune-Cassandra";
private final ICassandraTuner tuner;
private final InstanceState instanceState;
@Inject
public TuneCassandra(
IConfiguration config, ICassandraTuner tuner, InstanceState instanceState) {
super(config);
this.tuner = tuner;
this.instanceState = instanceState;
}
public void execute() throws Exception {
boolean isDone = false;
while (!isDone) {
try {
tuner.writeAllProperties(
config.getYamlLocation(), null, config.getSeedProviderName());
tuner.updateJVMOptions();
isDone = true;
instanceState.setYmlWritten(true);
} catch (IOException e) {
LOGGER.error("Fail writing cassandra.yml file. Retry again!", e);
}
}
}
@Override
public String getName() {
return JOBNAME;
}
public static TaskTimer getTimer() {
return new SimpleTimer(JOBNAME);
}
}
| 3,189 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/JVMOptionsTuner.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner;
import com.netflix.priam.config.IConfiguration;
import java.io.File;
import java.nio.file.Files;
import java.util.*;
import java.util.stream.Collectors;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is to tune the jvm.options file introduced in Cassandra 3.x to pass JVM parameters to
* Cassandra. It supports configuring GC type (CMS/G1GC) where it automatically activates default
* properties as provided in jvm.options file. Note that this will not "add" any GC options.
*
* <p>Created by aagrawal on 8/23/17.
*/
public class JVMOptionsTuner {
private static final Logger logger = LoggerFactory.getLogger(JVMOptionsTuner.class);
protected final IConfiguration config;
@Inject
public JVMOptionsTuner(IConfiguration config) {
this.config = config;
}
/**
* Update the JVM options file and save to a file for cassandra by updating/removing JVM options
* {@link IConfiguration#getJVMExcludeSet()} and {@link IConfiguration#getJVMUpsertSet()},
* configuring GC {@link IConfiguration#getGCType()}etc.
*
* @param outputFile File name with which this configured JVM options should be written.
* @throws Exception when encountered with invalid configured GC type. {@link
* IConfiguration#getGCType()}
*/
public void updateAndSaveJVMOptions(final String outputFile) throws Exception {
List<String> configuredJVMOptions = updateJVMOptions();
if (logger.isInfoEnabled()) {
StringBuffer buffer = new StringBuffer("\n");
configuredJVMOptions.stream().forEach(line -> buffer.append(line).append("\n"));
logger.info("Updating jvm.options with following values: " + buffer.toString());
}
// Verify we can write to output file and it is not directory.
File file = new File(outputFile);
if (file.exists() && !file.canWrite()) {
throw new Exception("Not enough permissions to write to file: " + outputFile);
}
// Write jvm.options back to override defaults.
Files.write(new File(outputFile).toPath(), configuredJVMOptions);
}
/**
* Update the JVM options file for cassandra by updating/removing JVM options {@link
* IConfiguration#getJVMExcludeSet()} and {@link IConfiguration#getJVMUpsertSet()}, configuring
* GC {@link IConfiguration#getGCType()}etc.
*
* @return List of Configuration as String after reading the configuration from jvm.options
* @throws Exception when encountered with invalid configured GC type. {@link
* IConfiguration#getGCType()}
*/
protected List<String> updateJVMOptions() throws Exception {
File jvmOptionsFile = new File(config.getJVMOptionsFileLocation());
validate(jvmOptionsFile);
final GCType configuredGC = config.getGCType();
final Map<String, JVMOption> excludeSet =
JVMOptionsTuner.parseJVMOptions(config.getJVMExcludeSet());
// Make a copy of upsertSet, so we can delete the entries as we process them.
Map<String, JVMOption> upsertSet =
JVMOptionsTuner.parseJVMOptions(config.getJVMUpsertSet());
// Don't use streams for processing as upsertSet jvm options needs to be removed if we find
// them
// already in jvm.options file.
List<String> optionsFromFile =
Files.lines(jvmOptionsFile.toPath()).collect(Collectors.toList());
List<String> configuredOptions = new LinkedList<>();
for (String line : optionsFromFile) {
configuredOptions.add(
updateConfigurationValue(line, configuredGC, upsertSet, excludeSet));
}
// Add all the upserts(inserts only left) from config.
if (upsertSet != null && !upsertSet.isEmpty()) {
configuredOptions.add("#################");
configuredOptions.add("# USER PROVIDED CUSTOM JVM CONFIGURATIONS #");
configuredOptions.add("#################");
configuredOptions.addAll(
upsertSet
.values()
.stream()
.map(JVMOption::toJVMOptionString)
.collect(Collectors.toList()));
}
return configuredOptions;
}
private void setHeapSetting(String configuredValue, JVMOption option) {
if (!StringUtils.isEmpty(configuredValue))
option.setCommented(false).setValue(configuredValue);
}
/**
* @param line a line as read from jvm.options file.
* @param configuredGC GCType configured by user for Cassandra.
* @param upsertSet configured upsert set of JVM properties as provided by user for Cassandra.
* @param excludeSet configured exclude set of JVM properties as provided by user for Cassandra.
* @return the "comment" as is, if not a valid JVM option. Else, a string representation of JVM
* option
*/
private String updateConfigurationValue(
final String line,
GCType configuredGC,
Map<String, JVMOption> upsertSet,
Map<String, JVMOption> excludeSet) {
JVMOption option = JVMOption.parse(line);
if (option == null) return line;
// Is parameter for heap setting.
if (option.isHeapJVMOption()) {
String configuredValue;
switch (option.getJvmOption()) {
// Special handling for heap new size ("Xmn")
case "-Xmn":
configuredValue = config.getHeapNewSize();
break;
// Set min and max heap size to same value
default:
configuredValue = config.getHeapSize();
break;
}
setHeapSetting(configuredValue, option);
}
// We don't want Xmn with G1GC, allow the GC to determine optimal young gen
if (option.getJvmOption().equals("-Xmn") && configuredGC == GCType.G1GC)
option.setCommented(true);
// Is parameter for GC.
GCType gcType = GCTuner.getGCType(option);
if (gcType != null) {
option.setCommented(gcType != configuredGC);
}
// See if option is in upsert list.
if (upsertSet != null && upsertSet.containsKey(option.getJvmOption())) {
JVMOption configuration = upsertSet.get(option.getJvmOption());
option.setCommented(false);
option.setValue(configuration.getValue());
upsertSet.remove(option.getJvmOption());
}
// See if option is in exclude list.
if (excludeSet != null && excludeSet.containsKey(option.getJvmOption()))
option.setCommented(true);
return option.toJVMOptionString();
}
private void validate(File jvmOptionsFile) throws Exception {
if (!jvmOptionsFile.exists())
throw new Exception(
"JVM Option File does not exist: " + jvmOptionsFile.getAbsolutePath());
if (jvmOptionsFile.isDirectory())
throw new Exception(
"JVM Option File is a directory: " + jvmOptionsFile.getAbsolutePath());
if (!jvmOptionsFile.canRead() || !jvmOptionsFile.canWrite())
throw new Exception(
"JVM Option File does not have right permission: "
+ jvmOptionsFile.getAbsolutePath());
}
/**
* Util function to parse comma separated list of jvm options to a Map (jvmOptionName,
* JVMOption). It will ignore anything which is not a valid JVM option.
*
* @param property comma separated list of JVM options.
* @return Map of (jvmOptionName, JVMOption).
*/
public static final Map<String, JVMOption> parseJVMOptions(String property) {
if (StringUtils.isEmpty(property)) return null;
return new HashSet<>(Arrays.asList(property.split(",")))
.stream()
.map(JVMOption::parse)
.filter(Objects::nonNull)
.collect(Collectors.toMap(JVMOption::getJvmOption, jvmOption -> jvmOption));
}
}
| 3,190 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/CassandraTunerService.java | package com.netflix.priam.tuner;
import com.netflix.priam.backup.IncrementalBackup;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.connection.JMXNodeTool;
import com.netflix.priam.defaultimpl.IService;
import com.netflix.priam.scheduler.PriamScheduler;
import com.netflix.priam.utils.RetryableCallable;
import javax.inject.Inject;
public class CassandraTunerService implements IService {
private final PriamScheduler scheduler;
private final IConfiguration configuration;
private final IBackupRestoreConfig backupRestoreConfig;
@Inject
public CassandraTunerService(
PriamScheduler priamScheduler,
IConfiguration configuration,
IBackupRestoreConfig backupRestoreConfig) {
this.scheduler = priamScheduler;
this.configuration = configuration;
this.backupRestoreConfig = backupRestoreConfig;
}
@Override
public void scheduleService() throws Exception {
// Run the task to tune Cassandra
scheduler.runTaskNow(TuneCassandra.class);
}
@Override
public void updateServicePre() throws Exception {}
@Override
public void updateServicePost() throws Exception {
// Update the cassandra to enable/disable new incremental files.
new RetryableCallable<Void>(6, 10000) {
public Void retriableCall() throws Exception {
try (JMXNodeTool nodeTool = JMXNodeTool.instance(configuration)) {
nodeTool.setIncrementalBackupsEnabled(
IncrementalBackup.isEnabled(configuration, backupRestoreConfig));
}
return null;
}
}.call();
}
}
| 3,191 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/GCTuner.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import javax.inject.Singleton;
/**
* List of Garbage collection parameters for CMS/G1GC. This list is used to automatically
* enable/disable configurations, if found in jvm.options. Created by aagrawal on 8/23/17.
*/
@Singleton
public class GCTuner {
private static final Set<String> cmsOptions =
new HashSet<>(
Arrays.asList(
"-XX:+UseConcMarkSweepGC",
"-XX:+UseParNewGC",
"-XX:+UseParallelGC",
"-XX:+CMSConcurrentMTEnabled",
"-XX:CMSInitiatingOccupancyFraction",
"-XX:+UseCMSInitiatingOccupancyOnly",
"-XX:+CMSClassUnloadingEnabled",
"-XX:+CMSIncrementalMode",
"-XX:+CMSPermGenSweepingEnabled",
"-XX:+ExplicitGCInvokesConcurrent",
"-XX:+ExplicitGCInvokesConcurrentAndUnloadsClasses",
"-XX:+DisableExplicitGC",
"-XX:+CMSParallelRemarkEnabled",
"-XX:SurvivorRatio",
"-XX:MaxTenuringThreshold",
"-XX:CMSWaitDuration",
"-XX:+CMSParallelInitialMarkEnabled",
"-XX:+CMSEdenChunksRecordAlways"));
private static final Set<String> g1gcOptions =
new HashSet<>(
Arrays.asList(
"-XX:+UseG1GC",
"-XX:G1HeapRegionSize",
"-XX:MaxGCPauseMillis",
"-XX:G1NewSizePercent",
"-XX:G1MaxNewSizePercent",
"-XX:-ResizePLAB",
"-XX:InitiatingHeapOccupancyPercent",
"-XX:G1MixedGCLiveThresholdPercent",
"-XX:G1HeapWastePercent",
"-XX:G1MixedGCCountTarget",
"-XX:G1OldCSetRegionThresholdPercent",
"-XX:G1ReservePercent",
"-XX:SoftRefLRUPolicyMSPerMB",
"-XX:G1ConcRefinementThreads",
"-XX:MaxGCPauseMillis",
"-XX:+UnlockExperimentalVMOptions",
"-XX:NewRatio",
"-XX:G1RSetUpdatingPauseTimePercent"));
static final GCType getGCType(String option) {
if (cmsOptions.contains(option)) return GCType.CMS;
if (g1gcOptions.contains(option)) return GCType.G1GC;
return null;
}
static final GCType getGCType(JVMOption jvmOption) {
return getGCType(jvmOption.getJvmOption());
}
}
| 3,192 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/dse/IAuditLogTuner.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner.dse;
import com.google.inject.ImplementedBy;
/**
* This is intended for tuning audit log settings. Audit log settings file change between cassandra
* version from log4j to yaml. Created by aagrawal on 8/8/17.
*/
@ImplementedBy(AuditLogTunerYaml.class)
interface IAuditLogTuner {
void tuneAuditLog();
}
| 3,193 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/dse/AuditLogTunerLog4J.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner.dse;
import com.google.common.base.Joiner;
import com.google.common.io.Files;
import com.netflix.priam.config.IConfiguration;
import java.io.BufferedWriter;
import java.io.File;
import java.nio.charset.Charset;
import java.util.List;
import javax.inject.Inject;
import org.apache.cassandra.io.util.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Dse tuner for audit log via log4j. Use this instead of AuditLogTunerYaml if you are on DSE
* version 3.x. Created by aagrawal on 8/8/17.
*/
public class AuditLogTunerLog4J implements IAuditLogTuner {
private final IConfiguration config;
private final IDseConfiguration dseConfig;
protected static final String AUDIT_LOG_ADDITIVE_ENTRY = "log4j.additivity.DataAudit";
protected static final String AUDIT_LOG_FILE = "/conf/log4j-server.properties";
protected static final String PRIMARY_AUDIT_LOG_ENTRY = "log4j.logger.DataAudit";
private static final Logger logger = LoggerFactory.getLogger(AuditLogTunerLog4J.class);
@Inject
public AuditLogTunerLog4J(IConfiguration config, IDseConfiguration dseConfig) {
this.config = config;
this.dseConfig = dseConfig;
}
/**
* Note: supporting the direct hacking of a log4j props file is far from elegant, but seems less
* odious than other solutions I've come up with. Operates under the assumption that the only
* people mucking with the audit log entries in the value are DataStax themselves and this
* program, and that the original property names are somehow still preserved. Otherwise, YMMV.
*/
public void tuneAuditLog() {
BufferedWriter writer = null;
try {
final File srcFile = new File(config.getCassHome() + AUDIT_LOG_FILE);
final List<String> lines = Files.readLines(srcFile, Charset.defaultCharset());
final File backupFile =
new File(
config.getCassHome()
+ AUDIT_LOG_FILE
+ "."
+ System.currentTimeMillis());
Files.move(srcFile, backupFile);
writer = Files.newWriter(srcFile, Charset.defaultCharset());
String loggerPrefix = "log4j.appender.";
try {
loggerPrefix += findAuditLoggerName(lines);
} catch (IllegalStateException ise) {
logger.warn(
"cannot locate "
+ PRIMARY_AUDIT_LOG_ENTRY
+ " property, will ignore any audit log updating");
return;
}
for (String line : lines) {
if (line.contains(loggerPrefix)
|| line.contains(PRIMARY_AUDIT_LOG_ENTRY)
|| line.contains(AUDIT_LOG_ADDITIVE_ENTRY)) {
if (dseConfig.isAuditLogEnabled()) {
// first, check to see if we need to uncomment the line
while (line.startsWith("#")) {
line = line.substring(1);
}
// next, check if we need to change the prop's value
if (line.contains("ActiveCategories")) {
final String cats =
Joiner.on(",").join(dseConfig.getAuditLogCategories());
line = line.substring(0, line.indexOf("=") + 1).concat(cats);
} else if (line.contains("ExemptKeyspaces")) {
line =
line.substring(0, line.indexOf("=") + 1)
.concat(dseConfig.getAuditLogExemptKeyspaces());
}
} else {
if (line.startsWith("#")) {
// make sure there's only one # at the beginning of the line
while (line.charAt(1) == '#') line = line.substring(1);
} else {
line = "#" + line;
}
}
}
writer.append(line);
writer.newLine();
}
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("Unable to read " + AUDIT_LOG_FILE, e);
} finally {
FileUtils.closeQuietly(writer);
}
}
private String findAuditLoggerName(List<String> lines) throws IllegalStateException {
for (final String l : lines) {
if (l.contains(PRIMARY_AUDIT_LOG_ENTRY)) {
final String[] valTokens = l.split(",");
return valTokens[valTokens.length - 1].trim();
}
}
throw new IllegalStateException();
}
}
| 3,194 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/dse/DseProcessManager.java | /**
* Copyright 2017 Netflix, Inc.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.tuner.dse;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.defaultimpl.CassandraProcessManager;
import com.netflix.priam.health.InstanceState;
import com.netflix.priam.merics.CassMonitorMetrics;
import com.netflix.priam.tuner.dse.IDseConfiguration.NodeType;
import java.util.Map;
import javax.inject.Inject;
public class DseProcessManager extends CassandraProcessManager {
private final IDseConfiguration dseConfig;
private InstanceState instanceState;
@Inject
public DseProcessManager(
IConfiguration config,
IDseConfiguration dseConfig,
InstanceState instanceState,
CassMonitorMetrics cassMonitorMetrics) {
super(config, instanceState, cassMonitorMetrics);
this.dseConfig = dseConfig;
}
protected void setEnv(Map<String, String> env) {
super.setEnv(env);
NodeType nodeType = dseConfig.getNodeType();
if (nodeType == NodeType.ANALYTIC_HADOOP) env.put("CLUSTER_TYPE", "-t");
else if (nodeType == NodeType.ANALYTIC_SPARK) env.put("CLUSTER_TYPE", "-k");
else if (nodeType == NodeType.ANALYTIC_HADOOP_SPARK) env.put("CLUSTER_TYPE", "-k -t");
else if (nodeType == NodeType.SEARCH) env.put("CLUSTER_TYPE", "-s");
}
}
| 3,195 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/dse/DseTuner.java | /**
* Copyright 2017 Netflix, Inc.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.tuner.dse;
import static com.netflix.priam.tuner.dse.IDseConfiguration.NodeType;
import static org.apache.cassandra.locator.SnitchProperties.RACKDC_PROPERTY_FILENAME;
import com.netflix.priam.config.IBackupRestoreConfig;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.identity.config.InstanceInfo;
import com.netflix.priam.tuner.StandardTuner;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.Reader;
import java.util.Properties;
import javax.inject.Inject;
import org.apache.cassandra.io.util.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Makes Datastax Enterprise-specific changes to the c* yaml and dse-yaml.
*
* @author jason brown
* @author minh do
*/
public class DseTuner extends StandardTuner {
private static final Logger logger = LoggerFactory.getLogger(DseTuner.class);
private final IDseConfiguration dseConfig;
private final IAuditLogTuner auditLogTuner;
@Inject
public DseTuner(
IConfiguration config,
IBackupRestoreConfig backupRestoreConfig,
IDseConfiguration dseConfig,
IAuditLogTuner auditLogTuner,
InstanceInfo instanceInfo) {
super(config, backupRestoreConfig, instanceInfo);
this.dseConfig = dseConfig;
this.auditLogTuner = auditLogTuner;
}
public void writeAllProperties(String yamlLocation, String hostname, String seedProvider)
throws Exception {
super.writeAllProperties(yamlLocation, hostname, seedProvider);
writeCassandraSnitchProperties();
auditLogTuner.tuneAuditLog();
}
private void writeCassandraSnitchProperties() {
final NodeType nodeType = dseConfig.getNodeType();
if (nodeType == NodeType.REAL_TIME_QUERY) return;
Reader reader = null;
try {
String filePath = config.getCassHome() + "/conf/" + RACKDC_PROPERTY_FILENAME;
reader = new FileReader(filePath);
Properties properties = new Properties();
properties.load(reader);
String suffix = "";
if (nodeType == NodeType.SEARCH) suffix = "_solr";
if (nodeType == NodeType.ANALYTIC_HADOOP) suffix = "_hadoop";
if (nodeType == NodeType.ANALYTIC_HADOOP_SPARK) suffix = "_hadoop_spark";
if (nodeType == NodeType.ANALYTIC_SPARK) suffix = "_spark";
properties.put("dc_suffix", suffix);
properties.store(new FileWriter(filePath), "");
} catch (Exception e) {
throw new RuntimeException("Unable to read " + RACKDC_PROPERTY_FILENAME, e);
} finally {
FileUtils.closeQuietly(reader);
}
}
protected String getSnitch() {
return dseConfig.getDseDelegatingSnitch();
}
}
| 3,196 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/dse/AuditLogTunerYaml.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.tuner.dse;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
/** Dse tuner for audit log via YAML. Use this for DSE version 4.x Created by aagrawal on 8/8/17. */
public class AuditLogTunerYaml implements IAuditLogTuner {
private final IDseConfiguration dseConfig;
private static final String AUDIT_LOG_DSE_ENTRY = "audit_logging_options";
private static final Logger logger = LoggerFactory.getLogger(AuditLogTunerYaml.class);
@Inject
public AuditLogTunerYaml(IDseConfiguration dseConfig) {
this.dseConfig = dseConfig;
}
public void tuneAuditLog() {
DumperOptions options = new DumperOptions();
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
Yaml yaml = new Yaml(options);
String dseYaml = dseConfig.getDseYamlLocation();
try {
Map<String, Object> map = yaml.load(new FileInputStream(dseYaml));
if (map.containsKey(AUDIT_LOG_DSE_ENTRY)) {
Boolean isEnabled =
(Boolean)
((Map<String, Object>) map.get(AUDIT_LOG_DSE_ENTRY)).get("enabled");
// Enable/disable audit logging (need this in addition to log4j-server.properties
// settings)
if (dseConfig.isAuditLogEnabled()) {
if (!isEnabled) {
((Map<String, Object>) map.get(AUDIT_LOG_DSE_ENTRY)).put("enabled", true);
}
} else if (isEnabled) {
((Map<String, Object>) map.get(AUDIT_LOG_DSE_ENTRY)).put("enabled", false);
}
}
if (logger.isInfoEnabled()) {
logger.info("Updating dse-yaml:\n" + yaml.dump(map));
}
yaml.dump(map, new FileWriter(dseYaml));
} catch (FileNotFoundException fileNotFound) {
logger.error(
"FileNotFound while trying to read yaml audit log for tuning: {}", dseYaml);
} catch (IOException e) {
logger.error(
"IOException while trying to write yaml file for audit log tuning: {}",
dseYaml);
}
}
}
| 3,197 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner | Create_ds/Priam/priam/src/main/java/com/netflix/priam/tuner/dse/IDseConfiguration.java | /**
* Copyright 2017 Netflix, Inc.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.tuner.dse;
import java.util.Set;
/**
* Datastax Enterprise-specific properties.
*
* @author jason brown
*/
public interface IDseConfiguration {
/** Using Datastax's terms here for the different types of nodes. */
enum NodeType {
/** vanilla Cassandra node */
REAL_TIME_QUERY("cassandra"),
/** Hadoop node */
ANALYTIC_HADOOP("hadoop"),
/** Spark node */
ANALYTIC_SPARK("spark"),
/** Hadoop and Spark node */
ANALYTIC_HADOOP_SPARK("hadoop-spark"),
/** Solr node */
SEARCH("solr");
private final String altName;
NodeType(String altName) {
this.altName = altName;
}
public static NodeType getByAltName(String altName) {
for (NodeType nt : NodeType.values()) {
if (nt.altName.toLowerCase().equals(altName)) return nt;
}
throw new IllegalArgumentException("Unknown node type: " + altName);
}
}
String getDseYamlLocation();
String getDseDelegatingSnitch();
NodeType getNodeType();
/* audit log configuration */
boolean isAuditLogEnabled();
/** @return comma-delimited list of keyspace names */
String getAuditLogExemptKeyspaces();
/**
* DSE-defined audit logging categories
* http://www.datastax.com/docs/datastax_enterprise3.1/security/data_auditing#data-auditing
*/
enum AuditLogCategory {
ADMIN,
ALL,
AUTH,
DML,
DDL,
DCL,
QUERY
}
Set<AuditLogCategory> getAuditLogCategories();
}
| 3,198 |
0 | Create_ds/Priam/priam/src/main/java/com/netflix/priam/cluster | Create_ds/Priam/priam/src/main/java/com/netflix/priam/cluster/management/SchemaConstant.java | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.cluster.management;
import com.google.common.collect.ImmutableSet;
import java.util.Set;
/** Created by aagrawal on 3/6/18. */
class SchemaConstant {
private static final String SYSTEM_KEYSPACE_NAME = "system";
private static final String SCHEMA_KEYSPACE_NAME = "system_schema";
private static final String TRACE_KEYSPACE_NAME = "system_traces";
private static final String AUTH_KEYSPACE_NAME = "system_auth";
private static final String DISTRIBUTED_KEYSPACE_NAME = "system_distributed";
private static final String DSE_SYSTEM = "dse_system";
private static final Set<String> SYSTEM_KEYSPACE_NAMES =
ImmutableSet.of(
SYSTEM_KEYSPACE_NAME,
SCHEMA_KEYSPACE_NAME,
TRACE_KEYSPACE_NAME,
AUTH_KEYSPACE_NAME,
DISTRIBUTED_KEYSPACE_NAME,
DSE_SYSTEM);
public static final boolean isSystemKeyspace(String keyspace) {
return SYSTEM_KEYSPACE_NAMES.contains(keyspace.toLowerCase());
}
}
| 3,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.