focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public Range<T> rangeContaining(long key, long value) {
return rangeSet.rangeContaining(key, value);
}
|
@Test
public void testRangeContaining() {
set = new RangeSetWrapper<>(consumer, reverseConvert, managedCursor);
set.add(Range.closed(new LongPair(0, 98), new LongPair(0, 99)));
set.add(Range.closed(new LongPair(0, 100), new LongPair(1, 5)));
com.google.common.collect.RangeSet<LongPair> gSet = TreeRangeSet.create();
gSet.add(Range.closed(new LongPair(0, 98), new LongPair(0, 100)));
gSet.add(Range.closed(new LongPair(0, 101), new LongPair(1, 5)));
set.add(Range.closed(new LongPair(1, 10), new LongPair(1, 15)));
set.add(Range.closed(new LongPair(1, 20), new LongPair(2, 10)));
set.add(Range.closed(new LongPair(2, 25), new LongPair(2, 28)));
set.add(Range.closed(new LongPair(3, 12), new LongPair(3, 20)));
set.add(Range.closed(new LongPair(4, 12), new LongPair(4, 20)));
gSet.add(Range.closed(new LongPair(1, 10), new LongPair(1, 15)));
gSet.add(Range.closed(new LongPair(1, 20), new LongPair(2, 10)));
gSet.add(Range.closed(new LongPair(2, 25), new LongPair(2, 28)));
gSet.add(Range.closed(new LongPair(3, 12), new LongPair(3, 20)));
gSet.add(Range.closed(new LongPair(4, 12), new LongPair(4, 20)));
LongPair position = new LongPair(0, 99);
assertEquals(set.rangeContaining(position.getKey(), position.getValue()),
Range.closed(new LongPair(0, 98), new LongPair(0, 100)));
assertEquals(set.rangeContaining(position.getKey(), position.getValue()), gSet.rangeContaining(position));
position = new LongPair(2, 30);
assertNull(set.rangeContaining(position.getKey(), position.getValue()));
assertEquals(set.rangeContaining(position.getKey(), position.getValue()), gSet.rangeContaining(position));
position = new LongPair(3, 13);
assertEquals(set.rangeContaining(position.getKey(), position.getValue()),
Range.closed(new LongPair(3, 12), new LongPair(3, 20)));
assertEquals(set.rangeContaining(position.getKey(), position.getValue()), gSet.rangeContaining(position));
position = new LongPair(3, 22);
assertNull(set.rangeContaining(position.getKey(), position.getValue()));
assertEquals(set.rangeContaining(position.getKey(), position.getValue()), gSet.rangeContaining(position));
}
|
public static int fromVersionString(String verStr) {
if (verStr == null || verStr.length() < 1) {
return 0;
}
int[] versions = new int[] {0, 0, 0, 0};
int index = 0;
String segment;
int cur = 0;
int pos;
do {
if (index >= versions.length) {
// More dots than "x.y.z.b" contains
return 0;
}
pos = verStr.indexOf('.', cur);
if (pos == -1) {
segment = verStr.substring(cur);
} else if (cur < pos) {
segment = verStr.substring(cur, pos);
} else {
// Illegal format
return 0;
}
versions[index] = parseInt(segment);
if (versions[index] < 0 || versions[index] > 255) {
// Out of range [0, 255]
return 0;
}
cur = pos + 1;
index ++;
} while (pos > 0);
return ((versions[0] & 0xff) << 24)
| ((versions[1] & 0xff) << 16)
| ((versions[2] & 0xff) << 8)
| (versions[3] & 0xff);
}
|
@Test
public void testFromVersionString() {
assertEquals(0x01020300, VersionUtil.fromVersionString("1.2.3"));
assertEquals(0x01020304, VersionUtil.fromVersionString("1.2.3.4"));
assertEquals(0x0102ff04, VersionUtil.fromVersionString("1.2.255.4"));
assertEquals(0xffffffff, VersionUtil.fromVersionString("255.255.255.255"));
assertEquals(0, VersionUtil.fromVersionString("1.255.256.0"));
assertEquals(0x01020000, VersionUtil.fromVersionString("1.2."));
assertEquals(0x01000000, VersionUtil.fromVersionString("1"));
assertEquals(0x01020000, VersionUtil.fromVersionString("1.2"));
assertEquals(0, VersionUtil.fromVersionString("test"));
assertEquals(0x01020300, VersionUtil.fromVersionString("1.2.3-"));
assertEquals(0x01020300, VersionUtil.fromVersionString("1.2.3b"));
assertEquals(0x01023c00, VersionUtil.fromVersionString("1.2.60.sec9"));
assertEquals(0x01023c00, VersionUtil.fromVersionString("1.2.60-internal"));
}
|
@Override
public synchronized RegisterApplicationMasterResponse registerApplicationMaster(
RegisterApplicationMasterRequest request) throws YarnException, IOException {
if (request == null) {
throw new YarnException("RegisterApplicationMasterRequest can't be null!");
}
// Reset the heartbeat responseId to zero upon register
synchronized (this.lastAllocateResponseLock) {
this.lastAllocateResponse.setResponseId(0);
}
this.justRecovered = false;
// If AM is calling with a different request, complain
if (this.amRegistrationRequest != null) {
if (!this.amRegistrationRequest.equals(request)) {
throw new YarnException("AM should not call "
+ "registerApplicationMaster with a different request body");
}
} else {
// Save the registration request. This will be used for registering with
// secondary sub-clusters using UAMs, as well as re-register later
this.amRegistrationRequest = request;
RegisterApplicationMasterRequestPBImpl requestPB = (RegisterApplicationMasterRequestPBImpl)
this.amRegistrationRequest;
storeAMRMProxyAppContextEntry(NMSS_REG_REQUEST_KEY, requestPB.getProto().toByteArray());
}
/*
* Present to AM as if we are the RM that never fails over. When actual RM
* fails over, we always re-register automatically.
*
* We did this because it is possible for AM to send duplicate register
* request because of timeout. When it happens, it is fine to simply return
* the success message. Out of all outstanding register threads, only the
* last one will still have an unbroken RPC connection and successfully
* return the response.
*/
if (this.amRegistrationResponse != null) {
return this.amRegistrationResponse;
}
/*
* Send a registration request to the home resource manager. Note that here
* we don't register with other sub-cluster resource managers because that
* will prevent us from using new sub-clusters that get added while the AM
* is running and will breaks the elasticity feature. The registration with
* the other sub-cluster RM will be done lazily as needed later.
*/
this.amRegistrationResponse = this.homeRMRelayer.registerApplicationMaster(request);
if (this.amRegistrationResponse == null) {
throw new YarnException("RegisterApplicationMasterResponse can't be null!");
}
List<Container> containersFromPreviousAttempts =
this.amRegistrationResponse.getContainersFromPreviousAttempts();
if (containersFromPreviousAttempts != null) {
cacheAllocatedContainers(containersFromPreviousAttempts, this.homeSubClusterId);
}
ApplicationId appId = this.attemptId.getApplicationId();
reAttachUAMAndMergeRegisterResponse(this.amRegistrationResponse, appId);
RegisterApplicationMasterResponsePBImpl responsePB = (RegisterApplicationMasterResponsePBImpl)
this.amRegistrationResponse;
storeAMRMProxyAppContextEntry(NMSS_REG_RESPONSE_KEY, responsePB.getProto().toByteArray());
// the queue this application belongs will be used for getting
// AMRMProxy policy from state store.
String queue = this.amRegistrationResponse.getQueue();
if (queue == null) {
LOG.warn("Received null queue for application {} from home subcluster. " +
" Will use default queue name {} for getting AMRMProxyPolicy.", appId,
YarnConfiguration.DEFAULT_QUEUE_NAME);
} else {
LOG.info("Application {} belongs to queue {}.", appId, queue);
}
// Initialize the AMRMProxyPolicy
try {
this.policyInterpreter = FederationPolicyUtils.loadAMRMPolicy(queue, this.policyInterpreter,
getConf(), this.federationFacade, this.homeSubClusterId);
} catch (FederationPolicyInitializationException e) {
throw new YarnRuntimeException(e);
}
return this.amRegistrationResponse;
}
|
@Test
public void testTwoIdenticalRegisterRequest() throws Exception {
// Register the application twice
RegisterApplicationMasterRequest registerReq =
Records.newRecord(RegisterApplicationMasterRequest.class);
registerReq.setHost(Integer.toString(testAppId));
registerReq.setRpcPort(0);
registerReq.setTrackingUrl("");
for (int i = 0; i < 2; i++) {
RegisterApplicationMasterResponse registerResponse =
interceptor.registerApplicationMaster(registerReq);
Assert.assertNotNull(registerResponse);
lastResponseId = 0;
}
}
|
@Override
public double calcNormalizedEdgeDistance3D(double ry, double rx, double rz,
double ay, double ax, double az,
double by, double bx, double bz) {
double dx = bx - ax;
double dy = by - ay;
double dz = bz - az;
double norm = dx * dx + dy * dy + dz * dz;
double factor = ((rx - ax) * dx + (ry - ay) * dy + (rz - az) * dz) / norm;
if (Double.isNaN(factor)) factor = 0;
// x,y,z is projection of r onto segment a-b
double cx = ax + factor * dx;
double cy = ay + factor * dy;
double cz = az + factor * dz;
double rdx = cx - rx;
double rdy = cy - ry;
double rdz = cz - rz;
return rdx * rdx + rdy * rdy + rdz * rdz;
}
|
@Test
public void testCalcNormalizedEdgeDistance3dStartEndSame() {
DistanceCalcEuclidean distanceCalc = new DistanceCalcEuclidean();
double distance = distanceCalc.calcNormalizedEdgeDistance3D(0, 3, 4, 0, 0, 0, 0, 0, 0);
assertEquals(25, distance, 0);
}
|
void taskLookup(ChatMessage chatMessage, String message)
{
if (!config.taskCommand())
{
return;
}
ChatMessageType type = chatMessage.getType();
final String player;
if (type.equals(ChatMessageType.PRIVATECHATOUT))
{
player = client.getLocalPlayer().getName();
}
else
{
player = Text.removeTags(chatMessage.getName())
.replace('\u00A0', ' ');
}
net.runelite.http.api.chat.Task task;
try
{
task = chatClient.getTask(player);
}
catch (IOException ex)
{
log.debug("unable to lookup slayer task", ex);
return;
}
if (TASK_STRING_VALIDATION.matcher(task.getTask()).find() || task.getTask().length() > TASK_STRING_MAX_LENGTH ||
TASK_STRING_VALIDATION.matcher(task.getLocation()).find() || task.getLocation().length() > TASK_STRING_MAX_LENGTH ||
Task.getTask(task.getTask()) == null || !isValidLocation(task.getLocation()))
{
log.debug("Validation failed for task name or location: {}", task);
return;
}
int killed = task.getInitialAmount() - task.getAmount();
StringBuilder sb = new StringBuilder();
sb.append(task.getTask());
if (!Strings.isNullOrEmpty(task.getLocation()))
{
sb.append(" (").append(task.getLocation()).append(')');
}
sb.append(": ");
if (killed < 0)
{
sb.append(task.getAmount()).append(" left");
}
else
{
sb.append(killed).append('/').append(task.getInitialAmount()).append(" killed");
}
String response = new ChatMessageBuilder()
.append(ChatColorType.NORMAL)
.append("Slayer Task: ")
.append(ChatColorType.HIGHLIGHT)
.append(sb.toString())
.build();
final MessageNode messageNode = chatMessage.getMessageNode();
messageNode.setRuneLiteFormatMessage(response);
client.refreshChat();
}
|
@Test
public void testTaskLookup() throws IOException
{
net.runelite.http.api.chat.Task task = new net.runelite.http.api.chat.Task();
task.setTask("Abyssal demons");
task.setLocation("The Abyss");
task.setAmount(42);
task.setInitialAmount(42);
when(slayerConfig.taskCommand()).thenReturn(true);
when(chatClient.getTask(anyString())).thenReturn(task);
MessageNode messageNode = mock(MessageNode.class);
ChatMessage setMessage = new ChatMessage();
setMessage.setType(ChatMessageType.PUBLICCHAT);
setMessage.setName("Adam");
setMessage.setMessageNode(messageNode);
slayerPlugin.taskLookup(setMessage, "!task");
verify(messageNode).setRuneLiteFormatMessage(anyString());
}
|
@Override
public void lock() {
try {
lock(-1, null, false);
} catch (InterruptedException e) {
throw new IllegalStateException();
}
}
|
@Test
public void testSubscriptionsPerConnection() throws InterruptedException {
Config config = createConfig();
config.useSingleServer()
.setSubscriptionConnectionPoolSize(1)
.setSubscriptionConnectionMinimumIdleSize(1)
.setSubscriptionsPerConnection(1)
.setAddress(redisson.getConfig().useSingleServer().getAddress());
RedissonClient redisson = Redisson.create(config);
ExecutorService e = Executors.newFixedThreadPool(32);
AtomicInteger errors = new AtomicInteger();
AtomicInteger ops = new AtomicInteger();
for (int i = 0; i < 5000; i++) {
int j = i;
e.submit(() -> {
try {
String lockKey = "lock-" + ThreadLocalRandom.current().nextInt(5);
RLock lock = redisson.getLock(lockKey);
lock.lock();
Thread.sleep(ThreadLocalRandom.current().nextInt(20));
lock.unlock();
ops.incrementAndGet();
} catch (Exception exception) {
exception.printStackTrace();
if(exception instanceof RedisTimeoutException){
return;
}
errors.incrementAndGet();
}
});
}
e.shutdown();
assertThat(e.awaitTermination(150, TimeUnit.SECONDS)).isTrue();
assertThat(errors.get()).isZero();
RedisClientConfig cc = new RedisClientConfig();
cc.setAddress(redisson.getConfig().useSingleServer().getAddress());
RedisClient c = RedisClient.create(cc);
RedisConnection ccc = c.connect();
List<String> channels = ccc.sync(RedisCommands.PUBSUB_CHANNELS);
assertThat(channels).isEmpty();
c.shutdown();
redisson.shutdown();
}
|
@Override
public ServerGroup servers() {
return cache.get();
}
|
@Test
public void all_down_endpoint_is_down() {
NginxHealthClient service = createClient("nginx-health-output-all-down.json");
assertFalse(service.servers().isHealthy("gateway.prod.music.vespa.us-east-2.prod"));
}
|
public static Map<String, String> parseQueryString(String qs) {
if (isEmpty(qs)) {
return new HashMap<>();
}
return parseKeyValuePair(qs, "\\&");
}
|
@Test
void testParseQueryString() throws Exception {
assertThat(StringUtils.getQueryStringValue("key1=value1&key2=value2", "key1"), equalTo("value1"));
assertThat(StringUtils.getQueryStringValue("key1=value1&key2=value2", "key2"), equalTo("value2"));
assertThat(StringUtils.getQueryStringValue("", "key1"), isEmptyOrNullString());
}
|
public boolean removeKey(ECKey key) {
lock.lock();
try {
boolean a = hashToKeys.remove(ByteString.copyFrom(key.getPubKeyHash())) != null;
boolean b = pubkeyToKeys.remove(ByteString.copyFrom(key.getPubKey())) != null;
checkState(a == b); // Should be in both maps or neither.
return a;
} finally {
lock.unlock();
}
}
|
@Test
public void removeKey() {
ECKey key = new ECKey();
chain.importKeys(key);
assertEquals(1, chain.numKeys());
assertTrue(chain.removeKey(key));
assertEquals(0, chain.numKeys());
assertFalse(chain.removeKey(key));
}
|
@Override
public void verify(byte[] data, byte[] signature, MessageDigest digest) {
final byte[] decrypted = engine.processBlock(signature, 0, signature.length);
final int delta = checkSignature(decrypted, digest);
final int offset = decrypted.length - digest.getDigestLength() - delta;
digest.update(decrypted, 1, offset - 1);
digest.update(data);
if (!CryptoUtils.compare(digest.digest(), decrypted, offset)) {
throw new VerificationException("Invalid signature");
}
}
|
@Test
public void shouldValidateSignatureSHA384() {
final byte[] challenge = CryptoUtils.random(40);
final byte[] signature = sign(0x54, challenge, ISOTrailers.TRAILER_SHA384, "SHA-384");
new DssRsaSignatureVerifier(PUBLIC).verify(challenge, signature, "SHA-384");
}
|
public static byte[] decodeBase64Zipped( String string ) throws IOException {
if ( string == null || string.isEmpty() ) {
return new byte[0];
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
// base 64 decode
byte[] bytes64 = org.apache.commons.codec.binary.Base64.decodeBase64( string.getBytes( ) );
ByteArrayInputStream zip = new ByteArrayInputStream( bytes64 );
try (
GZIPInputStream unzip = new GZIPInputStream( zip, ZIP_BUFFER_SIZE );
BufferedInputStream in = new BufferedInputStream( unzip, ZIP_BUFFER_SIZE );
) {
byte[] buff = new byte[ ZIP_BUFFER_SIZE ];
for ( int length = 0; ( length = in.read( buff ) ) > 0; ) {
baos.write( buff, 0, length );
}
} catch ( Exception e ) {
throw new RuntimeException( "Unexpected error trying to decode object.", e );
}
return baos.toByteArray();
}
|
@Test
public void testDecodeBase64Zipped() throws Exception {
TestClass testClass = new TestClass();
testClass.setTestProp1( "testPropValue1" );
testClass.setTestProp2( "testPropValue2" );
String base64ZippedString = this.encode( testClass );
TestClass reconstructedTestClass = (TestClass) this.decode( base64ZippedString );
Assert.assertNotNull( reconstructedTestClass );
Assert.assertEquals( reconstructedTestClass.getTestProp1(), testClass.getTestProp1() );
Assert.assertEquals( reconstructedTestClass.getTestProp2(), testClass.getTestProp2() );
}
|
public MonitorBuilder isDefault(Boolean isDefault) {
this.isDefault = isDefault;
return getThis();
}
|
@Test
void isDefault() {
MonitorBuilder builder = MonitorBuilder.newBuilder();
builder.isDefault(true);
Assertions.assertTrue(builder.build().isDefault());
}
|
public static DockerContainerDeletionTask
convertProtoToDockerContainerDeletionTask(
DeletionServiceDeleteTaskProto proto, DeletionService deletionService,
int taskId) {
String user = proto.hasUser() ? proto.getUser() : null;
String containerId =
proto.hasDockerContainerId() ? proto.getDockerContainerId() : null;
return new DockerContainerDeletionTask(taskId, deletionService, user,
containerId);
}
|
@Test
public void testConvertProtoToDockerContainerDeletionTask() throws Exception {
DeletionService deletionService = mock(DeletionService.class);
int id = 0;
String user = "user";
String dockerContainerId = "container_e123_12321231_00001";
DeletionServiceDeleteTaskProto.Builder protoBuilder =
DeletionServiceDeleteTaskProto.newBuilder();
protoBuilder
.setId(id)
.setUser(user)
.setDockerContainerId(dockerContainerId);
DeletionServiceDeleteTaskProto proto = protoBuilder.build();
DeletionTask deletionTask =
NMProtoUtils.convertProtoToDockerContainerDeletionTask(proto,
deletionService, id);
assertEquals(DeletionTaskType.DOCKER_CONTAINER.name(),
deletionTask.getDeletionTaskType().name());
assertEquals(id, deletionTask.getTaskId());
assertEquals(dockerContainerId,
((DockerContainerDeletionTask) deletionTask).getContainerId());
}
|
public void delHop() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final TransHopMeta transHopMeta = (TransHopMeta) selectionObject;
delHop( transMeta, transHopMeta );
}
|
@Test
public void testDelHop() throws Exception {
StepMetaInterface fromStepMetaInterface = Mockito.mock( StepMetaInterface.class );
StepMeta fromStep = new StepMeta();
fromStep.setStepMetaInterface( fromStepMetaInterface );
StepMetaInterface toStepMetaInterface = Mockito.mock( StepMetaInterface.class );
StepMeta toStep = new StepMeta();
toStep.setStepMetaInterface( toStepMetaInterface );
TransHopMeta transHopMeta = new TransHopMeta();
transHopMeta.setFromStep( fromStep );
transHopMeta.setToStep( toStep );
TransMeta transMeta = Mockito.mock( TransMeta.class );
spoon.delHop( transMeta, transHopMeta );
Mockito.verify( fromStepMetaInterface, times( 1 ) ).cleanAfterHopFromRemove( toStep );
Mockito.verify( toStepMetaInterface, times( 1 ) ).cleanAfterHopToRemove( fromStep );
}
|
@Override
public ByteBuf writeFloat(float value) {
writeInt(Float.floatToRawIntBits(value));
return this;
}
|
@Test
public void testWriteFloatAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeFloat(1);
}
});
}
|
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
this.koraAppElement = this.elements.getTypeElement(CommonClassNames.koraApp.canonicalName());
if (this.koraAppElement == null) {
return;
}
this.moduleElement = this.elements.getTypeElement(CommonClassNames.module.canonicalName());
this.koraSubmoduleElement = this.elements.getTypeElement(CommonClassNames.koraSubmodule.canonicalName());
this.componentElement = this.elements.getTypeElement(CommonClassNames.component.canonicalName());
this.initialized = true;
this.ctx = new ProcessingContext(processingEnv);
log.info("@KoraApp processor started");
}
|
@Test
void appWithComonentDescriptorCollision() throws Throwable {
var graphDraw = testClass(AppWithComponentCollision.class);
Assertions.assertThat(graphDraw.getNodes()).hasSize(3);
var materializedGraph = graphDraw.init();
Assertions.assertThat(materializedGraph).isNotNull();
}
|
boolean eosEnabled() {
return StreamsConfigUtils.eosEnabled(processingMode);
}
|
@Test
public void shouldNotHaveEosEnabledIfEosAlphaEnable() {
assertThat(eosAlphaStreamsProducer.eosEnabled(), is(true));
}
|
public static int compare(final byte[] a, final byte[] b) {
return getDefaultByteArrayComparator().compare(a, b);
}
|
@Test
public void testCompare() {
byte[] array = new byte[] { 1, 2 };
Assert.assertEquals(0, BytesUtil.compare(array, array));
Assert.assertEquals(-2, BytesUtil.compare(new byte[] { 1, 2 }, new byte[] { 3, 4 }));
Assert.assertEquals(0, BytesUtil.compare(new byte[] { 3, 4 }, new byte[] { 3, 4 }));
}
|
@Override
public FailureResult howToHandleFailure(
Throwable failure, CompletableFuture<Map<String, String>> failureLabels) {
FailureResult failureResult = howToHandleFailure(failure);
if (reportEventsAsSpans) {
// TODO: replace with reporting as event once events are supported.
// Add reporting as callback for when the failure labeling is completed.
failureLabels.thenAcceptAsync(
(labels) -> jobFailureMetricReporter.reportJobFailure(failureResult, labels),
componentMainThreadExecutor);
}
return failureResult;
}
|
@Test
void testHowToHandleFailureUnrecoverableFailure() throws Exception {
final Configuration configuration = new Configuration();
configuration.set(TraceOptions.REPORT_EVENTS_AS_SPANS, Boolean.TRUE);
final List<Span> spanCollector = new ArrayList<>(1);
final UnregisteredMetricGroups.UnregisteredJobManagerJobMetricGroup testMetricGroup =
createTestMetricGroup(spanCollector);
final AdaptiveScheduler scheduler =
new AdaptiveSchedulerBuilder(
createJobGraph(),
mainThreadExecutor,
EXECUTOR_RESOURCE.getExecutor())
.setJobMasterConfiguration(configuration)
.setJobManagerJobMetricGroup(testMetricGroup)
.build();
assertThat(
scheduler
.howToHandleFailure(
new SuppressRestartsException(new Exception("test")),
createFailureLabelsFuture())
.canRestart())
.isFalse();
assertThat(spanCollector).isEmpty();
mainThreadExecutor.trigger();
checkMetrics(spanCollector, false);
}
|
public static ConfigurableResource parseResourceConfigValue(String value)
throws AllocationConfigurationException {
return parseResourceConfigValue(value, Long.MAX_VALUE);
}
|
@Test
public void testCpuPercentageMemoryAbsoluteMemoryNegative() throws Exception {
expectMissingResource("memory");
parseResourceConfigValue("50% cpu, -1024 mb");
}
|
@Override
public ProcNodeInterface lookup(String beIdStr) throws AnalysisException {
if (Strings.isNullOrEmpty(beIdStr)) {
throw new AnalysisException("Backend id is null");
}
long backendId = -1L;
try {
backendId = Long.parseLong(beIdStr);
} catch (NumberFormatException e) {
throw new AnalysisException("Invalid backend id format: " + beIdStr);
}
Backend backend = clusterInfoService.getBackend(backendId);
if (backend == null) {
throw new AnalysisException("Backend[" + backendId + "] does not exist.");
}
return new BackendProcNode(backend);
}
|
@Test
public void testLookupNormal() {
ExceptionChecker.expectThrowsNoException(() -> {
BackendsProcDir dir = new BackendsProcDir(systemInfoService);
ProcNodeInterface node = dir.lookup("1000");
Assert.assertNotNull(node);
Assert.assertTrue(node instanceof BackendProcNode);
});
ExceptionChecker.expectThrowsNoException(() -> {
BackendsProcDir dir = new BackendsProcDir(systemInfoService);
ProcNodeInterface node = dir.lookup("1001");
Assert.assertNotNull(node);
Assert.assertTrue(node instanceof BackendProcNode);
});
ExceptionChecker.expectThrows(AnalysisException.class, () -> {
BackendsProcDir dir = new BackendsProcDir(systemInfoService);
dir.lookup("1002");
});
}
|
public static Applier fromSource(CharSequence source, EndPosTable endPositions) {
return new Applier(source, endPositions);
}
|
@Test
public void shouldReturnNullOnImportOnlyFix() {
AppliedFix fix =
AppliedFix.fromSource("public class Foo {}", endPositions)
.apply(SuggestedFix.builder().addImport("foo.bar.Baz").build());
assertThat(fix).isNull();
}
|
static boolean passSingleValueCheck(ResourceWrapper resourceWrapper, ParamFlowRule rule, int acquireCount,
Object value) {
if (rule.getGrade() == RuleConstant.FLOW_GRADE_QPS) {
if (rule.getControlBehavior() == RuleConstant.CONTROL_BEHAVIOR_RATE_LIMITER) {
return passThrottleLocalCheck(resourceWrapper, rule, acquireCount, value);
} else {
return passDefaultLocalCheck(resourceWrapper, rule, acquireCount, value);
}
} else if (rule.getGrade() == RuleConstant.FLOW_GRADE_THREAD) {
Set<Object> exclusionItems = rule.getParsedHotItems().keySet();
long threadCount = getParameterMetric(resourceWrapper).getThreadCount(rule.getParamIdx(), value);
if (exclusionItems.contains(value)) {
int itemThreshold = rule.getParsedHotItems().get(value);
return ++threadCount <= itemThreshold;
}
long threshold = (long) rule.getCount();
return ++threadCount <= threshold;
}
return true;
}
|
@Test
public void testSingleValueCheckThreadCountWithExceptionItems() {
final String resourceName = "testSingleValueCheckThreadCountWithExceptionItems";
final ResourceWrapper resourceWrapper = new StringResourceWrapper(resourceName, EntryType.IN);
int paramIdx = 0;
long globalThreshold = 5L;
int thresholdB = 3;
int thresholdD = 7;
ParamFlowRule rule = new ParamFlowRule(resourceName).setCount(globalThreshold).setParamIdx(paramIdx)
.setGrade(RuleConstant.FLOW_GRADE_THREAD);
String valueA = "valueA";
String valueB = "valueB";
String valueC = "valueC";
String valueD = "valueD";
// Directly set parsed map for test.
Map<Object, Integer> map = new HashMap<Object, Integer>();
map.put(valueB, thresholdB);
map.put(valueD, thresholdD);
rule.setParsedHotItems(map);
ParameterMetric metric = mock(ParameterMetric.class);
when(metric.getThreadCount(paramIdx, valueA)).thenReturn(globalThreshold - 1);
when(metric.getThreadCount(paramIdx, valueB)).thenReturn(globalThreshold - 1);
when(metric.getThreadCount(paramIdx, valueC)).thenReturn(globalThreshold - 1);
when(metric.getThreadCount(paramIdx, valueD)).thenReturn(globalThreshold + 1);
ParameterMetricStorage.getMetricsMap().put(resourceWrapper.getName(), metric);
assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueA));
assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueB));
assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueC));
assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueD));
when(metric.getThreadCount(paramIdx, valueA)).thenReturn(globalThreshold);
when(metric.getThreadCount(paramIdx, valueB)).thenReturn(thresholdB - 1L);
when(metric.getThreadCount(paramIdx, valueC)).thenReturn(globalThreshold + 1);
when(metric.getThreadCount(paramIdx, valueD)).thenReturn(globalThreshold - 1).thenReturn((long) thresholdD);
assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueA));
assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueB));
assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueC));
assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueD));
assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueD));
}
|
static void parseServerIpAndPort(MysqlConnection connection, Span span) {
try {
URI url = URI.create(connection.getURL().substring(5)); // strip "jdbc:"
String remoteServiceName = connection.getProperties().getProperty("zipkinServiceName");
if (remoteServiceName == null || "".equals(remoteServiceName)) {
String databaseName = getDatabaseName(connection);
if (databaseName != null && !databaseName.isEmpty()) {
remoteServiceName = "mysql-" + databaseName;
} else {
remoteServiceName = "mysql";
}
}
span.remoteServiceName(remoteServiceName);
String host = getHost(connection);
if (host != null) {
span.remoteIpAndPort(host, url.getPort() == -1 ? 3306 : url.getPort());
}
} catch (Exception e) {
// remote address is optional
}
}
|
@Test void parseServerIpAndPort_ipFromHost_portFromUrl() throws SQLException {
setupAndReturnPropertiesForHost("1.2.3.4");
TracingQueryInterceptor.parseServerIpAndPort(connection, span);
verify(span).remoteServiceName("mysql");
verify(span).remoteIpAndPort("1.2.3.4", 5555);
}
|
public static LogExceptionBehaviourInterface getExceptionStrategy( LogTableCoreInterface table ) {
return getExceptionStrategy( table, null );
}
|
@Test public void testExceptionStrategyWithMysqlDataTruncationException80driver() {
DatabaseMeta databaseMeta = mock( DatabaseMeta.class );
DatabaseInterface databaseInterface = new MySQLDatabaseMeta();
com.mysql.cj.jdbc.exceptions.MysqlDataTruncation e = new com.mysql.cj.jdbc.exceptions.MysqlDataTruncation();
when( logTable.getDatabaseMeta() ).thenReturn( databaseMeta );
when( databaseMeta.getDatabaseInterface() ).thenReturn( databaseInterface );
LogExceptionBehaviourInterface
exceptionStrategy =
DatabaseLogExceptionFactory.getExceptionStrategy( logTable, new KettleDatabaseException( e ) );
String strategyName = exceptionStrategy.getClass().getName();
assertEquals( SUPPRESSABLE_WITH_SHORT_MESSAGE, strategyName );
}
|
@VisibleForTesting
CompletableFuture<Acknowledge> getBootstrapCompletionFuture() {
return bootstrapCompletionFuture;
}
|
@Test
void testErrorHandlerIsCalledWhenSubmissionThrowsAnException() throws Exception {
final AtomicBoolean shutdownCalled = new AtomicBoolean(false);
final TestingDispatcherGateway.Builder dispatcherBuilder =
runningJobGatewayBuilder()
.setSubmitFunction(
jobGraph -> {
throw new FlinkRuntimeException("Nope!");
})
.setClusterShutdownFunction(
status -> {
shutdownCalled.set(true);
return CompletableFuture.completedFuture(Acknowledge.get());
});
// we're "listening" on this to be completed to verify that the error handler is called.
// In production, this will shut down the cluster with an exception.
final CompletableFuture<Void> errorHandlerFuture = new CompletableFuture<>();
final ApplicationDispatcherBootstrap bootstrap =
createApplicationDispatcherBootstrap(
2,
dispatcherBuilder.build(),
scheduledExecutor,
errorHandlerFuture::completeExceptionally);
final CompletableFuture<Acknowledge> completionFuture =
bootstrap.getBootstrapCompletionFuture();
// we call the error handler
assertException(errorHandlerFuture, FlinkRuntimeException.class);
// we return a future that is completed exceptionally
assertException(completionFuture, FlinkRuntimeException.class);
// and cluster shutdown didn't get called
assertThat(shutdownCalled.get()).isFalse();
}
|
public static ExecutableStage forGrpcPortRead(
QueryablePipeline pipeline,
PipelineNode.PCollectionNode inputPCollection,
Set<PipelineNode.PTransformNode> initialNodes) {
checkArgument(
!initialNodes.isEmpty(),
"%s must contain at least one %s.",
GreedyStageFuser.class.getSimpleName(),
PipelineNode.PTransformNode.class.getSimpleName());
// Choose the environment from an arbitrary node. The initial nodes may not be empty for this
// subgraph to make any sense, there has to be at least one processor node
// (otherwise the stage is gRPC Read -> gRPC Write, which doesn't do anything).
Environment environment = getStageEnvironment(pipeline, initialNodes);
ImmutableSet.Builder<PipelineNode.PTransformNode> fusedTransforms = ImmutableSet.builder();
fusedTransforms.addAll(initialNodes);
Set<SideInputReference> sideInputs = new LinkedHashSet<>();
Set<UserStateReference> userStates = new LinkedHashSet<>();
Set<TimerReference> timers = new LinkedHashSet<>();
Set<PipelineNode.PCollectionNode> fusedCollections = new LinkedHashSet<>();
Set<PipelineNode.PCollectionNode> materializedPCollections = new LinkedHashSet<>();
Queue<PipelineNode.PCollectionNode> fusionCandidates = new ArrayDeque<>();
for (PipelineNode.PTransformNode initialConsumer : initialNodes) {
fusionCandidates.addAll(pipeline.getOutputPCollections(initialConsumer));
sideInputs.addAll(pipeline.getSideInputs(initialConsumer));
userStates.addAll(pipeline.getUserStates(initialConsumer));
timers.addAll(pipeline.getTimers(initialConsumer));
}
while (!fusionCandidates.isEmpty()) {
PipelineNode.PCollectionNode candidate = fusionCandidates.poll();
if (fusedCollections.contains(candidate) || materializedPCollections.contains(candidate)) {
// This should generally mean we get to a Flatten via multiple paths through the graph and
// we've already determined what to do with the output.
LOG.debug(
"Skipping fusion candidate {} because it is {} in this {}",
candidate,
fusedCollections.contains(candidate) ? "fused" : "materialized",
ExecutableStage.class.getSimpleName());
continue;
}
PCollectionFusibility fusibility =
canFuse(pipeline, candidate, environment, fusedCollections);
switch (fusibility) {
case MATERIALIZE:
materializedPCollections.add(candidate);
break;
case FUSE:
// All of the consumers of the candidate PCollection can be fused into this stage. Do so.
fusedCollections.add(candidate);
fusedTransforms.addAll(pipeline.getPerElementConsumers(candidate));
for (PipelineNode.PTransformNode consumer : pipeline.getPerElementConsumers(candidate)) {
// The outputs of every transform fused into this stage must be either materialized or
// themselves fused away, so add them to the set of candidates.
fusionCandidates.addAll(pipeline.getOutputPCollections(consumer));
sideInputs.addAll(pipeline.getSideInputs(consumer));
}
break;
default:
throw new IllegalStateException(
String.format(
"Unknown type of %s %s",
PCollectionFusibility.class.getSimpleName(), fusibility));
}
}
return ImmutableExecutableStage.ofFullComponents(
pipeline.getComponents(),
environment,
inputPCollection,
sideInputs,
userStates,
timers,
fusedTransforms.build(),
materializedPCollections,
ExecutableStage.DEFAULT_WIRE_CODER_SETTINGS);
}
|
@Test
public void noEnvironmentThrows() {
// (impulse.out) -> runnerTransform -> gbk.out
// runnerTransform can't be executed in an environment, so trying to construct it should fail
PTransform gbkTransform =
PTransform.newBuilder()
.putInputs("input", "impulse.out")
.setSpec(
FunctionSpec.newBuilder().setUrn(PTransformTranslation.GROUP_BY_KEY_TRANSFORM_URN))
.putOutputs("output", "gbk.out")
.build();
QueryablePipeline p =
QueryablePipeline.forPrimitivesIn(
partialComponents
.toBuilder()
.putTransforms("runnerTransform", gbkTransform)
.putPcollections(
"gbk.out", PCollection.newBuilder().setUniqueName("gbk.out").build())
.build());
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Environment must be populated");
GreedyStageFuser.forGrpcPortRead(
p,
impulseOutputNode,
ImmutableSet.of(PipelineNode.pTransform("runnerTransform", gbkTransform)));
}
|
public static String subString(String src, String start, String to) {
return subString(src, start, to, false);
}
|
@Test
public void testSubString() {
Assert.assertNull(StringUtils.subString(",", "foo", ","));
Assert.assertNull(
StringUtils.subString("foo", "foo", "a\'b\'c", false));
Assert.assertNull(
StringUtils.subString("foo", "foo", "a\'b\'c", true));
Assert.assertEquals("a 2b ",
StringUtils.subString("1a 2b 3c", "1", "3"));
Assert.assertEquals("c",
StringUtils.subString("abcdef", "b", "d", true));
}
|
@Override
public void execute(Runnable command) {
if (command == null) {
throw new NullPointerException();
}
try {
super.execute(command);
} catch (RejectedExecutionException rx) {
// retry to offer the task into queue.
final TaskQueue queue = (TaskQueue) super.getQueue();
try {
if (!queue.retryOffer(command, 0, TimeUnit.MILLISECONDS)) {
throw new RejectedExecutionException("Queue capacity is full.", rx);
}
} catch (InterruptedException x) {
throw new RejectedExecutionException(x);
}
}
}
|
@Test
void testEagerThreadPool_rejectExecution1() {
String name = "eager-tf";
int cores = 1;
int threads = 3;
int queues = 2;
long alive = 1000;
// init queue and executor
TaskQueue<Runnable> taskQueue = new TaskQueue<>(queues);
final EagerThreadPoolExecutor executor = new EagerThreadPoolExecutor(
cores,
threads,
alive,
TimeUnit.MILLISECONDS,
taskQueue,
new NamedThreadFactory(name, true),
new AbortPolicyWithReport(name, URL));
taskQueue.setExecutor(executor);
CountDownLatch countDownLatch = new CountDownLatch(1);
Runnable runnable = () -> {
try {
countDownLatch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
};
for (int i = 0; i < 5; i++) {
executor.execute(runnable);
}
await().until(() -> executor.getPoolSize() == threads);
await().until(() -> executor.getQueue().size() == queues);
Assertions.assertThrows(RejectedExecutionException.class, () -> executor.execute(runnable));
countDownLatch.countDown();
await().until(() -> executor.getActiveCount() == 0);
executor.execute(runnable);
}
|
public void incGroupGetSize(final String group, final String topic, final int incValue) {
final String statsKey = buildStatsKey(topic, group);
this.statsTable.get(Stats.GROUP_GET_SIZE).addValue(statsKey, incValue, 1);
}
|
@Test
public void testIncGroupGetSize() {
brokerStatsManager.incGroupGetSize(GROUP_NAME, TOPIC, 1);
String statsKey = brokerStatsManager.buildStatsKey(TOPIC, GROUP_NAME);
assertThat(brokerStatsManager.getStatsItem(GROUP_GET_SIZE, statsKey).getValue().doubleValue()).isEqualTo(1L);
}
|
@Override
public DataTableType dataTableType() {
return dataTableType;
}
|
@Test
void can_define_table_row_transformer() throws NoSuchMethodException {
Method method = JavaDataTableTypeDefinitionTest.class.getMethod("convert_table_row_to_string", List.class);
JavaDataTableTypeDefinition definition = new JavaDataTableTypeDefinition(method, lookup, new String[0]);
assertThat(definition.dataTableType().transform(dataTable.cells()),
is(asList("convert_table_row_to_string=[a, b]", "convert_table_row_to_string=[c, d]")));
}
|
public static <K, V> V putToConcurrentMap(ConcurrentMap<K, V> map, K key, V value) {
V old = map.putIfAbsent(key, value);
return old != null ? old : value;
}
|
@Test
public void testPutToConcurrentMap() throws Exception {
final ConcurrentMap<String, AtomicInteger> hashMap = new ConcurrentHashMap<String, AtomicInteger>();
final CountDownLatch latch = new CountDownLatch(3);
for (int i = 0; i < 3; i++) {
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
for (int j = 0; j < 10000; j++) {
AtomicInteger inter = CommonUtils.putToConcurrentMap(hashMap, "key", new AtomicInteger(0));
inter.incrementAndGet();
}
latch.countDown();
}
});
thread.start();
}
latch.await();
Assert.assertEquals(hashMap.get("key").get(), 30000);
}
|
public void setMessageRequestMode(final String brokerAddr, final String topic, final String consumerGroup,
final MessageRequestMode mode, final int popShareQueueNum, final long timeoutMillis)
throws InterruptedException, RemotingTimeoutException, RemotingSendRequestException,
RemotingConnectException, MQClientException {
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.SET_MESSAGE_REQUEST_MODE, null);
SetMessageRequestModeRequestBody requestBody = new SetMessageRequestModeRequestBody();
requestBody.setTopic(topic);
requestBody.setConsumerGroup(consumerGroup);
requestBody.setMode(mode);
requestBody.setPopShareQueueNum(popShareQueueNum);
request.setBody(requestBody.encode());
RemotingCommand response = this.remotingClient.invokeSync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), brokerAddr), request, timeoutMillis);
assert response != null;
if (ResponseCode.SUCCESS != response.getCode()) {
throw new MQClientException(response.getCode(), response.getRemark());
}
}
|
@Test
public void testSetMessageRequestMode_Success() throws Exception {
doAnswer((Answer<RemotingCommand>) mock -> {
RemotingCommand request = mock.getArgument(1);
RemotingCommand response = RemotingCommand.createResponseCommand(null);
response.setCode(ResponseCode.SUCCESS);
response.setOpaque(request.getOpaque());
return response;
}).when(remotingClient).invokeSync(anyString(), any(RemotingCommand.class), anyLong());
mqClientAPI.setMessageRequestMode(brokerAddr, topic, group, MessageRequestMode.POP, 8, 10 * 1000L);
}
|
@Override
public double p(double x) {
if (x < 0) {
return 0.0;
} else {
return lambda * Math.exp(-lambda * x);
}
}
|
@Test
public void testP() {
System.out.println("p");
ExponentialDistribution instance = new ExponentialDistribution(2.0);
instance.rand();
assertEquals(0, instance.p(-0.1), 1E-7);
assertEquals(2.0, instance.p(0.0), 1E-7);
assertEquals(0.2706706, instance.p(1.0), 1E-7);
assertEquals(0.03663128, instance.p(2.0), 1E-7);
assertEquals(0.004957504, instance.p(3.0), 1E-7);
assertEquals(0.0006709253, instance.p(4.0), 1E-7);
}
|
public static Object applyLogicalType(Schema.Field field, Object value) {
if (field == null || field.schema() == null) {
return value;
}
Schema fieldSchema = resolveUnionSchema(field.schema());
return applySchemaTypeLogic(fieldSchema, value);
}
|
@Test
public void testApplyLogicalTypeReturnsSameValueWhenNotUsingLogicalType() {
String value = "abc";
String schemaString =
new StringBuilder().append("{").append(" \"type\": \"record\",").append(" \"name\": \"test\",")
.append(" \"fields\": [{").append(" \"name\": \"column1\",").append(" \"type\": \"string\"")
.append(" }]").append("}").toString();
Schema schema = new Schema.Parser().parse(schemaString);
Object result = AvroSchemaUtil.applyLogicalType(schema.getField("column1"), value);
Assert.assertSame(value, result);
}
|
static long sizeOf(Mutation m) {
if (m.getOperation() == Mutation.Op.DELETE) {
return sizeOf(m.getKeySet());
}
long result = 0;
for (Value v : m.getValues()) {
switch (v.getType().getCode()) {
case ARRAY:
result += estimateArrayValue(v);
break;
case STRUCT:
throw new IllegalArgumentException("Structs are not supported in mutation.");
default:
result += estimatePrimitiveValue(v);
}
}
return result;
}
|
@Test
public void bytes() throws Exception {
Mutation empty =
Mutation.newInsertOrUpdateBuilder("test").set("one").to(ByteArray.fromBase64("")).build();
Mutation nullValue =
Mutation.newInsertOrUpdateBuilder("test").set("one").to((ByteArray) null).build();
Mutation sample =
Mutation.newInsertOrUpdateBuilder("test")
.set("one")
.to(ByteArray.fromBase64("abcdabcd"))
.build();
Mutation nullArray =
Mutation.newInsertOrUpdateBuilder("test").set("one").toBytesArray(null).build();
Mutation deleteBytes =
Mutation.delete("test", Key.of(ByteArray.copyFrom("some_bytes".getBytes(UTF_8))));
assertThat(MutationSizeEstimator.sizeOf(empty), is(0L));
assertThat(MutationSizeEstimator.sizeOf(nullValue), is(0L));
assertThat(MutationSizeEstimator.sizeOf(sample), is(6L));
assertThat(MutationSizeEstimator.sizeOf(nullArray), is(0L));
assertThat(MutationSizeEstimator.sizeOf(deleteBytes), is(10L));
}
|
public static Builder builder() {
return new Builder();
}
|
@Test
public void testBackOffWithMaxTime() {
final BackOff backOff = BackOff.builder().maxElapsedTime(9, TimeUnit.SECONDS).build();
final BackOffTimerTask context = new BackOffTimerTask(backOff, null, t -> true);
long delay;
for (int i = 1; i <= 5; i++) {
delay = context.next();
assertEquals(i, context.getCurrentAttempts());
assertEquals(BackOff.DEFAULT_DELAY.toMillis(), delay);
assertEquals(BackOff.DEFAULT_DELAY.toMillis(), context.getCurrentDelay());
assertEquals(BackOff.DEFAULT_DELAY.toMillis() * i, context.getCurrentElapsedTime());
}
delay = context.next();
assertEquals(6, context.getCurrentAttempts());
assertEquals(BackOff.NEVER, delay);
}
|
public static Identifier parse(String stringValue) {
return parse(stringValue, -1);
}
|
@Test
public void testParseIntegerMaxInclusive() {
Identifier.parse("65535");
}
|
IdBatchAndWaitTime newIdBaseLocal(int batchSize) {
return newIdBaseLocal(Clock.currentTimeMillis(), getNodeId(), batchSize);
}
|
@Test
public void test_timeMiddle() {
long id = gen.newIdBaseLocal(1516028439000L, 1234, 10).idBatch.base();
assertEquals(5300086112257234L, id);
}
|
public EncryptedKeyVersion generateEncryptedKey(String encryptionKeyName)
throws IOException,
GeneralSecurityException {
return getExtension().generateEncryptedKey(encryptionKeyName);
}
|
@Test
public void testGenerateEncryptedKey() throws Exception {
// Generate a new EEK and check it
KeyProviderCryptoExtension.EncryptedKeyVersion ek1 =
kpExt.generateEncryptedKey(encryptionKey.getName());
assertEquals("Version name of EEK should be EEK",
KeyProviderCryptoExtension.EEK,
ek1.getEncryptedKeyVersion().getVersionName());
assertEquals("Name of EEK should be encryption key name",
ENCRYPTION_KEY_NAME, ek1.getEncryptionKeyName());
assertNotNull("Expected encrypted key material",
ek1.getEncryptedKeyVersion().getMaterial());
assertEquals("Length of encryption key material and EEK material should "
+ "be the same", encryptionKey.getMaterial().length,
ek1.getEncryptedKeyVersion().getMaterial().length
);
// Decrypt EEK into an EK and check it
KeyVersion k1 = kpExt.decryptEncryptedKey(ek1);
assertEquals(KeyProviderCryptoExtension.EK, k1.getVersionName());
assertEquals(encryptionKey.getMaterial().length, k1.getMaterial().length);
// Decrypt it again and it should be the same
KeyVersion k1a = kpExt.decryptEncryptedKey(ek1);
assertArrayEquals(k1.getMaterial(), k1a.getMaterial());
// Generate another EEK and make sure it's different from the first
KeyProviderCryptoExtension.EncryptedKeyVersion ek2 =
kpExt.generateEncryptedKey(encryptionKey.getName());
KeyVersion k2 = kpExt.decryptEncryptedKey(ek2);
if (Arrays.equals(k1.getMaterial(), k2.getMaterial())) {
fail("Generated EEKs should have different material!");
}
if (Arrays.equals(ek1.getEncryptedKeyIv(), ek2.getEncryptedKeyIv())) {
fail("Generated EEKs should have different IVs!");
}
}
|
@Override
public String execute(CommandContext commandContext, String[] args) {
String config = frameworkModel.getApplicationModels().stream()
.map(applicationModel ->
applicationModel.getApplicationConfigManager().getApplication())
.map(o -> o.orElse(null))
.filter(Objects::nonNull)
.map(ApplicationConfig::getLivenessProbe)
.filter(Objects::nonNull)
.collect(Collectors.joining(","));
URL url = URL.valueOf("application://").addParameter(CommonConstants.QOS_LIVE_PROBE_EXTENSION, config);
List<LivenessProbe> livenessProbes = frameworkModel
.getExtensionLoader(LivenessProbe.class)
.getActivateExtension(url, CommonConstants.QOS_LIVE_PROBE_EXTENSION);
if (!livenessProbes.isEmpty()) {
for (LivenessProbe livenessProbe : livenessProbes) {
if (!livenessProbe.check()) {
// 503 Service Unavailable
commandContext.setHttpCode(503);
return "false";
}
}
}
// 200 OK
commandContext.setHttpCode(200);
return "true";
}
|
@Test
void testExecute() {
Live live = new Live(frameworkModel);
CommandContext commandContext = new CommandContext("live");
String result = live.execute(commandContext, new String[0]);
Assertions.assertEquals(result, "false");
Assertions.assertEquals(commandContext.getHttpCode(), 503);
MockLivenessProbe.setCheckReturnValue(true);
result = live.execute(commandContext, new String[0]);
Assertions.assertEquals(result, "true");
Assertions.assertEquals(commandContext.getHttpCode(), 200);
}
|
@Udf
public <T extends Comparable<? super T>> List<T> arraySortWithDirection(@UdfParameter(
description = "The array to sort") final List<T> input,
@UdfParameter(
description = "Marks the end of the series (inclusive)") final String direction) {
if (input == null || direction == null) {
return null;
}
if (SORT_DIRECTION_ASC.contains(direction.toUpperCase())) {
input.sort(nullsLast(naturalOrder()));
} else if (SORT_DIRECTION_DESC.contains(direction.toUpperCase())) {
input.sort(nullsLast(Collections.reverseOrder()));
} else {
return null;
}
return input;
}
|
@Test
public void shouldSortNullsToEndDescending() {
final List<String> input = Arrays.asList(null, "foo", null, "bar", null);
final List<String> output = udf.arraySortWithDirection(input, "desc");
assertThat(output, contains("foo", "bar", null, null, null));
}
|
@Override
protected String getScheme() {
return config.getScheme();
}
|
@Test
public void testGetScheme() {
S3FileSystem s3FileSystem = new S3FileSystem(s3Config("s3"));
assertEquals("s3", s3FileSystem.getScheme());
s3FileSystem = new S3FileSystem(s3Config("other"));
assertEquals("other", s3FileSystem.getScheme());
}
|
public static CompletionStage<Void> lockAsync(
InterProcessLock lock, long timeout, TimeUnit unit, Executor executor) {
CompletableFuture<Void> future = new CompletableFuture<>();
if (executor == null) {
CompletableFuture.runAsync(() -> lock(future, lock, timeout, unit));
} else {
CompletableFuture.runAsync(() -> lock(future, lock, timeout, unit), executor);
}
return future;
}
|
@Test
public void testContention() throws Exception {
try (CuratorFramework client =
CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1))) {
client.start();
InterProcessMutex lock1 = new InterProcessMutex(client, "/one/two");
InterProcessMutex lock2 = new InterProcessMutex(client, "/one/two");
CountDownLatch latch = new CountDownLatch(1);
AsyncWrappers.lockAsync(lock1).thenAccept(__ -> {
latch.countDown(); // don't release the lock
});
assertTrue(timing.awaitLatch(latch));
CountDownLatch latch2 = new CountDownLatch(1);
AsyncWrappers.lockAsync(lock2, timing.forSleepingABit().milliseconds(), TimeUnit.MILLISECONDS)
.exceptionally(e -> {
if (e instanceof AsyncWrappers.TimeoutException) {
latch2.countDown(); // lock should still be held
}
return null;
});
assertTrue(timing.awaitLatch(latch2));
}
}
|
public Encoding getEncoding() {
return encoding;
}
|
@Test
public void testEncodeCharactersOutsideOfLowerSpecial() {
// Contains characters outside LOWER_SPECIAL
String testString = "abcdefABCDEF1234!@#";
MetaStringEncoder encoder = new MetaStringEncoder('_', '$');
MetaString encodedMetaString = encoder.encode(testString);
assertSame(encodedMetaString.getEncoding(), MetaString.Encoding.UTF_8);
}
|
public MemoryCache() {
this.mainCache = new ConcurrentHashMap<>();
}
|
@Test
public void testMemoryCache() {
final MemoryCache memoryCache = new MemoryCache();
final String key = "data";
memoryCache.isExist(key).subscribe(v -> assertEquals(Boolean.FALSE, v));
memoryCache.cacheData(key, "data".getBytes(StandardCharsets.UTF_8), 10)
.subscribe(v -> assertEquals(Boolean.TRUE, v));
memoryCache.isExist(key).subscribe(v -> assertEquals(Boolean.TRUE, v));
memoryCache.getData(key).subscribe(v -> assertEquals("data", new String(v, StandardCharsets.UTF_8)));
}
|
public static int[] convertToByteCode(String instructions) {
if (instructions == null || instructions.trim().length() == 0) {
return new int[0];
}
var splitedInstructions = instructions.trim().split(" ");
var bytecode = new int[splitedInstructions.length];
for (var i = 0; i < splitedInstructions.length; i++) {
if (isValidInstruction(splitedInstructions[i])) {
bytecode[i] = Instruction.valueOf(splitedInstructions[i]).getIntValue();
} else if (isValidInt(splitedInstructions[i])) {
bytecode[i] = Integer.parseInt(splitedInstructions[i]);
} else {
var errorMessage = "Invalid instruction or number: " + splitedInstructions[i];
throw new IllegalArgumentException(errorMessage);
}
}
return bytecode;
}
|
@Test
void testInstructions() {
var instructions = "LITERAL 35 SET_HEALTH SET_WISDOM SET_AGILITY PLAY_SOUND"
+ " SPAWN_PARTICLES GET_HEALTH ADD DIVIDE";
var bytecode = InstructionConverterUtil.convertToByteCode(instructions);
Assertions.assertEquals(10, bytecode.length);
Assertions.assertEquals(Instruction.LITERAL.getIntValue(), bytecode[0]);
Assertions.assertEquals(35, bytecode[1]);
Assertions.assertEquals(Instruction.SET_HEALTH.getIntValue(), bytecode[2]);
Assertions.assertEquals(Instruction.SET_WISDOM.getIntValue(), bytecode[3]);
Assertions.assertEquals(Instruction.SET_AGILITY.getIntValue(), bytecode[4]);
Assertions.assertEquals(Instruction.PLAY_SOUND.getIntValue(), bytecode[5]);
Assertions.assertEquals(Instruction.SPAWN_PARTICLES.getIntValue(), bytecode[6]);
Assertions.assertEquals(Instruction.GET_HEALTH.getIntValue(), bytecode[7]);
Assertions.assertEquals(Instruction.ADD.getIntValue(), bytecode[8]);
Assertions.assertEquals(Instruction.DIVIDE.getIntValue(), bytecode[9]);
}
|
public DirectoryEntry lookUp(
File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException {
checkNotNull(path);
checkNotNull(options);
DirectoryEntry result = lookUp(workingDirectory, path, options, 0);
if (result == null) {
// an intermediate file in the path did not exist or was not a directory
throw new NoSuchFileException(path.toString());
}
return result;
}
|
@Test
public void testLookup_absolute_withDotDotsInPath_afterSymlink() throws IOException {
assertExists(lookup("/work/four/five/.."), "/", "/");
assertExists(lookup("/work/four/six/.."), "/", "work");
}
|
@Override
public PollResult poll(long currentTimeMs) {
if (memberId == null) {
return PollResult.EMPTY;
}
// Send any pending acknowledgements before fetching more records.
PollResult pollResult = processAcknowledgements(currentTimeMs);
if (pollResult != null) {
return pollResult;
}
if (!fetchMoreRecords || closing) {
return PollResult.EMPTY;
}
Map<Node, ShareSessionHandler> handlerMap = new HashMap<>();
Map<String, Uuid> topicIds = metadata.topicIds();
for (TopicPartition partition : partitionsToFetch()) {
Optional<Node> leaderOpt = metadata.currentLeader(partition).leader;
if (!leaderOpt.isPresent()) {
log.debug("Requesting metadata update for partition {} since current leader node is missing", partition);
metadata.requestUpdate(false);
continue;
}
Uuid topicId = topicIds.get(partition.topic());
if (topicId == null) {
log.debug("Requesting metadata update for partition {} since topic ID is missing", partition);
metadata.requestUpdate(false);
continue;
}
Node node = leaderOpt.get();
if (nodesWithPendingRequests.contains(node.id())) {
log.trace("Skipping fetch for partition {} because previous fetch request to {} has not been processed", partition, node.id());
} else {
// if there is a leader and no in-flight requests, issue a new fetch
ShareSessionHandler handler = handlerMap.computeIfAbsent(node,
k -> sessionHandlers.computeIfAbsent(node.id(), n -> new ShareSessionHandler(logContext, n, memberId)));
TopicIdPartition tip = new TopicIdPartition(topicId, partition);
Acknowledgements acknowledgementsToSend = fetchAcknowledgementsMap.get(tip);
if (acknowledgementsToSend != null) {
metricsManager.recordAcknowledgementSent(acknowledgementsToSend.size());
}
handler.addPartitionToFetch(tip, acknowledgementsToSend);
log.debug("Added fetch request for partition {} to node {}", partition, node.id());
}
}
Map<Node, ShareFetchRequest.Builder> builderMap = new LinkedHashMap<>();
for (Map.Entry<Node, ShareSessionHandler> entry : handlerMap.entrySet()) {
builderMap.put(entry.getKey(), entry.getValue().newShareFetchBuilder(groupId, fetchConfig));
}
List<UnsentRequest> requests = builderMap.entrySet().stream().map(entry -> {
Node target = entry.getKey();
log.trace("Building ShareFetch request to send to node {}", target.id());
ShareFetchRequest.Builder requestBuilder = entry.getValue();
nodesWithPendingRequests.add(target.id());
BiConsumer<ClientResponse, Throwable> responseHandler = (clientResponse, error) -> {
if (error != null) {
handleShareFetchFailure(target, requestBuilder.data(), error);
} else {
handleShareFetchSuccess(target, requestBuilder.data(), clientResponse);
}
};
return new UnsentRequest(requestBuilder, Optional.of(target)).whenComplete(responseHandler);
}).collect(Collectors.toList());
return new PollResult(requests);
}
|
@Test
public void testFetchDisconnected() {
buildRequestManager();
assignFromSubscribed(singleton(tp0));
assertEquals(1, sendFetches());
client.prepareResponse(fullFetchResponse(tip0, records, acquiredRecords, Errors.NONE), true);
networkClientDelegate.poll(time.timer(0));
assertEmptyFetch("Should not return records on disconnect");
}
|
public Map<MessageQueue, Long> invokeBrokerToResetOffset(final String addr, final String topic, final String group,
final long timestamp, final boolean isForce, final long timeoutMillis)
throws RemotingException, MQClientException, InterruptedException {
return invokeBrokerToResetOffset(addr, topic, group, timestamp, isForce, timeoutMillis, false);
}
|
@Test
public void assertInvokeBrokerToResetOffset() throws RemotingException, InterruptedException, MQClientException {
mockInvokeSync();
ResetOffsetBody responseBody = new ResetOffsetBody();
responseBody.getOffsetTable().put(new MessageQueue(), 1L);
setResponseBody(responseBody);
Map<MessageQueue, Long> actual = mqClientAPI.invokeBrokerToResetOffset(defaultBrokerAddr, defaultTopic, "", System.currentTimeMillis(), false, defaultTimeout);
assertNotNull(actual);
assertEquals(1, actual.size());
actual = mqClientAPI.invokeBrokerToResetOffset(defaultBrokerAddr, defaultTopic, "", System.currentTimeMillis(), 1, 1L, defaultTimeout);
assertNotNull(actual);
assertEquals(1, actual.size());
}
|
public List<StepOption> retriveOptions() {
return Arrays.asList(
new StepOption( DISABLE_MESSAGE_ID, getString( PKG, "JmsDialog.Options.DISABLE_MESSAGE_ID" ), disableMessageId ),
new StepOption( DISABLE_MESSAGE_TIMESTAMP, getString( PKG, "JmsDialog.Options.DISABLE_MESSAGE_TIMESTAMP" ),
disableMessageTimestamp ),
new StepOption( DELIVERY_MODE, getString( PKG, "JmsDialog.Options.DELIVERY_MODE" ), deliveryMode ),
new StepOption( PRIORITY, getString( PKG, "JmsDialog.Options.PRIORITY" ), prio ),
new StepOption( TIME_TO_LIVE, getString( PKG, "JmsDialog.Options.TIME_TO_LIVE" ), timeToLive ),
new StepOption( DELIVERY_DELAY, getString( PKG, "JmsDialog.Options.DELIVERY_DELAY" ), deliveryDelay ),
new StepOption( JMS_CORRELATION_ID, getString( PKG, "JmsDialog.Options.JMS_CORRELATION_ID" ), jmsCorrelationId ),
new StepOption( JMS_TYPE, getString( PKG, "JmsDialog.Options.JMS_TYPE" ), jmsType )
);
}
|
@Test
public void testRetriveOptions() {
List<StepOption> compareStepOptions = Arrays.asList(
new StepOption( DISABLE_MESSAGE_ID, getString( JmsProducerMeta.class, "JmsDialog.Options.DISABLE_MESSAGE_ID" ),
"false" ),
new StepOption( DISABLE_MESSAGE_TIMESTAMP,
getString( JmsProducerMeta.class, "JmsDialog.Options.DISABLE_MESSAGE_TIMESTAMP" ), "true" ),
new StepOption( DELIVERY_MODE, getString( JmsProducerMeta.class, "JmsDialog.Options.DELIVERY_MODE" ), "2" ),
new StepOption( PRIORITY, getString( JmsProducerMeta.class, "JmsDialog.Options.PRIORITY" ), "3" ),
new StepOption( TIME_TO_LIVE, getString( JmsProducerMeta.class, "JmsDialog.Options.TIME_TO_LIVE" ), "100" ),
new StepOption( DELIVERY_DELAY, getString( JmsProducerMeta.class, "JmsDialog.Options.DELIVERY_DELAY" ), "20" ),
new StepOption( JMS_CORRELATION_ID, getString( JmsProducerMeta.class, "JmsDialog.Options.JMS_CORRELATION_ID" ),
"asdf" ),
new StepOption( JMS_TYPE, getString( JmsProducerMeta.class, "JmsDialog.Options.JMS_TYPE" ), "myType" )
);
JmsProducerMeta jmsProducerMeta = new JmsProducerMeta();
jmsProducerMeta.setDisableMessageId( "false" );
jmsProducerMeta.setDisableMessageTimestamp( "true" );
jmsProducerMeta.setDeliveryMode( "2" );
jmsProducerMeta.setPriority( "3" );
jmsProducerMeta.setTimeToLive( "100" );
jmsProducerMeta.setDeliveryDelay( "20" );
jmsProducerMeta.setJmsCorrelationId( "asdf" );
jmsProducerMeta.setJmsType( "myType" );
List<StepOption> stepOptions = jmsProducerMeta.retriveOptions();
assertNotNull( stepOptions );
assertEquals( 8, stepOptions.size() );
assertOptions( compareStepOptions, stepOptions );
}
|
@Override
public final void invoke() throws Exception {
// Allow invoking method 'invoke' without having to call 'restore' before it.
if (!isRunning) {
LOG.debug("Restoring during invoke will be called.");
restoreInternal();
}
// final check to exit early before starting to run
ensureNotCanceled();
scheduleBufferDebloater();
// let the task do its work
getEnvironment().getMetricGroup().getIOMetricGroup().markTaskStart();
runMailboxLoop();
// if this left the run() method cleanly despite the fact that this was canceled,
// make sure the "clean shutdown" is not attempted
ensureNotCanceled();
afterInvoke();
}
|
@Test
void testProcessWithAvailableOutput() throws Exception {
try (final MockEnvironment environment = setupEnvironment(true, true)) {
final int numberOfProcessCalls = 10;
final AvailabilityTestInputProcessor inputProcessor =
new AvailabilityTestInputProcessor(numberOfProcessCalls);
final StreamTask task =
new MockStreamTaskBuilder(environment)
.setStreamInputProcessor(inputProcessor)
.build();
task.invoke();
assertThat(inputProcessor.currentNumProcessCalls).isEqualTo(numberOfProcessCalls);
}
}
|
public boolean evaluate( RowMetaInterface rowMeta, Object[] r ) {
// Start of evaluate
boolean retval = false;
// If we have 0 items in the list, evaluate the current condition
// Otherwise, evaluate all sub-conditions
//
try {
if ( isAtomic() ) {
if ( function == FUNC_TRUE ) {
return !negate;
}
// Get fieldnrs left value
//
// Check out the fieldnrs if we don't have them...
if ( leftValuename != null && leftValuename.length() > 0 ) {
leftFieldnr = rowMeta.indexOfValue( leftValuename );
}
// Get fieldnrs right value
//
if ( rightValuename != null && rightValuename.length() > 0 ) {
rightFieldnr = rowMeta.indexOfValue( rightValuename );
}
// Get fieldnrs left field
ValueMetaInterface fieldMeta = null;
Object field = null;
if ( leftFieldnr >= 0 ) {
fieldMeta = rowMeta.getValueMeta( leftFieldnr );
field = r[ leftFieldnr ];
} else {
return false; // no fields to evaluate
}
// Get fieldnrs right exact
ValueMetaInterface fieldMeta2 = rightExact != null ? rightExact.getValueMeta() : null;
Object field2 = rightExact != null ? rightExact.getValueData() : null;
if ( field2 == null && rightFieldnr >= 0 ) {
fieldMeta2 = rowMeta.getValueMeta( rightFieldnr );
field2 = r[ rightFieldnr ];
}
// Evaluate
switch ( function ) {
case FUNC_EQUAL:
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) == 0 );
break;
case FUNC_NOT_EQUAL:
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) != 0 );
break;
case FUNC_SMALLER:
// Added this if/else to accommodate for CUST-270
if ( "Y".equalsIgnoreCase( System.getProperty( Const.KETTLE_FILTER_TREAT_NULLS_AS_NOT_ZERO, "N" ) )
&& fieldMeta.isNull( field ) ) {
retval = false;
} else {
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) < 0 );
}
break;
case FUNC_SMALLER_EQUAL:
// Added this if/else to accommodate for CUST-270
if ( "Y".equalsIgnoreCase( System.getProperty( Const.KETTLE_FILTER_TREAT_NULLS_AS_NOT_ZERO, "N" ) )
&& fieldMeta.isNull( field ) ) {
retval = false;
} else {
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) <= 0 );
}
break;
case FUNC_LARGER:
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) > 0 );
break;
case FUNC_LARGER_EQUAL:
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) >= 0 );
break;
case FUNC_REGEXP:
if ( fieldMeta.isNull( field ) || field2 == null ) {
retval = false;
} else {
retval =
Pattern
.matches( fieldMeta2.getCompatibleString( field2 ), fieldMeta.getCompatibleString( field ) );
}
break;
case FUNC_NULL:
retval = ( fieldMeta.isNull( field ) );
break;
case FUNC_NOT_NULL:
retval = ( !fieldMeta.isNull( field ) );
break;
case FUNC_IN_LIST:
// performance reason: create the array first or again when it is against a field and not a constant
//
if ( inList == null || rightFieldnr >= 0 ) {
inList = Const.splitString( fieldMeta2.getString( field2 ), ';', true );
for ( int i = 0; i < inList.length; i++ ) {
inList[i] = inList[i] == null ? null : inList[i].replace( "\\", "" );
}
Arrays.sort( inList );
}
String searchString = fieldMeta.getCompatibleString( field );
int inIndex = -1;
if ( searchString != null ) {
inIndex = Arrays.binarySearch( inList, searchString );
}
retval = inIndex >= 0;
break;
case FUNC_CONTAINS:
String fm2CompatibleContains = fieldMeta2.getCompatibleString( field2 );
retval = Optional.ofNullable( fieldMeta.getCompatibleString( field ) )
.filter( s -> s.contains( fm2CompatibleContains ) ).isPresent();
break;
case FUNC_STARTS_WITH:
String fm2CompatibleStarts = fieldMeta2.getCompatibleString( field2 );
retval = Optional.ofNullable( fieldMeta.getCompatibleString( field ) )
.filter( s -> s.startsWith( fm2CompatibleStarts ) ).isPresent();
break;
case FUNC_ENDS_WITH:
String string = fieldMeta.getCompatibleString( field );
if ( !Utils.isEmpty( string ) ) {
if ( rightString == null && field2 != null ) {
rightString = fieldMeta2.getCompatibleString( field2 );
}
if ( rightString != null ) {
retval = string.endsWith( fieldMeta2.getCompatibleString( field2 ) );
} else {
retval = false;
}
} else {
retval = false;
}
break;
case FUNC_LIKE:
// Converts to a regular expression
// TODO: optimize the patterns and String replacements
//
if ( fieldMeta.isNull( field ) || field2 == null ) {
retval = false;
} else {
String regex = fieldMeta2.getCompatibleString( field2 );
regex = regex.replace( "%", ".*" );
regex = regex.replace( "?", "." );
retval = Pattern.matches( regex, fieldMeta.getCompatibleString( field ) );
}
break;
default:
break;
}
// Only NOT makes sense, the rest doesn't, so ignore!!!!
// Optionally negate
//
if ( isNegated() ) {
retval = !retval;
}
} else {
// Composite : get first
Condition cb0 = list.get( 0 );
retval = cb0.evaluate( rowMeta, r );
// Loop over the conditions listed below.
//
for ( int i = 1; i < list.size(); i++ ) {
// Composite : #i
// Get right hand condition
Condition cb = list.get( i );
// Evaluate the right hand side of the condition cb.evaluate() within
// the switch statement
// because the condition may be short-circuited due to the left hand
// side (retval)
switch ( cb.getOperator() ) {
case Condition.OPERATOR_OR:
retval = retval || cb.evaluate( rowMeta, r );
break;
case Condition.OPERATOR_AND:
retval = retval && cb.evaluate( rowMeta, r );
break;
case Condition.OPERATOR_OR_NOT:
retval = retval || ( !cb.evaluate( rowMeta, r ) );
break;
case Condition.OPERATOR_AND_NOT:
retval = retval && ( !cb.evaluate( rowMeta, r ) );
break;
case Condition.OPERATOR_XOR:
retval = retval ^ cb.evaluate( rowMeta, r );
break;
default:
break;
}
}
// Composite: optionally negate
if ( isNegated() ) {
retval = !retval;
}
}
} catch ( Exception e ) {
throw new RuntimeException( "Unexpected error evaluation condition [" + toString() + "]", e );
}
return retval;
}
|
@Test
public void testNullSmallerOrEqualsThanZero() {
String left = "left";
String right = "right";
Long leftValue = null;
Long rightValue = 0L;
RowMetaInterface rowMeta = new RowMeta();
rowMeta.addValueMeta( new ValueMetaInteger( left ) );
rowMeta.addValueMeta( new ValueMetaInteger( right ) );
Condition condition = new Condition( left, Condition.FUNC_SMALLER_EQUAL, right, null );
assertTrue( condition.evaluate( rowMeta, new Object[] { leftValue, rightValue } ) );
}
|
public static boolean testURLPassesExclude(String url, String exclude) {
// If the url doesn't decode to UTF-8 then return false, it could be trying to get around our rules with nonstandard encoding
// If the exclude rule includes a "?" character, the url must exactly match the exclude rule.
// If the exclude rule does not contain the "?" character, we chop off everything starting at the first "?"
// in the URL and then the resulting url must exactly match the exclude rule. If the exclude ends with a "*"
// (wildcard) character, and wildcards are allowed in excludes, then the URL is allowed if it exactly
// matches everything before the * and there are no ".." even encoded ones characters after the "*".
String decodedUrl = null;
try {
decodedUrl = URLDecoder.decode(url, "UTF-8");
} catch (Exception e) {
return false;
}
if (exclude.endsWith("*") && ALLOW_WILDCARDS_IN_EXCLUDES.getValue()) {
if (url.startsWith(exclude.substring(0, exclude.length()-1))) {
// Now make sure that there are no ".." characters in the rest of the URL.
if (!decodedUrl.contains("..")) {
return true;
}
}
}
else if (exclude.contains("?")) {
if (url.equals(exclude)) {
return true;
}
}
else {
int paramIndex = url.indexOf("?");
if (paramIndex != -1) {
url = url.substring(0, paramIndex);
}
if (url.equals(exclude)) {
return true;
}
}
return false;
}
|
@Test
public void wildcardInExcludePassesWhenWildcardsAllowed() throws Exception {
AuthCheckFilter.ALLOW_WILDCARDS_IN_EXCLUDES.setValue(true);
assertTrue(AuthCheckFilter.testURLPassesExclude("setup/setup-new.jsp","setup/setup-*"));
}
|
public MacAddress encapEthDst() {
return encapEthDst;
}
|
@Test
public void testConstruction() {
final NiciraEncapEthDst encapEthDst1 = new NiciraEncapEthDst(mac1);
assertThat(encapEthDst1, is(notNullValue()));
assertThat(encapEthDst1.encapEthDst(), is(mac1));
}
|
public EndpointResponse registerHeartbeat(final HeartbeatMessage request) {
handleHeartbeat(request);
return EndpointResponse.ok(new HeartbeatResponse(true));
}
|
@Test
public void shouldSendHeartbeat() {
// When:
final HeartbeatMessage request = new HeartbeatMessage(new KsqlHostInfoEntity("localhost", 8080),
1);
final EndpointResponse response = heartbeatResource.registerHeartbeat(request);
// Then:
assertThat(response.getStatus(), is(200));
assertThat(response.getEntity(), instanceOf(HeartbeatResponse.class));
}
|
static int readDirectBuffer(InputStream f, ByteBuffer buf, byte[] temp) throws IOException {
// copy all the bytes that return immediately, stopping at the first
// read that doesn't return a full buffer.
int nextReadLength = Math.min(buf.remaining(), temp.length);
int totalBytesRead = 0;
int bytesRead;
while ((bytesRead = f.read(temp, 0, nextReadLength)) == temp.length) {
buf.put(temp);
totalBytesRead += bytesRead;
nextReadLength = Math.min(buf.remaining(), temp.length);
}
if (bytesRead < 0) {
// return -1 if nothing was read
return totalBytesRead == 0 ? -1 : totalBytesRead;
} else {
// copy the last partial buffer
buf.put(temp, 0, bytesRead);
totalBytesRead += bytesRead;
return totalBytesRead;
}
}
|
@Test
public void testDirectSmallReads() throws Exception {
ByteBuffer readBuffer = ByteBuffer.allocateDirect(10);
MockInputStream stream = new MockInputStream(2, 3, 3);
int len = DelegatingSeekableInputStream.readDirectBuffer(stream, readBuffer, TEMP.get());
Assert.assertEquals(2, len);
Assert.assertEquals(2, readBuffer.position());
Assert.assertEquals(10, readBuffer.limit());
len = DelegatingSeekableInputStream.readDirectBuffer(stream, readBuffer, TEMP.get());
Assert.assertEquals(3, len);
Assert.assertEquals(5, readBuffer.position());
Assert.assertEquals(10, readBuffer.limit());
len = DelegatingSeekableInputStream.readDirectBuffer(stream, readBuffer, TEMP.get());
Assert.assertEquals(3, len);
Assert.assertEquals(8, readBuffer.position());
Assert.assertEquals(10, readBuffer.limit());
len = DelegatingSeekableInputStream.readDirectBuffer(stream, readBuffer, TEMP.get());
Assert.assertEquals(2, len);
Assert.assertEquals(10, readBuffer.position());
Assert.assertEquals(10, readBuffer.limit());
readBuffer.flip();
Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}
|
@Override
public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) {
IdentityProvider provider = resolveProviderOrHandleResponse(request, response, INIT_CONTEXT);
if (provider != null) {
handleProvider(request, response, provider);
}
}
|
@Test
public void redirect_contains_cookie_with_error_message_when_failing_because_of_UnauthorizedExceptionException() throws Exception {
IdentityProvider identityProvider = new FailWithUnauthorizedExceptionIdProvider("failing");
when(request.getRequestURI()).thenReturn("/sessions/init/" + identityProvider.getKey());
identityProviderRepository.addIdentityProvider(identityProvider);
underTest.doFilter(request, response, chain);
verify(response).sendRedirect("/sessions/unauthorized");
verify(authenticationEvent).loginFailure(eq(request), authenticationExceptionCaptor.capture());
AuthenticationException authenticationException = authenticationExceptionCaptor.getValue();
assertThat(authenticationException).hasMessage("Email john@email.com is already used");
assertThat(authenticationException.getSource()).isEqualTo(AuthenticationEvent.Source.external(identityProvider));
assertThat(authenticationException.getLogin()).isNull();
assertThat(authenticationException.getPublicMessage()).isEqualTo("Email john@email.com is already used");
verifyDeleteAuthCookie();
verify(response).addCookie(cookieArgumentCaptor.capture());
Cookie cookie = cookieArgumentCaptor.getValue();
assertThat(cookie.getName()).isEqualTo("AUTHENTICATION-ERROR");
assertThat(cookie.getValue()).isEqualTo("Email%20john%40email.com%20is%20already%20used");
assertThat(cookie.getPath()).isEqualTo("/");
assertThat(cookie.isHttpOnly()).isFalse();
assertThat(cookie.getMaxAge()).isEqualTo(300);
assertThat(cookie.isSecure()).isFalse();
}
|
public static Object[] realize(Object[] objs, Class<?>[] types) {
if (objs.length != types.length) {
throw new IllegalArgumentException("args.length != types.length");
}
Object[] dests = new Object[objs.length];
for (int i = 0; i < objs.length; i++) {
dests[i] = realize(objs[i], types[i]);
}
return dests;
}
|
@Test
public void testPojoGeneric2() throws NoSuchMethodException {
String personName = "testName";
{
Ageneric<Ageneric<PersonInfo>> generic2PersonInfo = createAGenericLoop(personName);
Object o = JSON.toJSON(generic2PersonInfo);
{
Ageneric personInfo = (Ageneric) PojoUtils.realize(o, Ageneric.class);
assertEquals(Ageneric.NAME, personInfo.getName());
assertTrue(personInfo.getData() instanceof Map);
}
{
Type[] createGenericPersonInfos = ReflectUtils.getReturnTypes(
PojoUtilsTest.class.getDeclaredMethod("createAGenericLoop", String.class));
Ageneric personInfo = (Ageneric)
PojoUtils.realize(o, (Class) createGenericPersonInfos[0], createGenericPersonInfos[1]);
assertEquals(Ageneric.NAME, personInfo.getName());
assertEquals(personInfo.getData().getClass(), Ageneric.class);
assertEquals(Ageneric.NAME, ((Ageneric) personInfo.getData()).getName());
assertEquals(((Ageneric) personInfo.getData()).getData().getClass(), PersonInfo.class);
assertEquals(personName, ((PersonInfo) ((Ageneric) personInfo.getData()).getData()).getName());
}
}
{
Bgeneric<Ageneric<PersonInfo>> generic = createBGenericWithAgeneric(personName);
Object o = JSON.toJSON(generic);
{
Ageneric personInfo = (Ageneric) PojoUtils.realize(o, Ageneric.class);
assertEquals(Bgeneric.NAME, personInfo.getName());
assertTrue(personInfo.getData() instanceof Map);
}
{
Type[] createGenericPersonInfos = ReflectUtils.getReturnTypes(
PojoUtilsTest.class.getDeclaredMethod("createBGenericWithAgeneric", String.class));
Bgeneric personInfo = (Bgeneric)
PojoUtils.realize(o, (Class) createGenericPersonInfos[0], createGenericPersonInfos[1]);
assertEquals(Bgeneric.NAME, personInfo.getName());
assertEquals(personInfo.getData().getClass(), Ageneric.class);
assertEquals(Ageneric.NAME, ((Ageneric) personInfo.getData()).getName());
assertEquals(((Ageneric) personInfo.getData()).getData().getClass(), PersonInfo.class);
assertEquals(personName, ((PersonInfo) ((Ageneric) personInfo.getData()).getData()).getName());
}
}
}
|
public static void close(@Nullable Context context, boolean swallowIOException) throws NamingException {
if (context == null) {
return;
}
try {
context.close();
} catch (NamingException e) {
if (swallowIOException) {
LOG.warn("NamingException thrown while closing context.", e);
} else {
throw e;
}
}
}
|
@Test
public void shouldNotSwallow() throws Exception {
Context context = mock(Context.class);
doThrow(new NamingException()).when(context).close();
assertThatThrownBy(() -> ContextHelper.close(context, false))
.isInstanceOf(NamingException.class);
}
|
public static void init(String applicationId, String transactionServiceGroup) {
init(applicationId, transactionServiceGroup, null, null);
}
|
@Test
public void testInit() {
TMClient.init(APPLICATION_ID, SERVICE_GROUP);
TmNettyRemotingClient tmNettyRemotingClient = TmNettyRemotingClient.getInstance();
Assertions.assertEquals(tmNettyRemotingClient.getTransactionServiceGroup(), SERVICE_GROUP);
}
|
@Udf
public String concatWS(
@UdfParameter(description = "Separator string and values to join") final String... inputs) {
if (inputs == null || inputs.length < 2) {
throw new KsqlFunctionException("Function Concat_WS expects at least two input arguments.");
}
final String separator = inputs[0];
if (separator == null) {
return null;
}
return Arrays.stream(inputs, 1,
inputs.length)
.filter(Objects::nonNull)
.collect(Collectors.joining(separator));
}
|
@Test
public void shouldReturnEmptyIfAllInputsNull() {
assertThat(udf.concatWS("SEP", null, null), is(""));
assertThat(udf.concatWS(ByteBuffer.wrap(new byte[] {2}), null, null), is(EMPTY_BYTES));
}
|
@Override
public Optional<ShardingTableNameReviser> getTableNameReviser() {
return Optional.of(new ShardingTableNameReviser());
}
|
@Test
void assertGetTableNameReviser() {
Optional<ShardingTableNameReviser> tableNameReviser = reviseEntry.getTableNameReviser();
assertTrue(tableNameReviser.isPresent());
assertThat(tableNameReviser.get().getClass(), is(ShardingTableNameReviser.class));
}
|
public GoConfigHolder loadConfigHolder(final String content, Callback callback) throws Exception {
CruiseConfig configForEdit;
CruiseConfig config;
LOGGER.debug("[Config Save] Loading config holder");
configForEdit = deserializeConfig(content);
if (callback != null) callback.call(configForEdit);
config = preprocessAndValidate(configForEdit);
return new GoConfigHolder(config, configForEdit);
}
|
@Test
void shouldNotAllowConfigWithEnvironmentsWithSameNames() {
String content = configWithEnvironments(
"""
<environments>
<environment name='uat' />
<environment name='uat' />
</environments>""", CONFIG_SCHEMA_VERSION);
assertThatThrownBy(() -> xmlLoader.loadConfigHolder(content))
.as("Should not support 2 environments with the same same")
.satisfiesAnyOf(
t -> assertThat(t.getMessage()).contains("Duplicate unique value [uat] declared for identity constraint of element \"environments\"."),
t -> assertThat(t.getMessage()).contains("Duplicate unique value [uat] declared for identity constraint \"uniqueEnvironmentName\" of element \"environments\".")
);
}
|
public static Expression generateFilterExpression(SearchArgument sarg) {
return translate(sarg.getExpression(), sarg.getLeaves());
}
|
@Test
public void testDateType() {
SearchArgument.Builder builder = SearchArgumentFactory.newBuilder();
Date gmtDate = Date.valueOf(LocalDate.of(2015, 11, 12));
SearchArgument arg =
builder.startAnd().equals("date", PredicateLeaf.Type.DATE, gmtDate).end().build();
UnboundPredicate expected =
Expressions.equal("date", Literal.of("2015-11-12").to(Types.DateType.get()).value());
UnboundPredicate actual =
(UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg);
assertPredicatesMatch(expected, actual);
}
|
public static PredicateTreeAnalyzerResult analyzePredicateTree(Predicate predicate) {
AnalyzerContext context = new AnalyzerContext();
int treeSize = aggregatePredicateStatistics(predicate, false, context);
int minFeature = ((int)Math.ceil(findMinFeature(predicate, false, context))) + (context.hasNegationPredicate ? 1 : 0);
return new PredicateTreeAnalyzerResult(minFeature, treeSize, context.subTreeSizes);
}
|
@Test
void require_that_minfeature_is_1_for_simple_term() {
Predicate p = feature("foo").inSet("bar");
PredicateTreeAnalyzerResult r = PredicateTreeAnalyzer.analyzePredicateTree(p);
assertEquals(1, r.minFeature);
assertEquals(1, r.treeSize);
assertTrue(r.sizeMap.isEmpty());
}
|
@Description("Given a Bing tile, returns the polygon representation of the tile")
@ScalarFunction("bing_tile_polygon")
@SqlType(GEOMETRY_TYPE_NAME)
public static Slice bingTilePolygon(@SqlType(BingTileType.NAME) long input)
{
BingTile tile = BingTile.decode(input);
return serialize(tileToEnvelope(tile));
}
|
@Test
public void testBingTilePolygon()
{
assertFunction("ST_AsText(bing_tile_polygon(bing_tile('123030123010121')))", VARCHAR, "POLYGON ((59.996337890625 30.11662158281937, 59.996337890625 30.12612436422458, 60.00732421875 30.12612436422458, 60.00732421875 30.11662158281937, 59.996337890625 30.11662158281937))");
assertFunction("ST_AsText(ST_Centroid(bing_tile_polygon(bing_tile('123030123010121'))))", VARCHAR, "POINT (60.0018310546875 30.12137297352197)");
// Check bottom right corner of a stack of tiles at different zoom levels
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(1, 1, 1)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (180 -85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(3, 3, 2)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (180 -85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(7, 7, 3)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (180 -85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(15, 15, 4)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (180 -85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(31, 31, 5)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (180 -85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(0, 0, 1)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(1, 1, 2)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(3, 3, 3)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(7, 7, 4)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(15, 15, 5)), g -> ST_Point(ST_XMax(g), ST_YMin(g))))", VARCHAR, "POINT (0 0)");
// Check top left corner of a stack of tiles at different zoom levels
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(1, 1, 1)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(2, 2, 2)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(4, 4, 3)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(8, 8, 4)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(16, 16, 5)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (0 0)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(0, 0, 1)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (-180 85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(0, 0, 2)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (-180 85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(0, 0, 3)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (-180 85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(0, 0, 4)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (-180 85.05112877980659)");
assertFunction("ST_AsText(apply(bing_tile_polygon(bing_tile(0, 0, 5)), g -> ST_Point(ST_XMin(g), ST_YMax(g))))", VARCHAR, "POINT (-180 85.05112877980659)");
}
|
@VisibleForTesting
static <T extends Comparable<? super T>> boolean le(T a, T b) {
return a.compareTo(b) <= 0;
}
|
@Test
public void testComparators_LE() {
Assert.assertTrue(VersionChecker.le(0, 1));
Assert.assertTrue(VersionChecker.le(1, 1));
Assert.assertFalse(VersionChecker.le(2, 1));
}
|
public Optional<Session> getActiveSession(ApplicationId applicationId) {
return getActiveSession(getTenant(applicationId), applicationId);
}
|
@Test
public void testThatPreviousSessionIsDeactivated() {
deployApp(testAppJdiscOnly);
Session firstSession = applicationRepository.getActiveSession(applicationId()).get();
deployApp(testAppJdiscOnly);
assertEquals(DEACTIVATE, firstSession.getStatus());
}
|
@Override
protected void doStart() throws Exception {
super.doStart();
asyncDispatchesAwareServletRequestListener = new AsyncDispatchesAwareServletRequestListener(getAsyncDispatches());
}
|
@Test
public void gaugesAreRegisteredWithResponseMeteredLevelCoarse() throws Exception {
InstrumentedEE10Handler handler = new InstrumentedEE10Handler(registry, "coarse", COARSE);
handler.setHandler(new TestHandler());
handler.setName("handler");
handler.doStart();
assertThat(registry.getGauges()).containsKey("coarse.handler.percent-4xx-1m");
}
|
public String getLegacyColumnName( DatabaseMetaData dbMetaData, ResultSetMetaData rsMetaData, int index ) throws KettleDatabaseException {
if ( dbMetaData == null ) {
throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoDBMetaDataException" ) );
}
if ( rsMetaData == null ) {
throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoRSMetaDataException" ) );
}
try {
return dbMetaData.getDriverMajorVersion() > 3 ? rsMetaData.getColumnLabel( index ) : rsMetaData.getColumnName( index );
} catch ( Exception e ) {
throw new KettleDatabaseException( String.format( "%s: %s", BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameException" ), e.getMessage() ), e );
}
}
|
@Test( expected = KettleDatabaseException.class )
public void testGetLegacyColumnNameDriverLessOrEqualToThreeException() throws Exception {
DatabaseMetaData databaseMetaData = mock( DatabaseMetaData.class );
doReturn( 3 ).when( databaseMetaData ).getDriverMajorVersion();
new MySQLDatabaseMeta().getLegacyColumnName( databaseMetaData, getResultSetMetaDataException(), 1 );
}
|
PartitionRegistration getPartition(Uuid topicId, int partitionId) {
TopicControlInfo topic = topics.get(topicId);
if (topic == null) {
return null;
}
return topic.parts.get(partitionId);
}
|
@Test
public void testEligibleLeaderReplicas_ShrinkAndExpandIsr() {
ReplicationControlTestContext ctx = new ReplicationControlTestContext.Builder().setIsElrEnabled(true).build();
ReplicationControlManager replicationControl = ctx.replicationControl;
ctx.registerBrokers(0, 1, 2);
ctx.unfenceBrokers(0, 1, 2);
CreatableTopicResult createTopicResult = ctx.createTestTopic("foo",
new int[][] {new int[] {0, 1, 2}});
TopicIdPartition topicIdPartition = new TopicIdPartition(createTopicResult.topicId(), 0);
assertEquals(OptionalInt.of(0), ctx.currentLeader(topicIdPartition));
long brokerEpoch = ctx.currentBrokerEpoch(0);
ctx.alterTopicConfig("foo", TopicConfig.MIN_IN_SYNC_REPLICAS_CONFIG, "2");
// Change ISR to {0}.
PartitionData shrinkIsrRequest = newAlterPartition(
replicationControl, topicIdPartition, isrWithDefaultEpoch(0), LeaderRecoveryState.RECOVERED);
ControllerResult<AlterPartitionResponseData> shrinkIsrResult = sendAlterPartition(
replicationControl, 0, brokerEpoch, topicIdPartition.topicId(), shrinkIsrRequest);
AlterPartitionResponseData.PartitionData shrinkIsrResponse = assertAlterPartitionResponse(
shrinkIsrResult, topicIdPartition, NONE);
assertConsistentAlterPartitionResponse(replicationControl, topicIdPartition, shrinkIsrResponse);
PartitionRegistration partition = replicationControl.getPartition(topicIdPartition.topicId(), topicIdPartition.partitionId());
assertArrayEquals(new int[]{1, 2}, partition.elr, partition.toString());
assertArrayEquals(new int[]{}, partition.lastKnownElr, partition.toString());
PartitionData expandIsrRequest = newAlterPartition(
replicationControl, topicIdPartition, isrWithDefaultEpoch(0, 1), LeaderRecoveryState.RECOVERED);
ControllerResult<AlterPartitionResponseData> expandIsrResult = sendAlterPartition(
replicationControl, 0, brokerEpoch, topicIdPartition.topicId(), expandIsrRequest);
AlterPartitionResponseData.PartitionData expandIsrResponse = assertAlterPartitionResponse(
expandIsrResult, topicIdPartition, NONE);
assertConsistentAlterPartitionResponse(replicationControl, topicIdPartition, expandIsrResponse);
partition = replicationControl.getPartition(topicIdPartition.topicId(), topicIdPartition.partitionId());
assertArrayEquals(new int[]{}, partition.elr, partition.toString());
assertArrayEquals(new int[]{}, partition.lastKnownElr, partition.toString());
}
|
@Override
public void setConfig(RedisClusterNode node, String param, String value) {
RedisClient entry = getEntry(node);
RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_SET, param, value);
syncFuture(f);
}
|
@Test
public void testSetConfig() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
connection.setConfig(master, "timeout", "10");
});
}
|
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Struct struct = (Struct) o;
return Objects.equals(schema, struct.schema) &&
Arrays.deepEquals(values, struct.values);
}
|
@Test
public void testEquals() {
Struct struct1 = new Struct(FLAT_STRUCT_SCHEMA)
.put("int8", (byte) 12)
.put("int16", (short) 12)
.put("int32", 12)
.put("int64", (long) 12)
.put("float32", 12.f)
.put("float64", 12.)
.put("boolean", true)
.put("string", "foobar")
.put("bytes", ByteBuffer.wrap("foobar".getBytes()));
Struct struct2 = new Struct(FLAT_STRUCT_SCHEMA)
.put("int8", (byte) 12)
.put("int16", (short) 12)
.put("int32", 12)
.put("int64", (long) 12)
.put("float32", 12.f)
.put("float64", 12.)
.put("boolean", true)
.put("string", "foobar")
.put("bytes", ByteBuffer.wrap("foobar".getBytes()));
Struct struct3 = new Struct(FLAT_STRUCT_SCHEMA)
.put("int8", (byte) 12)
.put("int16", (short) 12)
.put("int32", 12)
.put("int64", (long) 12)
.put("float32", 12.f)
.put("float64", 12.)
.put("boolean", true)
.put("string", "mismatching string")
.put("bytes", ByteBuffer.wrap("foobar".getBytes()));
assertEquals(struct1, struct2);
assertNotEquals(struct1, struct3);
List<Byte> array = Arrays.asList((byte) 1, (byte) 2);
Map<Integer, String> map = Collections.singletonMap(1, "string");
struct1 = new Struct(NESTED_SCHEMA)
.put("array", array)
.put("map", map)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12));
List<Byte> array2 = Arrays.asList((byte) 1, (byte) 2);
Map<Integer, String> map2 = Collections.singletonMap(1, "string");
struct2 = new Struct(NESTED_SCHEMA)
.put("array", array2)
.put("map", map2)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12));
List<Byte> array3 = Arrays.asList((byte) 1, (byte) 2, (byte) 3);
Map<Integer, String> map3 = Collections.singletonMap(2, "string");
struct3 = new Struct(NESTED_SCHEMA)
.put("array", array3)
.put("map", map3)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 13));
assertEquals(struct1, struct2);
assertNotEquals(struct1, struct3);
}
|
@Override
public String resolve(Method method, Object[] arguments, String spelExpression) {
if (StringUtils.isEmpty(spelExpression)) {
return spelExpression;
}
if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) {
return stringValueResolver.resolveStringValue(spelExpression);
}
if (spelExpression.matches(METHOD_SPEL_REGEX)) {
SpelRootObject rootObject = new SpelRootObject(method, arguments);
MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer);
Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext);
return (String) evaluated;
}
if (spelExpression.matches(BEAN_SPEL_REGEX)) {
SpelRootObject rootObject = new SpelRootObject(method, arguments);
MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer);
evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory));
Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext);
return (String) evaluated;
}
return spelExpression;
}
|
@Test
public void nullTest() throws Exception {
DefaultSpelResolverTest target = new DefaultSpelResolverTest();
Method testMethod = target.getClass().getMethod("testMethod", String.class);
String result = sut.resolve(testMethod, new Object[]{}, null);
assertThat(result).isNull();
}
|
public void setDescription(String description) {
this.description = description;
}
|
@Test
public void testSetDescription() {
String description = "description";
String expected = "description";
Model instance = new Model();
instance.setDescription(description);
assertEquals(expected, instance.getDescription());
}
|
@Override
public void doSubscribe(final URL url, final NotifyListener listener) {
try {
checkDestroyed();
if (ANY_VALUE.equals(url.getServiceInterface())) {
String root = toRootPath();
boolean check = url.getParameter(CHECK_KEY, false);
ConcurrentMap<NotifyListener, ChildListener> listeners =
ConcurrentHashMapUtils.computeIfAbsent(zkListeners, url, k -> new ConcurrentHashMap<>());
ChildListener zkListener = ConcurrentHashMapUtils.computeIfAbsent(
listeners, listener, k -> (parentPath, currentChildren) -> {
for (String child : currentChildren) {
try {
child = URL.decode(child);
if (!(JsonUtils.checkJson(child))) {
throw new Exception("dubbo-admin subscribe " + child + " failed,beacause "
+ child + "is root path in " + url);
}
} catch (Exception e) {
logger.warn(PROTOCOL_ERROR_DESERIALIZE, "", "", e.getMessage());
}
if (!anyServices.contains(child)) {
anyServices.add(child);
subscribe(
url.setPath(child)
.addParameters(
INTERFACE_KEY,
child,
Constants.CHECK_KEY,
String.valueOf(check)),
k);
}
}
});
zkClient.create(root, false, true);
List<String> services = zkClient.addChildListener(root, zkListener);
if (CollectionUtils.isNotEmpty(services)) {
for (String service : services) {
service = URL.decode(service);
anyServices.add(service);
subscribe(
url.setPath(service)
.addParameters(
INTERFACE_KEY, service, Constants.CHECK_KEY, String.valueOf(check)),
listener);
}
}
} else {
CountDownLatch latch = new CountDownLatch(1);
try {
List<URL> urls = new ArrayList<>();
/*
Iterate over the category value in URL.
With default settings, the path variable can be when url is a consumer URL:
/dubbo/[service name]/providers,
/dubbo/[service name]/configurators
/dubbo/[service name]/routers
*/
for (String path : toCategoriesPath(url)) {
ConcurrentMap<NotifyListener, ChildListener> listeners = ConcurrentHashMapUtils.computeIfAbsent(
zkListeners, url, k -> new ConcurrentHashMap<>());
ChildListener zkListener = ConcurrentHashMapUtils.computeIfAbsent(
listeners, listener, k -> new RegistryChildListenerImpl(url, k, latch));
if (zkListener instanceof RegistryChildListenerImpl) {
((RegistryChildListenerImpl) zkListener).setLatch(latch);
}
// create "directories".
zkClient.create(path, false, true);
// Add children (i.e. service items).
List<String> children = zkClient.addChildListener(path, zkListener);
if (children != null) {
// The invocation point that may cause 1-1.
urls.addAll(toUrlsWithEmpty(url, path, children));
}
}
notify(url, listener, urls);
} finally {
// tells the listener to run only after the sync notification of main thread finishes.
latch.countDown();
}
}
} catch (Throwable e) {
throw new RpcException(
"Failed to subscribe " + url + " to zookeeper " + getUrl() + ", cause: " + e.getMessage(), e);
}
}
|
@Test
void testDoSubscribeWithException() {
Assertions.assertThrows(RpcException.class, () -> zookeeperRegistry.doSubscribe(anyUrl, listener));
}
|
public void updateClusterState(final ClusterState state) {
repository.persist(ComputeNode.getClusterStateNodePath(), state.name());
}
|
@Test
void assertUpdateClusterStateClusterStateWithoutPath() {
StatePersistService statePersistService = new StatePersistService(repository);
statePersistService.updateClusterState(ClusterState.OK);
verify(repository).persist(ComputeNode.getClusterStateNodePath(), ClusterState.OK.name());
}
|
String getApplicationId() {
return configuration.get(APPLICATION_ID).orElseThrow(() -> new IllegalArgumentException("Application ID is missing"));
}
|
@Test
public void return_default_value_of_application_id() {
assertThat(underTest.getApplicationId()).isEqualTo("sonarqube");
}
|
public BigDecimal calculateTDEE(ActiveLevel activeLevel) {
if(activeLevel == null) return BigDecimal.valueOf(0);
BigDecimal multiplayer = BigDecimal.valueOf(activeLevel.getMultiplayer());
return multiplayer.multiply(BMR).setScale(2, RoundingMode.HALF_DOWN);
}
|
@Test
void calculateTDEE_LIGHTLY_ACTIVE() {
BigDecimal TDEE = bmrCalculator.calculate(attributes).calculateTDEE(ActiveLevel.LIGHTLY);
assertEquals(new BigDecimal("2808.44"), TDEE);
}
|
T getFunction(final List<SqlArgument> arguments) {
// first try to get the candidates without any implicit casting
Optional<T> candidate = findMatchingCandidate(arguments, false);
if (candidate.isPresent()) {
return candidate.get();
} else if (!supportsImplicitCasts) {
throw createNoMatchingFunctionException(arguments);
}
// if none were found (candidate isn't present) try again with implicit casting
candidate = findMatchingCandidate(arguments, true);
if (candidate.isPresent()) {
return candidate.get();
}
throw createNoMatchingFunctionException(arguments);
}
|
@Test
public void shouldChooseLaterVariadicWhenTwoVariadicsMatchReversedInsertionOrder() {
// Given:
givenFunctions(
function(EXPECTED, 2, LONG, INT, STRING_VARARGS, DOUBLE),
function(OTHER, 1, LONG, INT_VARARGS, STRING, DOUBLE)
);
// When:
final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of(
SqlArgument.of(SqlTypes.BIGINT),
SqlArgument.of(SqlTypes.INTEGER),
SqlArgument.of(SqlTypes.STRING),
SqlArgument.of(SqlTypes.DOUBLE))
);
// Then:
assertThat(fun.name(), equalTo(EXPECTED));
}
|
@Override
public ApplicationId applicationId() {
return OrchestratorUtil.toApplicationId(applicationInstance.reference());
}
|
@Test
public void testApplicationId() {
try (var api = modelUtils.createScopedApplicationApi(modelUtils.createApplicationInstance(new ArrayList<>()))) {
assertEquals("tenant:application-name:default", api.applicationApi().applicationId().serializedForm());
}
}
|
protected SuppressionRules rules() {
return rules;
}
|
@Test
public void addDeviceTypeRule() {
Device.Type deviceType1 = Device.Type.ROADM;
Device.Type deviceType2 = Device.Type.SWITCH;
Set<Device.Type> deviceTypes = new HashSet<>();
deviceTypes.add(deviceType1);
cfg.deviceTypes(deviceTypes);
configEvent(NetworkConfigEvent.Type.CONFIG_ADDED);
assertTrue(provider.rules().getSuppressedDeviceType().contains(deviceType1));
assertFalse(provider.rules().getSuppressedDeviceType().contains(deviceType2));
}
|
@Override
public KsMaterializedQueryResult<WindowedRow> get(
final GenericKey key,
final int partition,
final Range<Instant> windowStart,
final Range<Instant> windowEnd,
final Optional<Position> position
) {
try {
final ReadOnlySessionStore<GenericKey, GenericRow> store = stateStore
.store(QueryableStoreTypes.sessionStore(), partition);
return KsMaterializedQueryResult.rowIterator(
findSession(store, key, windowStart, windowEnd).iterator());
} catch (final Exception e) {
throw new MaterializationException("Failed to get value from materialized table", e);
}
}
|
@Test
public void shouldIgnoreSessionsThatStartAtLowerBoundIfLowerBoundOpen() {
// Given:
final Range<Instant> startBounds = Range.openClosed(
LOWER_INSTANT,
UPPER_INSTANT
);
givenSingleSession(LOWER_INSTANT, LOWER_INSTANT.plusMillis(1));
// When:
final Iterator<WindowedRow> rowIterator =
table.get(A_KEY, PARTITION, startBounds, Range.all()).rowIterator;
// Then:
assertThat(rowIterator.hasNext(), is(false));
}
|
CreateConnectorRequest parseConnectorConfigurationFile(String filePath) throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
File connectorConfigurationFile = Paths.get(filePath).toFile();
try {
Map<String, String> connectorConfigs = objectMapper.readValue(
connectorConfigurationFile,
new TypeReference<Map<String, String>>() { });
if (!connectorConfigs.containsKey(NAME_CONFIG)) {
throw new ConnectException("Connector configuration at '" + filePath + "' is missing the mandatory '" + NAME_CONFIG + "' "
+ "configuration");
}
return new CreateConnectorRequest(connectorConfigs.get(NAME_CONFIG), connectorConfigs, null);
} catch (StreamReadException | DatabindException e) {
log.debug("Could not parse connector configuration file '{}' into a Map with String keys and values", filePath);
}
try {
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
CreateConnectorRequest createConnectorRequest = objectMapper.readValue(connectorConfigurationFile,
new TypeReference<CreateConnectorRequest>() { });
if (createConnectorRequest.config().containsKey(NAME_CONFIG)) {
if (!createConnectorRequest.config().get(NAME_CONFIG).equals(createConnectorRequest.name())) {
throw new ConnectException("Connector name configuration in 'config' doesn't match the one specified in 'name' at '" + filePath
+ "'");
}
} else {
createConnectorRequest.config().put(NAME_CONFIG, createConnectorRequest.name());
}
return createConnectorRequest;
} catch (StreamReadException | DatabindException e) {
log.debug("Could not parse connector configuration file '{}' into an object of type {}",
filePath, CreateConnectorRequest.class.getSimpleName());
}
Map<String, String> connectorConfigs = Utils.propsToStringMap(Utils.loadProps(filePath));
if (!connectorConfigs.containsKey(NAME_CONFIG)) {
throw new ConnectException("Connector configuration at '" + filePath + "' is missing the mandatory '" + NAME_CONFIG + "' "
+ "configuration");
}
return new CreateConnectorRequest(connectorConfigs.get(NAME_CONFIG), connectorConfigs, null);
}
|
@Test
public void testParseJsonFileWithCreateConnectorRequestWithoutInitialState() throws Exception {
Map<String, Object> requestToWrite = new HashMap<>();
requestToWrite.put("name", CONNECTOR_NAME);
requestToWrite.put("config", CONNECTOR_CONFIG);
try (FileWriter writer = new FileWriter(connectorConfigurationFile)) {
writer.write(new ObjectMapper().writeValueAsString(requestToWrite));
}
CreateConnectorRequest parsedRequest = connectStandalone.parseConnectorConfigurationFile(connectorConfigurationFile.getAbsolutePath());
CreateConnectorRequest expectedRequest = new CreateConnectorRequest(CONNECTOR_NAME, CONNECTOR_CONFIG, null);
assertEquals(expectedRequest, parsedRequest);
}
|
public void refresh() {
try {
if (contextManager.getMetaDataContexts().getMetaData().getTemporaryProps().getValue(TemporaryConfigurationPropertyKey.PROXY_META_DATA_COLLECTOR_ENABLED)) {
collectAndRefresh();
}
// CHECKSTYLE:OFF
} catch (final Exception ex) {
// CHECKSTYLE:ON
log.error("Collect data error", ex);
}
}
|
@Test
void assertRefresh() {
ContextManager contextManager = mock(ContextManager.class, RETURNS_DEEP_STUBS);
ShardingSphereStatistics statistics = mockStatistics();
when(contextManager.getMetaDataContexts().getStatistics()).thenReturn(statistics);
ShardingSphereMetaData metaData = mockMetaData();
when(contextManager.getMetaDataContexts().getMetaData()).thenReturn(metaData);
when(contextManager.getMetaDataContexts().getMetaData().getProps()).thenReturn(new ConfigurationProperties(new Properties()));
when(contextManager.getMetaDataContexts().getMetaData().getTemporaryProps()).thenReturn(new TemporaryConfigurationProperties(
PropertiesBuilder.build(new Property(TemporaryConfigurationPropertyKey.PROXY_META_DATA_COLLECTOR_ENABLED.getKey(), Boolean.TRUE.toString()))));
GlobalLockContext globalLockContext = mock(GlobalLockContext.class);
when(globalLockContext.tryLock(any(GlobalLockDefinition.class), anyLong())).thenReturn(true);
new ShardingSphereStatisticsRefreshEngine(contextManager, globalLockContext).refresh();
verify(contextManager.getPersistServiceFacade().getMetaDataPersistService().getShardingSphereDataPersistService()).update(any(AlteredShardingSphereDatabaseData.class));
}
|
@Override
public KsqlObject getKsqlObject(final int columnIndex) {
return values.getKsqlObject(columnIndex - 1);
}
|
@Test
public void shouldGetKsqlObject() {
assertThat(row.getKsqlObject("f_map"), is(new KsqlObject(ImmutableMap.of("k1", "v1", "k2", "v2"))));
assertThat(row.getKsqlObject("f_struct"), is(new KsqlObject(ImmutableMap.of("f1", "baz", "f2", 12))));
}
|
public void resolveAssertionConsumerService(AuthenticationRequest authenticationRequest) throws SamlValidationException {
// set URL if set in authnRequest
final String authnAcsURL = authenticationRequest.getAuthnRequest().getAssertionConsumerServiceURL();
if (authnAcsURL != null) {
authenticationRequest.setAssertionConsumerURL(authnAcsURL);
return;
}
// search url from metadata endpoints
final Integer authnAcsIdx = authenticationRequest.getAuthnRequest().getAssertionConsumerServiceIndex();
List<Endpoint> endpoints = authenticationRequest.getConnectionEntity().getRoleDescriptors().get(0).getEndpoints(AssertionConsumerService.DEFAULT_ELEMENT_NAME);
if (endpoints.isEmpty()) {
throw new SamlValidationException("Authentication: Assertion Consumer Service not found in metadata");
}
if (authnAcsIdx != null && endpoints.size() <= authnAcsIdx) {
throw new SamlValidationException("Authentication: Assertion Consumer Index is out of bounds");
}
// TODO: check if this statement is correct
if (endpoints.size() == 1) {
authenticationRequest.setAssertionConsumerURL(endpoints.get(0).getLocation());
return;
}
if(authnAcsIdx == null) {
AssertionConsumerService defaultAcs = endpoints.stream()
.filter(e -> e instanceof AssertionConsumerService)
.map(acs -> (AssertionConsumerService) acs)
.filter(IndexedEndpoint::isDefault)
.findAny()
.orElse(null);
if (defaultAcs == null) {
throw new SamlValidationException("Authentication: There is no default AssertionConsumerService");
}
authenticationRequest.setAssertionConsumerURL(defaultAcs.getLocation());
return;
}
authenticationRequest.setAssertionConsumerURL(endpoints.get(authnAcsIdx).getLocation());
}
|
@Test
void resolveAcsUrlWithoutIndexInMultiAcsMetadata() throws SamlValidationException {
AuthnRequest authnRequest = OpenSAMLUtils.buildSAMLObject(AuthnRequest.class);
AuthenticationRequest authenticationRequest = new AuthenticationRequest();
authenticationRequest.setAuthnRequest(authnRequest);
authenticationRequest.setConnectionEntity(MetadataParser.readMetadata(stubsMultiAcsMetadataFile, CONNECTION_ENTITY_ID));
assertionConsumerServiceUrlService.resolveAssertionConsumerService(authenticationRequest);
assertEquals("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", authenticationRequest.getAssertionConsumerURL());
}
|
public static Version of(int major, int minor) {
if (major == UNKNOWN_VERSION && minor == UNKNOWN_VERSION) {
return UNKNOWN;
} else {
return new Version(major, minor);
}
}
|
@Test(expected = AssertionError.class)
@RequireAssertEnabled
public void construct_withNegativeMinor() {
Version.of(1, -1);
}
|
@Override
public OperatorContext getOperatorContext()
{
return operatorContext;
}
|
@Test(dataProvider = "hashEnabledAndMemoryLimitForMergeValues")
public void testHashAggregationMemoryReservation(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
{
JavaAggregationFunctionImplementation arrayAggColumn = getAggregation("array_agg", BIGINT);
List<Integer> hashChannels = Ints.asList(1);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, BIGINT, BIGINT);
List<Page> input = rowPagesBuilder
.addSequencePage(10, 100, 0)
.addSequencePage(10, 200, 0)
.addSequencePage(10, 300, 0)
.build();
DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(10, Unit.MEGABYTE))
.addPipelineContext(0, true, true, false)
.addDriverContext();
HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(
0,
new PlanNodeId("test"),
ImmutableList.of(BIGINT),
hashChannels,
ImmutableList.of(),
ImmutableList.of(),
Step.SINGLE,
true,
ImmutableList.of(generateAccumulatorFactory(arrayAggColumn, ImmutableList.of(0), Optional.empty())),
rowPagesBuilder.getHashChannel(),
Optional.empty(),
100_000,
Optional.of(new DataSize(16, MEGABYTE)),
spillEnabled,
Optional.empty(),
succinctBytes(memoryLimitForMerge),
succinctBytes(memoryLimitForMergeWithMemory),
spillerFactory,
joinCompiler,
false);
Operator operator = operatorFactory.createOperator(driverContext);
toPages(operator, input.iterator(), revokeMemoryWhenAddingPages);
assertEquals(operator.getOperatorContext().getOperatorStats().getUserMemoryReservation().toBytes(), 0);
}
|
@Override
public List<Class<?>> getIgnoredViewTypeList() {
return new ArrayList<>();
}
|
@Test
public void getIgnoredViewTypeList() {
mSensorsAPI.ignoreViewType(Button.class);
Assert.assertEquals(0, mSensorsAPI.getIgnoredViewTypeList().size());
}
|
@Override
public TenantPackageDO getTenantPackage(Long id) {
return tenantPackageMapper.selectById(id);
}
|
@Test
public void testGetTenantPackage() {
// mock 数据
TenantPackageDO dbTenantPackage = randomPojo(TenantPackageDO.class);
tenantPackageMapper.insert(dbTenantPackage);// @Sql: 先插入出一条存在的数据
// 调用
TenantPackageDO result = tenantPackageService.getTenantPackage(dbTenantPackage.getId());
// 断言
assertPojoEquals(result, dbTenantPackage);
}
|
protected List<MavenArtifact> processResponse(Dependency dependency, HttpURLConnection conn) throws IOException {
final List<MavenArtifact> result = new ArrayList<>();
try (InputStreamReader streamReader = new InputStreamReader(conn.getInputStream(), StandardCharsets.UTF_8);
JsonParser parser = objectReader.getFactory().createParser(streamReader)) {
if (init(parser) && parser.nextToken() == com.fasterxml.jackson.core.JsonToken.START_OBJECT) {
// at least one result
do {
final FileImpl file = objectReader.readValue(parser);
checkHashes(dependency, file.getChecksums());
final Matcher pathMatcher = PATH_PATTERN.matcher(file.getPath());
if (!pathMatcher.matches()) {
throw new IllegalStateException("Cannot extract the Maven information from the path "
+ "retrieved in Artifactory " + file.getPath());
}
final String groupId = pathMatcher.group("groupId").replace('/', '.');
final String artifactId = pathMatcher.group("artifactId");
final String version = pathMatcher.group("version");
result.add(new MavenArtifact(groupId, artifactId, version, file.getDownloadUri(),
MavenArtifact.derivePomUrl(artifactId, version, file.getDownloadUri())));
} while (parser.nextToken() == com.fasterxml.jackson.core.JsonToken.START_OBJECT);
} else {
throw new FileNotFoundException("Artifact " + dependency + " not found in Artifactory");
}
}
return result;
}
|
@Test
public void shouldThrowExceptionWhenPatternCannotBeParsed() throws IOException {
// Given
Dependency dependency = new Dependency();
dependency.setSha1sum("c5b4c491aecb72e7c32a78da0b5c6b9cda8dee0f");
dependency.setSha256sum("512b4bf6927f4864acc419b8c5109c23361c30ed1f5798170248d33040de068e");
dependency.setMd5sum("2d1dd0fc21ee96bccfab4353d5379649");
final HttpURLConnection urlConnection = mock(HttpURLConnection.class);
final byte[] payload = payloadWithSha256().replace("/com/google/code/gson/gson/2.8.5/gson-2.8.5-sources.jar", "/2.8.5/gson-2.8.5-sources.jar").getBytes(StandardCharsets.UTF_8);
when(urlConnection.getInputStream()).thenReturn(new ByteArrayInputStream(payload));
// When
try {
searcher.processResponse(dependency, urlConnection);
fail("SHA256 mismatching should throw an exception!");
} catch (IllegalStateException e) {
// Then
assertEquals("Cannot extract the Maven information from the path retrieved in Artifactory /2.8.5/gson-2.8.5-sources.jar", e.getMessage());
}
}
|
public static String toString(Object o) {
if (null == o) {
return null;
}
return o.toString();
}
|
@Test
public void testToString() {
Assertions.assertNull(Utils.toString(null));
Assertions.assertEquals("", Utils.toString(""));
Assertions.assertEquals("foo", Utils.toString("foo"));
Assertions.assertEquals("123", Utils.toString(123));
}
|
public static String getRootCauseOrMessage(Throwable t) {
final Throwable rootCause = getRootCause(t, true);
return formatMessageCause(rootCause != null ? rootCause : t);
}
|
@Test
public void getRootCauseOrMessage() {
assertThat(ExceptionUtils.getRootCauseOrMessage(new Exception("cause1", new Exception("root")))).satisfies(m -> {
assertThat(m).isNotBlank();
assertThat(m).isEqualTo("root.");
});
assertThat(ExceptionUtils.getRootCauseOrMessage(new Exception("cause1"))).satisfies(m -> {
assertThat(m).isNotBlank();
assertThat(m).isEqualTo("cause1.");
});
assertThat(ExceptionUtils.getRootCauseOrMessage(new Exception("cause1", new Exception("")))).satisfies(m -> {
assertThat(m).isNotBlank();
assertThat(m).isEqualTo("cause1.");
});
}
|
public static void main(String[] args) {
Blacksmith blacksmith = new OrcBlacksmith();
Weapon weapon = blacksmith.manufactureWeapon(WeaponType.SPEAR);
LOGGER.info(MANUFACTURED, blacksmith, weapon);
weapon = blacksmith.manufactureWeapon(WeaponType.AXE);
LOGGER.info(MANUFACTURED, blacksmith, weapon);
blacksmith = new ElfBlacksmith();
weapon = blacksmith.manufactureWeapon(WeaponType.SPEAR);
LOGGER.info(MANUFACTURED, blacksmith, weapon);
weapon = blacksmith.manufactureWeapon(WeaponType.AXE);
LOGGER.info(MANUFACTURED, blacksmith, weapon);
}
|
@Test
void shouldExecuteWithoutException() {
assertDoesNotThrow(() -> App.main(new String[]{}));
}
|
public void giveWine(Royalty r) {
r.getDrink();
}
|
@Test
void testGiveWine() {
final var royalty = mock(Royalty.class);
final var servant = new Servant("test");
servant.giveWine(royalty);
verify(royalty).getDrink();
verifyNoMoreInteractions(royalty);
}
|
public void emptyStruct(String message) {
messages.add(message);
hasEmptyStruct = true;
}
|
@Test
public void testEmptyStruct() {
CompatibilityReport report = getCompatibilityReport(NestedEmptyStruct.class, NestedEmptyStruct.class);
assertEquals(
"encountered an empty struct: required_empty\nencountered an empty struct: optional_empty",
report.prettyMessages());
assertTrue(report.hasEmptyStruct());
}
|
@Override
public CiConfiguration loadConfiguration() {
String revision = system.envVariable("DRONE_COMMIT_SHA");
return new CiConfigurationImpl(revision, getName());
}
|
@Test
public void loadConfiguration() {
setEnvVariable("CI", "true");
setEnvVariable("DRONE", "true");
setEnvVariable("DRONE_COMMIT_SHA", "abd12fc");
assertThat(underTest.loadConfiguration().getScmRevision()).hasValue("abd12fc");
}
|
public Node deserializeObject(JsonReader reader) {
Log.info("Deserializing JSON to Node.");
JsonObject jsonObject = reader.readObject();
return deserializeObject(jsonObject);
}
|
@Test
void testAttachingSymbolResolver() {
SymbolResolver stubResolver = new SymbolResolver() {
@Override
public <T> T resolveDeclaration(Node node, Class<T> resultClass) {
return null;
}
@Override
public <T> T toResolvedType(Type javaparserType, Class<T> resultClass) {
return null;
}
@Override
public ResolvedType calculateType(Expression expression) {
return null;
}
@Override
public ResolvedReferenceTypeDeclaration toTypeDeclaration(Node node) {
return null;
}
};
StaticJavaParser.getConfiguration().setSymbolResolver(stubResolver);
CompilationUnit cu = parse("public class X{} class Z{}");
String serialized = serialize(cu, false);
CompilationUnit deserialized =
(CompilationUnit) deserializer.deserializeObject(Json.createReader(new StringReader(serialized)));
assertTrue(deserialized.containsData(Node.SYMBOL_RESOLVER_KEY));
assertEquals(stubResolver, deserialized.getData(Node.SYMBOL_RESOLVER_KEY));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.