focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public String process(final Expression expression) {
return formatExpression(expression);
}
|
@Test
public void shouldGenerateCorrectCodeForTimestampStringGEQ() {
// Given:
final ComparisonExpression compExp = new ComparisonExpression(
Type.GREATER_THAN_OR_EQUAL,
new StringLiteral("2020-01-01T00:00:00"),
TIMESTAMPCOL
);
// When:
final String java = sqlToJavaVisitor.process(compExp);
// Then:
assertThat(java, containsString("(SqlTimeTypes.parseTimestamp(\"2020-01-01T00:00:00\").compareTo(((java.sql.Timestamp) arguments.get(\"COL10\"))) >= 0)"));
}
|
@Override
public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException {
final Map<Path, List<Long>> regular = new HashMap<>();
final Map<Path, List<Long>> trashed = new HashMap<>();
for(Path file : files.keySet()) {
final Map<Path, List<Long>> set = file.attributes().isDuplicate() ? trashed : regular;
if(set.containsKey(file.getParent())) {
set.get(file.getParent()).add(Long.parseLong(nodeid.getVersionId(file)));
}
else {
final List<Long> nodes = new ArrayList<>();
nodes.add(Long.parseLong(nodeid.getVersionId(file)));
set.put(file.getParent(), nodes);
}
callback.delete(file);
nodeid.cache(file, null);
}
for(List<Long> nodes : regular.values()) {
try {
new NodesApi(session.getClient()).removeNodes(new DeleteNodesRequest().nodeIds(nodes), StringUtils.EMPTY);
}
catch(ApiException e) {
switch(e.getCode()) {
case 400:
log.warn(String.format("Ignore failure %s", e));
new SDSDeleteFeature(session, nodeid).delete(files, prompt, callback);
break;
default:
throw new SDSExceptionMappingService(nodeid).map("Cannot delete {0}", e, files.keySet().iterator().next());
}
}
}
for(List<Long> nodes : trashed.values()) {
try {
new NodesApi(session.getClient()).removeDeletedNodes(new DeleteDeletedNodesRequest().deletedNodeIds(nodes), StringUtils.EMPTY);
}
catch(ApiException e) {
throw new SDSExceptionMappingService(nodeid).map("Cannot delete {0}", e, files.keySet().iterator().next());
}
}
}
|
@Test
public void testDeleteFolderRoomWithContent() throws Exception {
final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session);
final Path room = new SDSDirectoryFeature(session, nodeid).mkdir(new Path(
new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final Path folder = new SDSDirectoryFeature(session, nodeid).mkdir(new Path(room,
new AlphanumericRandomStringService().random().toLowerCase(), EnumSet.of(Path.Type.directory)), new TransferStatus());
assertTrue(new SDSFindFeature(session, nodeid).find(folder));
final Path file = new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
new SDSTouchFeature(session, nodeid).touch(file, new TransferStatus());
assertTrue(new SDSFindFeature(session, nodeid).find(file));
new SDSBatchDeleteFeature(session, nodeid).delete(Collections.singletonList(folder), new DisabledLoginCallback(), new Delete.DisabledCallback());
assertFalse(new SDSFindFeature(session, nodeid).find(folder));
new SDSBatchDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback());
assertFalse(new SDSFindFeature(session, nodeid).find(room));
}
|
private void addPublisherIndexes(Service service, String clientId) {
publisherIndexes.computeIfAbsent(service, key -> new ConcurrentHashSet<>()).add(clientId);
NotifyCenter.publishEvent(new ServiceEvent.ServiceChangedEvent(service, true));
}
|
@Test
void testAddPublisherIndexes() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
String clientId = "clientId";
Class<ClientServiceIndexesManager> clientServiceIndexesManagerClass = ClientServiceIndexesManager.class;
Method addPublisherIndexes = clientServiceIndexesManagerClass.getDeclaredMethod("addPublisherIndexes", Service.class, String.class);
addPublisherIndexes.setAccessible(true);
addPublisherIndexes.invoke(clientServiceIndexesManager, service, clientId);
Collection<String> allClientsSubscribeService = clientServiceIndexesManager.getAllClientsRegisteredService(service);
assertNotNull(allClientsSubscribeService);
assertEquals(2, allClientsSubscribeService.size());
}
|
public static String likeToRegexpLike(String likePattern) {
int start = 0;
int end = likePattern.length();
String prefix = "^";
String suffix = "$";
switch (likePattern.length()) {
case 0:
return "^$";
case 1:
if (likePattern.charAt(0) == '%') {
return "";
}
break;
default:
if (likePattern.charAt(0) == '%') {
start = indexOfFirstDifferent(likePattern, '%');
if (start == -1) {
return "";
}
prefix = "";
}
if (likePattern.charAt(likePattern.length() - 1) == '%') {
end = indexOfLastDifferent(likePattern, '%');
if (end == -1) { //this should never happen, but for clarity
return "";
}
end++;
suffix = "";
}
break;
}
likePattern = likePattern.substring(start, end);
return escapeMetaCharsAndWildcards(likePattern, prefix, suffix);
}
|
@Test
public void testLeadingRepeatedWildcards() {
String regexpLikePattern = RegexpPatternConverterUtils.likeToRegexpLike("%%%%%%%%%%%%%zz");
assertEquals(regexpLikePattern, "zz$");
}
|
public static String getClassPath() {
return getClassPath(false);
}
|
@Test
public void getClassPathTest() {
String classPath = ClassUtil.getClassPath();
assertNotNull(classPath);
}
|
@GET
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response getAlarm(@PathParam("id") String id) {
log.debug("HTTP GET alarm for id={}", id);
AlarmId alarmId = AlarmId.alarmId(id);
Alarm alarm = get(AlarmService.class).getAlarm(alarmId);
ObjectNode result = new ObjectMapper().createObjectNode();
result.set("alarm", new AlarmCodec().encode(alarm, this));
return ok(result.toString()).build();
}
|
@Test
@Ignore
public void getAlarm() {
WebTarget wt = target();
String response = wt.path("/alarms/1").request().get(String.class);
// Ensure hard-coded alarms returned okay
assertThat(response, containsString("\"NE is not reachable\","));
assertThat(response, not(containsString("\"Equipment Missing\",")));
}
|
public static InetSocketAddress createSocketAddr(String target) {
return createSocketAddr(target, -1);
}
|
@Test
public void testCreateSocketAddress() throws Throwable {
InetSocketAddress addr = NetUtils.createSocketAddr(
"127.0.0.1:12345", 1000, "myconfig");
assertEquals("127.0.0.1", addr.getAddress().getHostAddress());
assertEquals(12345, addr.getPort());
addr = NetUtils.createSocketAddr(
"127.0.0.1", 1000, "myconfig");
assertEquals("127.0.0.1", addr.getAddress().getHostAddress());
assertEquals(1000, addr.getPort());
try {
NetUtils.createSocketAddr(
"127.0.0.1:blahblah", 1000, "myconfig");
fail("Should have failed to parse bad port");
} catch (IllegalArgumentException iae) {
assertInException(iae, "myconfig");
}
}
|
public CreateStreamCommand createStreamCommand(final KsqlStructuredDataOutputNode outputNode) {
return new CreateStreamCommand(
outputNode.getSinkName().get(),
outputNode.getSchema(),
outputNode.getTimestampColumn(),
outputNode.getKsqlTopic().getKafkaTopicName(),
Formats.from(outputNode.getKsqlTopic()),
outputNode.getKsqlTopic().getKeyFormat().getWindowInfo(),
Optional.of(outputNode.getOrReplace()),
Optional.of(false)
);
}
|
@Test
public void shouldNotThrowOnRowKeyKeyColumn() {
// Given:
final CreateStream statement = new CreateStream(
SOME_NAME,
TableElements.of(tableElement("k", new Type(SqlTypes.STRING), KEY_CONSTRAINT)),
false,
true,
withProperties,
false
);
// When:
createSourceFactory.createStreamCommand(statement, ksqlConfig);
// Then: did not throw
}
|
@Override
public void reset() {
super.reset();
this.minDeltaInCurrentBlock = Integer.MAX_VALUE;
}
|
@Test
public void shouldReset() throws IOException {
shouldReadWriteWhenDataIsNotAlignedWithBlock();
int[] data = new int[5 * blockSize];
for (int i = 0; i < blockSize * 5; i++) {
data[i] = i * 2;
}
writer.reset();
shouldWriteAndRead(data);
}
|
private void executeByCondition(String brokerName, String queueId, long offset, long timeValueBegin, long timeValueEnd) {
MessageQueue mq = new MessageQueue(topic, brokerName, Integer.parseInt(queueId));
try {
long minOffset = defaultMQPullConsumer.minOffset(mq);
long maxOffset = defaultMQPullConsumer.maxOffset(mq);
if (timeValueBegin > 0) {
minOffset = defaultMQPullConsumer.searchOffset(mq, timeValueBegin);
}
if (timeValueEnd > 0) {
maxOffset = defaultMQPullConsumer.searchOffset(mq, timeValueEnd);
}
if (offset > maxOffset) {
System.out.printf("%s no matched msg, offset=%s\n", mq, offset);
return;
}
minOffset = minOffset > offset ? minOffset : offset;
if (maxOffset - minOffset > messageCount) {
System.out.printf("The oldler %d message will be provided\n", messageCount);
maxOffset = minOffset + messageCount - 1;
}
pullMessageByQueue(mq, minOffset, maxOffset);
} catch (Exception e) {
e.printStackTrace();
}
}
|
@Test
public void testExecuteByCondition() throws SubCommandException {
PrintStream out = System.out;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
System.setOut(new PrintStream(bos));
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t mytopic", "-b localhost", "-i 0", "-n localhost:9876"};
CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + consumeMessageCommand.commandName(),
subargs, consumeMessageCommand.buildCommandlineOptions(options), new DefaultParser());
assignPullResult();
consumeMessageCommand.execute(commandLine, options, null);
System.setOut(out);
String s = new String(bos.toByteArray(), StandardCharsets.UTF_8);
Assert.assertTrue(s.contains("Consume ok"));
}
|
public KsqlTarget target(final URI server) {
return target(server, Collections.emptyMap());
}
|
@Test
public void shouldRequestServerInfo() {
// Given:
ServerInfo expectedResponse = new ServerInfo("someversion",
"kafkaclusterid", "ksqlserviceid", "status");
server.setResponseObject(expectedResponse);
// When:
KsqlTarget target = ksqlClient.target(serverUri);
RestResponse<ServerInfo> response = target.getServerInfo();
// Then:
assertThat(server.getHttpMethod(), is(HttpMethod.GET));
assertThat(server.getBody(), nullValue());
assertThat(server.getPath(), is("/info"));
assertThat(server.getHeaders().get("Accept"), is("application/json"));
assertThat(response.get(), is(expectedResponse));
}
|
public static Criterion matchTcpDst(TpPort tcpPort) {
return new TcpPortCriterion(tcpPort, Type.TCP_DST);
}
|
@Test
public void testMatchTcpDstMethod() {
Criterion matchTcpDst = Criteria.matchTcpDst(tpPort1);
TcpPortCriterion tcpPortCriterion =
checkAndConvert(matchTcpDst,
Criterion.Type.TCP_DST,
TcpPortCriterion.class);
assertThat(tcpPortCriterion.tcpPort(), is(equalTo(tpPort1)));
}
|
@Override
public void check(Collection<? extends T> collection, ConditionEvents events) {
ViolatedAndSatisfiedConditionEvents subEvents = new ViolatedAndSatisfiedConditionEvents();
for (T element : collection) {
condition.check(element, subEvents);
}
if (!subEvents.getAllowed().isEmpty() || !subEvents.getViolating().isEmpty()) {
events.add(new AnyConditionEvent(collection, subEvents));
}
}
|
@Test
public void satisfied_works_and_description_contains_mismatches() {
ConditionEvents events = ConditionEvents.Factory.create();
containAnyElementThat(IS_SERIALIZABLE).check(TWO_NONSERIALIZABLE_OBJECTS, events);
assertThat(events).containViolations(messageForTwoTimes(isSerializableMessageFor(Object.class)));
events = ConditionEvents.Factory.create();
containAnyElementThat(IS_SERIALIZABLE).check(ONE_SERIALIZABLE_AND_ONE_NON_SERIALIZABLE_OBJECT, events);
assertThat(events).containNoViolation();
}
|
public boolean isAllowable() {
long now = System.currentTimeMillis();
if (now > lastResetTime.get() + interval) {
token.set(rate);
lastResetTime.set(now);
}
return token.decrementAndGet() >= 0;
}
|
@Test
void testAccuracy() throws Exception {
final int EXPECTED_RATE = 5;
statItem = new StatItem("test", EXPECTED_RATE, 60_000L);
for (int i = 1; i <= EXPECTED_RATE; i++) {
assertTrue(statItem.isAllowable());
}
// Must block the 6th item
assertFalse(statItem.isAllowable());
}
|
@CheckForNull
public FileData fileData(String moduleKeyWithBranch, String path) {
SingleProjectRepository repository = repositoriesPerModule.get(moduleKeyWithBranch);
return repository == null ? null : repository.fileData(path);
}
|
@Test
public void test_file_data_when_module_does_not_exist() {
FileData fileData = repository.fileData("unknown", "/Def.java");
assertThat(fileData).isNull();
}
|
public static Entry entry(String name) throws BlockException {
return Env.sph.entry(name, EntryType.OUT, 1, OBJECTS0);
}
|
@Test
public void testStringEntryAll() throws BlockException {
final String arg0 = "foo";
final String arg1 = "baz";
Entry e = SphU.entry("resourceName", EntryType.IN, 2, arg0, arg1);
assertSame(e.resourceWrapper.getEntryType(), EntryType.IN);
e.exit(2, arg0, arg1);
}
|
@Override
public void init() {
if(log.isDebugEnabled()) {
log.debug("Initialize responder by browsing DNSSD");
}
super.init();
try {
for(String protocol : this.getServiceTypes()) {
if(log.isInfoEnabled()) {
log.info(String.format("Adding service listener for %s", protocol));
}
browsers.put(protocol, DNSSD.browse(protocol, this));
}
}
catch(DNSSDException e) {
log.error(String.format("Failure initializing Bonjour discovery: %s", e.getMessage()), e);
this.quit();
}
}
|
@Test
public void testInit() throws Exception {
final Rendezvous r = new RendezvousResponder();
final CountDownLatch wait = new CountDownLatch(1);
final AssertionError[] failure = new AssertionError[1];
r.addListener(new RendezvousListener() {
@Override
public void serviceResolved(final String identifier, final Host host) {
try {
assertNotNull(host);
}
catch(AssertionError error) {
failure[0] = error;
}
finally {
wait.countDown();
}
}
@Override
public void serviceLost(final Host servicename) {
//
}
});
r.init();
wait.await(5L, TimeUnit.SECONDS);
if(failure[0] != null) {
fail(failure[0].getMessage());
}
r.quit();
}
|
@Override
protected boolean isStepCompleted(@NonNull Context context) {
return false; // this step is never done! You can always configure more :)
}
|
@Test
public void testIsStepCompletedAlwaysFalse() {
Assert.assertFalse(
startFragment().isStepCompleted(ApplicationProvider.getApplicationContext()));
}
|
public boolean isBetween(ConnectPoint src, ConnectPoint dst) {
return (this.src.equals(src) && this.dst.equals(dst));
}
|
@Test
public void testIsBetween() {
ConnectPoint cp1 = new ConnectPoint(DeviceId.deviceId("of:0000000000000001"), PortNumber.portNumber(1L));
ConnectPoint cp2 = new ConnectPoint(DeviceId.deviceId("of:0000000000000002"), PortNumber.portNumber(2L));
ConnectPoint cp3 = new ConnectPoint(DeviceId.deviceId("of:0000000000000003"), PortNumber.portNumber(3L));
OpticalCircuitIntent ochIntent = OpticalCircuitIntent.builder()
.appId(appId)
.src(cp1)
.dst(cp2)
.bidirectional(true)
.key(Key.of(0, appId))
.signalType(CltSignalType.CLT_1GBE)
.build();
PacketLinkRealizedByOptical plink = PacketLinkRealizedByOptical.create(cp1, cp2, ochIntent);
assertTrue(plink.isBetween(cp1, cp2));
assertFalse(plink.isBetween(cp1, cp3));
}
|
@Override
public FSDataOutputStream create(Path path, boolean overwrite, int bufferSize, short replication,
long blockSize, Progressable progress) throws IOException {
String confUmask = mAlluxioConf.getString(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_UMASK);
Mode mode = ModeUtils.applyFileUMask(Mode.defaults(), confUmask);
return this.create(path, new FsPermission(mode.toShort()), overwrite, bufferSize, replication,
blockSize, progress);
}
|
@Test
public void resetContextFromZkUriToNonZkUri() throws Exception {
org.apache.hadoop.conf.Configuration conf = getConf();
URI uri = URI.create(Constants.HEADER + "zk@zkHost:2181/tmp/path.txt");
FileSystem fs = getHadoopFilesystem(org.apache.hadoop.fs.FileSystem.get(uri, conf));
assertTrue(fs.mFileSystem.getConf().getBoolean(PropertyKey.ZOOKEEPER_ENABLED));
assertEquals("zkHost:2181", fs.mFileSystem.getConf().get(PropertyKey.ZOOKEEPER_ADDRESS));
URI otherUri = URI.create(Constants.HEADER + "alluxioHost:19998/tmp/path.txt");
fs = getHadoopFilesystem(org.apache.hadoop.fs.FileSystem.get(otherUri, conf));
assertEquals("alluxioHost", fs.mFileSystem.getConf().get(PropertyKey.MASTER_HOSTNAME));
assertEquals(19998, fs.mFileSystem.getConf().get(PropertyKey.MASTER_RPC_PORT));
assertFalse(fs.mFileSystem.getConf().getBoolean(PropertyKey.ZOOKEEPER_ENABLED));
assertFalse(fs.mFileSystem.getConf().isSet(PropertyKey.ZOOKEEPER_ADDRESS));
}
|
public static ScalarOperator compoundAnd(Collection<ScalarOperator> nodes) {
return createCompound(CompoundPredicateOperator.CompoundType.AND, nodes);
}
|
@Test
public void compoundAnd3() {
ScalarOperator tree1 = Utils.compoundAnd(ConstantOperator.createInt(1),
ConstantOperator.createInt(2),
ConstantOperator.createInt(3),
ConstantOperator.createInt(4),
ConstantOperator.createInt(5),
ConstantOperator.createInt(6));
assertEquals(CompoundPredicateOperator.CompoundType.AND, ((CompoundPredicateOperator) tree1).getCompoundType());
CompoundPredicateOperator leftChild = (CompoundPredicateOperator) tree1.getChild(0);
CompoundPredicateOperator rightChild = (CompoundPredicateOperator) tree1.getChild(1);
assertEquals(CompoundPredicateOperator.CompoundType.AND, leftChild.getCompoundType());
assertEquals(CompoundPredicateOperator.CompoundType.AND, rightChild.getCompoundType());
assertEquals(CompoundPredicateOperator.CompoundType.AND,
((CompoundPredicateOperator) leftChild.getChild(0)).getCompoundType());
assertEquals(CompoundPredicateOperator.CompoundType.AND,
((CompoundPredicateOperator) leftChild.getChild(1)).getCompoundType());
assertEquals(1, ((ConstantOperator) leftChild.getChild(0).getChild(0)).getInt());
assertEquals(2, ((ConstantOperator) leftChild.getChild(0).getChild(1)).getInt());
assertEquals(3, ((ConstantOperator) leftChild.getChild(1).getChild(0)).getInt());
assertEquals(4, ((ConstantOperator) leftChild.getChild(1).getChild(1)).getInt());
assertEquals(5, ((ConstantOperator) rightChild.getChild(0)).getInt());
assertEquals(6, ((ConstantOperator) rightChild.getChild(1)).getInt());
}
|
@Override
public Path touch(final Path file, final TransferStatus status) throws BackgroundException {
return super.touch(file, status.withChecksum(write.checksum(file, status).compute(new NullInputStream(0L), status)));
}
|
@Test
public void testTouchCarriageReturnKey() throws Exception {
final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory));
final Path test = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(
new Path(container, String.format("%s\n-\r", new AsciiRandomStringService().random()), EnumSet.of(Path.Type.file)), new TransferStatus());
assertNull(test.attributes().getVersionId());
assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test));
assertEquals(test.attributes(), new DefaultAttributesFinderFeature(session).find(test));
assertEquals(test.attributes(), new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(test));
new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback());
assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test));
}
|
public NamedRuleItemNodePath getNamedItem(final String itemType) {
return namedItems.get(itemType);
}
|
@Test
void assertGetNamedItem() {
NamedRuleItemNodePath namedRulePath = ruleNodePath.getNamedItem("tables");
assertThat(namedRulePath.getPath("foo_table"), is("tables/foo_table"));
Optional<String> path = namedRulePath.getName("/metadata/foo_db/rules/foo/tables/foo_table/versions/0");
assertTrue(path.isPresent());
assertThat(path.get(), is("foo_table"));
}
|
@Activate
public void activate() {
deviceToPipeconf = storageService.<DeviceId, PiPipeconfId>consistentMapBuilder()
.withName("onos-pipeconf-table")
.withSerializer(Serializer.using(KryoNamespaces.API))
.build();
deviceToPipeconf.addListener(mapListener);
log.info("Started");
}
|
@Test
public void activate() {
assertNotNull(store.storageService);
assertTrue("Store must have delegate", store.hasDelegate());
assertTrue("No value should be in the map", store.deviceToPipeconf.isEmpty());
assertTrue("No value should be in the map", store.pipeconfToDevices.isEmpty());
}
|
@Override
public List<SubscriptionPath> listSubscriptions(ProjectPath project, TopicPath topic)
throws IOException {
Subscriptions.List request = pubsub.projects().subscriptions().list(project.getPath());
ListSubscriptionsResponse response = request.execute();
if (response.getSubscriptions() == null || response.getSubscriptions().isEmpty()) {
return ImmutableList.of();
}
List<SubscriptionPath> subscriptions = new ArrayList<>(response.getSubscriptions().size());
while (true) {
for (Subscription subscription : response.getSubscriptions()) {
if (subscription.getTopic().equals(topic.getPath())) {
subscriptions.add(subscriptionPathFromPath(subscription.getName()));
}
}
if (Strings.isNullOrEmpty(response.getNextPageToken())) {
break;
}
request.setPageToken(response.getNextPageToken());
response = request.execute();
}
return subscriptions;
}
|
@Test
public void listSubscriptions() throws Exception {
ListSubscriptionsResponse expectedResponse1 = new ListSubscriptionsResponse();
expectedResponse1.setSubscriptions(Collections.singletonList(buildSubscription(1)));
expectedResponse1.setNextPageToken("AVgJH3Z7aHxiDBs");
ListSubscriptionsResponse expectedResponse2 = new ListSubscriptionsResponse();
expectedResponse2.setSubscriptions(Collections.singletonList(buildSubscription(2)));
Subscriptions.List request = mockPubsub.projects().subscriptions().list(PROJECT.getPath());
when((Object) request.execute()).thenReturn(expectedResponse1, expectedResponse2);
final TopicPath topic101 = PubsubClient.topicPathFromName("testProject", "Topic2");
List<SubscriptionPath> subscriptionPaths = client.listSubscriptions(PROJECT, topic101);
assertEquals(1, subscriptionPaths.size());
}
|
public static List<TriStateSelection> forAgentsResources(Set<ResourceConfig> resourceConfigs, Agents agents) {
return convert(resourceConfigs, agents, new Assigner<>() {
@Override
public boolean shouldAssociate(Agent agent, ResourceConfig resourceConfig) {
return agent.getResourcesAsList().contains(resourceConfig.getName());
}
@Override
public String identifier(ResourceConfig resourceConfig) {
return resourceConfig.getName();
}
@Override
public boolean shouldEnable(Agent agent, ResourceConfig resourceConfig) {
return true;
}
});
}
|
@Test
public void shouldHaveActionRemoveIfNoAgentsHaveResource() {
resourceConfigs.add(new ResourceConfig("none"));
agents.add(new Agent("uuid1", "host1", "127.0.0.1", List.of("one")));
agents.add(new Agent("uuid2", "host2", "127.0.0.2", List.of("two")));
List<TriStateSelection> selections = TriStateSelection.forAgentsResources(resourceConfigs, agents);
assertThat(selections, hasItem(new TriStateSelection("none", TriStateSelection.Action.remove)));
}
|
public static String toOperationDesc(Operation op) {
Class<? extends Operation> operationClass = op.getClass();
if (PartitionIteratingOperation.class.isAssignableFrom(operationClass)) {
PartitionIteratingOperation partitionIteratingOperation = (PartitionIteratingOperation) op;
OperationFactory operationFactory = partitionIteratingOperation.getOperationFactory();
String desc = DESCRIPTORS.get(operationFactory.getClass().getName());
if (desc == null) {
desc = PartitionIteratingOperation.class.getSimpleName() + "(" + operationFactory.getClass().getName() + ")";
DESCRIPTORS.put(operationFactory.getClass().getName(), desc);
}
return desc;
} else if (Backup.class.isAssignableFrom(operationClass)) {
Backup backup = (Backup) op;
Operation backupOperation = backup.getBackupOp();
String desc = DESCRIPTORS.get(backupOperation.getClass().getName());
if (desc == null) {
desc = Backup.class.getSimpleName() + "(" + backup.getBackupOp().getClass().getName() + ")";
DESCRIPTORS.put(backupOperation.getClass().getName(), desc);
}
return desc;
} else {
return operationClass.getName();
}
}
|
@Test
public void testNormalOperation() {
assertEquals(DummyOperation.class.getName(), toOperationDesc(new DummyOperation()));
}
|
void shutdown(final boolean clean) {
shutdownStateUpdater();
shutdownSchedulingTaskManager();
final AtomicReference<RuntimeException> firstException = new AtomicReference<>(null);
// TODO: change type to `StreamTask`
final Set<Task> activeTasks = new TreeSet<>(Comparator.comparing(Task::id));
activeTasks.addAll(tasks.activeTasks());
executeAndMaybeSwallow(
clean,
() -> closeAndCleanUpTasks(activeTasks, standbyTaskIterable(), clean),
e -> firstException.compareAndSet(null, e),
e -> log.warn("Ignoring an exception while unlocking remaining task directories.", e)
);
executeAndMaybeSwallow(
clean,
activeTaskCreator::closeThreadProducerIfNeeded,
e -> firstException.compareAndSet(null, e),
e -> log.warn("Ignoring an exception while closing thread producer.", e)
);
tasks.clear();
// this should be called after closing all tasks and clearing them from `tasks` to make sure we unlock the dir
// for any tasks that may have still been in CREATED at the time of shutdown, since Task#close will not do so
executeAndMaybeSwallow(
clean,
this::releaseLockedUnassignedTaskDirectories,
e -> firstException.compareAndSet(null, e),
e -> log.warn("Ignoring an exception while unlocking remaining task directories.", e)
);
final RuntimeException fatalException = firstException.get();
if (fatalException != null) {
throw fatalException;
}
log.info("Shutdown complete");
}
|
@Test
public void shouldShutDownStateUpdaterAndCloseFailedTasksDirty() {
final TasksRegistry tasks = mock(TasksRegistry.class);
final StreamTask failedStatefulTask = statefulTask(taskId01, taskId01ChangelogPartitions)
.inState(State.RESTORING).build();
final StandbyTask failedStandbyTask = standbyTask(taskId02, taskId02ChangelogPartitions)
.inState(State.RUNNING).build();
when(stateUpdater.drainExceptionsAndFailedTasks())
.thenReturn(Arrays.asList(
new ExceptionAndTask(new RuntimeException(), failedStatefulTask),
new ExceptionAndTask(new RuntimeException(), failedStandbyTask))
);
final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true);
taskManager.shutdown(true);
verify(activeTaskCreator).closeAndRemoveTaskProducerIfNeeded(failedStatefulTask.id());
verify(activeTaskCreator).closeThreadProducerIfNeeded();
verify(stateUpdater).shutdown(Duration.ofMillis(Long.MAX_VALUE));
verify(failedStatefulTask).prepareCommit();
verify(failedStatefulTask).suspend();
verify(failedStatefulTask).closeDirty();
}
|
@Nullable static String channelKind(@Nullable Destination destination) {
if (destination == null) return null;
return isQueue(destination) ? "queue" : "topic";
}
|
@Test void channelKind_queueAndTopic_topicOnNoQueueName() throws JMSException {
QueueAndTopic destination = mock(QueueAndTopic.class);
when(destination.getTopicName()).thenReturn("topic-foo");
assertThat(MessageParser.channelKind(destination))
.isEqualTo("topic");
}
|
public CompletableFuture<NotifyClientTerminationResponse> notifyClientTermination(ProxyContext ctx,
NotifyClientTerminationRequest request) {
CompletableFuture<NotifyClientTerminationResponse> future = new CompletableFuture<>();
try {
String clientId = ctx.getClientID();
LanguageCode languageCode = LanguageCode.valueOf(ctx.getLanguage());
Settings clientSettings = grpcClientSettingsManager.removeAndGetClientSettings(ctx);
switch (clientSettings.getClientType()) {
case PRODUCER:
for (Resource topic : clientSettings.getPublishing().getTopicsList()) {
String topicName = topic.getName();
GrpcClientChannel channel = this.grpcChannelManager.removeChannel(clientId);
if (channel != null) {
ClientChannelInfo clientChannelInfo = new ClientChannelInfo(channel, clientId, languageCode, MQVersion.Version.V5_0_0.ordinal());
this.messagingProcessor.unRegisterProducer(ctx, topicName, clientChannelInfo);
}
}
break;
case PUSH_CONSUMER:
case SIMPLE_CONSUMER:
validateConsumerGroup(request.getGroup());
String consumerGroup = request.getGroup().getName();
GrpcClientChannel channel = this.grpcChannelManager.removeChannel(clientId);
if (channel != null) {
ClientChannelInfo clientChannelInfo = new ClientChannelInfo(channel, clientId, languageCode, MQVersion.Version.V5_0_0.ordinal());
this.messagingProcessor.unRegisterConsumer(ctx, consumerGroup, clientChannelInfo);
}
break;
default:
future.complete(NotifyClientTerminationResponse.newBuilder()
.setStatus(ResponseBuilder.getInstance().buildStatus(Code.UNRECOGNIZED_CLIENT_TYPE, clientSettings.getClientType().name()))
.build());
return future;
}
future.complete(NotifyClientTerminationResponse.newBuilder()
.setStatus(ResponseBuilder.getInstance().buildStatus(Code.OK, Code.OK.name()))
.build());
} catch (Throwable t) {
future.completeExceptionally(t);
}
return future;
}
|
@Test
public void testProducerNotifyClientTermination() throws Throwable {
ProxyContext context = createContext();
when(this.grpcClientSettingsManager.removeAndGetClientSettings(any())).thenReturn(Settings.newBuilder()
.setClientType(ClientType.PRODUCER)
.setPublishing(Publishing.newBuilder()
.addTopics(Resource.newBuilder().setName(TOPIC).build())
.build())
.build());
ArgumentCaptor<ClientChannelInfo> channelInfoArgumentCaptor = ArgumentCaptor.forClass(ClientChannelInfo.class);
doNothing().when(this.messagingProcessor).unRegisterProducer(any(), anyString(), channelInfoArgumentCaptor.capture());
when(this.metadataService.getTopicMessageType(any(), anyString())).thenReturn(TopicMessageType.NORMAL);
this.sendProducerTelemetry(context);
this.sendProducerHeartbeat(context);
NotifyClientTerminationResponse response = this.clientActivity.notifyClientTermination(
context,
NotifyClientTerminationRequest.newBuilder()
.build()
).get();
assertEquals(Code.OK, response.getStatus().getCode());
ClientChannelInfo clientChannelInfo = channelInfoArgumentCaptor.getValue();
assertClientChannelInfo(clientChannelInfo, TOPIC);
}
|
@Override
@Nonnull
public List<Sdk> selectSdks(Configuration configuration, UsesSdk usesSdk) {
Config config = configuration.get(Config.class);
Set<Sdk> sdks = new TreeSet<>(configuredSdks(config, usesSdk));
if (enabledSdks != null) {
sdks = Sets.intersection(sdks, enabledSdks);
}
return Lists.newArrayList(sdks);
}
|
@Test
public void withAllSdksConfig_shouldUseFullSdkRangeFromAndroidManifest() throws Exception {
when(usesSdk.getTargetSdkVersion()).thenReturn(22);
when(usesSdk.getMinSdkVersion()).thenReturn(19);
when(usesSdk.getMaxSdkVersion()).thenReturn(23);
assertThat(
sdkPicker.selectSdks(
buildConfig(new Config.Builder().setSdk(Config.ALL_SDKS)), usesSdk))
.containsExactly(
sdkCollection.getSdk(19),
sdkCollection.getSdk(21),
sdkCollection.getSdk(22),
sdkCollection.getSdk(23));
}
|
@Override
public double cdf(double k) {
if (k < 0) {
return 0.0;
} else if (k == 0) {
return q;
} else {
return 1.0;
}
}
|
@Test
public void testCdf() {
System.out.println("cdf");
BernoulliDistribution instance = new BernoulliDistribution(0.3);
instance.rand();
assertEquals(0.7, instance.cdf(0), 1E-7);
assertEquals(1.0, instance.cdf(1), 1E-7);
}
|
@Override
public void writeLong(final long v) throws IOException {
ensureAvailable(LONG_SIZE_IN_BYTES);
Bits.writeLong(buffer, pos, v, isBigEndian);
pos += LONG_SIZE_IN_BYTES;
}
|
@Test
public void testWriteLongForPositionV() throws Exception {
long expected = 100;
out.writeLong(2, expected);
long actual = Bits.readLongB(out.buffer, 2);
assertEquals(expected, actual);
}
|
public static Map<TopicPartition, Long> parseSinkConnectorOffsets(Map<Map<String, ?>, Map<String, ?>> partitionOffsets) {
Map<TopicPartition, Long> parsedOffsetMap = new HashMap<>();
for (Map.Entry<Map<String, ?>, Map<String, ?>> partitionOffset : partitionOffsets.entrySet()) {
Map<String, ?> partitionMap = partitionOffset.getKey();
if (partitionMap == null) {
throw new BadRequestException("The partition for a sink connector offset cannot be null or missing");
}
if (!partitionMap.containsKey(KAFKA_TOPIC_KEY) || !partitionMap.containsKey(KAFKA_PARTITION_KEY)) {
throw new BadRequestException(String.format("The partition for a sink connector offset must contain the keys '%s' and '%s'",
KAFKA_TOPIC_KEY, KAFKA_PARTITION_KEY));
}
if (partitionMap.get(KAFKA_TOPIC_KEY) == null) {
throw new BadRequestException("Kafka topic names must be valid strings and may not be null");
}
if (partitionMap.get(KAFKA_PARTITION_KEY) == null) {
throw new BadRequestException("Kafka partitions must be valid numbers and may not be null");
}
String topic = String.valueOf(partitionMap.get(KAFKA_TOPIC_KEY));
int partition;
try {
// We parse it this way because both "10" and 10 should be accepted as valid partition values in the REST API's
// JSON request payload. If it throws an exception, we should propagate it since it's indicative of a badly formatted value.
partition = Integer.parseInt(String.valueOf(partitionMap.get(KAFKA_PARTITION_KEY)));
} catch (Exception e) {
throw new BadRequestException("Failed to parse the following Kafka partition value in the provided offsets: '" +
partitionMap.get(KAFKA_PARTITION_KEY) + "'. Partition values for sink connectors need " +
"to be integers.", e);
}
TopicPartition tp = new TopicPartition(topic, partition);
Map<String, ?> offsetMap = partitionOffset.getValue();
if (offsetMap == null) {
// represents an offset reset
parsedOffsetMap.put(tp, null);
} else {
if (!offsetMap.containsKey(KAFKA_OFFSET_KEY)) {
throw new BadRequestException(String.format("The offset for a sink connector should either be null or contain " +
"the key '%s'", KAFKA_OFFSET_KEY));
}
long offset;
try {
// We parse it this way because both "1000" and 1000 should be accepted as valid offset values in the REST API's
// JSON request payload. If it throws an exception, we should propagate it since it's indicative of a badly formatted value.
offset = Long.parseLong(String.valueOf(offsetMap.get(KAFKA_OFFSET_KEY)));
} catch (Exception e) {
throw new BadRequestException("Failed to parse the following Kafka offset value in the provided offsets: '" +
offsetMap.get(KAFKA_OFFSET_KEY) + "'. Offset values for sink connectors need " +
"to be integers.", e);
}
parsedOffsetMap.put(tp, offset);
}
}
return parsedOffsetMap;
}
|
@Test
public void testValidateAndParseStringPartitionValue() {
Map<Map<String, ?>, Map<String, ?>> partitionOffsets = createPartitionOffsetMap("topic", "10", "100");
Map<TopicPartition, Long> parsedOffsets = SinkUtils.parseSinkConnectorOffsets(partitionOffsets);
assertEquals(1, parsedOffsets.size());
TopicPartition tp = parsedOffsets.keySet().iterator().next();
assertEquals(10, tp.partition());
}
|
@Override
public Object merge(T mergingValue, T existingValue) {
if (mergingValue == null) {
return existingValue.getRawValue();
}
return mergingValue.getRawValue();
}
|
@Test
public void merge_mergingNotNull() {
MapMergeTypes existing = mergingValueWithGivenValue(EXISTING);
MapMergeTypes merging = mergingValueWithGivenValue(MERGING);
assertEquals(MERGING, mergePolicy.merge(merging, existing));
}
|
public static <T> List<T> batchTransform(final Class<T> clazz, List<?> srcList) {
if (CollectionUtils.isEmpty(srcList)) {
return Collections.emptyList();
}
List<T> result = new ArrayList<>(srcList.size());
for (Object srcObject : srcList) {
result.add(transform(clazz, srcObject));
}
return result;
}
|
@Test
public void testBatchTransformListNotEmpty() {
someList.add(77);
assertNotNull(BeanUtils.batchTransform(String.class, someList));
}
|
@Override
public Object getBody() throws Exception {
MultipartFile target = super.getFile(DEFAULT_FILE_NAME);
if (Objects.nonNull(target)) {
MultiValueMap<String, Object> parts = new LinkedMultiValueMap<>();
parts.add(DEFAULT_FILE_NAME, target.getResource());
return parts;
} else {
// The content-type for the configuration publication might be "multipart/form-data"
return HttpUtils.encodingParams(HttpUtils.translateParameterMap(stringMap), StandardCharsets.UTF_8.name());
}
}
|
@Test
void testGetBodyWithoutFile() throws Exception {
Object body = reuseUploadFileHttpServletRequest.getBody();
assertEquals(HttpUtils.encodingParams(HttpUtils.translateParameterMap(new HashMap<>()), StandardCharsets.UTF_8.name()), body);
}
|
@Override
public int read() throws EOFException {
return (pos < size) ? (data[pos++] & 0xff) : -1;
}
|
@Test
public void testReadForBOffLen() throws Exception {
int read = in.read(INIT_DATA, 0, 5);
assertEquals(5, read);
}
|
@Override
public Batch toBatch() {
return new SparkBatch(
sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode());
}
|
@TestTemplate
public void testUnpartitionedDays() throws Exception {
createUnpartitionedTable(spark, tableName);
SparkScanBuilder builder = scanBuilder();
DaysFunction.TimestampToDaysFunction function = new DaysFunction.TimestampToDaysFunction();
UserDefinedScalarFunc udf = toUDF(function, expressions(fieldRef("ts")));
Predicate predicate =
new Predicate(
"<",
expressions(
udf, dateLit(timestampStrToDayOrdinal("2018-11-20T00:00:00.000000+00:00"))));
pushFilters(builder, predicate);
Batch scan = builder.build().toBatch();
assertThat(scan.planInputPartitions().length).isEqualTo(10);
// NOT LT
builder = scanBuilder();
predicate = new Not(predicate);
pushFilters(builder, predicate);
scan = builder.build().toBatch();
assertThat(scan.planInputPartitions().length).isEqualTo(10);
}
|
@Override
public int selectChannel(SerializationDelegate<StreamRecord<T>> record) {
return random.nextInt(numberOfChannels);
}
|
@Test
void testSelectChannelsInterval() {
assertSelectedChannelWithSetup(0, 1);
streamPartitioner.setup(2);
assertThat(streamPartitioner.selectChannel(serializationDelegate))
.isGreaterThanOrEqualTo(0)
.isLessThan(2);
streamPartitioner.setup(1024);
assertThat(streamPartitioner.selectChannel(serializationDelegate))
.isGreaterThanOrEqualTo(0)
.isLessThan(1024);
}
|
boolean isUseNodeNameAsExternalAddress() {
return useNodeNameAsExternalAddress;
}
|
@Test
public void kubernetesApiNodeNameAsExternalAddress() {
// given
Map<String, Comparable> properties = createProperties();
properties.put(KubernetesProperties.USE_NODE_NAME_AS_EXTERNAL_ADDRESS.key(), true);
// when
KubernetesConfig config = new KubernetesConfig(properties);
// then
assertTrue(config.isUseNodeNameAsExternalAddress());
}
|
@Override
protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selectorData, final RuleData rule) {
SignRuleHandler ruleHandler = SignPluginDataHandler.CACHED_HANDLE.get().obtainHandle(CacheKeyUtils.INST.getKey(rule));
if (ObjectUtils.isEmpty(ruleHandler) || !ruleHandler.getSignRequestBody()) {
VerifyResult result = signService.signatureVerify(exchange);
if (result.isFailed()) {
return WebFluxResultUtils.failedResult(ShenyuResultEnum.SIGN_IS_NOT_PASS.getCode(),
result.getReason(), exchange);
}
return chain.execute(exchange);
}
return ServerWebExchangeUtils.rewriteRequestBody(exchange, messageReaders, body -> {
VerifyResult result = signVerifyWithBody(body, exchange);
if (result.isSuccess()) {
return Mono.just(body);
}
throw new ResponsiveException(ShenyuResultEnum.SIGN_IS_NOT_PASS.getCode(), result.getReason(), exchange);
}).flatMap(chain::execute)
.onErrorResume(error -> {
if (error instanceof ResponsiveException) {
return WebFluxResultUtils.failedResult((ResponsiveException) error);
}
return Mono.error(error);
});
}
|
@Test
public void testSignPluginSimple2() {
this.exchange = MockServerWebExchange.from(MockServerHttpRequest.get("localhost").build());
when(signService.signatureVerify(exchange)).thenReturn(VerifyResult.fail(""));
RuleData data = mock(RuleData.class);
SelectorData selectorData = mock(SelectorData.class);
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(signPlugin.doExecute(exchange, chain, selectorData, data)).expectSubscription().verifyComplete();
}
|
@Override
public boolean shouldSample() {
long now = nanoClock.nanoTimeNow();
long period = now / periodLengthInNanos;
synchronized (this) {
if (period != currentSamplingPeriod) {
currentSamplingPeriod = period;
samplesInCurrentPeriod = 1;
return true;
}
if (samplesInCurrentPeriod >= maxSamplesPerPeriod) {
return false;
}
++samplesInCurrentPeriod;
return true;
}
}
|
@Test
void first_sample_in_period_returns_true() {
var clock = MockUtils.mockedClockReturning(1000L);
var sampler = new MaxSamplesPerPeriod(clock, 1000L, 1L);
assertTrue(sampler.shouldSample());
}
|
public static <OutputT> PTransform<PCollection<Row>, PCollection<OutputT>> fromRows(
Class<OutputT> clazz) {
return to(clazz);
}
|
@Test
@Category(NeedsRunner.class)
public void testFromRows() {
PCollection<POJO1> pojos =
pipeline
.apply(Create.of(EXPECTED_ROW1).withRowSchema(EXPECTED_SCHEMA1))
.apply(Convert.fromRows(POJO1.class));
PAssert.that(pojos).containsInAnyOrder(new POJO1());
pipeline.run();
}
|
@VisibleForTesting
public static UClassIdent create(String qualifiedName) {
List<String> topLevelPath = new ArrayList<>();
for (String component : Splitter.on('.').split(qualifiedName)) {
topLevelPath.add(component);
if (Character.isUpperCase(component.charAt(0))) {
break;
}
}
return create(Joiner.on('.').join(topLevelPath), qualifiedName);
}
|
@Test
public void serialization() {
SerializableTester.reserializeAndAssert(UClassIdent.create("java.math.BigInteger"));
}
|
private void addRunningOne(WorkflowInstance.Status status, WorkflowRollupOverview overview) {
if (runningStats == null) {
runningStats = new EnumMap<>(WorkflowInstance.Status.class);
}
runningStats.put(status, runningStats.getOrDefault(status, 0L) + 1);
if (runningRollup == null) {
runningRollup = new WorkflowRollupOverview();
}
runningRollup.aggregate(overview);
}
|
@Test
public void testAddRunningOne() throws Exception {
ForeachStepOverview overview =
loadObject(
"fixtures/instances/sample-foreach-step-overview.json", ForeachStepOverview.class);
WorkflowRollupOverview.CountReference ref = new WorkflowRollupOverview.CountReference();
ref.setCnt(1);
WorkflowRollupOverview rollupOverview =
WorkflowRollupOverview.of(1, Collections.singletonMap(StepInstance.Status.RUNNING, ref));
overview.addOne(80333, WorkflowInstance.Status.IN_PROGRESS, rollupOverview);
assertEquals(
207L, overview.getRunningStats().get(WorkflowInstance.Status.IN_PROGRESS).longValue());
assertEquals(517L, overview.getRunningRollup().getTotalLeafCount());
ref.setCnt(60);
assertEquals(ref, overview.getRunningRollup().getOverview().get(StepInstance.Status.RUNNING));
assertTrue(overview.statusExistInIterations(WorkflowInstance.Status.SUCCEEDED));
assertFalse(overview.statusExistInIterations(WorkflowInstance.Status.FAILED));
assertEquals(10116, overview.getRunningStatsCount(true));
assertEquals(333, overview.getRunningStatsCount(false));
}
|
@Override
public void bootup() {
startComponents();
}
|
@Test
public void bootup_starts_components_lazily_unless_they_are_annotated_with_EagerStart() {
DefaultStartable defaultStartable = new DefaultStartable();
EagerStartable eagerStartable = new EagerStartable();
TaskContainerImpl ceContainer = new TaskContainerImpl(parent, container -> {
container.add(defaultStartable);
container.add(eagerStartable);
});
ceContainer.bootup();
assertThat(defaultStartable.startCalls).isZero();
assertThat(defaultStartable.stopCalls).isZero();
assertThat(eagerStartable.startCalls).isOne();
assertThat(eagerStartable.stopCalls).isZero();
}
|
public void setExcludedProtocols(String protocols) {
this.excludedProtocols = protocols;
}
|
@Test
public void testSetExcludedProtocols() throws Exception {
configurable.setSupportedProtocols(new String[] { "A", "B" });
configuration.setExcludedProtocols("A");
configuration.configure(configurable);
assertTrue(Arrays.equals(new String[] { "B" },
configurable.getEnabledProtocols()));
}
|
@Override public E intern(E sample) {
E canonical = map.get(sample);
if (canonical != null) {
return canonical;
}
var value = map.putIfAbsent(sample, sample);
return (value == null) ? sample : value;
}
|
@Test
public void intern_weak_replace() {
var canonical = new Int(1);
var other = new Int(1);
Interner<Int> interner = Interner.newWeakInterner();
assertThat(interner.intern(canonical)).isSameInstanceAs(canonical);
var signal = new WeakReference<>(canonical);
canonical = null;
GcFinalization.awaitClear(signal);
assertThat(interner.intern(other)).isSameInstanceAs(other);
checkSize(interner, 1);
checkState(interner);
}
|
public void sendMessage(M message, MessageHeaders headers) {
this.sendMessage(responseTopic, message, headers);
}
|
@Test
public void testMessagePostProcessingHeaders() throws JMSException {
AfnemersberichtAanDGL afnemersberichtAanDGL = new AfnemersberichtAanDGL();
Map<String, Object> headers = new HashMap<>();
headers.put("header1", "header1 value");
headers.put("header2", "header2 value");
MessageHeaders messageHeaders = new MessageHeaders(headers);
afnemersberichtAanDGLSender.sendMessage(afnemersberichtAanDGL, messageHeaders);
ArgumentCaptor<MessagePostProcessor> messagePostProcessorCaptor = ArgumentCaptor.forClass(MessagePostProcessor.class);
Mockito.verify(jmsTemplateMock).convertAndSend(nullable(String.class), nullable(String.class), messagePostProcessorCaptor.capture());
MessagePostProcessor value = messagePostProcessorCaptor.getValue();
javax.jms.Message mockMessage = Mockito.mock(javax.jms.Message.class);
value.postProcessMessage(mockMessage);
ArgumentCaptor<String> keyCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<String> valueCaptor = ArgumentCaptor.forClass(String.class);
Mockito.verify(mockMessage, times(4)).setStringProperty(keyCaptor.capture(), valueCaptor.capture());
assertThat(keyCaptor.getAllValues().get(1), is("header1"));
assertThat(valueCaptor.getAllValues().get(1), is("header1 value"));
assertThat(keyCaptor.getAllValues().get(0), is("header2"));
assertThat(valueCaptor.getAllValues().get(0), is("header2 value"));
}
|
public static Locale getLocale(String param) {
int pointIndex = param.indexOf('.');
if (pointIndex > 0)
param = param.substring(0, pointIndex);
param = param.replace("-", "_");
int index = param.indexOf("_");
if (index < 0) {
return new Locale(param);
}
return new Locale(param.substring(0, index), param.substring(index + 1));
}
|
@Test
public void testGetLocale() {
assertEquals(Locale.GERMAN, Helper.getLocale("de"));
assertEquals(Locale.GERMANY, Helper.getLocale("de_DE"));
assertEquals(Locale.GERMANY, Helper.getLocale("de-DE"));
assertEquals(Locale.ENGLISH, Helper.getLocale("en"));
assertEquals(Locale.US, Helper.getLocale("en_US"));
assertEquals(Locale.US, Helper.getLocale("en_US.UTF-8"));
}
|
public Duration durationBetweenRecurringJobInstances() {
Instant base = Instant.EPOCH.plusSeconds(3600);
Schedule schedule = ScheduleExpressionType.getSchedule(scheduleExpression);
Instant run1 = schedule.next(base, base, ZoneOffset.UTC);
Instant run2 = schedule.next(base, run1, ZoneOffset.UTC);
return between(run1, run2);
}
|
@Test
void testDurationBetweenRecurringJobInstancesForCronJob() {
RecurringJob recurringJob1 = aDefaultRecurringJob().withCronExpression("* * * * * *").build();
assertThat(recurringJob1.durationBetweenRecurringJobInstances()).isEqualTo(ofSeconds(1));
RecurringJob recurringJob2 = aDefaultRecurringJob().withCronExpression("*/5 * * * * *").build();
assertThat(recurringJob2.durationBetweenRecurringJobInstances()).isEqualTo(ofSeconds(5));
}
|
public static void main(String[] args) throws IOException {
runSqlLine(args, null, System.out, System.err);
}
|
@Test
public void testSqlLine_emptyArgs() throws Exception {
BeamSqlLine.main(new String[] {});
}
|
public boolean checkStateUpdater(final long now,
final java.util.function.Consumer<Set<TopicPartition>> offsetResetter) {
addTasksToStateUpdater();
if (stateUpdater.hasExceptionsAndFailedTasks()) {
handleExceptionsFromStateUpdater();
}
if (stateUpdater.restoresActiveTasks()) {
handleRestoredTasksFromStateUpdater(now, offsetResetter);
}
return !stateUpdater.restoresActiveTasks()
&& !tasks.hasPendingTasksToInit();
}
|
@Test
public void shouldRethrowStreamsExceptionFromStateUpdater() {
final StreamTask statefulTask = statefulTask(taskId00, taskId00ChangelogPartitions)
.inState(State.RESTORING)
.withInputPartitions(taskId00Partitions).build();
final StreamsException exception = new StreamsException("boom!");
final ExceptionAndTask exceptionAndTasks = new ExceptionAndTask(exception, statefulTask);
when(stateUpdater.hasExceptionsAndFailedTasks()).thenReturn(true);
when(stateUpdater.drainExceptionsAndFailedTasks()).thenReturn(Collections.singletonList(exceptionAndTasks));
final TasksRegistry tasks = mock(TasksRegistry.class);
final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true);
final StreamsException thrown = assertThrows(
StreamsException.class,
() -> taskManager.checkStateUpdater(time.milliseconds(), noOpResetter)
);
assertEquals(exception, thrown);
assertEquals(statefulTask.id(), thrown.taskId().get());
}
|
@SuppressWarnings({"unchecked"})
public static <T> T[] replace(T[] buffer, int index, T... values) {
if (isEmpty(values)) {
return buffer;
}
if (isEmpty(buffer)) {
return values;
}
if (index < 0) {
// 从头部追加
return insert(buffer, 0, values);
}
if (index >= buffer.length) {
// 超出长度,尾部追加
return append(buffer, values);
}
if (buffer.length >= values.length + index) {
System.arraycopy(values, 0, buffer, index, values.length);
return buffer;
}
// 替换长度大于原数组长度,新建数组
int newArrayLength = index + values.length;
final T[] result = newArray(buffer.getClass().getComponentType(), newArrayLength);
System.arraycopy(buffer, 0, result, 0, index);
System.arraycopy(values, 0, result, index, values.length);
return result;
}
|
@Test
public void replaceTest() {
String[] a = {"1", "2", "3", "4"};
String[] b = {"a", "b", "c"};
// 在小于0的位置,-1位置插入,返回b+a,新数组
String[] result = ArrayUtil.replace(a, -1, b);
assertArrayEquals(new String[]{"a", "b", "c", "1", "2", "3", "4"}, result);
// 在第0个位置开始替换,返回a
result = ArrayUtil.replace(ArrayUtil.clone(a), 0, b);
assertArrayEquals(new String[]{"a", "b", "c", "4"}, result);
// 在第1个位置替换,即"2"开始
result = ArrayUtil.replace(ArrayUtil.clone(a), 1, b);
assertArrayEquals(new String[]{"1", "a", "b", "c"}, result);
// 在第2个位置插入,即"3"之后
result = ArrayUtil.replace(ArrayUtil.clone(a), 2, b);
assertArrayEquals(new String[]{"1", "2", "a", "b", "c"}, result);
// 在第3个位置插入,即"4"之后
result = ArrayUtil.replace(ArrayUtil.clone(a), 3, b);
assertArrayEquals(new String[]{"1", "2", "3", "a", "b", "c"}, result);
// 在第4个位置插入,数组长度为4,在索引4出替换即两个数组相加
result = ArrayUtil.replace(ArrayUtil.clone(a), 4, b);
assertArrayEquals(new String[]{"1", "2", "3", "4", "a", "b", "c"}, result);
// 在大于3个位置插入,数组长度为4,即两个数组相加
result = ArrayUtil.replace(ArrayUtil.clone(a), 5, b);
assertArrayEquals(new String[]{"1", "2", "3", "4", "a", "b", "c"}, result);
String[] e = null;
String[] f = {"a", "b", "c"};
// e为null 返回 f
result = ArrayUtil.replace(e, -1, f);
assertArrayEquals(f, result);
String[] g = {"a", "b", "c"};
String[] h = null;
// h为null 返回 g
result = ArrayUtil.replace(g, 0, h);
assertArrayEquals(g, result);
}
|
public static void validateFineGrainedAuth(Method endpointMethod, UriInfo uriInfo, HttpHeaders httpHeaders,
FineGrainedAccessControl accessControl) {
if (endpointMethod.isAnnotationPresent(Authorize.class)) {
final Authorize auth = endpointMethod.getAnnotation(Authorize.class);
String targetId = null;
// Message to use in the access denied exception
String accessDeniedMsg;
if (auth.targetType() == TargetType.TABLE) {
// paramName is mandatory for table level authorization
if (StringUtils.isEmpty(auth.paramName())) {
throw new WebApplicationException(
"paramName not found for table level authorization in API: " + uriInfo.getRequestUri(),
Response.Status.INTERNAL_SERVER_ERROR);
}
// find the paramName in the path or query params
targetId = findParam(auth.paramName(), uriInfo.getPathParameters(), uriInfo.getQueryParameters());
if (StringUtils.isEmpty(targetId)) {
throw new WebApplicationException(
"Could not find paramName " + auth.paramName() + " in path or query params of the API: "
+ uriInfo.getRequestUri(), Response.Status.INTERNAL_SERVER_ERROR);
}
// Table name may contain type, hence get raw table name for checking access
targetId = DatabaseUtils.translateTableName(TableNameBuilder.extractRawTableName(targetId), httpHeaders);
accessDeniedMsg = "Access denied to " + auth.action() + " for table: " + targetId;
} else if (auth.targetType() == TargetType.CLUSTER) {
accessDeniedMsg = "Access denied to " + auth.action() + " in the cluster";
} else {
throw new WebApplicationException(
"Unsupported targetType: " + auth.targetType() + " in API: " + uriInfo.getRequestUri(),
Response.Status.INTERNAL_SERVER_ERROR);
}
boolean hasAccess;
try {
hasAccess = accessControl.hasAccess(httpHeaders, auth.targetType(), targetId, auth.action());
} catch (Throwable t) {
// catch and log Throwable for NoSuchMethodError which can happen when there are classpath conflicts
// otherwise, grizzly will return a 500 without any logs or indication of what failed
String errorMsg = String.format("Failed to check for access for target type %s and target ID %s with action %s",
auth.targetType(), targetId, auth.action());
LOGGER.error(errorMsg, t);
throw new WebApplicationException(errorMsg, t, Response.Status.INTERNAL_SERVER_ERROR);
}
// Check for access now
if (!hasAccess) {
throw new WebApplicationException(accessDeniedMsg, Response.Status.FORBIDDEN);
}
} else if (!accessControl.defaultAccess(httpHeaders)) {
throw new WebApplicationException("Access denied - default authorization failed", Response.Status.FORBIDDEN);
}
}
|
@Test
public void testValidateFineGrainedAuthWithNoSuchMethodError() {
FineGrainedAccessControl ac = Mockito.mock(FineGrainedAccessControl.class);
Mockito.when(ac.hasAccess(Mockito.any(HttpHeaders.class), Mockito.any(), Mockito.any(), Mockito.any()))
.thenThrow(new NoSuchMethodError("Method not found"));
UriInfo mockUriInfo = Mockito.mock(UriInfo.class);
HttpHeaders mockHttpHeaders = Mockito.mock(HttpHeaders.class);
try {
FineGrainedAuthUtils.validateFineGrainedAuth(getAnnotatedMethod(), mockUriInfo, mockHttpHeaders, ac);
Assert.fail("Expected WebApplicationException");
} catch (WebApplicationException e) {
Assert.assertTrue(e.getMessage().contains("Failed to check for access"));
Assert.assertEquals(e.getResponse().getStatus(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
}
|
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CPE_ENABLED;
}
|
@Test
public void testGetAnalyzerEnabledSettingKey() {
CPEAnalyzer instance = new CPEAnalyzer();
String expResult = Settings.KEYS.ANALYZER_CPE_ENABLED;
String result = instance.getAnalyzerEnabledSettingKey();
assertEquals(expResult, result);
}
|
static ApiError validateQuotaKeyValue(
Map<String, ConfigDef.ConfigKey> validKeys,
String key,
double value
) {
// Ensure we have an allowed quota key
ConfigDef.ConfigKey configKey = validKeys.get(key);
if (configKey == null) {
return new ApiError(Errors.INVALID_REQUEST, "Invalid configuration key " + key);
}
if (value <= 0.0) {
return new ApiError(Errors.INVALID_REQUEST, "Quota " + key + " must be greater than 0");
}
// Ensure the quota value is valid
switch (configKey.type()) {
case DOUBLE:
return ApiError.NONE;
case SHORT:
if (value > Short.MAX_VALUE) {
return new ApiError(Errors.INVALID_REQUEST,
"Proposed value for " + key + " is too large for a SHORT.");
}
return getErrorForIntegralQuotaValue(value, key);
case INT:
if (value > Integer.MAX_VALUE) {
return new ApiError(Errors.INVALID_REQUEST,
"Proposed value for " + key + " is too large for an INT.");
}
return getErrorForIntegralQuotaValue(value, key);
case LONG: {
if (value > Long.MAX_VALUE) {
return new ApiError(Errors.INVALID_REQUEST,
"Proposed value for " + key + " is too large for a LONG.");
}
return getErrorForIntegralQuotaValue(value, key);
}
default:
return new ApiError(Errors.UNKNOWN_SERVER_ERROR,
"Unexpected config type " + configKey.type() + " should be Long or Double");
}
}
|
@Test
public void testValidateQuotaKeyValueForUnknownQuota() {
assertEquals(new ApiError(Errors.INVALID_REQUEST, "Invalid configuration key foobar"),
ClientQuotaControlManager.validateQuotaKeyValue(
VALID_CLIENT_ID_QUOTA_KEYS, "foobar", 1.0));
}
|
public static XMLInputFactory createSecureXMLInputFactory() {
XMLInputFactory factory = XMLInputFactory.newInstance();
factory.setProperty( IS_SUPPORTING_EXTERNAL_ENTITIES, Boolean.FALSE );
return factory;
}
|
@Test
public void secureXmlInputStream() {
XMLInputFactory factory = XMLParserFactoryProducer.createSecureXMLInputFactory();
assertEquals( false, factory.getProperty( IS_SUPPORTING_EXTERNAL_ENTITIES ) );
}
|
@Override
public Flux<Subscriber> resolve(Reason reason) {
var reasonType = reason.getSpec().getReasonType();
return subscriptionService.listByPerPage(reasonType)
.filter(this::isNotDisabled)
.filter(subscription -> {
var interestReason = subscription.getSpec().getReason();
if (hasSubject(interestReason)) {
return subjectMatch(subscription, reason.getSpec().getSubject());
} else if (StringUtils.isNotBlank(interestReason.getExpression())) {
return expressionMatch(subscription.getMetadata().getName(),
interestReason.getExpression(), reason);
}
return false;
})
.map(subscription -> {
var id = UserIdentity.of(subscription.getSpec().getSubscriber().getName());
return new Subscriber(id, subscription.getMetadata().getName());
})
.distinct(Subscriber::name);
}
|
@Test
void distinct() {
// same subscriber to different subscriptions
var subscriber = new Subscription.Subscriber();
subscriber.setName("test");
final var subscription1 = createSubscription(subscriber);
subscription1.getMetadata().setName("sub-1");
final var subscription2 = createSubscription(subscriber);
subscription2.getMetadata().setName("sub-2");
subscription2.getSpec().getReason().setSubject(null);
subscription2.getSpec().getReason().setExpression("props.owner == 'guqing'");
when(subscriptionService.listByPerPage(anyString()))
.thenReturn(Flux.just(subscription1, subscription2));
var reason = new Reason();
reason.setSpec(new Reason.Spec());
reason.getSpec().setReasonType("new-comment-on-post");
reason.getSpec().setSubject(new Reason.Subject());
reason.getSpec().getSubject().setApiVersion("content.halo.run/v1alpha1");
reason.getSpec().getSubject().setKind("Post");
reason.getSpec().getSubject().setName("fake-post");
var reasonAttributes = new ReasonAttributes();
reasonAttributes.put("owner", "guqing");
reason.getSpec().setAttributes(reasonAttributes);
recipientResolver.resolve(reason)
.as(StepVerifier::create)
.expectNextCount(1)
.verifyComplete();
verify(subscriptionService).listByPerPage(anyString());
}
|
@Override
public GroupAssignment assign(
GroupSpec groupSpec,
SubscribedTopicDescriber subscribedTopicDescriber
) throws PartitionAssignorException {
if (groupSpec.memberIds().isEmpty())
return new GroupAssignment(Collections.emptyMap());
if (groupSpec.subscriptionType().equals(HOMOGENEOUS)) {
return assignHomogenous(groupSpec, subscribedTopicDescriber);
} else {
return assignHeterogeneous(groupSpec, subscribedTopicDescriber);
}
}
|
@Test
public void testAssignWithNoSubscribedTopic() {
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
Collections.singletonMap(
TOPIC_1_UUID,
new TopicMetadata(
TOPIC_1_UUID,
TOPIC_1_NAME,
3,
Collections.emptyMap()
)
)
);
Map<String, MemberSubscriptionAndAssignmentImpl> members = Collections.singletonMap(
MEMBER_A,
new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Collections.emptySet(),
Assignment.EMPTY
)
);
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Collections.emptyMap()
);
GroupAssignment groupAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEquals(Collections.emptyMap(), groupAssignment.members());
}
|
public static URI parse(String featureIdentifier) {
requireNonNull(featureIdentifier, "featureIdentifier may not be null");
if (featureIdentifier.isEmpty()) {
throw new IllegalArgumentException("featureIdentifier may not be empty");
}
// Legacy from the Cucumber Eclipse plugin
// Older versions of Cucumber allowed it.
if (CLASSPATH_SCHEME_PREFIX.equals(featureIdentifier)) {
return rootPackageUri();
}
if (nonStandardPathSeparatorInUse(featureIdentifier)) {
String standardized = replaceNonStandardPathSeparator(featureIdentifier);
return parseAssumeFileScheme(standardized);
}
if (isWindowsOS() && pathContainsWindowsDrivePattern(featureIdentifier)) {
return parseAssumeFileScheme(featureIdentifier);
}
if (probablyURI(featureIdentifier)) {
return parseProbableURI(featureIdentifier);
}
return parseAssumeFileScheme(featureIdentifier);
}
|
@Test
void can_parse_root_package() {
URI uri = FeaturePath.parse("classpath:/");
assertAll(
() -> assertThat(uri.getScheme(), is("classpath")),
() -> assertThat(uri.getSchemeSpecificPart(), is("/")));
}
|
public Preference<String> getString(@StringRes int prefKey, @StringRes int defaultValue) {
return getString(mResources.getString(prefKey), defaultValue);
}
|
@Test
public void testConvertQuickKey() {
SharedPrefsHelper.setPrefsValue(
"settings_key_ordered_active_quick_text_keys",
"623e21f5-9200-4c0b-b4c7-9691129d7f1f,1057806d-4f6e-42aa-8dfd-eea57995c2ee");
SharedPrefsHelper.setPrefsValue(RxSharedPrefs.CONFIGURATION_VERSION, 10);
SharedPreferences preferences =
PreferenceManager.getDefaultSharedPreferences(getApplicationContext());
Assert.assertFalse(preferences.contains("quick_text_1057806d-4f6e-42aa-8dfd-eea57995c2ee"));
Assert.assertFalse(preferences.contains("quick_text_623e21f5-9200-4c0b-b4c7-9691129d7f1f"));
new RxSharedPrefs(getApplicationContext(), this::testRestoreFunction);
Assert.assertTrue(preferences.contains("quick_text_1057806d-4f6e-42aa-8dfd-eea57995c2ee"));
Assert.assertTrue(preferences.contains("quick_text_623e21f5-9200-4c0b-b4c7-9691129d7f1f"));
Assert.assertEquals(
"623e21f5-9200-4c0b-b4c7-9691129d7f1f,1057806d-4f6e-42aa-8dfd-eea57995c2ee",
preferences.getString("quick_text_AddOnsFactory_order_key", ""));
}
|
public static String fromBytes(byte[] bytes) throws IOException {
DataInputBuffer dbuf = new DataInputBuffer();
dbuf.reset(bytes, 0, bytes.length);
StringBuilder buf = new StringBuilder(bytes.length);
readChars(dbuf, buf, bytes.length);
return buf.toString();
}
|
@Test
public void testInvalidUTF8Truncated() throws Exception {
// Truncated CAT FACE character -- this is a 4-byte sequence, but we
// only have the first three bytes.
byte[] truncated = new byte[] {
(byte)0xF0, (byte)0x9F, (byte)0x90 };
try {
UTF8.fromBytes(truncated);
fail("did not throw an exception");
} catch (UTFDataFormatException utfde) {
GenericTestUtils.assertExceptionContains(
"Truncated UTF8 at f09f90", utfde);
}
}
|
@Override
public Collection<ShutdownAwarePlugin> getShutdownAwarePluginList() {
return mainLock.applyWithReadLock(shutdownAwarePluginList::getPlugins);
}
|
@Test
public void testGetShutdownAwarePluginList() {
manager.register(new TestShutdownAwarePlugin());
Assert.assertEquals(1, manager.getShutdownAwarePluginList().size());
}
|
@Override
public ShardingSphereUser swapToObject(final YamlUserConfiguration yamlConfig) {
if (null == yamlConfig) {
return null;
}
Grantee grantee = convertYamlUserToGrantee(yamlConfig.getUser());
return new ShardingSphereUser(grantee.getUsername(), yamlConfig.getPassword(), grantee.getHostname(), yamlConfig.getAuthenticationMethodName(), yamlConfig.isAdmin());
}
|
@Test
void assertSwapToObject() {
YamlUserConfiguration user = new YamlUserConfiguration();
user.setUser("foo_user@127.0.0.1");
user.setPassword("foo_pwd");
ShardingSphereUser actual = new YamlUserSwapper().swapToObject(user);
assertNotNull(actual);
assertThat(actual.getGrantee().getUsername(), is("foo_user"));
assertThat(actual.getGrantee().getHostname(), is("127.0.0.1"));
assertThat(actual.getPassword(), is("foo_pwd"));
}
|
@VisibleForTesting
static int checkJar(Path file) throws Exception {
final URI uri = file.toUri();
int numSevereIssues = 0;
try (final FileSystem fileSystem =
FileSystems.newFileSystem(
new URI("jar:file", uri.getHost(), uri.getPath(), uri.getFragment()),
Collections.emptyMap())) {
if (isTestJarAndEmpty(file, fileSystem.getPath("/"))) {
return 0;
}
if (!noticeFileExistsAndIsValid(fileSystem.getPath("META-INF", "NOTICE"), file)) {
numSevereIssues++;
}
if (!licenseFileExistsAndIsValid(fileSystem.getPath("META-INF", "LICENSE"), file)) {
numSevereIssues++;
}
numSevereIssues +=
getNumLicenseFilesOutsideMetaInfDirectory(file, fileSystem.getPath("/"));
numSevereIssues += getFilesWithIncompatibleLicenses(file, fileSystem.getPath("/"));
}
return numSevereIssues;
}
|
@Test
void testRejectedOnMissingNoticeFile(@TempDir Path tempDir) throws Exception {
assertThat(
JarFileChecker.checkJar(
createJar(
tempDir,
Entry.fileEntry(
VALID_LICENSE_CONTENTS, VALID_LICENSE_PATH))))
.isEqualTo(1);
}
|
public abstract ResourceMethod getResourceMethod();
|
@Test(dataProvider = "envelopeResourceMethodDataProvider")
public void testEnvelopeResourceMethodType(RestLiResponseEnvelope responseEnvelope, ResourceMethod resourceMethod)
{
Assert.assertEquals(responseEnvelope.getResourceMethod(), resourceMethod);
}
|
public Schema find(String name, String namespace) {
Schema.Type type = PRIMITIVES.get(name);
if (type != null) {
return Schema.create(type);
}
String fullName = fullName(name, namespace);
Schema schema = getNamedSchema(fullName);
if (schema == null) {
schema = getNamedSchema(name);
}
return schema != null ? schema : SchemaResolver.unresolvedSchema(fullName);
}
|
@Test
public void validateSchemaRetrievalBySimpleName() {
assertSame(fooRecord, fooBarBaz.find(fooRecord.getName(), fooRecord.getNamespace()));
}
|
@Override
boolean isCacheable() {
return false;
}
|
@Test
public void test_isCacheable() {
boolean cacheable = NullGetter.NULL_GETTER.isCacheable();
assertFalse(cacheable);
}
|
public static TimestampExtractionPolicy create(
final KsqlConfig ksqlConfig,
final LogicalSchema schema,
final Optional<TimestampColumn> timestampColumn
) {
if (!timestampColumn.isPresent()) {
return new MetadataTimestampExtractionPolicy(getDefaultTimestampExtractor(ksqlConfig));
}
final ColumnName col = timestampColumn.get().getColumn();
final Optional<String> timestampFormat = timestampColumn.get().getFormat();
final Column column = schema.findColumn(col)
.orElseThrow(() -> new KsqlException(
"The TIMESTAMP column set in the WITH clause does not exist in the schema: '"
+ col.toString(FormatOptions.noEscape()) + "'"));
final SqlBaseType tsColumnType = column.type().baseType();
if (tsColumnType == SqlBaseType.STRING) {
final String format = timestampFormat.orElseThrow(() -> new KsqlException(
"A String timestamp field has been specified without"
+ " also specifying the "
+ CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY.toLowerCase()));
return new StringTimestampExtractionPolicy(col, format);
}
if (timestampFormat.isPresent()) {
throw new KsqlException("'" + CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY
+ "' set in the WITH clause can only be used "
+ "when the timestamp column is of type STRING.");
}
if (tsColumnType == SqlBaseType.BIGINT) {
return new LongColumnTimestampExtractionPolicy(col);
}
if (tsColumnType == SqlBaseType.TIMESTAMP) {
return new TimestampColumnTimestampExtractionPolicy(col);
}
throw new KsqlException(
"Timestamp column, " + col + ", should be LONG(INT64), TIMESTAMP,"
+ " or a String with a "
+ CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY.toLowerCase()
+ " specified.");
}
|
@Test
public void shouldFailIfCantFindTimestampField() {
// When:
assertThrows(
KsqlException.class,
() -> TimestampExtractionPolicyFactory
.create(
ksqlConfig,
schemaBuilder2.build(),
Optional.of(
new TimestampColumn(
ColumnName.of("whateva"),
Optional.empty()
)
)
)
);
}
|
public boolean removeShardById(final int shardId) {
if (shardMap.containsKey(shardId)) {
shardMap.remove(shardId);
return true;
} else {
return false;
}
}
|
@Test
void testRemoveShardById() {
try {
var shard = new Shard(1);
shardManager.addNewShard(shard);
boolean flag = shardManager.removeShardById(1);
var field = ShardManager.class.getDeclaredField("shardMap");
field.setAccessible(true);
var map = (Map<Integer, Shard>) field.get(shardManager);
assertTrue(flag);
assertEquals(0, map.size());
} catch (IllegalAccessException | NoSuchFieldException e) {
fail("Fail to modify field access.");
}
}
|
@Override
public Object decode(Response response, Type type) throws IOException, DecodeException {
if (response.status() == 404 || response.status() == 204)
if (JSONObject.class.isAssignableFrom((Class<?>) type))
return new JSONObject();
else if (JSONArray.class.isAssignableFrom((Class<?>) type))
return new JSONArray();
else if (String.class.equals(type))
return null;
else
throw new DecodeException(response.status(),
format("%s is not a type supported by this decoder.", type), response.request());
if (response.body() == null)
return null;
try (Reader reader = response.body().asReader(response.charset())) {
Reader bodyReader = (reader.markSupported()) ? reader : new BufferedReader(reader);
bodyReader.mark(1);
if (bodyReader.read() == -1) {
return null; // Empty body
}
bodyReader.reset();
return decodeBody(response, type, bodyReader);
} catch (JSONException jsonException) {
if (jsonException.getCause() != null && jsonException.getCause() instanceof IOException) {
throw (IOException) jsonException.getCause();
}
throw new DecodeException(response.status(), jsonException.getMessage(), response.request(),
jsonException);
}
}
|
@Test
void decodesExtendedArray() throws IOException {
String json = "[{\"a\":\"b\",\"c\":1},123]";
Response response = Response.builder()
.status(200)
.reason("OK")
.headers(Collections.emptyMap())
.body(json, UTF_8)
.request(request)
.build();
assertThat(jsonArray.similar(new JsonDecoder().decode(response, ExtendedJSONArray.class)))
.isTrue();
}
|
public void write(CruiseConfig configForEdit, OutputStream output, boolean skipPreprocessingAndValidation) throws Exception {
LOGGER.debug("[Serializing Config] Starting to write. Validation skipped? {}", skipPreprocessingAndValidation);
MagicalGoConfigXmlLoader loader = new MagicalGoConfigXmlLoader(configCache, registry);
if (!configForEdit.getOrigin().isLocal()) {
throw new GoConfigInvalidException(configForEdit, "Attempted to save merged configuration with partials");
}
if (!skipPreprocessingAndValidation) {
loader.preprocessAndValidate(configForEdit);
LOGGER.debug("[Serializing Config] Done with cruise config validators.");
}
Document document = createEmptyCruiseConfigDocument();
write(configForEdit, document.getRootElement(), configCache, registry);
LOGGER.debug("[Serializing Config] XSD and DOM validation.");
verifyXsdValid(document);
MagicalGoConfigXmlLoader.validateDom(document.getRootElement(), registry);
LOGGER.info("[Serializing Config] Generating config partial.");
XmlUtils.writeXml(document, output);
LOGGER.debug("[Serializing Config] Finished writing config partial.");
}
|
@Test
public void shouldNotThrowUpWhenTfsWorkspaceIsNotSpecified() {
CruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("tfs_pipeline");
cruiseConfig.initializeServer();
PipelineConfig tfs_pipeline = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("tfs_pipeline"));
tfs_pipeline.materialConfigs().clear();
tfs_pipeline.addMaterialConfig(tfs(new GoCipher(), new UrlArgument("http://tfs.com"), "username", "CORPORATE", "password", "$/project_path"));
try {
xmlWriter.write(cruiseConfig, output, false);
} catch (Exception e) {
fail("should not fail as workspace name is not mandatory anymore " + e);
}
}
|
@Override
public Collection<IExternalResourceInfo> getResourcesFromRow( Rest step, RowMetaInterface rowMeta, Object[] row ) {
Set<IExternalResourceInfo> resources = new HashSet<>();
RestMeta meta = (RestMeta) step.getStepMetaInterface();
if ( meta == null ) {
meta = (RestMeta) step.getStepMeta().getStepMetaInterface();
}
if ( meta != null ) {
String url;
String method;
String body;
try {
if ( meta.isUrlInField() ) {
url = rowMeta.getString( row, meta.getUrlField(), null );
} else {
url = meta.getUrl();
}
if ( StringUtils.isNotEmpty( url ) ) {
WebServiceResourceInfo resourceInfo = createResourceInfo( url, meta );
if ( ArrayUtils.isNotEmpty( meta.getHeaderField() ) ) {
for ( int i = 0; i < meta.getHeaderField().length; i++ ) {
String field = meta.getHeaderField()[ i ];
String label = meta.getHeaderName()[ i ];
resourceInfo.addHeader( label, rowMeta.getString( row, field, null ) );
}
}
if ( ArrayUtils.isNotEmpty( meta.getParameterField() ) ) {
for ( int i = 0; i < meta.getParameterField().length; i++ ) {
String field = meta.getParameterField()[ i ];
String label = meta.getParameterName()[ i ];
resourceInfo.addParameter( label, rowMeta.getString( row, field, null ) );
}
}
if ( meta.isDynamicMethod() ) {
method = rowMeta.getString( row, meta.getMethodFieldName(), null );
resourceInfo.setMethod( method );
}
if ( StringUtils.isNotEmpty( meta.getBodyField() ) ) {
body = rowMeta.getString( row, meta.getBodyField(), null );
resourceInfo.setBody( body );
}
resources.add( resourceInfo );
}
} catch ( KettleValueException e ) {
// could not find a url on this row
log.debug( e.getMessage(), e );
}
}
return resources;
}
|
@Test
public void testGetResourcesFromRow_fieldsForMethodAndBody() throws Exception {
when( meta.isUrlInField() ).thenReturn( true );
when( meta.getUrlField() ).thenReturn( "url" );
when( meta.getHeaderField() ).thenReturn( null );
when( meta.getParameterField() ).thenReturn( null );
when( meta.isDynamicMethod() ).thenReturn( true );
when( meta.getMethodFieldName() ).thenReturn( "method" );
when( meta.getBodyField() ).thenReturn( "body" );
when( rmi.getString( row, "method", null ) ).thenReturn( row[ 2 ].toString() );
when( rmi.getString( row, "body", null ) ).thenReturn( row[ 2 ].toString() );
Collection<IExternalResourceInfo> resourcesFromMeta = consumer.getResourcesFromRow( step, rmi, row );
assertEquals( 1, resourcesFromMeta.size() );
IExternalResourceInfo resourceInfo = resourcesFromMeta.toArray( new IExternalResourceInfo[ 1 ] )[ 0 ];
assertEquals( row[ 0 ], resourceInfo.getName() );
assertNotNull( resourceInfo.getAttributes() );
}
|
public FEELFnResult<Range> invoke(@ParameterName("from") String from) {
if (from == null || from.isEmpty() || from.isBlank()) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "cannot be null"));
}
Range.RangeBoundary startBoundary;
if (from.startsWith("(") || from.startsWith("]")) {
startBoundary = RangeBoundary.OPEN;
} else if (from.startsWith("[")) {
startBoundary = RangeBoundary.CLOSED;
} else {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "does not start with a valid character"));
}
Range.RangeBoundary endBoundary;
if (from.endsWith(")") || from.endsWith("[")) {
endBoundary = RangeBoundary.OPEN;
} else if (from.endsWith("]")) {
endBoundary = RangeBoundary.CLOSED;
} else {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "does not end with a valid character"));
}
String[] split = from.split("\\.\\.");
if (split.length != 2) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "does not include two literals separated by `..` two dots characters"));
}
String leftString = split[0].substring(1);
String rightString = split[1].substring(0, split[1].length() - 1);
if ((leftString.isEmpty() || leftString.isBlank()) && (rightString.isEmpty() || rightString.isBlank())) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "at least one endpoint must not be null"));
}
BaseNode leftNode = parse(leftString);
if (!nodeIsAllowed(leftNode)) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "left endpoint is not a recognised valid literal"));
}
BaseNode rightNode = parse(rightString);
if (!nodeIsAllowed(rightNode)) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "right endpoint is not a recognised valid literal"));
}
Object left = leftNode.evaluate(getStubbed());
if (!nodeValueIsAllowed(left)) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "left endpoint is not a valid value " + left.getClass()));
}
Object right = rightNode.evaluate(getStubbed());
if (!nodeValueIsAllowed(right)) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "right endpoint is not a valid value " + right.getClass()));
}
if (!nodesReturnsSameType(left, right)) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "endpoints must be of equivalent types"));
}
return FEELFnResult.ofResult(new RangeImpl(startBoundary, (Comparable) left, (Comparable) right, endBoundary));
}
|
@Test
void invokeNull() {
List<String> from = Arrays.asList(null, " ", "", "[..]");
from.forEach(it -> FunctionTestUtil.assertResultError(rangeFunction.invoke(it), InvalidParametersEvent.class,
it));
}
|
public SegmentCommitter createSegmentCommitter(SegmentCompletionProtocol.Request.Params params,
String controllerVipUrl)
throws URISyntaxException {
boolean uploadToFs = _streamConfig.isServerUploadToDeepStore();
String peerSegmentDownloadScheme = _tableConfig.getValidationConfig().getPeerSegmentDownloadScheme();
String segmentStoreUri = _indexLoadingConfig.getSegmentStoreURI();
SegmentUploader segmentUploader;
if (uploadToFs || peerSegmentDownloadScheme != null) {
// TODO: peer scheme non-null check exists for backwards compatibility. remove check once users have migrated
segmentUploader = new PinotFSSegmentUploader(segmentStoreUri,
ServerSegmentCompletionProtocolHandler.getSegmentUploadRequestTimeoutMs(), _serverMetrics);
} else {
segmentUploader = new Server2ControllerSegmentUploader(_logger,
_protocolHandler.getFileUploadDownloadClient(),
_protocolHandler.getSegmentCommitUploadURL(params, controllerVipUrl), params.getSegmentName(),
ServerSegmentCompletionProtocolHandler.getSegmentUploadRequestTimeoutMs(), _serverMetrics,
_protocolHandler.getAuthProvider(), _tableConfig.getTableName());
}
return new SplitSegmentCommitter(_logger, _protocolHandler, params, segmentUploader, peerSegmentDownloadScheme);
}
|
@Test(description = "use upload to deepstore when either serverUploadToDeepStore is set or peer segment download "
+ "scheme is non-null")
public void testUploadToDeepStoreConfig()
throws URISyntaxException {
ServerSegmentCompletionProtocolHandler protocolHandler =
new ServerSegmentCompletionProtocolHandler(Mockito.mock(ServerMetrics.class), "test_REALTIME");
String controllerVipUrl = "http://localhost:1234";
SegmentCompletionProtocol.Request.Params requestParams = new SegmentCompletionProtocol.Request.Params();
// No peer segment download scheme, serverUploadToDeepStore = true
Map<String, String> streamConfigMap = new HashMap<>(getMinimumStreamConfigMap());
streamConfigMap.put(StreamConfigProperties.SERVER_UPLOAD_TO_DEEPSTORE, "true");
TableConfig config = createRealtimeTableConfig("testDeepStoreConfig", streamConfigMap).build();
IndexLoadingConfig indexLoadingConfig = Mockito.mock(IndexLoadingConfig.class);
Mockito.when(indexLoadingConfig.getSegmentStoreURI()).thenReturn("file:///path/to/segment/store.txt");
SegmentCommitterFactory factory = new SegmentCommitterFactory(Mockito.mock(Logger.class), protocolHandler, config,
indexLoadingConfig, Mockito.mock(ServerMetrics.class));
SegmentCommitter committer = factory.createSegmentCommitter(requestParams, controllerVipUrl);
Assert.assertNotNull(committer);
Assert.assertTrue(committer instanceof SplitSegmentCommitter);
Assert.assertTrue(((SplitSegmentCommitter) committer).getSegmentUploader() instanceof PinotFSSegmentUploader);
// Peer segment download scheme is set, serverUploadToDeepStore = false (for backwards compatibility)
Map<String, String> streamConfigMap1 = new HashMap<>(getMinimumStreamConfigMap());
streamConfigMap1.put(StreamConfigProperties.SERVER_UPLOAD_TO_DEEPSTORE, "false");
TableConfig config1 = createRealtimeTableConfig("testDeepStoreConfig", streamConfigMap1)
.setPeerSegmentDownloadScheme("http")
.build();
factory = new SegmentCommitterFactory(Mockito.mock(Logger.class), protocolHandler, config1,
indexLoadingConfig, Mockito.mock(ServerMetrics.class));
committer = factory.createSegmentCommitter(requestParams, controllerVipUrl);
Assert.assertNotNull(committer);
Assert.assertTrue(committer instanceof SplitSegmentCommitter);
Assert.assertTrue(((SplitSegmentCommitter) committer).getSegmentUploader() instanceof PinotFSSegmentUploader);
}
|
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
QueryEntry that = (QueryEntry) o;
if (!key.equals(that.key)) {
return false;
}
return true;
}
|
@Test
public void test_equality_differentKey() {
QueryableEntry queryEntry = createEntry("dataKey", "dataValue");
QueryableEntry queryEntryOther = createEntry("dataKeyOther", "dataValue");
assertFalse(queryEntry.equals(queryEntryOther));
}
|
@Override
public void removeKey(DeviceKeyId deviceKeyId) {
checkNotNull(deviceKeyId, "Device key identifier cannot be null");
store.deleteDeviceKey(deviceKeyId);
}
|
@Test
public void testRemoveKey() {
DeviceKeyId deviceKeyId = DeviceKeyId.deviceKeyId(deviceKeyIdValue);
DeviceKey deviceKey = DeviceKey.createDeviceKeyUsingCommunityName(deviceKeyId,
deviceKeyLabel, deviceKeySnmpName);
// Add the new device key using the device key manager
manager.addKey(deviceKey);
// Remove the device key from the store
manager.removeKey(deviceKeyId);
// Validate that the device key was removed from the store by querying it.
deviceKey = manager.getDeviceKey(deviceKeyId);
assertNull("The device key set should be empty.", deviceKey);
// Validate that the following events were received in order,
// DEVICE_KEY_ADDED, DEVICE_KEY_REMOVED.
validateEvents(DeviceKeyEvent.Type.DEVICE_KEY_ADDED, DeviceKeyEvent.Type.DEVICE_KEY_REMOVED);
}
|
public void start() {
if (isStarted()) return;
int errorCount = 0;
if (port <= 0) {
errorCount++;
addError("No port was configured for appender"
+ name
+ " For more information, please visit http://logback.qos.ch/codes.html#socket_no_port");
}
if (remoteHost == null) {
errorCount++;
addError("No remote host was configured for appender"
+ name
+ " For more information, please visit http://logback.qos.ch/codes.html#socket_no_host");
}
if (queueSize == 0) {
addWarn("Queue size of zero is deprecated, use a size of one to indicate synchronous processing");
}
if (queueSize < 0) {
errorCount++;
addError("Queue size must be greater than zero");
}
if (errorCount == 0) {
try {
address = InetAddress.getByName(remoteHost);
} catch (UnknownHostException ex) {
addError("unknown host: " + remoteHost);
errorCount++;
}
}
if (errorCount == 0) {
deque = queueFactory.newLinkedBlockingDeque(queueSize);
peerId = "remote peer " + remoteHost + ":" + port + ": ";
connector = createConnector(address, port, 0, reconnectionDelay.getMilliseconds());
task = getContext().getScheduledExecutorService().submit(new Runnable() {
public void run() {
connectSocketAndDispatchEvents();
}
});
super.start();
}
}
|
@Test
public void shutsDownOnInterruptWhileWaitingForSocketConnection() throws Exception {
// given
doThrow(new InterruptedException()).when(socketConnector).call();
// when
appender.start();
// then
verify(socketConnector, timeout(TIMEOUT)).call();
}
|
@ScalarFunction
public static String splitPart(String input, String delimiter, int index) {
String[] splitString = StringUtils.splitByWholeSeparator(input, delimiter);
if (index >= 0 && index < splitString.length) {
return splitString[index];
} else if (index < 0 && index >= -splitString.length) {
return splitString[splitString.length + index];
} else {
return "null";
}
}
|
@Test(dataProvider = "splitPartTestCases")
public void testSplitPart(String input, String delimiter, int index, int limit, String expectedToken,
String expectedTokenWithLimitCounts) {
assertEquals(StringFunctions.splitPart(input, delimiter, index), expectedToken);
assertEquals(StringFunctions.splitPart(input, delimiter, limit, index), expectedTokenWithLimitCounts);
}
|
@Override
public void addCorsMappings(final CorsRegistry registry) {
registry.addMapping("/**")
.allowedHeaders("Access-Control-Allow-Origin",
"*",
"Access-Control-Allow-Methods",
"POST, GET, OPTIONS, PUT, DELETE",
"Access-Control-Allow-Headers",
"Origin, X-Requested-With, Content-Type, Accept")
.allowedOrigins("*")
.allowedMethods("*");
}
|
@Test
public void testAddCorsMappings() {
CorsRegistry registry = new CorsRegistry();
WebConfiguration webConfiguration = new WebConfiguration();
webConfiguration.addCorsMappings(registry);
assertEquals(getCorsConfigurationsString(registry), getCorsConfigurationsString(corsRegistryJSONStringExtendBuild()));
}
|
public ProtocolBuilder status(String status) {
this.status = status;
return getThis();
}
|
@Test
void status() {
ProtocolBuilder builder = new ProtocolBuilder();
builder.status("mockstatuschecker");
Assertions.assertEquals("mockstatuschecker", builder.build().getStatus());
}
|
@Override
public NewAnalysisError onFile(InputFile inputFile) {
checkArgument(inputFile != null, "Cannot use a inputFile that is null");
checkState(this.inputFile == null, "onFile() already called");
this.inputFile = inputFile;
return this;
}
|
@Test
public void test_save() {
DefaultAnalysisError analysisError = new DefaultAnalysisError(storage);
analysisError.onFile(inputFile).save();
Mockito.verify(storage).store(analysisError);
Mockito.verifyNoMoreInteractions(storage);
}
|
Writer getWriter( Object source ) throws KettleException {
try {
Writer outputStreamWriter = writers.get( source );
if ( outputStreamWriter != null ) {
return outputStreamWriter;
}
FileObject file =
getReplayFilename( destinationDirectory, processingFilename, dateString, fileExtension, source );
ResultFile resultFile =
new ResultFile( ResultFile.FILE_TYPE_GENERAL, file, baseStep.getTransMeta().getName(), baseStep
.getStepname() );
baseStep.addResultFile( resultFile );
try {
if ( isBlank( encoding ) ) {
outputStreamWriter = new OutputStreamWriter( KettleVFS.getOutputStream( file, false ) );
} else {
outputStreamWriter = new OutputStreamWriter( KettleVFS.getOutputStream( file, false ), encoding );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "AbstractFileErrorHandler.Exception.CouldNotCreateFileErrorHandlerForFile" )
+ file.getName().getURI(), e );
}
writers.put( source, outputStreamWriter );
return outputStreamWriter;
} catch ( KettleFileException e ) {
throw new KettleException( BaseMessages.getString(
PKG, "AbstractFileErrorHandler.Exception.CouldNotCreateFileErrorHandlerForFile" ), e );
}
}
|
@Test
public void getWriterEmptyEncoding() throws Exception {
Object source = Mockito.mock( Object.class );
setupErrorHandler( "", source );
OutputStreamWriter outputStreamWriter = (OutputStreamWriter) abstractFileErrorHandler.getWriter( source );
assertEquals( DEFAULT_ENCODING, outputStreamWriter.getEncoding() );
}
|
@Override
public double get(int index) {
return elements[index];
}
|
@Test
public void serialization431Test() throws URISyntaxException, IOException {
Path vectorPath = Paths.get(DenseVectorTest.class.getResource("dense-vector-431.tribuo").toURI());
try (InputStream fis = Files.newInputStream(vectorPath)) {
TensorProto proto = TensorProto.parseFrom(fis);
Tensor vector = Tensor.deserialize(proto);
assertEquals(generateVectorA(), vector);
}
}
|
@Override
public void run() {
try {
interceptorChain.doInterceptor(task);
} catch (Exception e) {
Loggers.SRV_LOG.info("Interceptor health check task {} failed", task.getTaskId(), e);
}
}
|
@Test
void testRunHealthyInstanceWithTimeoutFromMetadata() throws InterruptedException {
InstancePublishInfo instance = injectInstance(true, System.currentTimeMillis());
Service service = Service.newService(NAMESPACE, GROUP_NAME, SERVICE_NAME);
InstanceMetadata metadata = new InstanceMetadata();
metadata.getExtendData().put(PreservedMetadataKeys.HEART_BEAT_TIMEOUT, 1000L);
when(namingMetadataManager.getInstanceMetadata(service, instance.getMetadataId())).thenReturn(Optional.of(metadata));
when(globalConfig.isExpireInstance()).thenReturn(true);
TimeUnit.SECONDS.sleep(1);
taskWrapper.run();
assertFalse(client.getAllInstancePublishInfo().isEmpty());
assertFalse(client.getInstancePublishInfo(Service.newService(NAMESPACE, GROUP_NAME, SERVICE_NAME)).isHealthy());
}
|
public List<Stream> match(Message message) {
final Set<Stream> result = Sets.newHashSet();
final Set<String> blackList = Sets.newHashSet();
for (final Rule rule : rulesList) {
if (blackList.contains(rule.getStreamId())) {
continue;
}
final StreamRule streamRule = rule.getStreamRule();
final StreamRuleType streamRuleType = streamRule.getType();
final Stream.MatchingType matchingType = rule.getMatchingType();
if (!ruleTypesNotNeedingFieldPresence.contains(streamRuleType)
&& !message.hasField(streamRule.getField())) {
if (matchingType == Stream.MatchingType.AND) {
result.remove(rule.getStream());
// blacklist stream because it can't match anymore
blackList.add(rule.getStreamId());
}
continue;
}
final Stream stream;
if (streamRuleType != StreamRuleType.REGEX) {
stream = rule.match(message);
} else {
stream = rule.matchWithTimeOut(message, streamProcessingTimeout, TimeUnit.MILLISECONDS);
}
if (stream == null) {
if (matchingType == Stream.MatchingType.AND) {
result.remove(rule.getStream());
// blacklist stream because it can't match anymore
blackList.add(rule.getStreamId());
}
} else {
result.add(stream);
if (matchingType == Stream.MatchingType.OR) {
// blacklist stream because it is already matched
blackList.add(rule.getStreamId());
}
}
}
final Stream defaultStream = defaultStreamProvider.get();
boolean alreadyRemovedDefaultStream = false;
for (Stream stream : result) {
if (stream.getRemoveMatchesFromDefaultStream()) {
if (alreadyRemovedDefaultStream || message.removeStream(defaultStream)) {
alreadyRemovedDefaultStream = true;
if (LOG.isTraceEnabled()) {
LOG.trace("Successfully removed default stream <{}> from message <{}>", defaultStream.getId(), message.getId());
}
} else {
// A previously executed message processor (or Illuminate) has likely already removed the
// default stream from the message. Now, the message has matched a stream in the Graylog
// MessageFilterChain, and the matching stream is also set to remove the default stream.
// This is usually from user-defined stream rules, and is generally not a problem.
cannotRemoveDefaultMeter.inc();
if (LOG.isTraceEnabled()) {
LOG.trace("Couldn't remove default stream <{}> from message <{}>", defaultStream.getId(), message.getId());
}
}
}
}
return ImmutableList.copyOf(result);
}
|
@Test
public void testInvertedContainsMatch() throws Exception {
final StreamMock stream = getStreamMock("test");
final StreamRuleMock rule = new StreamRuleMock(
ImmutableMap.<String, Object>builder()
.put("_id", new ObjectId())
.put("field", "testfield")
.put("inverted", true)
.put("value", "testvalue")
.put("type", StreamRuleType.CONTAINS.toInteger())
.put("stream_id", stream.getId())
.build()
);
stream.setStreamRules(Lists.newArrayList(rule));
final StreamRouterEngine engine = newEngine(Lists.newArrayList(stream));
final Message message = getMessage();
// Without the field
assertEquals(Lists.newArrayList(stream), engine.match(message));
// Without the matching value in the field
message.addField("testfield", "no-foobar");
assertEquals(Lists.newArrayList(stream), engine.match(message));
// With matching value in the field.
message.addField("testfield", "hello testvalue");
assertTrue(engine.match(message).isEmpty());
}
|
public void processRow(GenericRow decodedRow, Result reusedResult)
throws Exception {
reusedResult.reset();
if (_complexTypeTransformer != null) {
// TODO: consolidate complex type transformer into composite type transformer
decodedRow = _complexTypeTransformer.transform(decodedRow);
}
Collection<GenericRow> rows = (Collection<GenericRow>) decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
if (rows != null) {
for (GenericRow row : rows) {
processPlainRow(row, reusedResult);
}
} else {
processPlainRow(decodedRow, reusedResult);
}
}
|
@Test
public void testSingleRowFailure()
throws Exception {
TableConfig config = createTestTableConfig();
Schema schema = Fixtures.createSchema();
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow simpleRow = Fixtures.createInvalidSingleRow(9527);
boolean exceptionThrown = false;
TransformPipeline.Result result = new TransformPipeline.Result();
try {
pipeline.processRow(simpleRow, result);
} catch (Exception ex) {
exceptionThrown = true;
}
Assert.assertTrue(exceptionThrown);
Assert.assertNotNull(result);
Assert.assertEquals(result.getTransformedRows().size(), 0);
Assert.assertEquals(result.getSkippedRowCount(), 0);
}
|
@Override
public boolean isDebugEnabled() {
return logger.isDebugEnabled();
}
|
@Test
public void testIsDebugEnabled() {
Logger mockLogger = mock(Logger.class);
when(mockLogger.getName()).thenReturn("foo");
when(mockLogger.isDebugEnabled()).thenReturn(true);
InternalLogger logger = new Slf4JLogger(mockLogger);
assertTrue(logger.isDebugEnabled());
verify(mockLogger).getName();
verify(mockLogger).isDebugEnabled();
}
|
public Map<V, Integer> inDegree() {
Map<V, Integer> result = new HashMap<>();
for (V vertex : neighbors.keySet()) {
result.put(vertex, 0); // all in-degrees are 0
}
for (V from : neighbors.keySet()) {
for (V to : neighbors.get(from)) {
result.put(to, result.get(to) + 1); // increment in-degree
}
}
return result;
}
|
@Test
void inDegree() {
Map<Character, Integer> result = graph.inDegree();
Map<Character, Integer> expected = new HashMap<>(7);
expected.put('A', 0);
expected.put('B', 1);
expected.put('C', 1);
expected.put('D', 0);
expected.put('E', 1);
expected.put('F', 1);
expected.put('G', 1);
assertEquals(expected, result);
}
|
public Optional<Object> getLiteralValue(final int index) {
ExpressionSegment valueExpression = valueExpressions.get(index);
if (valueExpression instanceof ParameterMarkerExpressionSegment) {
return Optional.ofNullable(parameters.get(getParameterIndex((ParameterMarkerExpressionSegment) valueExpression)));
}
if (valueExpression instanceof LiteralExpressionSegment) {
return Optional.ofNullable(((LiteralExpressionSegment) valueExpression).getLiterals());
}
return Optional.empty();
}
|
@Test
void assertGetLiteralValueWithParameterMarker() {
Collection<ExpressionSegment> assignments = makeParameterMarkerExpressionSegment();
String parameterValue = "test";
InsertValueContext insertValueContext = new InsertValueContext(assignments, Collections.singletonList(parameterValue), 0);
Optional<Object> valueFromInsertValueContext = insertValueContext.getLiteralValue(0);
assertTrue(valueFromInsertValueContext.isPresent());
assertThat(valueFromInsertValueContext.get(), is(parameterValue));
}
|
@Override
public boolean isEmpty() {
return mask == 0;
}
|
@Test
public void testIsEmpty() {
assertThat(new LongBitMask(1L).isEmpty()).isFalse();
assertThat(new LongBitMask(0L).isEmpty()).isTrue();
}
|
public final long encodeLatLon(double lat, double lon) {
return encode(x(lon), y(lat));
}
|
@Test
public void testEdgeCases() {
double minLon = -1, maxLon = 1.6;
double minLat = -1, maxLat = 0.5;
int parts = 4;
int bits = (int) (Math.log(parts * parts) / Math.log(2));
SpatialKeyAlgo spatialKeyAlgo = new SpatialKeyAlgo(bits, new BBox(minLon, maxLon, minLat, maxLat));
// lat border 0.125
assertEquals(11, spatialKeyAlgo.encodeLatLon(0.125, -0.2));
assertEquals(9, spatialKeyAlgo.encodeLatLon(0.124, -0.2));
// lon border -0.35
assertEquals(11, spatialKeyAlgo.encodeLatLon(0.2, -0.35));
assertEquals(10, spatialKeyAlgo.encodeLatLon(0.2, -0.351));
}
|
public static CommonsConfigurationCircuitBreakerConfiguration of(final Configuration configuration) throws ConfigParseException{
CommonsConfigurationCircuitBreakerConfiguration obj = new CommonsConfigurationCircuitBreakerConfiguration();
try {
obj.getConfigs().putAll(obj.getProperties(configuration.subset(CIRCUITBREAKER_CONFIGS_PREFIX)));
obj.getInstances().putAll(obj.getProperties(configuration.subset(CIRCUITBREAKER_INSTANCES_PREFIX)));
return obj;
} catch (Exception ex) {
throw new ConfigParseException("Error creating circuitbreaker configuration", ex);
}
}
|
@Test
public void testFromYamlFile() throws ConfigurationException {
Configuration config = CommonsConfigurationUtil.getConfiguration(YAMLConfiguration.class, TestConstants.RESILIENCE_CONFIG_YAML_FILE_NAME);
CommonsConfigurationCircuitBreakerConfiguration commonsConfigurationCircuitBreakerConfiguration =
CommonsConfigurationCircuitBreakerConfiguration.of(config);
assertConfigs(commonsConfigurationCircuitBreakerConfiguration.getConfigs());
assertInstances(commonsConfigurationCircuitBreakerConfiguration.getInstances());
}
|
public boolean fence(HAServiceTarget fromSvc) {
return fence(fromSvc, null);
}
|
@Test
public void testShortNameSshWithPort() throws BadFencingConfigurationException {
NodeFencer fencer = setupFencer("sshfence(:123)");
assertFalse(fencer.fence(MOCK_TARGET));
}
|
@Override
String getFileName(double lat, double lon) {
int intKey = calcIntKey(lat, lon);
String str = areas.get(intKey);
if (str == null)
return null;
int minLat = Math.abs(down(lat));
int minLon = Math.abs(down(lon));
str += "/";
if (lat >= 0)
str += "N";
else
str += "S";
if (minLat < 10)
str += "0";
str += minLat;
if (lon >= 0)
str += "E";
else
str += "W";
if (minLon < 10)
str += "0";
if (minLon < 100)
str += "0";
str += minLon;
return str;
}
|
@Test
public void testGetFileString() {
assertEquals("Eurasia/N49E011", instance.getFileName(49, 11));
assertEquals("Eurasia/N52W002", instance.getFileName(52.268157, -1.230469));
assertEquals("Africa/S06E034", instance.getFileName(-5.965754, 34.804687));
assertEquals("Australia/S29E131", instance.getFileName(-28.304381, 131.484375));
assertEquals("South_America/S09W045", instance.getFileName(-9, -45));
assertEquals("South_America/S10W046", instance.getFileName(-9.1, -45.1));
assertEquals("South_America/S10W045", instance.getFileName(-9.6, -45));
assertEquals("South_America/S28W071", instance.getFileName(-28, -71));
assertEquals("South_America/S29W072", instance.getFileName(-28.88316, -71.070557));
}
|
@Override
public void endInput(int inputId) throws Exception {
inputSelectionHandler.endInput(inputId);
InputSpec inputSpec = inputSpecMap.get(inputId);
inputSpec.getOutput().endOperatorInput(inputSpec.getOutputOpInputId());
}
|
@Test
public void testProcess() throws Exception {
TestingBatchMultipleInputStreamOperator op = createMultipleInputStreamOperator();
List<StreamElement> outputData = op.getOutputData();
TestingTwoInputStreamOperator joinOp2 =
(TestingTwoInputStreamOperator) op.getTailWrapper().getStreamOperator();
TableOperatorWrapper<?> joinWrapper1 = op.getTailWrapper().getInputWrappers().get(0);
TestingTwoInputStreamOperator joinOp1 =
(TestingTwoInputStreamOperator) joinWrapper1.getStreamOperator();
TableOperatorWrapper<?> aggWrapper1 = joinWrapper1.getInputWrappers().get(0);
TestingOneInputStreamOperator aggOp1 =
(TestingOneInputStreamOperator) aggWrapper1.getStreamOperator();
TableOperatorWrapper<?> aggWrapper2 = joinWrapper1.getInputWrappers().get(1);
TestingOneInputStreamOperator aggOp2 =
(TestingOneInputStreamOperator) aggWrapper2.getStreamOperator();
List<Input> inputs = op.getInputs();
assertThat(inputs).hasSize(3);
Input input1 = inputs.get(0);
Input input2 = inputs.get(1);
Input input3 = inputs.get(2);
assertThat(input1).isInstanceOf(OneInput.class);
assertThat(input2).isInstanceOf(OneInput.class);
assertThat(input3).isInstanceOf(SecondInputOfTwoInput.class);
assertThat(joinOp2.getCurrentElement1()).isNull();
assertThat(joinOp2.getCurrentElement2()).isNull();
assertThat(joinOp1.getCurrentElement1()).isNull();
assertThat(joinOp1.getCurrentElement2()).isNull();
assertThat(aggOp1.getCurrentElement()).isNull();
assertThat(aggOp2.getCurrentElement()).isNull();
assertThat(outputData).isEmpty();
// process first input (input id is 3)
StreamRecord<RowData> element1 =
new StreamRecord<>(GenericRowData.of(StringData.fromString("123")), 456);
input3.processElement(element1);
assertThat(joinOp2.getCurrentElement2()).isEqualTo(element1);
assertThat(joinOp2.getCurrentElement1()).isNull();
assertThat(outputData).isEmpty();
// finish first input
assertThat(joinOp2.getEndInputs()).isEmpty();
op.endInput(3);
assertThat(outputData).isEmpty();
assertThat(joinOp2.getEndInputs()).containsExactly(2);
// process second input (input id is 1)
StreamRecord<RowData> element2 =
new StreamRecord<>(GenericRowData.of(StringData.fromString("124")), 457);
input1.processElement(element2);
assertThat(aggOp1.getCurrentElement()).isEqualTo(element2);
assertThat(joinOp1.getCurrentElement1()).isNull();
assertThat(joinOp2.getCurrentElement1()).isNull();
assertThat(outputData).isEmpty();
// finish second input
assertThat(joinOp1.getEndInputs()).isEmpty();
op.endInput(1);
assertThat(joinOp1.getEndInputs()).containsExactly(1);
assertThat(joinOp2.getEndInputs()).containsExactly(2);
assertThat(joinOp1.getCurrentElement1()).isEqualTo(element2);
assertThat(outputData).isEmpty();
// process third input (input id is 2)
StreamRecord<RowData> element3 =
new StreamRecord<>(GenericRowData.of(StringData.fromString("125")), 458);
input2.processElement(element3);
assertThat(aggOp2.getCurrentElement()).isEqualTo(element3);
assertThat(joinOp1.getCurrentElement2()).isNull();
assertThat(joinOp2.getCurrentElement1()).isNull();
assertThat(outputData).isEmpty();
// finish third input
assertThat(joinOp1.getEndInputs()).containsExactly(1);
op.endInput(2);
assertThat(joinOp1.getEndInputs()).isEqualTo(Arrays.asList(1, 2));
assertThat(joinOp2.getEndInputs()).isEqualTo(Arrays.asList(2, 1));
assertThat(joinOp1.getCurrentElement2()).isEqualTo(element3);
assertThat(outputData).hasSize(3);
}
|
@Override
@Deprecated
public <VR> KStream<K, VR> flatTransformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, Iterable<VR>> valueTransformerSupplier,
final String... stateStoreNames) {
Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null");
return doFlatTransformValues(
toValueTransformerWithKeySupplier(valueTransformerSupplier),
NamedInternal.empty(),
stateStoreNames);
}
|
@Test
@SuppressWarnings("deprecation")
public void shouldNotAllowNullStoreNameOnFlatTransformValuesWithFlatValueSupplierAndNamed() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.flatTransformValues(
flatValueTransformerSupplier,
Named.as("flatValueTransformer"),
(String) null));
assertThat(exception.getMessage(), equalTo("stateStoreNames can't contain `null` as store name"));
}
|
@Override
@VisibleForTesting
public CompletableFuture<JarUploadResponseBody> handleRequest(
@Nonnull final HandlerRequest<EmptyRequestBody> request,
@Nonnull final RestfulGateway gateway)
throws RestHandlerException {
Collection<File> uploadedFiles = request.getUploadedFiles();
if (uploadedFiles.size() != 1) {
throw new RestHandlerException(
"Exactly 1 file must be sent, received " + uploadedFiles.size() + '.',
HttpResponseStatus.BAD_REQUEST);
}
final Path fileUpload = uploadedFiles.iterator().next().toPath();
return CompletableFuture.supplyAsync(
() -> {
if (!fileUpload.getFileName().toString().endsWith(".jar")) {
throw new CompletionException(
new RestHandlerException(
"Only Jar files are allowed.",
HttpResponseStatus.BAD_REQUEST));
} else {
final Path destination =
jarDir.resolve(UUID.randomUUID() + "_" + fileUpload.getFileName());
try {
Files.move(fileUpload, destination);
} catch (IOException e) {
throw new CompletionException(
new RestHandlerException(
String.format(
"Could not move uploaded jar file [%s] to [%s].",
fileUpload, destination),
HttpResponseStatus.INTERNAL_SERVER_ERROR,
e));
}
return new JarUploadResponseBody(destination.normalize().toString());
}
},
executor);
}
|
@Test
void testRejectNonJarFiles() throws Exception {
final Path uploadedFile = Files.createFile(jarDir.resolve("katrin.png"));
final HandlerRequest<EmptyRequestBody> request = createRequest(uploadedFile);
assertThatThrownBy(
() -> jarUploadHandler.handleRequest(request, mockDispatcherGateway).get())
.satisfies(
e -> {
final Throwable throwable =
ExceptionUtils.stripCompletionException(e.getCause());
assertThat(throwable).isInstanceOf(RestHandlerException.class);
final RestHandlerException restHandlerException =
(RestHandlerException) throwable;
assertThat(restHandlerException.getHttpResponseStatus())
.isEqualTo(HttpResponseStatus.BAD_REQUEST);
});
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.