focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
static int getConfigValueAsInt(ServiceConfiguration conf, String configProp, int defaultValue) {
Object value = conf.getProperty(configProp);
if (value instanceof Integer) {
log.info("Configuration for [{}] is [{}]", configProp, value);
return (Integer) value;
} else if (value instanceof String) {
try {
return Integer.parseInt((String) value);
} catch (NumberFormatException numberFormatException) {
log.error("Expected configuration for [{}] to be a long, but got [{}]. Using default value: [{}]",
configProp, value, defaultValue, numberFormatException);
return defaultValue;
}
} else {
log.info("Configuration for [{}] is using the default value: [{}]", configProp, defaultValue);
return defaultValue;
}
}
|
@Test
public void testGetConfigValueAsIntegerWorks() {
Properties props = new Properties();
props.setProperty("prop1", "1234");
ServiceConfiguration config = new ServiceConfiguration();
config.setProperties(props);
int actual = ConfigUtils.getConfigValueAsInt(config, "prop1", 9);
assertEquals(1234, actual);
}
|
public static String humanReadableByteCountSI(long bytes) {
if (-1000 < bytes && bytes < 1000) {
if (bytes == 1) {
return bytes + " byte";
}
return bytes + " bytes";
}
CharacterIterator ci = new StringCharacterIterator("kMGTPE");
while (bytes <= -999_950 || bytes >= 999_950) {
bytes /= 1000;
ci.next();
}
return String.format(Locale.ENGLISH, "%.1f %cB", bytes / 1000.0, ci.current());
}
|
@Test
public void humanReadableByteCountSI_returns_kbs() {
assertThat(FileUtils.humanReadableByteCountSI(1_234)).isEqualTo("1.2 kB");
assertThat(FileUtils.humanReadableByteCountSI(1_000)).isEqualTo("1.0 kB");
assertThat(FileUtils.humanReadableByteCountSI(9_999)).isEqualTo("10.0 kB");
assertThat(FileUtils.humanReadableByteCountSI(999_949)).isEqualTo("999.9 kB");
}
|
public static Parser parser() {
return ParserImpl.INSTANCE;
}
|
@Test
void testReaderInputValidation() {
assertThrows(NullPointerException.class, new Executable() {
@Override
public void execute() throws IOException {
HostsFileEntriesProvider.parser().parse((Reader) null);
}
});
}
|
@Override
public String getMediaType() {
return mediaType;
}
|
@Test
public void getMediaType() {
underTest.setMediaType("JSON");
assertThat(underTest.getMediaType()).isEqualTo("JSON");
}
|
public static List<AclEntry> replaceAclEntries(List<AclEntry> existingAcl,
List<AclEntry> inAclSpec) throws AclException {
ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec);
ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES);
// Replacement is done separately for each scope: access and default.
EnumMap<AclEntryScope, AclEntry> providedMask =
Maps.newEnumMap(AclEntryScope.class);
EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class);
EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class);
for (AclEntry aclSpecEntry: aclSpec) {
scopeDirty.add(aclSpecEntry.getScope());
if (aclSpecEntry.getType() == MASK) {
providedMask.put(aclSpecEntry.getScope(), aclSpecEntry);
maskDirty.add(aclSpecEntry.getScope());
} else {
aclBuilder.add(aclSpecEntry);
}
}
// Copy existing entries if the scope was not replaced.
for (AclEntry existingEntry: existingAcl) {
if (!scopeDirty.contains(existingEntry.getScope())) {
if (existingEntry.getType() == MASK) {
providedMask.put(existingEntry.getScope(), existingEntry);
} else {
aclBuilder.add(existingEntry);
}
}
}
copyDefaultsIfNeeded(aclBuilder);
calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty);
return buildAndValidateAcl(aclBuilder);
}
|
@Test
public void testReplaceAclEntriesAccessMaskCalculated() throws AclException {
List<AclEntry> existing = new ImmutableList.Builder<AclEntry>()
.add(aclEntry(ACCESS, USER, ALL))
.add(aclEntry(ACCESS, GROUP, READ))
.add(aclEntry(ACCESS, OTHER, READ))
.build();
List<AclEntry> aclSpec = Lists.newArrayList(
aclEntry(ACCESS, USER, ALL),
aclEntry(ACCESS, USER, "bruce", READ),
aclEntry(ACCESS, USER, "diana", READ_WRITE),
aclEntry(ACCESS, GROUP, READ),
aclEntry(ACCESS, OTHER, READ));
List<AclEntry> expected = new ImmutableList.Builder<AclEntry>()
.add(aclEntry(ACCESS, USER, ALL))
.add(aclEntry(ACCESS, USER, "bruce", READ))
.add(aclEntry(ACCESS, USER, "diana", READ_WRITE))
.add(aclEntry(ACCESS, GROUP, READ))
.add(aclEntry(ACCESS, MASK, READ_WRITE))
.add(aclEntry(ACCESS, OTHER, READ))
.build();
assertEquals(expected, replaceAclEntries(existing, aclSpec));
}
|
@Override
protected PolarisRegistration getManagementRegistration() {
return null;
}
|
@Test
public void testGetManagementRegistration() {
assertThat(polarisAutoServiceRegistration.getManagementRegistration()).isNull();
}
|
public void validateUrl(String serverUrl) {
HttpUrl url = buildUrl(serverUrl, "/rest/api/1.0/repos");
doGet("", url, body -> buildGson().fromJson(body, RepositoryList.class));
}
|
@Test
public void validate_url_success() {
server.enqueue(new MockResponse().setResponseCode(200)
.setBody(REPOS_BODY));
underTest.validateUrl(server.url("/").toString());
}
|
@Override
public Column convert(BasicTypeDefine typeDefine) {
Long typeDefineLength = typeDefine.getLength();
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.sourceType(typeDefine.getColumnType())
.columnLength(typeDefineLength)
.scale(typeDefine.getScale())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String irisDataType = typeDefine.getDataType().toUpperCase();
long charOrBinaryLength =
Objects.nonNull(typeDefineLength) && typeDefineLength > 0 ? typeDefineLength : 1;
switch (irisDataType) {
case IRIS_NULL:
builder.dataType(BasicType.VOID_TYPE);
break;
case IRIS_BIT:
builder.dataType(BasicType.BOOLEAN_TYPE);
break;
case IRIS_NUMERIC:
case IRIS_MONEY:
case IRIS_SMALLMONEY:
case IRIS_NUMBER:
case IRIS_DEC:
case IRIS_DECIMAL:
DecimalType decimalType;
if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) {
decimalType =
new DecimalType(
typeDefine.getPrecision().intValue(), typeDefine.getScale());
} else {
decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE);
}
builder.dataType(decimalType);
builder.columnLength(Long.valueOf(decimalType.getPrecision()));
builder.scale(decimalType.getScale());
break;
case IRIS_INT:
case IRIS_INTEGER:
case IRIS_MEDIUMINT:
builder.dataType(BasicType.INT_TYPE);
break;
case IRIS_ROWVERSION:
case IRIS_BIGINT:
case IRIS_SERIAL:
builder.dataType(BasicType.LONG_TYPE);
break;
case IRIS_TINYINT:
builder.dataType(BasicType.BYTE_TYPE);
break;
case IRIS_SMALLINT:
builder.dataType(BasicType.SHORT_TYPE);
break;
case IRIS_FLOAT:
builder.dataType(BasicType.FLOAT_TYPE);
break;
case IRIS_DOUBLE:
case IRIS_REAL:
case IRIS_DOUBLE_PRECISION:
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case IRIS_CHAR:
case IRIS_CHAR_VARYING:
case IRIS_CHARACTER_VARYING:
case IRIS_NATIONAL_CHAR:
case IRIS_NATIONAL_CHAR_VARYING:
case IRIS_NATIONAL_CHARACTER:
case IRIS_NATIONAL_CHARACTER_VARYING:
case IRIS_NATIONAL_VARCHAR:
case IRIS_NCHAR:
case IRIS_SYSNAME:
case IRIS_VARCHAR2:
case IRIS_VARCHAR:
case IRIS_NVARCHAR:
case IRIS_UNIQUEIDENTIFIER:
case IRIS_GUID:
case IRIS_CHARACTER:
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(charOrBinaryLength);
break;
case IRIS_NTEXT:
case IRIS_CLOB:
case IRIS_LONG_VARCHAR:
case IRIS_LONG:
case IRIS_LONGTEXT:
case IRIS_MEDIUMTEXT:
case IRIS_TEXT:
case IRIS_LONGVARCHAR:
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(Long.valueOf(Integer.MAX_VALUE));
break;
case IRIS_DATE:
builder.dataType(LocalTimeType.LOCAL_DATE_TYPE);
break;
case IRIS_TIME:
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
break;
case IRIS_DATETIME:
case IRIS_DATETIME2:
case IRIS_SMALLDATETIME:
case IRIS_TIMESTAMP:
case IRIS_TIMESTAMP2:
case IRIS_POSIXTIME:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
break;
case IRIS_BINARY:
case IRIS_BINARY_VARYING:
case IRIS_RAW:
case IRIS_VARBINARY:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(charOrBinaryLength);
break;
case IRIS_LONGVARBINARY:
case IRIS_BLOB:
case IRIS_IMAGE:
case IRIS_LONG_BINARY:
case IRIS_LONG_RAW:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(Long.valueOf(Integer.MAX_VALUE));
break;
default:
throw CommonError.convertToSeaTunnelTypeError(
DatabaseIdentifier.IRIS, irisDataType, typeDefine.getName());
}
return builder.build();
}
|
@Test
public void testConvertTinyint() {
BasicTypeDefine<Object> typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("tinyint")
.dataType("tinyint")
.build();
Column column = IrisTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(BasicType.BYTE_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
}
|
@Override
public String toString() {
String t = map2String(tags);
return measurement +
(t.length() > 0? "," + t : "") +
" value=" + value +
" " + timestamp;
}
|
@Test
public void testToString() {
Map<String, String> tags = new LinkedHashMap<>();
tags.put("key1", "value1");
tags.put("key2", "value2");
tags.put("key3", "value3");
InfluxDbPoint point = new InfluxDbPoint("counter", tags, 1234567890, "123");
Assert.assertEquals("counter,key1=value1,key2=value2,key3=value3 value=123 1234567890", point.toString());
}
|
public boolean matches(Evidence evidence) {
return sourceMatches(evidence)
&& confidenceMatches(evidence)
&& name.equalsIgnoreCase(evidence.getName())
&& valueMatches(evidence);
}
|
@Test
public void testExactMatching() throws Exception {
final EvidenceMatcher exactMatcherHighest = new EvidenceMatcher("source", "name", "value", false, Confidence.HIGHEST);
assertTrue("exact matcher should match EVIDENCE_HIGHEST", exactMatcherHighest.matches(EVIDENCE_HIGHEST));
assertFalse("exact matcher should not match EVIDENCE_HIGH", exactMatcherHighest.matches(EVIDENCE_HIGH));
assertFalse("exact matcher should not match EVIDENCE_MEDIUM", exactMatcherHighest.matches(EVIDENCE_MEDIUM));
assertFalse("exact matcher should not match EVIDENCE_MEDIUM_SECOND_SOURCE", exactMatcherHighest.matches(EVIDENCE_MEDIUM_SECOND_SOURCE));
assertFalse("exact matcher should not match EVIDENCE_LOW", exactMatcherHighest.matches(EVIDENCE_LOW));
}
|
@Override
public boolean addAll(Collection<? extends E> c) {
for (E e : c) {
add(e);
}
return true;
}
|
@Test(expected = IllegalStateException.class)
public void testAddAll_whenOverCapacity_thenThrowException() {
queue.addAll(asList(1, 2, 3, 4, 5, 6));
}
|
public void createOrUpdate(final String key, final String value, final CreateMode mode) {
String val = StringUtils.isEmpty(value) ? "" : value;
try {
client.create().orSetData().creatingParentsIfNeeded().withMode(mode).forPath(key, val.getBytes(StandardCharsets.UTF_8));
} catch (Exception e) {
throw new ShenyuException(e);
}
}
|
@Test
void createOrUpdate() throws Exception {
assertThrows(ShenyuException.class, () ->
client.createOrUpdate("/test", "hello", CreateMode.PERSISTENT));
CreateBuilder createBuilder = mock(CreateBuilder.class);
when(curatorFramework.create()).thenReturn(createBuilder);
CreateBuilder2 createBuilder2 = mock(CreateBuilder2.class);
when(createBuilder.orSetData()).thenReturn(createBuilder2);
ProtectACLCreateModeStatPathAndBytesable protectACLCreateModeStatPathAndBytesable = mock(ProtectACLCreateModeStatPathAndBytesable.class);
when(createBuilder2.creatingParentsIfNeeded()).thenReturn(protectACLCreateModeStatPathAndBytesable);
ACLBackgroundPathAndBytesable pathAndBytesable = mock(ACLBackgroundPathAndBytesable.class);
when(protectACLCreateModeStatPathAndBytesable.withMode(any())).thenReturn(pathAndBytesable);
when(pathAndBytesable.forPath(anyString(), any(byte[].class))).thenReturn(null);
client.createOrUpdate("/test", "hello", CreateMode.PERSISTENT);
client.createOrUpdate("", "hello", CreateMode.PERSISTENT);
client.createOrUpdate("", (Object) null, CreateMode.PERSISTENT);
client.createOrUpdate("", new Object(), CreateMode.PERSISTENT);
}
|
public BlobStoreFile write(String key, boolean create) throws IOException {
return new HdfsBlobStoreFile(getKeyDir(key), true, create, hadoopConf);
}
|
@Test
public void testGetFileLength() throws Exception {
Map<String, Object> conf = new HashMap<>();
String validKey = "validkeyBasic";
String testString = "testingblob";
try (TestHdfsBlobStoreImpl hbs = new TestHdfsBlobStoreImpl(blobDir, conf, DFS_CLUSTER_EXTENSION.getHadoopConf())) {
BlobStoreFile pfile = hbs.write(validKey, false);
// Adding metadata to avoid null pointer exception
SettableBlobMeta meta = new SettableBlobMeta();
meta.set_replication_factor(1);
pfile.setMetadata(meta);
try (OutputStream ios = pfile.getOutputStream()) {
ios.write(testString.getBytes(StandardCharsets.UTF_8));
}
assertEquals(testString.getBytes(StandardCharsets.UTF_8).length, pfile.getFileLength());
}
}
|
@Nonnull
public static String checkHasText(String argument, String errorMessage) {
if (argument == null || argument.isEmpty()) {
throw new IllegalArgumentException(errorMessage);
}
return argument;
}
|
@Test
public void checkHasText() {
checkHasText(null, false);
checkHasText("", false);
checkHasText("foobar", true);
}
|
public static PostgreSQLCommandPacket newInstance(final PostgreSQLCommandPacketType commandPacketType, final PostgreSQLPacketPayload payload) {
if (!PostgreSQLCommandPacketType.isExtendedProtocolPacketType(commandPacketType)) {
payload.getByteBuf().skipBytes(1);
return getPostgreSQLCommandPacket(commandPacketType, payload);
}
List<PostgreSQLCommandPacket> result = new ArrayList<>();
while (payload.hasCompletePacket()) {
PostgreSQLCommandPacketType type = PostgreSQLCommandPacketType.valueOf(payload.readInt1());
int length = payload.getByteBuf().getInt(payload.getByteBuf().readerIndex());
PostgreSQLPacketPayload slicedPayload = new PostgreSQLPacketPayload(payload.getByteBuf().readSlice(length), payload.getCharset());
result.add(getPostgreSQLCommandPacket(type, slicedPayload));
}
return new PostgreSQLAggregatedCommandPacket(result);
}
|
@Test
void assertNewInstanceWithExecuteComPacket() {
assertThat(PostgreSQLCommandPacketFactory.newInstance(PostgreSQLCommandPacketType.EXECUTE_COMMAND, payload), instanceOf(PostgreSQLAggregatedCommandPacket.class));
}
|
public void setIncludedCipherSuites(String cipherSuites) {
this.includedCipherSuites = cipherSuites;
}
|
@Test
public void testSetIncludedCipherSuites() throws Exception {
configurable.setSupportedCipherSuites(new String[] { "A", "B", "C", "D" });
configuration.setIncludedCipherSuites("A,B ,C, D");
configuration.configure(configurable);
assertTrue(Arrays.equals(new String[] { "A", "B", "C", "D" }, configurable.getEnabledCipherSuites()));
}
|
public boolean isVisibleBySelectedOptionIds(Collection<Long> selectedOptionIds) {
return visibleType == VisibleType.ALWAYS || selectedOptionIds.contains(onSelectedOptionId);
}
|
@Test
void 조건_옵션을_선택하면_섹션이_보인다() {
// given
Section section = new Section(VisibleType.CONDITIONAL, List.of(), 1L, "섹션명", "말머리", 1);
// when
boolean actual = section.isVisibleBySelectedOptionIds(List.of(1L, 2L, 3L));
// then
assertThat(actual).isTrue();
}
|
@Override
public KeyValues getLowCardinalityKeyValues(DubboServerContext context) {
return super.getLowCardinalityKeyValues(context.getInvocation());
}
|
@Test
void testGetLowCardinalityKeyValues() throws NoSuchFieldException, IllegalAccessException {
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("testMethod");
invocation.setAttachment("interface", "com.example.TestService");
invocation.setTargetServiceUniqueName("targetServiceName1");
Invoker<?> invoker = ObservationConventionUtils.getMockInvokerWithUrl();
invocation.setInvoker(invoker);
DubboServerContext context = new DubboServerContext(invoker, invocation);
KeyValues keyValues = dubboServerObservationConvention.getLowCardinalityKeyValues(context);
Assertions.assertEquals("testMethod", ObservationConventionUtils.getValueForKey(keyValues, "rpc.method"));
Assertions.assertEquals(
"targetServiceName1", ObservationConventionUtils.getValueForKey(keyValues, "rpc.service"));
Assertions.assertEquals("apache_dubbo", ObservationConventionUtils.getValueForKey(keyValues, "rpc.system"));
}
|
public void removeAll() {
lastKey = null;
firstKey = null;
rows.clear();
}
|
@Test
public void testRemoveAll() {
Table table = new Table(TWO_COLUMN_TABLE);
assertFalse(table.removeAt(ROW_1));
table.put(1, ROW_1);
table.put(2, ROW_2);
table.put(3, ROW_3);
table.removeAll();
assertEquals(0, table.count());
}
|
@Override
public FilteredMessage apply(Message msg) {
try (var ignored = executionTime.time()) {
return doApply(msg);
}
}
|
@Test
void applyWithNoFilterAndOneDestination(MessageFactory messageFactory) {
final var filter = createFilter(Map.of(), Set.of("indexer"));
final var message = messageFactory.createMessage("msg", "src", Tools.nowUTC());
message.addStream(defaultStream);
final var filteredMessage = filter.apply(message);
assertThat(filteredMessage.message()).isEqualTo(ImmutableMessage.wrap(message));
assertThat(filteredMessage.destinations().keySet()).containsExactlyInAnyOrder("indexer");
assertThat(filteredMessage.destinations().get("indexer")).containsExactlyInAnyOrder(defaultStream);
}
|
public static boolean isTimecodeLink(String link) {
return link != null && link.matches(TIMECODE_LINK_REGEX.pattern());
}
|
@Test
public void testIsTimecodeLink() {
assertFalse(ShownotesCleaner.isTimecodeLink(null));
assertFalse(ShownotesCleaner.isTimecodeLink("http://antennapod/timecode/123123"));
assertFalse(ShownotesCleaner.isTimecodeLink("antennapod://timecode/"));
assertFalse(ShownotesCleaner.isTimecodeLink("antennapod://123123"));
assertFalse(ShownotesCleaner.isTimecodeLink("antennapod://timecode/123123a"));
assertTrue(ShownotesCleaner.isTimecodeLink("antennapod://timecode/123"));
assertTrue(ShownotesCleaner.isTimecodeLink("antennapod://timecode/1"));
}
|
public static PostgreSQLErrorResponsePacket newInstance(final Exception cause) {
Optional<ServerErrorMessage> serverErrorMessage = findServerErrorMessage(cause);
return serverErrorMessage.map(PostgreSQLErrorPacketFactory::createErrorResponsePacket)
.orElseGet(() -> createErrorResponsePacket(SQLExceptionTransformEngine.toSQLException(cause, DATABASE_TYPE)));
}
|
@Test
void assertPSQLExceptionWithServerErrorMessageNotNull() throws ReflectiveOperationException {
ServerErrorMessage serverErrorMessage = mock(ServerErrorMessage.class);
when(serverErrorMessage.getSeverity()).thenReturn(PostgreSQLMessageSeverityLevel.FATAL);
when(serverErrorMessage.getSQLState()).thenReturn("sqlState");
when(serverErrorMessage.getMessage()).thenReturn("message");
when(serverErrorMessage.getPosition()).thenReturn(1);
PostgreSQLErrorResponsePacket actual = PostgreSQLErrorPacketFactory.newInstance(new PSQLException(serverErrorMessage));
Map<Character, String> fields = (Map<Character, String>) Plugins.getMemberAccessor().get(PostgreSQLErrorResponsePacket.class.getDeclaredField("fields"), actual);
assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_SEVERITY), is(PostgreSQLMessageSeverityLevel.FATAL));
assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_CODE), is("sqlState"));
assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_MESSAGE), is("message"));
assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_POSITION), is("1"));
}
|
public KsqlEntityList execute(
final KsqlSecurityContext securityContext,
final List<ParsedStatement> statements,
final SessionProperties sessionProperties
) {
final KsqlEntityList entities = new KsqlEntityList();
for (final ParsedStatement parsed : statements) {
final PreparedStatement<?> prepared = ksqlEngine.prepare(
parsed,
(isVariableSubstitutionEnabled(sessionProperties)
? sessionProperties.getSessionVariables()
: Collections.emptyMap())
);
executeStatement(
securityContext,
prepared,
sessionProperties,
entities
).ifPresent(entities::add);
}
return entities;
}
|
@Test
public void shouldDefaultToDistributor() {
// Given
givenRequestHandler(ImmutableMap.of());
// When
final List<ParsedStatement> statements = KSQL_PARSER.parse(SOME_STREAM_SQL);
final KsqlEntityList entities = handler.execute(securityContext, statements, sessionProperties);
// Then
assertThat(entities, contains(entity));
verify(distributor, times(2))
.execute(argThat(is(configured(
preparedStatement(instanceOf(CreateStream.class)),
ImmutableMap.of(),
ksqlConfig))),
eq(ksqlEngine),
eq(securityContext)
);
}
|
public static boolean get(String path, Map<String, String> headers) {
HttpURLConnection conn = null;
try {
URL url = new java.net.URL(path);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
conn.setConnectTimeout((int) TimeUnit.SECONDS.toMillis(2));
conn.setReadTimeout((int) TimeUnit.SECONDS.toMillis(2));
if (!CollectionUtils.isEmpty(headers)) {
headers.forEach(conn::setRequestProperty);
}
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
StringBuffer buffer = new StringBuffer();
String str;
while ((str = reader.readLine()) != null) {
buffer.append(str);
}
String responseBody = buffer.toString();
if (conn.getResponseCode() == 200 && StringUtils.hasText(responseBody)) {
LOGGER.debug("exec get request, url: {} success, response data: {}", url, responseBody);
return true;
}
}
catch (Exception e) {
LOGGER.error("exec get request, url: {} failed!", path, e);
return false;
}
finally {
if (null != conn) {
conn.disconnect();
}
}
return false;
}
|
@Test
public void testGet() {
assertThat(OkHttpUtil.get("http://localhost:" + port + "/test", Maps.newHashMap("key", "value"))).isTrue();
assertThat(OkHttpUtil.checkUrl("localhost", port, "/test", Maps.newHashMap("key", "value"))).isTrue();
assertThat(OkHttpUtil.checkUrl("localhost", port, "test", Maps.newHashMap("key", "value"))).isTrue();
assertThat(OkHttpUtil.get("http://localhost:" + port + "/error", Maps.newHashMap("key", "value"))).isFalse();
assertThat(OkHttpUtil.get("http://localhost:55555/error", Maps.newHashMap("key", "value"))).isFalse();
}
|
@Override
public UserCredentials findByActivateToken(TenantId tenantId, String activateToken) {
return DaoUtil.getData(userCredentialsRepository.findByActivateToken(activateToken));
}
|
@Test
public void testFindByActivateToken() {
UserCredentials foundedUserCredentials = userCredentialsDao.findByActivateToken(SYSTEM_TENANT_ID, ACTIVATE_TOKEN);
assertNotNull(foundedUserCredentials);
assertEquals(neededUserCredentials.getId(), foundedUserCredentials.getId());
}
|
public CompletableFuture<Void> clearUsernameHash(final Account account) {
if (account.getUsernameHash().isEmpty()) {
// no username to clear
return CompletableFuture.completedFuture(null);
}
final byte[] usernameHash = account.getUsernameHash().get();
final Timer.Sample sample = Timer.start();
final Account updatedAccount = AccountUtil.cloneAccountAsNotStale(account);
updatedAccount.setUsernameHash(null);
updatedAccount.setUsernameLinkDetails(null, null);
final Instant now = clock.instant();
final Optional<byte[]> holdToRemove = addToHolds(updatedAccount, usernameHash, now);
final List<TransactWriteItem> items = new ArrayList<>();
// 0: remove the username from the account object, conditioned on account version
items.add(UpdateAccountSpec.forAccount(accountsTableName, updatedAccount).transactItem());
// 1: Un-confirm our username, adding a temporary hold for the old username to stop others from claiming it
items.add(holdUsernameTransactItem(updatedAccount.getUuid(), usernameHash, now));
// 2?: Adding that hold may have caused our account to exceed our maximum holds. Release an old hold
holdToRemove.ifPresent(oldHold -> items.add(releaseHoldIfAllowedTransactItem(updatedAccount.getUuid(), oldHold, now)));
return asyncClient.transactWriteItems(TransactWriteItemsRequest.builder().transactItems(items).build())
.thenAccept(ignored -> {
account.setUsernameHash(null);
account.setUsernameLinkDetails(null, null);
account.setVersion(account.getVersion() + 1);
account.setUsernameHolds(updatedAccount.getUsernameHolds());
})
.exceptionally(ExceptionUtils.exceptionallyHandler(TransactionCanceledException.class, e -> {
if (conditionalCheckFailed(e.cancellationReasons().get(0)) // Account version conflict
// When we looked at the holds on our account, we thought we still held the corresponding username
// reservation. But it turned out that someone else has taken the reservation since. This means that the
// TTL on the hold must have just expired, so if we retry we should see that our hold is expired, and we
// won't try to remove it again.
|| (e.cancellationReasons().size() > 2 && conditionalCheckFailed(e.cancellationReasons().get(2)))
// concurrent update on any table
|| e.cancellationReasons().stream().anyMatch(Accounts::isTransactionConflict)) {
throw new ContestedOptimisticLockException();
} else {
throw ExceptionUtils.wrap(e);
}
}))
.whenComplete((ignored, throwable) -> sample.stop(CLEAR_USERNAME_HASH_TIMER));
}
|
@Test
void testClearUsernameNoUsername() {
final Account account = generateAccount("+18005551234", UUID.randomUUID(), UUID.randomUUID());
createAccount(account);
assertThatNoException().isThrownBy(() -> accounts.clearUsernameHash(account).join());
}
|
@Private
public void scheduleAllReduces() {
for (ContainerRequest req : pendingReduces) {
scheduledRequests.addReduce(req);
}
pendingReduces.clear();
}
|
@Test
public void testUnsupportedReduceContainerRequirement() throws Exception {
final Resource maxContainerSupported = Resource.newInstance(1, 1);
final ApplicationId appId = ApplicationId.newInstance(1, 1);
final ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
appId, 1);
final JobId jobId =
MRBuilderUtils.newJobId(appAttemptId.getApplicationId(), 0);
final MockScheduler mockScheduler = new MockScheduler(appAttemptId);
final Configuration conf = new Configuration();
final MyContainerAllocator allocator = new MyContainerAllocator(null,
conf, appAttemptId, mock(Job.class), SystemClock.getInstance()) {
@Override
protected void register() {
}
@Override
protected ApplicationMasterProtocol createSchedulerProxy() {
return mockScheduler;
}
@Override
protected Resource getMaxContainerCapability() {
return maxContainerSupported;
}
};
final int memory = (int) (maxContainerSupported.getMemorySize() + 10);
ContainerRequestEvent reduceRequestEvt = createRequest(jobId, 0,
Resource.newInstance(memory,
maxContainerSupported.getVirtualCores()),
new String[0], false, true);
allocator.sendRequests(Arrays.asList(reduceRequestEvt));
// Reducer container requests are added to the pending queue upon request,
// schedule all reducers here so that we can observe if reducer requests
// are accepted by RMContainerAllocator on RM side.
allocator.scheduleAllReduces();
allocator.schedule();
Assert.assertEquals(0, mockScheduler.lastAnyAskReduce);
}
|
@Override
public Map<String, Object> getVariables() {
final Map<String, Object> collectedVariables = new HashMap<>();
if (parent != null) {
collectedVariables.putAll(parent.getVariables());
}
variables.forEach(collectedVariables::put);
return collectedVariables;
}
|
@Test
public void testGetVariables() {
ProcessContextImpl context = new ProcessContextImpl();
ProcessContextImpl parentContext = new ProcessContextImpl();
parentContext.setVariable("key", "value");
context.setParent(parentContext);
Assertions.assertEquals(1, context.getVariables().size());
}
|
@Override
public boolean verifyClient(DistroClientVerifyInfo verifyData) {
String clientId = verifyData.getClientId();
IpPortBasedClient client = clients.get(clientId);
if (null != client) {
// remote node of old version will always verify with zero revision
if (0 == verifyData.getRevision() || client.getRevision() == verifyData.getRevision()) {
NamingExecuteTaskDispatcher.getInstance()
.dispatchAndExecuteTask(clientId, new ClientBeatUpdateTask(client));
return true;
} else {
Loggers.DISTRO.info("[DISTRO-VERIFY-FAILED] IpPortBasedClient[{}] revision local={}, remote={}",
client.getClientId(), client.getRevision(), verifyData.getRevision());
}
}
return false;
}
|
@Test
void testVerifyClient0() {
assertTrue(ephemeralIpPortClientManager.verifyClient(new DistroClientVerifyInfo(ephemeralIpPortId, 0)));
assertTrue(ephemeralIpPortClientManager.verifyClient(new DistroClientVerifyInfo(syncedClientId, 0)));
}
|
NettyPartitionRequestClient createPartitionRequestClient(ConnectionID connectionId)
throws IOException, InterruptedException {
// We map the input ConnectionID to a new value to restrict the number of tcp connections
connectionId =
new ConnectionID(
connectionId.getResourceID(),
connectionId.getAddress(),
connectionId.getConnectionIndex() % maxNumberOfConnections);
while (true) {
final CompletableFuture<NettyPartitionRequestClient> newClientFuture =
new CompletableFuture<>();
final CompletableFuture<NettyPartitionRequestClient> clientFuture =
clients.putIfAbsent(connectionId, newClientFuture);
final NettyPartitionRequestClient client;
if (clientFuture == null) {
try {
client = connectWithRetries(connectionId);
} catch (Throwable e) {
newClientFuture.completeExceptionally(
new IOException("Could not create Netty client.", e));
clients.remove(connectionId, newClientFuture);
throw e;
}
newClientFuture.complete(client);
} else {
try {
client = clientFuture.get();
} catch (ExecutionException e) {
ExceptionUtils.rethrowIOException(ExceptionUtils.stripExecutionException(e));
return null;
}
}
// Make sure to increment the reference count before handing a client
// out to ensure correct bookkeeping for channel closing.
if (client.validateClientAndIncrementReferenceCounter()) {
return client;
} else if (client.canBeDisposed()) {
client.closeConnection();
} else {
destroyPartitionRequestClient(connectionId, client);
}
}
}
|
@TestTemplate
void testNettyClientConnectRetry() throws Exception {
NettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient();
UnstableNettyClient unstableNettyClient =
new UnstableNettyClient(serverAndClient.client(), 2);
PartitionRequestClientFactory factory =
new PartitionRequestClientFactory(
unstableNettyClient, 2, 1, connectionReuseEnabled);
factory.createPartitionRequestClient(serverAndClient.getConnectionID(RESOURCE_ID, 0));
shutdown(serverAndClient);
}
|
public Lease acquire() throws Exception {
String path = internals.attemptLock(-1, null, null);
return makeLease(path);
}
|
@Test
public void testSimple2() throws Exception {
final int MAX_LEASES = 3;
Timing timing = new Timing();
List<Lease> leases = Lists.newArrayList();
CuratorFramework client = CuratorFrameworkFactory.newClient(
server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1));
client.start();
try {
for (int i = 0; i < MAX_LEASES; ++i) {
InterProcessSemaphoreV2 semaphore = new InterProcessSemaphoreV2(client, "/test", MAX_LEASES);
Lease lease = semaphore.acquire(timing.forWaiting().seconds(), TimeUnit.SECONDS);
assertNotNull(lease);
leases.add(lease);
}
InterProcessSemaphoreV2 semaphore = new InterProcessSemaphoreV2(client, "/test", MAX_LEASES);
Lease lease = semaphore.acquire(timing.forWaiting().seconds(), TimeUnit.SECONDS);
assertNull(lease);
leases.remove(0).close();
assertNotNull(semaphore.acquire(timing.forWaiting().seconds(), TimeUnit.SECONDS));
} finally {
for (Lease l : leases) {
CloseableUtils.closeQuietly(l);
}
TestCleanState.closeAndTestClean(client);
}
}
|
protected CompletableFuture<Triple<MessageExt, String, Boolean>> getMessageFromRemoteAsync(String topic, long offset, int queueId, String brokerName) {
try {
String brokerAddr = this.brokerController.getTopicRouteInfoManager().findBrokerAddressInSubscribe(brokerName, MixAll.MASTER_ID, false);
if (null == brokerAddr) {
this.brokerController.getTopicRouteInfoManager().updateTopicRouteInfoFromNameServer(topic, true, false);
brokerAddr = this.brokerController.getTopicRouteInfoManager().findBrokerAddressInSubscribe(brokerName, MixAll.MASTER_ID, false);
if (null == brokerAddr) {
LOG.warn("can't find broker address for topic {}, {}", topic, brokerName);
return CompletableFuture.completedFuture(Triple.of(null, "brokerAddress not found", true)); // maybe offline temporarily, so need retry
}
}
return this.brokerController.getBrokerOuterAPI().pullMessageFromSpecificBrokerAsync(brokerName,
brokerAddr, this.innerConsumerGroupName, topic, queueId, offset, 1, DEFAULT_PULL_TIMEOUT_MILLIS)
.thenApply(pullResult -> {
if (pullResult.getLeft() != null
&& PullStatus.FOUND.equals(pullResult.getLeft().getPullStatus())
&& CollectionUtils.isNotEmpty(pullResult.getLeft().getMsgFoundList())) {
return Triple.of(pullResult.getLeft().getMsgFoundList().get(0), "", false);
}
return Triple.of(null, pullResult.getMiddle(), pullResult.getRight());
});
} catch (Exception e) {
LOG.error("Get message from remote failed. {}, {}, {}, {}", topic, offset, queueId, brokerName, e);
}
return CompletableFuture.completedFuture(Triple.of(null, "Get message from remote failed", true)); // need retry
}
|
@Test
public void getMessageFromRemoteAsyncTest_message_found() throws Exception {
PullResult pullResult = new PullResult(PullStatus.FOUND, 1, 1, 1, Arrays.asList(new MessageExt()));
when(brokerOuterAPI.pullMessageFromSpecificBrokerAsync(anyString(), anyString(), anyString(), anyString(), anyInt(), anyLong(), anyInt(), anyLong()))
.thenReturn(CompletableFuture.completedFuture(Triple.of(pullResult, "", false))); // right value is ignored
Triple<MessageExt, String, Boolean> rst = escapeBridge.getMessageFromRemoteAsync(TEST_TOPIC, 1, DEFAULT_QUEUE_ID, BROKER_NAME).join();
Assert.assertNotNull(rst.getLeft());
Assert.assertTrue(StringUtils.isEmpty(rst.getMiddle()));
Assert.assertFalse(rst.getRight()); // no retry
}
|
public void removeMapping(String name, boolean ifExists) {
if (relationsStorage.removeMapping(name) != null) {
listeners.forEach(TableListener::onTableChanged);
} else if (!ifExists) {
throw QueryException.error("Mapping does not exist: " + name);
}
}
|
@Test
public void when_removesNonExistingMappingWithIfExists_then_succeeds() {
// given
String name = "name";
given(relationsStorage.removeMapping(name)).willReturn(null);
// when
// then
catalog.removeMapping(name, true);
verifyNoInteractions(listener);
}
|
@Override
public String select(String text) {
return selectGroup(text).get(group);
}
|
@Test
public void testRegexWithZeroWidthAssertions() {
String regex = "^.*(?=\\?)(?!\\?yy)";
String source = "hello world?xx?yy";
RegexSelector regexSelector = new RegexSelector(regex);
String select = regexSelector.select(source);
Assertions.assertThat(select).isEqualTo("hello world");
regex = "\\d{3}(?!\\d)";
source = "123456asdf";
regexSelector = new RegexSelector(regex);
select = regexSelector.select(source);
Assertions.assertThat(select).isEqualTo("456");
}
|
public abstract byte[] encode(MutableSpan input);
|
@Test void span_shared_JSON_V2() {
MutableSpan span = clientSpan;
span.kind(Kind.SERVER);
span.setShared();
assertThat(new String(encoder.encode(clientSpan), UTF_8))
.isEqualTo(
"{\"traceId\":\"7180c278b62e8f6a216a2aea45d08fc9\",\"parentId\":\"6b221d5bc9e6496c\",\"id\":\"5b4185666d50f68b\",\"kind\":\"SERVER\",\"name\":\"get\",\"timestamp\":1472470996199000,\"duration\":207000,\"localEndpoint\":{\"serviceName\":\"frontend\",\"ipv4\":\"127.0.0.1\"},\"remoteEndpoint\":{\"serviceName\":\"backend\",\"ipv4\":\"192.168.99.101\",\"port\":9000},\"annotations\":[{\"timestamp\":1472470996238000,\"value\":\"foo\"},{\"timestamp\":1472470996403000,\"value\":\"bar\"}],\"tags\":{\"clnt/finagle.version\":\"6.45.0\",\"http.path\":\"/api\"},\"shared\":true}");
}
|
public static GraphQLRequestParams toGraphQLRequestParams(byte[] postData, final String contentEncoding)
throws JsonProcessingException, UnsupportedEncodingException {
final String encoding = StringUtils.isNotEmpty(contentEncoding) ? contentEncoding
: EncoderCache.URL_ARGUMENT_ENCODING;
ObjectNode data;
try (InputStreamReader reader = new InputStreamReader(new ByteArrayInputStream(postData), encoding)) {
data = OBJECT_MAPPER.readValue(reader, ObjectNode.class);
} catch (IOException e) {
throw new IllegalArgumentException("Invalid json data: " + e.getLocalizedMessage(), e);
}
String operationName = null;
String query;
String variables = null;
final JsonNode operationNameNode = data.has(OPERATION_NAME_FIELD) ? data.get(OPERATION_NAME_FIELD) : null;
if (operationNameNode != null) {
operationName = getJsonNodeTextContent(operationNameNode, true);
}
if (!data.has(QUERY_FIELD)) {
throw new IllegalArgumentException("Not a valid GraphQL query.");
}
final JsonNode queryNode = data.get(QUERY_FIELD);
query = getJsonNodeTextContent(queryNode, false);
final String trimmedQuery = StringUtils.trim(query);
if (!StringUtils.startsWith(trimmedQuery, QUERY_FIELD) && !StringUtils.startsWith(trimmedQuery, "mutation")) {
throw new IllegalArgumentException("Not a valid GraphQL query.");
}
final JsonNode variablesNode = data.has(VARIABLES_FIELD) ? data.get(VARIABLES_FIELD) : null;
if (variablesNode != null) {
final JsonNodeType nodeType = variablesNode.getNodeType();
if (nodeType != JsonNodeType.NULL) {
if (nodeType == JsonNodeType.OBJECT) {
variables = OBJECT_MAPPER.writeValueAsString(variablesNode);
} else {
throw new IllegalArgumentException("Not a valid object node for GraphQL variables.");
}
}
}
return new GraphQLRequestParams(operationName, query, variables);
}
|
@Test
void testToGraphQLRequestParamsWithPostData() throws Exception {
GraphQLRequestParams params = GraphQLRequestParamUtils
.toGraphQLRequestParams(EXPECTED_POST_BODY.getBytes(StandardCharsets.UTF_8), null);
assertNull(params.getOperationName());
assertEquals(QUERY.trim(), params.getQuery());
assertEquals(EXPECTED_VARIABLES_GET_PARAM_VALUE, params.getVariables());
params = GraphQLRequestParamUtils.toGraphQLRequestParams(
"{\"operationName\":\"op1\",\"variables\":{\"id\":123},\"query\":\"query { droid { id }}\"}"
.getBytes(StandardCharsets.UTF_8),
null);
assertEquals("op1", params.getOperationName());
assertEquals("query { droid { id }}", params.getQuery());
assertEquals("{\"id\":123}", params.getVariables());
}
|
public static String buildEventSignature(String methodSignature) {
byte[] input = methodSignature.getBytes();
byte[] hash = Hash.sha3(input);
return Numeric.toHexString(hash);
}
|
@Test
public void testBuildEventSignature() {
assertEquals(
EventEncoder.buildEventSignature("Deposit(address,hash256,uint256)"),
("0x50cb9fe53daa9737b786ab3646f04d0150dc50ef4e75f59509d83667ad5adb20"));
assertEquals(
EventEncoder.buildEventSignature("Notify(uint256,uint256)"),
("0x71e71a8458267085d5ab16980fd5f114d2d37f232479c245d523ce8d23ca40ed"));
}
|
public void eval(Object... args) throws HiveException {
// When the parameter is (Integer, Array[Double]), Flink calls udf.eval(Integer,
// Array[Double]), which is not a problem.
// But when the parameter is a single array, Flink calls udf.eval(Array[Double]),
// at this point java's var-args will cast Array[Double] to Array[Object] and let it be
// Object... args, So we need wrap it.
if (isArgsSingleArray) {
args = new Object[] {args};
}
checkArgument(args.length == conversions.length);
if (!allIdentityConverter) {
for (int i = 0; i < args.length; i++) {
args[i] = conversions[i].toHiveObject(args[i]);
}
}
function.process(args);
}
|
@Test
public void testStruct() throws Exception {
Object[] constantArgs = new Object[] {null};
DataType[] dataTypes =
new DataType[] {
DataTypes.ARRAY(
DataTypes.ROW(
DataTypes.FIELD("1", DataTypes.INT()),
DataTypes.FIELD("2", DataTypes.DOUBLE())))
};
HiveGenericUDTF udf = init(GenericUDTFInline.class, constantArgs, dataTypes);
udf.eval(new Row[] {Row.of(1, 2.2d), Row.of(3, 4.4d)});
assertThat(collector.result).isEqualTo(Arrays.asList(Row.of(1, 2.2), Row.of(3, 4.4)));
}
|
@Override
public void showPreviewForKey(
Keyboard.Key key, Drawable icon, View parentView, PreviewPopupTheme previewPopupTheme) {
KeyPreview popup = getPopupForKey(key, parentView, previewPopupTheme);
Point previewPosition =
mPositionCalculator.calculatePositionForPreview(
key, previewPopupTheme, getLocationInWindow(parentView));
popup.showPreviewForKey(key, icon, previewPosition);
}
|
@Test
public void testNoPopupForNoPreview() {
KeyPreviewsManager underTest =
new KeyPreviewsManager(getApplicationContext(), mPositionCalculator, 3);
PopupWindow createdPopupWindow = getLatestCreatedPopupWindow();
Assert.assertNull(createdPopupWindow);
mTestKeys[0].showPreview = false;
underTest.showPreviewForKey(mTestKeys[0], "y", mKeyboardView, mTheme);
createdPopupWindow = getLatestCreatedPopupWindow();
Assert.assertNull(createdPopupWindow);
}
|
static void manageMissingValues(final KiePMMLMiningField miningField, final PMMLRequestData requestData) {
MISSING_VALUE_TREATMENT_METHOD missingValueTreatmentMethod =
miningField.getMissingValueTreatmentMethod() != null ?
miningField.getMissingValueTreatmentMethod()
: MISSING_VALUE_TREATMENT_METHOD.RETURN_INVALID;
switch (missingValueTreatmentMethod) {
case RETURN_INVALID:
throw new KiePMMLInputDataException("Missing required value for " + miningField.getName());
case AS_IS:
case AS_MEAN:
case AS_MODE:
case AS_MEDIAN:
case AS_VALUE:
String missingValueReplacement = miningField.getMissingValueReplacement();
if (missingValueReplacement != null) {
Object requiredValue =
miningField.getDataType().getActualValue(missingValueReplacement);
requestData.addRequestParam(miningField.getName(), requiredValue);
}
break;
default:
throw new KiePMMLException("Unmanaged INVALID_VALUE_TREATMENT_METHOD " + missingValueTreatmentMethod);
}
}
|
@Test
void manageMissingValuesReturnInvalid() {
assertThatExceptionOfType(KiePMMLException.class).isThrownBy(() -> {
KiePMMLMiningField miningField = KiePMMLMiningField.builder("FIELD", null)
.withDataType(DATA_TYPE.STRING)
.withMissingValueTreatmentMethod(MISSING_VALUE_TREATMENT_METHOD.RETURN_INVALID)
.build();
PreProcess.manageMissingValues(miningField, new PMMLRequestData());
});
}
|
CreateConnectorRequest parseConnectorConfigurationFile(String filePath) throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
File connectorConfigurationFile = Paths.get(filePath).toFile();
try {
Map<String, String> connectorConfigs = objectMapper.readValue(
connectorConfigurationFile,
new TypeReference<Map<String, String>>() { });
if (!connectorConfigs.containsKey(NAME_CONFIG)) {
throw new ConnectException("Connector configuration at '" + filePath + "' is missing the mandatory '" + NAME_CONFIG + "' "
+ "configuration");
}
return new CreateConnectorRequest(connectorConfigs.get(NAME_CONFIG), connectorConfigs, null);
} catch (StreamReadException | DatabindException e) {
log.debug("Could not parse connector configuration file '{}' into a Map with String keys and values", filePath);
}
try {
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
CreateConnectorRequest createConnectorRequest = objectMapper.readValue(connectorConfigurationFile,
new TypeReference<CreateConnectorRequest>() { });
if (createConnectorRequest.config().containsKey(NAME_CONFIG)) {
if (!createConnectorRequest.config().get(NAME_CONFIG).equals(createConnectorRequest.name())) {
throw new ConnectException("Connector name configuration in 'config' doesn't match the one specified in 'name' at '" + filePath
+ "'");
}
} else {
createConnectorRequest.config().put(NAME_CONFIG, createConnectorRequest.name());
}
return createConnectorRequest;
} catch (StreamReadException | DatabindException e) {
log.debug("Could not parse connector configuration file '{}' into an object of type {}",
filePath, CreateConnectorRequest.class.getSimpleName());
}
Map<String, String> connectorConfigs = Utils.propsToStringMap(Utils.loadProps(filePath));
if (!connectorConfigs.containsKey(NAME_CONFIG)) {
throw new ConnectException("Connector configuration at '" + filePath + "' is missing the mandatory '" + NAME_CONFIG + "' "
+ "configuration");
}
return new CreateConnectorRequest(connectorConfigs.get(NAME_CONFIG), connectorConfigs, null);
}
|
@Test
public void testParseJsonFileWithCreateConnectorRequestWithUnknownField() throws Exception {
Map<String, Object> requestToWrite = new HashMap<>();
requestToWrite.put("name", CONNECTOR_NAME);
requestToWrite.put("config", CONNECTOR_CONFIG);
requestToWrite.put("unknown-field", "random-value");
try (FileWriter writer = new FileWriter(connectorConfigurationFile)) {
writer.write(new ObjectMapper().writeValueAsString(requestToWrite));
}
CreateConnectorRequest parsedRequest = connectStandalone.parseConnectorConfigurationFile(connectorConfigurationFile.getAbsolutePath());
CreateConnectorRequest expectedRequest = new CreateConnectorRequest(CONNECTOR_NAME, CONNECTOR_CONFIG, null);
assertEquals(expectedRequest, parsedRequest);
}
|
public void decode(ByteBuf buffer) {
boolean last;
int statusCode;
while (true) {
switch(state) {
case READ_COMMON_HEADER:
if (buffer.readableBytes() < SPDY_HEADER_SIZE) {
return;
}
int frameOffset = buffer.readerIndex();
int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET;
int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET;
buffer.skipBytes(SPDY_HEADER_SIZE);
boolean control = (buffer.getByte(frameOffset) & 0x80) != 0;
int version;
int type;
if (control) {
// Decode control frame common header
version = getUnsignedShort(buffer, frameOffset) & 0x7FFF;
type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET);
streamId = 0; // Default to session Stream-ID
} else {
// Decode data frame common header
version = spdyVersion; // Default to expected version
type = SPDY_DATA_FRAME;
streamId = getUnsignedInt(buffer, frameOffset);
}
flags = buffer.getByte(flagsOffset);
length = getUnsignedMedium(buffer, lengthOffset);
// Check version first then validity
if (version != spdyVersion) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SPDY Version");
} else if (!isValidFrameHeader(streamId, type, flags, length)) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid Frame Error");
} else {
state = getNextState(type, length);
}
break;
case READ_DATA_FRAME:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0));
break;
}
// Generate data frames that do not exceed maxChunkSize
int dataLength = Math.min(maxChunkSize, length);
// Wait until entire frame is readable
if (buffer.readableBytes() < dataLength) {
return;
}
ByteBuf data = buffer.alloc().buffer(dataLength);
data.writeBytes(buffer, dataLength);
length -= dataLength;
if (length == 0) {
state = State.READ_COMMON_HEADER;
}
last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN);
delegate.readDataFrame(streamId, last, data);
break;
case READ_SYN_STREAM_FRAME:
if (buffer.readableBytes() < 10) {
return;
}
int offset = buffer.readerIndex();
streamId = getUnsignedInt(buffer, offset);
int associatedToStreamId = getUnsignedInt(buffer, offset + 4);
byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07);
last = hasFlag(flags, SPDY_FLAG_FIN);
boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL);
buffer.skipBytes(10);
length -= 10;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_STREAM Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional);
}
break;
case READ_SYN_REPLY_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_REPLY Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynReplyFrame(streamId, last);
}
break;
case READ_RST_STREAM_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (streamId == 0 || statusCode == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid RST_STREAM Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readRstStreamFrame(streamId, statusCode);
}
break;
case READ_SETTINGS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR);
numSettings = getUnsignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
length -= 4;
// Validate frame length against number of entries. Each ID/Value entry is 8 bytes.
if ((length & 0x07) != 0 || length >> 3 != numSettings) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SETTINGS Frame");
} else {
state = State.READ_SETTING;
delegate.readSettingsFrame(clear);
}
break;
case READ_SETTING:
if (numSettings == 0) {
state = State.READ_COMMON_HEADER;
delegate.readSettingsEnd();
break;
}
if (buffer.readableBytes() < 8) {
return;
}
byte settingsFlags = buffer.getByte(buffer.readerIndex());
int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1);
int value = getSignedInt(buffer, buffer.readerIndex() + 4);
boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE);
boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED);
buffer.skipBytes(8);
--numSettings;
delegate.readSetting(id, value, persistValue, persisted);
break;
case READ_PING_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
int pingId = getSignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
state = State.READ_COMMON_HEADER;
delegate.readPingFrame(pingId);
break;
case READ_GOAWAY_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
state = State.READ_COMMON_HEADER;
delegate.readGoAwayFrame(lastGoodStreamId, statusCode);
break;
case READ_HEADERS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid HEADERS Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readHeadersFrame(streamId, last);
}
break;
case READ_WINDOW_UPDATE_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (deltaWindowSize == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid WINDOW_UPDATE Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readWindowUpdateFrame(streamId, deltaWindowSize);
}
break;
case READ_HEADER_BLOCK:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readHeaderBlockEnd();
break;
}
if (!buffer.isReadable()) {
return;
}
int compressedBytes = Math.min(buffer.readableBytes(), length);
ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes);
headerBlock.writeBytes(buffer, compressedBytes);
length -= compressedBytes;
delegate.readHeaderBlock(headerBlock);
break;
case DISCARD_FRAME:
int numBytes = Math.min(buffer.readableBytes(), length);
buffer.skipBytes(numBytes);
length -= numBytes;
if (length == 0) {
state = State.READ_COMMON_HEADER;
break;
}
return;
case FRAME_ERROR:
buffer.skipBytes(buffer.readableBytes());
return;
default:
throw new Error("Shouldn't reach here.");
}
}
}
|
@Test
public void testSpdySettingsPersistedValues() throws Exception {
short type = 4;
byte flags = 0;
int numSettings = 1;
int length = 8 * numSettings + 4;
byte idFlags = 0x02; // FLAG_SETTINGS_PERSISTED
int id = RANDOM.nextInt() & 0x00FFFFFF;
int value = RANDOM.nextInt();
ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length);
encodeControlFrameHeader(buf, type, flags, length);
buf.writeInt(numSettings);
for (int i = 0; i < numSettings; i++) {
buf.writeByte(idFlags);
buf.writeMedium(id);
buf.writeInt(value);
}
delegate.readSettingsEnd();
decoder.decode(buf);
verify(delegate).readSettingsFrame(false);
verify(delegate, times(numSettings)).readSetting(id, value, false, true);
assertFalse(buf.isReadable());
buf.release();
}
|
@Override
public Object handle(String targetService, List<Object> invokers, Object invocation, Map<String, String> queryMap,
String serviceInterface) {
if (!shouldHandle(invokers)) {
return invokers;
}
List<Object> result = getTargetInvokersByRules(invokers, targetService);
return super.handle(targetService, result, invocation, queryMap, serviceInterface);
}
|
@Test
public void testGetTargetInvokerByTagRulesWithPolicySceneOne() {
// initialize the routing rule
RuleInitializationUtils.initAZTagMatchTriggerThresholdPolicyRule();
List<Object> invokers = new ArrayList<>();
ApacheInvoker<Object> invoker1 = new ApacheInvoker<>("1.0.0", "az1");
invokers.add(invoker1);
ApacheInvoker<Object> invoker2 = new ApacheInvoker<>("1.0.1", "az2");
invokers.add(invoker2);
ApacheInvoker<Object> invoker3 = new ApacheInvoker<>("1.0.1", "az1");
invokers.add(invoker3);
Invocation invocation = new ApacheInvocation();
Map<String, String> queryMap = new HashMap<>();
queryMap.put("zone", "az1");
queryMap.put("interface", "io.sermant.foo.FooTest");
Map<String, String> parameters = new HashMap<>();
parameters.putIfAbsent(RouterConstant.META_ZONE_KEY, "az1");
DubboCache.INSTANCE.setParameters(parameters);
DubboCache.INSTANCE.putApplication("io.sermant.foo.FooTest", "foo");
List<Object> targetInvokers = (List<Object>) tagRouteHandler.handle(
DubboCache.INSTANCE.getApplication("io.sermant.foo.FooTest")
, invokers, invocation, queryMap, "io.sermant.foo.FooTest");
Assert.assertEquals(2, targetInvokers.size());
ConfigCache.getLabel(RouterConstant.DUBBO_CACHE_NAME).resetRouteRule(Collections.emptyMap());
}
|
public static Blob string2blob(String str) {
if (str == null) {
return null;
}
try {
return new SerialBlob(str.getBytes(Constants.DEFAULT_CHARSET));
} catch (Exception e) {
throw new ShouldNeverHappenException(e);
}
}
|
@Test
public void testString2blob() throws SQLException {
assertNull(BlobUtils.string2blob(null));
assertThat(BlobUtils.string2blob("123abc")).isEqualTo(
new SerialBlob("123abc".getBytes(Constants.DEFAULT_CHARSET)));
}
|
public static List<AclEntry> mergeAclEntries(List<AclEntry> existingAcl,
List<AclEntry> inAclSpec) throws AclException {
ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec);
ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES);
List<AclEntry> foundAclSpecEntries =
Lists.newArrayListWithCapacity(MAX_ENTRIES);
EnumMap<AclEntryScope, AclEntry> providedMask =
Maps.newEnumMap(AclEntryScope.class);
EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class);
EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class);
for (AclEntry existingEntry: existingAcl) {
AclEntry aclSpecEntry = aclSpec.findByKey(existingEntry);
if (aclSpecEntry != null) {
foundAclSpecEntries.add(aclSpecEntry);
scopeDirty.add(aclSpecEntry.getScope());
if (aclSpecEntry.getType() == MASK) {
providedMask.put(aclSpecEntry.getScope(), aclSpecEntry);
maskDirty.add(aclSpecEntry.getScope());
} else {
aclBuilder.add(aclSpecEntry);
}
} else {
if (existingEntry.getType() == MASK) {
providedMask.put(existingEntry.getScope(), existingEntry);
} else {
aclBuilder.add(existingEntry);
}
}
}
// ACL spec entries that were not replacements are new additions.
for (AclEntry newEntry: aclSpec) {
if (Collections.binarySearch(foundAclSpecEntries, newEntry,
ACL_ENTRY_COMPARATOR) < 0) {
scopeDirty.add(newEntry.getScope());
if (newEntry.getType() == MASK) {
providedMask.put(newEntry.getScope(), newEntry);
maskDirty.add(newEntry.getScope());
} else {
aclBuilder.add(newEntry);
}
}
}
copyDefaultsIfNeeded(aclBuilder);
calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty);
return buildAndValidateAcl(aclBuilder);
}
|
@Test
public void testMergeAclEntriesDefaultMaskCalculated() throws AclException {
List<AclEntry> existing = new ImmutableList.Builder<AclEntry>()
.add(aclEntry(ACCESS, USER, ALL))
.add(aclEntry(ACCESS, GROUP, READ))
.add(aclEntry(ACCESS, OTHER, READ))
.add(aclEntry(DEFAULT, USER, ALL))
.add(aclEntry(DEFAULT, USER, "bruce", READ))
.add(aclEntry(DEFAULT, GROUP, READ))
.add(aclEntry(DEFAULT, MASK, READ))
.add(aclEntry(DEFAULT, OTHER, NONE))
.build();
List<AclEntry> aclSpec = Lists.newArrayList(
aclEntry(DEFAULT, USER, "bruce", READ_WRITE),
aclEntry(DEFAULT, USER, "diana", READ_EXECUTE));
List<AclEntry> expected = new ImmutableList.Builder<AclEntry>()
.add(aclEntry(ACCESS, USER, ALL))
.add(aclEntry(ACCESS, GROUP, READ))
.add(aclEntry(ACCESS, OTHER, READ))
.add(aclEntry(DEFAULT, USER, ALL))
.add(aclEntry(DEFAULT, USER, "bruce", READ_WRITE))
.add(aclEntry(DEFAULT, USER, "diana", READ_EXECUTE))
.add(aclEntry(DEFAULT, GROUP, READ))
.add(aclEntry(DEFAULT, MASK, ALL))
.add(aclEntry(DEFAULT, OTHER, NONE))
.build();
assertEquals(expected, mergeAclEntries(existing, aclSpec));
}
|
static void populateEvaluateNode(final JavaParserDTO toPopulate,
final NodeNamesDTO nodeNamesDTO,
final List<Field<?>> fields,
final boolean isRoot) {
String nodeClassName = nodeNamesDTO.nodeClassName;
final BlockStmt evaluateNodeBody = isRoot ? toPopulate.evaluateRootNodeBody :
toPopulate.getEvaluateNestedNodeMethodDeclaration(nodeClassName).getBody()
.orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_BODY_TEMPLATE,
EVALUATE_NODE + nodeClassName)));
// set 'predicate'
populateEvaluateNodeWithPredicate(evaluateNodeBody, nodeNamesDTO.node.getPredicate(), fields);
// set 'nodeFunctions'
final List<String> nestedNodesFullClasses = nodeNamesDTO.getNestedNodesFullClassNames(toPopulate.packageName);
populateEvaluateNodeWithNodeFunctions(evaluateNodeBody, nestedNodesFullClasses);
// set 'score'
populateEvaluateNodeWithScore(evaluateNodeBody, nodeNamesDTO.node.getScore());
// set 'scoreDistributions'
if (nodeNamesDTO.node.hasScoreDistributions()) {
populateEvaluateNodeWithScoreDistributions(evaluateNodeBody, nodeNamesDTO.node.getScoreDistributions());
}
// set 'missingValuePenalty'
if (nodeNamesDTO.missingValuePenalty != null) {
populateEvaluateNodeWithMissingValuePenalty(evaluateNodeBody, nodeNamesDTO.missingValuePenalty);
}
}
|
@Test
void populateEvaluateNode() {
final String packageName = "packageName";
// empty node
boolean isRoot = false;
KiePMMLNodeFactory.NodeNamesDTO nodeNamesDTO = new KiePMMLNodeFactory.NodeNamesDTO(nodeLeaf,
createNodeClassName(),
"PARENTNODECLASS",
1.0);
KiePMMLNodeFactory.JavaParserDTO toPopulate = new KiePMMLNodeFactory.JavaParserDTO(nodeNamesDTO, packageName);
KiePMMLNodeFactory.populateEvaluateNode(toPopulate,
nodeNamesDTO,
getFieldsFromDataDictionaryAndDerivedFields(dataDictionary2,
derivedFields2),
isRoot);
commonVerifyEvaluateNode(toPopulate, nodeNamesDTO, isRoot);
// populated node
isRoot = true;
nodeNamesDTO = new KiePMMLNodeFactory.NodeNamesDTO(nodeRoot, createNodeClassName(), null, 1.0);
toPopulate = new KiePMMLNodeFactory.JavaParserDTO(nodeNamesDTO, packageName);
KiePMMLNodeFactory.populateEvaluateNode(toPopulate,
nodeNamesDTO,
getFieldsFromDataDictionaryAndDerivedFields(dataDictionary2,
derivedFields2),
isRoot);
commonVerifyEvaluateNode(toPopulate, nodeNamesDTO, isRoot);
}
|
public static boolean isP2PKH(Script script) {
List<ScriptChunk> chunks = script.chunks();
if (chunks.size() != 5)
return false;
if (!chunks.get(0).equalsOpCode(OP_DUP))
return false;
if (!chunks.get(1).equalsOpCode(OP_HASH160))
return false;
byte[] chunk2data = chunks.get(2).data;
if (chunk2data == null)
return false;
if (chunk2data.length != LegacyAddress.LENGTH)
return false;
if (!chunks.get(3).equalsOpCode(OP_EQUALVERIFY))
return false;
if (!chunks.get(4).equalsOpCode(OP_CHECKSIG))
return false;
return true;
}
|
@Test
public void testCreateP2PKHOutputScript() {
assertTrue(ScriptPattern.isP2PKH(
ScriptBuilder.createP2PKHOutputScript(keys.get(0))
));
}
|
public static String toJson(SnapshotRef ref) {
return toJson(ref, false);
}
|
@Test
public void testBranchToJsonAllFields() {
String json =
"{\"snapshot-id\":1,\"type\":\"branch\",\"min-snapshots-to-keep\":2,"
+ "\"max-snapshot-age-ms\":3,\"max-ref-age-ms\":4}";
SnapshotRef ref =
SnapshotRef.branchBuilder(1L)
.minSnapshotsToKeep(2)
.maxSnapshotAgeMs(3L)
.maxRefAgeMs(4L)
.build();
assertThat(SnapshotRefParser.toJson(ref))
.as("Should be able to serialize branch with all fields")
.isEqualTo(json);
}
|
@Override
public List<PartitionKey> getPrunedPartitions(Table table, ScalarOperator predicate, long limit, TableVersionRange version) {
IcebergTable icebergTable = (IcebergTable) table;
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
if (version.end().isEmpty()) {
return new ArrayList<>();
}
PredicateSearchKey key = PredicateSearchKey.of(dbName, tableName, version.end().get(), predicate);
triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit);
List<PartitionKey> partitionKeys = new ArrayList<>();
List<FileScanTask> icebergSplitTasks = splitTasks.get(key);
if (icebergSplitTasks == null) {
throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]",
dbName, tableName, predicate);
}
Set<List<String>> scannedPartitions = new HashSet<>();
PartitionSpec spec = icebergTable.getNativeTable().spec();
List<Column> partitionColumns = icebergTable.getPartitionColumnsIncludeTransformed();
boolean existPartitionTransformedEvolution = ((IcebergTable) table).hasPartitionTransformedEvolution();
for (FileScanTask fileScanTask : icebergSplitTasks) {
org.apache.iceberg.PartitionData partitionData = (org.apache.iceberg.PartitionData) fileScanTask.file().partition();
List<String> values = PartitionUtil.getIcebergPartitionValues(
spec, partitionData, existPartitionTransformedEvolution);
if (values.size() != partitionColumns.size()) {
// ban partition evolution and non-identify column.
continue;
}
if (scannedPartitions.contains(values)) {
continue;
} else {
scannedPartitions.add(values);
}
try {
List<com.starrocks.catalog.Type> srTypes = new ArrayList<>();
for (PartitionField partitionField : spec.fields()) {
if (partitionField.transform().isVoid()) {
continue;
}
if (!partitionField.transform().isIdentity()) {
Type sourceType = spec.schema().findType(partitionField.sourceId());
Type resultType = partitionField.transform().getResultType(sourceType);
if (resultType == Types.DateType.get()) {
resultType = Types.IntegerType.get();
}
srTypes.add(fromIcebergType(resultType));
continue;
}
srTypes.add(icebergTable.getColumn(icebergTable.getPartitionSourceName(spec.schema(),
partitionField)).getType());
}
if (existPartitionTransformedEvolution) {
srTypes = partitionColumns.stream()
.map(Column::getType)
.collect(Collectors.toList());
}
partitionKeys.add(createPartitionKeyWithType(values, srTypes, table.getType()));
} catch (Exception e) {
LOG.error("create partition key failed.", e);
throw new StarRocksConnectorException(e.getMessage());
}
}
return partitionKeys;
}
|
@Test
public void testDateDayPartitionPrune() {
IcebergHiveCatalog icebergHiveCatalog = new IcebergHiveCatalog(CATALOG_NAME, new Configuration(), DEFAULT_CONFIG);
List<Column> columns = Lists.newArrayList(new Column("k1", INT), new Column("dt", DATE));
IcebergMetadata metadata = new IcebergMetadata(CATALOG_NAME, HDFS_ENVIRONMENT, icebergHiveCatalog,
Executors.newSingleThreadExecutor(), Executors.newSingleThreadExecutor(), null);
IcebergTable icebergTable = new IcebergTable(1, "srTableName", CATALOG_NAME, "resource_name", "db_name",
"table_name", "", columns, mockedNativeTableF, Maps.newHashMap());
org.apache.iceberg.PartitionKey partitionKey = new org.apache.iceberg.PartitionKey(SPEC_F, SCHEMA_F);
partitionKey.set(0, 19660);
DataFile tsDataFiles = DataFiles.builder(SPEC_F)
.withPath("/path/to/data-f.parquet")
.withFileSizeInBytes(20)
.withPartition(partitionKey)
.withRecordCount(2)
.build();
mockedNativeTableF.newAppend().appendFile(tsDataFiles).commit();
mockedNativeTableF.refresh();
TableVersionRange version = TableVersionRange.withEnd(Optional.of(
mockedNativeTableF.currentSnapshot().snapshotId()));
List<PartitionKey> partitionKeys = metadata.getPrunedPartitions(icebergTable, null, -1, version);
Assert.assertEquals("19660", partitionKeys.get(0).getKeys().get(0).getStringValue());
}
|
public Optional<Measure> toMeasure(@Nullable ScannerReport.Measure batchMeasure, Metric metric) {
Objects.requireNonNull(metric);
if (batchMeasure == null) {
return Optional.empty();
}
Measure.NewMeasureBuilder builder = Measure.newMeasureBuilder();
switch (metric.getType().getValueType()) {
case INT:
return toIntegerMeasure(builder, batchMeasure);
case LONG:
return toLongMeasure(builder, batchMeasure);
case DOUBLE:
return toDoubleMeasure(builder, batchMeasure);
case BOOLEAN:
return toBooleanMeasure(builder, batchMeasure);
case STRING:
return toStringMeasure(builder, batchMeasure);
case LEVEL:
return toLevelMeasure(builder, batchMeasure);
case NO_VALUE:
return toNoValueMeasure(builder);
default:
throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType());
}
}
|
@Test
public void toMeasure_returns_no_value_if_dto_has_no_string_value_for_LEVEL_Metric() {
Optional<Measure> measure = underTest.toMeasure(EMPTY_BATCH_MEASURE, SOME_LEVEL_METRIC);
assertThat(measure).isPresent();
assertThat(measure.get().getValueType()).isEqualTo(Measure.ValueType.NO_VALUE);
}
|
public HollowHashIndexResult findMatches(Object... query) {
if (hashStateVolatile == null) {
throw new IllegalStateException(this + " wasn't initialized");
}
int hashCode = 0;
for(int i=0;i<query.length;i++) {
if(query[i] == null)
throw new IllegalArgumentException("querying by null unsupported; i=" + i);
hashCode ^= HashCodes.hashInt(keyHashCode(query[i], i));
}
HollowHashIndexResult result;
HollowHashIndexState hashState;
do {
result = null;
hashState = hashStateVolatile;
long bucket = hashCode & hashState.getMatchHashMask();
long hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry();
boolean bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0;
while (!bucketIsEmpty) {
if (matchIsEqual(hashState.getMatchHashTable(), hashBucketBit, query)) {
int selectSize = (int) hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey(), hashState.getBitsPerSelectTableSize());
long selectBucketPointer = hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey() + hashState.getBitsPerSelectTableSize(), hashState.getBitsPerSelectTablePointer());
result = new HollowHashIndexResult(hashState, selectBucketPointer, selectSize);
break;
}
bucket = (bucket + 1) & hashState.getMatchHashMask();
hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry();
bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0;
}
} while (hashState != hashStateVolatile);
return result;
}
|
@Test
public void testIndexingReferenceTypeFieldWithNullValues() throws Exception {
mapper.add(new TypeC(null));
mapper.add(new TypeC(new TypeD(null)));
mapper.add(new TypeC(new TypeD("one")));
roundTripSnapshot();
HollowHashIndex index = new HollowHashIndex(readStateEngine, "TypeC", "", "cd.d1.value");
Assert.assertNull(index.findMatches("none"));
assertIteratorContainsAll(index.findMatches("one").iterator(), 2);
}
|
@Override
public void start() {
File dbHome = new File(getRequiredSetting(PATH_DATA.getKey()));
if (!dbHome.exists()) {
dbHome.mkdirs();
}
startServer(dbHome);
}
|
@Test
public void start_creates_db_and_adds_tcp_listener() throws IOException {
int port = NetworkUtilsImpl.INSTANCE.getNextLoopbackAvailablePort();
settings
.setProperty(PATH_DATA.getKey(), temporaryFolder.newFolder().getAbsolutePath())
.setProperty(JDBC_URL.getKey(), "jdbc url")
.setProperty(JDBC_EMBEDDED_PORT.getKey(), "" + port)
.setProperty(JDBC_USERNAME.getKey(), "foo")
.setProperty(JDBC_PASSWORD.getKey(), "bar");
underTest.start();
checkDbIsUp(port, "foo", "bar");
// H2 listens on loopback address only
verify(system2).setProperty("h2.bindAddress", LOOPBACK_ADDRESS);
}
|
public static <K, N, V, S extends State>
InternalKvState<K, N, ?> createStateAndWrapWithLatencyTrackingIfEnabled(
InternalKvState<K, N, ?> kvState,
StateDescriptor<S, V> stateDescriptor,
LatencyTrackingStateConfig latencyTrackingStateConfig)
throws Exception {
if (latencyTrackingStateConfig.isEnabled()) {
return new LatencyTrackingStateFactory<>(
kvState, stateDescriptor, latencyTrackingStateConfig)
.createState();
}
return kvState;
}
|
@TestTemplate
@SuppressWarnings("unchecked")
<K, N> void testTrackValueState() throws Exception {
InternalValueState<K, N, String> valueState = mock(InternalValueState.class);
ValueStateDescriptor<String> valueStateDescriptor =
new ValueStateDescriptor<>("value", String.class);
InternalKvState<K, ?, ?> latencyTrackingState =
LatencyTrackingStateFactory.createStateAndWrapWithLatencyTrackingIfEnabled(
valueState, valueStateDescriptor, getLatencyTrackingStateConfig());
if (enableLatencyTracking) {
assertThat(latencyTrackingState).isInstanceOf(LatencyTrackingValueState.class);
} else {
assertThat(latencyTrackingState).isEqualTo(valueState);
}
}
|
@Override
public QualityGate findEffectiveQualityGate(Project project) {
return findQualityGate(project).orElseGet(this::findDefaultQualityGate);
}
|
@Test
public void findDefaultQualityGate_by_property_found() {
QualityGateDto qualityGateDto = new QualityGateDto();
qualityGateDto.setUuid(QUALITY_GATE_DTO.getUuid());
qualityGateDto.setName(QUALITY_GATE_DTO.getName());
when(qualityGateDao.selectDefault(any())).thenReturn(qualityGateDto);
when(qualityGateConditionDao.selectForQualityGate(any(), eq(SOME_UUID))).thenReturn(ImmutableList.of(CONDITION_1, CONDITION_2));
when(metricRepository.getOptionalByUuid(METRIC_UUID_1)).thenReturn(Optional.empty());
when(metricRepository.getOptionalByUuid(METRIC_UUID_2)).thenReturn(Optional.of(METRIC_2));
QualityGate result = underTest.findEffectiveQualityGate(mock(Project.class));
assertThat(result).isNotNull();
assertThat(result.getUuid()).isEqualTo(QUALITY_GATE_DTO.getUuid());
assertThat(result.getName()).isEqualTo(QUALITY_GATE_DTO.getName());
}
|
@Override
public double logp(int k) {
if (k < Math.max(0, m + n - N) || k > Math.min(m, n)) {
return Double.NEGATIVE_INFINITY;
} else {
return lchoose(m, k) + lchoose(N - m, n - k) - lchoose(N, n);
}
}
|
@Test
public void testLogP() {
System.out.println("logP");
HyperGeometricDistribution instance = new HyperGeometricDistribution(100, 30, 70);
instance.rand();
assertEquals(Math.log(3.404564e-26), instance.logp(0), 1E-5);
assertEquals(Math.log(7.149584e-23), instance.logp(1), 1E-5);
assertEquals(Math.log(3.576579e-20), instance.logp(2), 1E-5);
assertEquals(Math.log(0.1655920), instance.logp(20), 1E-5);
assertEquals(Math.log(0.1877461), instance.logp(21), 1E-5);
assertEquals(Math.log(0.00041413), instance.logp(28), 1E-5);
assertEquals(Math.log(4.136376e-05), instance.logp(29), 1E-5);
assertEquals(Math.log(1.884349e-06), instance.logp(30), 1E-5);
}
|
public static Payload convert(Request request, RequestMeta meta) {
//meta.
Payload.Builder payloadBuilder = Payload.newBuilder();
Metadata.Builder metaBuilder = Metadata.newBuilder();
if (meta != null) {
metaBuilder.putAllHeaders(request.getHeaders()).setType(request.getClass().getSimpleName());
}
metaBuilder.setClientIp(NetUtils.localIP());
payloadBuilder.setMetadata(metaBuilder.build());
// request body .
byte[] jsonBytes = convertRequestToByte(request);
return payloadBuilder.setBody(Any.newBuilder().setValue(UnsafeByteOperations.unsafeWrap(jsonBytes))).build();
}
|
@Test
void testConvertResponse() {
Payload convert = GrpcUtils.convert(response);
assertEquals(response.getClass().getSimpleName(), convert.getMetadata().getType());
}
|
public Optional<Measure> toMeasure(@Nullable ScannerReport.Measure batchMeasure, Metric metric) {
Objects.requireNonNull(metric);
if (batchMeasure == null) {
return Optional.empty();
}
Measure.NewMeasureBuilder builder = Measure.newMeasureBuilder();
switch (metric.getType().getValueType()) {
case INT:
return toIntegerMeasure(builder, batchMeasure);
case LONG:
return toLongMeasure(builder, batchMeasure);
case DOUBLE:
return toDoubleMeasure(builder, batchMeasure);
case BOOLEAN:
return toBooleanMeasure(builder, batchMeasure);
case STRING:
return toStringMeasure(builder, batchMeasure);
case LEVEL:
return toLevelMeasure(builder, batchMeasure);
case NO_VALUE:
return toNoValueMeasure(builder);
default:
throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType());
}
}
|
@Test
public void toMeasure_returns_no_value_if_dto_has_no_value_for_Boolean_metric() {
Optional<Measure> measure = underTest.toMeasure(EMPTY_BATCH_MEASURE, SOME_BOOLEAN_METRIC);
assertThat(measure).isPresent();
assertThat(measure.get().getValueType()).isEqualTo(Measure.ValueType.NO_VALUE);
}
|
@InvokeOnHeader(Web3jConstants.ETH_GET_CODE)
void ethGetCode(Message message) throws IOException {
DefaultBlockParameter atBlock
= toDefaultBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, String.class));
String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class);
Request<?, EthGetCode> request = web3j.ethGetCode(address, atBlock);
setRequestId(message, request);
EthGetCode response = request.send();
boolean hasError = checkForError(message, response);
if (!hasError) {
message.setBody(response.getCode());
}
}
|
@Test
public void ethGetCodeTest() throws Exception {
EthGetCode response = Mockito.mock(EthGetCode.class);
Mockito.when(mockWeb3j.ethGetCode(any(), any())).thenReturn(request);
Mockito.when(request.send()).thenReturn(response);
Mockito.when(response.getCode()).thenReturn("test");
Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_GET_CODE);
template.send(exchange);
String body = exchange.getIn().getBody(String.class);
assertEquals("test", body);
}
|
public static void ensureMarkFileLink(final File serviceDir, final File actualFile, final String linkFilename)
{
final String serviceDirPath;
final String markFileParentPath;
try
{
serviceDirPath = serviceDir.getCanonicalPath();
}
catch (final IOException ex)
{
throw new IllegalArgumentException("failed to resolve canonical path for archiveDir=" + serviceDir);
}
try
{
markFileParentPath = actualFile.getParentFile().getCanonicalPath();
}
catch (final IOException ex)
{
throw new IllegalArgumentException(
"failed to resolve canonical path for markFile parent dir of " + actualFile);
}
final Path linkFile = new File(serviceDirPath, linkFilename).toPath();
if (serviceDirPath.equals(markFileParentPath))
{
try
{
Files.deleteIfExists(linkFile);
}
catch (final IOException ex)
{
throw new RuntimeException("failed to remove old link file", ex);
}
}
else
{
try
{
Files.write(
linkFile,
markFileParentPath.getBytes(US_ASCII),
StandardOpenOption.CREATE,
StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING);
}
catch (final IOException ex)
{
throw new RuntimeException("failed to create link for mark file directory", ex);
}
}
}
|
@Test
void shouldCreateLinkFileIfFileInDifferentLocation() throws IOException
{
final String linkFilename = "markfile.lnk";
final File markFileLocation = new File(alternativeDirectory, "markfile.dat");
MarkFile.ensureMarkFileLink(serviceDirectory, markFileLocation, linkFilename);
final File linkFileLocation = new File(serviceDirectory, linkFilename);
assertTrue(linkFileLocation.exists());
final List<String> strings = Files.readAllLines(linkFileLocation.toPath());
assertEquals(1, strings.size());
assertEquals(markFileLocation.getCanonicalFile().getParent(), strings.get(0));
}
|
public static void main(String args[]) {
BaseBarSeriesBuilder barSeriesBuilder = new BaseBarSeriesBuilder();
BarSeries seriesD = barSeriesBuilder.withName("Sample Series Double ")
.withNumTypeOf(DoubleNum::valueOf)
.build();
BarSeries seriesP = barSeriesBuilder.withName("Sample Series DecimalNum 32")
.withNumTypeOf(DecimalNum::valueOf)
.build();
BarSeries seriesPH = barSeriesBuilder.withName("Sample Series DecimalNum 256")
.withNumTypeOf(number -> DecimalNum.valueOf(number.toString(), 256))
.build();
int[] randoms = new Random().ints(NUMBARS, 80, 100).toArray();
for (int i = 0; i < randoms.length; i++) {
ZonedDateTime date = ZonedDateTime.now().minusSeconds(NUMBARS - i);
seriesD.addBar(date, randoms[i], randoms[i] + 21, randoms[i] - 21, randoms[i] - 5);
seriesP.addBar(date, randoms[i], randoms[i] + 21, randoms[i] - 21, randoms[i] - 5);
seriesPH.addBar(date, randoms[i], randoms[i] + 21, randoms[i] - 21, randoms[i] - 5);
}
Num D = DecimalNum.valueOf(test(seriesD).toString(), 256);
Num P = DecimalNum.valueOf(test(seriesP).toString(), 256);
Num standard = DecimalNum.valueOf(test(seriesPH).toString(), 256);
System.out.println(seriesD.getName() + " error: "
+ D.minus(standard).dividedBy(standard).multipliedBy(DecimalNum.valueOf(100)));
System.out.println(seriesP.getName() + " error: "
+ P.minus(standard).dividedBy(standard).multipliedBy(DecimalNum.valueOf(100)));
}
|
@Test
public void test() {
CompareNumTypes.main(null);
}
|
@Override
public int compareTo(final ComparableWrapper other) {
if (!Objects.equals(type, Type.NORMAL) || !Objects.equals(other.type, Type.NORMAL)) {
return type.compareTo(other.type);
} else {
Comparator<Comparable> nullFirstCompare = Comparator.nullsFirst(Comparable::compareTo);
return nullFirstCompare.compare(value, other.value);
}
}
|
@Test
void compareTo() {
assertThat(theNull.compareTo(theNull)).isEqualTo(0);
assertThat(one.compareTo(one)).isEqualTo(0);
assertThat(ten.compareTo(ten)).isEqualTo(0);
assertThat(min.compareTo(min)).isEqualTo(0);
assertThat(max.compareTo(max)).isEqualTo(0);
assertThat(one.compareTo(theNull) > 0).isTrue();
assertThat(one.compareTo(ten) < 0).isTrue();
assertThat(one.compareTo(min) > 0).isTrue();
assertThat(one.compareTo(max) < 0).isTrue();
assertThat(ten.compareTo(theNull) > 0).isTrue();
assertThat(ten.compareTo(one) > 0).isTrue();
assertThat(ten.compareTo(min) > 0).isTrue();
assertThat(ten.compareTo(max) < 0).isTrue();
assertThat(min.compareTo(theNull) < 0).isTrue();
assertThat(min.compareTo(one) < 0).isTrue();
assertThat(min.compareTo(ten) < 0).isTrue();
assertThat(min.compareTo(max) < 0).isTrue();
assertThat(max.compareTo(theNull) > 0).isTrue();
assertThat(max.compareTo(one) > 0).isTrue();
assertThat(max.compareTo(ten) > 0).isTrue();
assertThat(max.compareTo(min) > 0).isTrue();
}
|
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
MessageSubject that = (MessageSubject) obj;
return Objects.equals(this.value, that.value);
}
|
@Test
public void testEquals() {
new EqualsTester()
.addEqualityGroup(subject1, sameAsSubject1)
.addEqualityGroup(subject2)
.addEqualityGroup(subject3)
.testEquals();
}
|
public static boolean isReserved(final String token) {
final SqlBaseLexer sqlBaseLexer = new SqlBaseLexer(
new CaseInsensitiveStream(CharStreams.fromString(token)));
final CommonTokenStream tokenStream = new CommonTokenStream(sqlBaseLexer);
final SqlBaseParser sqlBaseParser = new SqlBaseParser(tokenStream);
sqlBaseParser.removeErrorListeners();
final SqlBaseParser.NonReservedContext nonReservedContext = sqlBaseParser.nonReserved();
if (nonReservedContext.exception == null) {
// If we call nonReservedWord, and if it successfully parses,
// then we just parsed through a nonReserved word as defined in SqlBase.g4
// and we return false
return false;
}
final Set<String> allVocab = ParserKeywordValidatorUtil.getKsqlReservedWords();
return allVocab.contains(token.toLowerCase());
}
|
@Test
public void shouldNotBeReserved() {
// Given:
final String[] keywords = new String[]{
"source", // non-reserved keyword
"sink", // non-reserved keyword
"MAP", //upper case
"Array", //case insensitive
"ASSERT",
"foo",
"bAR"
};
// Then:
for (final String keyword : keywords) {
assertEquals(false, ParserUtil.isReserved(keyword));
}
}
|
@Override
public boolean onOptionsItemSelected(@NonNull MenuItem item) {
MainSettingsActivity mainSettingsActivity = (MainSettingsActivity) getActivity();
if (mainSettingsActivity == null) return super.onOptionsItemSelected(item);
if (item.getItemId() == R.id.add_user_word) {
createEmptyItemForAdd();
return true;
}
return super.onOptionsItemSelected(item);
}
|
@Test
public void testTwiceAddNewWordFromMenuAtEmptyState() {
UserDictionaryEditorFragment fragment = startEditorFragment();
RecyclerView wordsRecyclerView = fragment.getView().findViewById(R.id.words_recycler_view);
Assert.assertNotNull(wordsRecyclerView);
Assert.assertEquals(1 /*empty view*/, wordsRecyclerView.getAdapter().getItemCount());
Assert.assertEquals(
R.id.word_editor_view_type_empty_view_row,
wordsRecyclerView.getAdapter().getItemViewType(0));
final MenuItem menuItem = Mockito.mock(MenuItem.class);
Mockito.doReturn(R.id.add_user_word).when(menuItem).getItemId();
fragment.onOptionsItemSelected(menuItem);
TestRxSchedulers.drainAllTasks();
fragment.onOptionsItemSelected(menuItem);
TestRxSchedulers.drainAllTasks();
Assert.assertEquals(1, wordsRecyclerView.getAdapter().getItemCount());
Assert.assertEquals(
R.id.word_editor_view_type_editing_row, wordsRecyclerView.getAdapter().getItemViewType(0));
}
|
public Quantity<U> add(Quantity<U> second) {
if(unit == second.unit)
return new Quantity<U>(value + second.value, unit);
else {
final double sum = value + second.in(unit).value;
return new Quantity<U>(sum, unit);
}
}
|
@Test
public void addQuantitiesInSameUnits() throws Exception {
Quantity<Metrics> first = new Quantity<Metrics>(1, Metrics.m);
Quantity<Metrics> second = new Quantity<Metrics>(2, Metrics.m);
assertThat(first.add(second)).isEqualTo(new Quantity<Metrics>(3, Metrics.m));
}
|
public void runExtractor(Message msg) {
try(final Timer.Context ignored = completeTimer.time()) {
final String field;
try (final Timer.Context ignored2 = conditionTimer.time()) {
// We can only work on Strings.
if (!(msg.getField(sourceField) instanceof String)) {
conditionMissesCounter.inc();
return;
}
field = (String) msg.getField(sourceField);
// Decide if to extract at all.
if (conditionType.equals(ConditionType.STRING)) {
if (field.contains(conditionValue)) {
conditionHitsCounter.inc();
} else {
conditionMissesCounter.inc();
return;
}
} else if (conditionType.equals(ConditionType.REGEX)) {
if (regexConditionPattern.matcher(field).find()) {
conditionHitsCounter.inc();
} else {
conditionMissesCounter.inc();
return;
}
}
}
try (final Timer.Context ignored2 = executionTimer.time()) {
Result[] results;
try {
results = run(field);
} catch (ExtractorException e) {
final String error = "Could not apply extractor <" + getTitle() + " (" + getId() + ")>";
msg.addProcessingError(new Message.ProcessingError(
ProcessingFailureCause.ExtractorException, error, ExceptionUtils.getRootCauseMessage(e)));
return;
}
if (results == null || results.length == 0 || Arrays.stream(results).anyMatch(result -> result.getValue() == null)) {
return;
} else if (results.length == 1 && results[0].target == null) {
// results[0].target is null if this extractor cannot produce multiple fields use targetField in that case
msg.addField(targetField, results[0].getValue());
} else {
for (final Result result : results) {
msg.addField(result.getTarget(), result.getValue());
}
}
// Remove original from message?
if (cursorStrategy.equals(CursorStrategy.CUT) && !targetField.equals(sourceField) && !Message.RESERVED_FIELDS.contains(sourceField) && results[0].beginIndex != -1) {
final StringBuilder sb = new StringBuilder(field);
final List<Result> reverseList = Arrays.stream(results)
.sorted(Comparator.<Result>comparingInt(result -> result.endIndex).reversed())
.collect(Collectors.toList());
// remove all from reverse so that the indices still match
for (final Result result : reverseList) {
sb.delete(result.getBeginIndex(), result.getEndIndex());
}
final String builtString = sb.toString();
final String finalResult = builtString.trim().isEmpty() ? "fullyCutByExtractor" : builtString;
msg.removeField(sourceField);
// TODO don't add an empty field back, or rather don't add fullyCutByExtractor
msg.addField(sourceField, finalResult);
}
runConverters(msg);
}
}
}
|
@Test
public void testConvertersWithNonStringFieldValue() throws Exception {
final Converter converter = new TestConverter.Builder()
.callback(new Function<Object, Object>() {
@Nullable
@Override
public Object apply(Object input) {
return "converted";
}
})
.build();
final TestExtractor extractor = new TestExtractor.Builder()
.converters(Lists.newArrayList(converter))
.callback(new Callable<Result[]>() {
@Override
public Result[] call() throws Exception {
return new Result[] {
new Result(123, "target", -1, -1)
};
}
})
.build();
final Message msg = createMessage("message");
extractor.runExtractor(msg);
// Only string values will be converted.
assertThat(msg.getField("target")).isEqualTo(123);
}
|
public static <T> T instantiateClassDefConstructor(Class<T> clazz) {
//if constructor present then it should have a no arg constructor
//if not present then default constructor is already their
Objects.requireNonNull(clazz, "class to instantiate should not be null");
if (clazz.getConstructors().length > 0
&& Arrays.stream(clazz.getConstructors()).noneMatch(c -> c.getParameterCount() == 0)) {
throw new InstantiationException(
"Default constructor is required to create instance of public class: " + clazz
.getName());
}
try {
return clazz.getConstructor().newInstance();
} catch (Exception e) {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName(), e);
}
}
|
@Test
public void shouldInstantiateClassWithDefaultConstructor2() {
assertThat(ClassUtils.instantiateClassDefConstructor(DefaultConstructor2.class)).isNotNull();
}
|
@Override
public boolean match(String attributeValue) {
if (attributeValue == null) {
return false;
}
switch (type) {
case Equals:
return attributeValue.equals(value);
case StartsWith:
return (length == -1 || length == attributeValue.length()) && attributeValue.startsWith(value);
case EndsWith:
return (length == -1 || length == attributeValue.length()) && attributeValue.endsWith(value);
case Contains:
return attributeValue.contains(value);
case Regexp:
return regexPattern.matcher(attributeValue).matches();
default:
throw new IllegalStateException("Unexpected type " + type);
}
}
|
@Test
public void testDegeneratedContains() {
LikeCondition likeCondition = new LikeCondition("%ab%");
assertTrue(likeCondition.match("ab"));
assertTrue(likeCondition.match("xabxx"));
assertFalse(likeCondition.match("axb"));
}
|
@Override
public DescriptiveUrlBag toUrl(final Path file) {
final DescriptiveUrlBag list = new DescriptiveUrlBag();
list.add(new DescriptiveUrl(this.toUrl(file, distribution.getOrigin()), DescriptiveUrl.Type.origin,
MessageFormat.format(LocaleFactory.localizedString("{0} {1} URL"),
distribution.getName(),
LocaleFactory.localizedString("Origin", "Info"))));
if(distribution.getUrl() != null) {
list.add(new DescriptiveUrl(this.toUrl(file, distribution.getUrl()), DescriptiveUrl.Type.cdn,
MessageFormat.format(LocaleFactory.localizedString("{0} {1} URL"),
distribution.getName(),
LocaleFactory.localizedString(distribution.getMethod().toString(), "S3"))));
}
if(distribution.getSslUrl() != null) {
list.add(new DescriptiveUrl(this.toUrl(file, distribution.getSslUrl()), DescriptiveUrl.Type.cdn,
String.format("%s (SSL)", MessageFormat.format(LocaleFactory.localizedString("{0} {1} URL"),
distribution.getName(),
LocaleFactory.localizedString(distribution.getMethod().toString(), "S3")))));
}
if(distribution.getStreamingUrl() != null) {
list.add(new DescriptiveUrl(this.toUrl(file, distribution.getStreamingUrl()), DescriptiveUrl.Type.cdn,
String.format("%s (Streaming)", MessageFormat.format(LocaleFactory.localizedString("{0} {1} URL"),
distribution.getName(),
LocaleFactory.localizedString(distribution.getMethod().toString(), "S3")))));
}
if(distribution.getiOSstreamingUrl() != null) {
list.add(new DescriptiveUrl(this.toUrl(file, distribution.getiOSstreamingUrl()), DescriptiveUrl.Type.cdn,
String.format("%s (iOS Streaming)", MessageFormat.format(LocaleFactory.localizedString("{0} {1} URL"),
distribution.getName(),
LocaleFactory.localizedString(distribution.getMethod().toString(), "S3")))));
}
list.addAll(this.toCnameUrl(file));
return list;
}
|
@Test
public void testDownload() {
final Distribution distribution = new Distribution(Distribution.DOWNLOAD, "n", URI.create("https://test.cyberduck.ch.s3.amazonaws.com"), true);
final DescriptiveUrl url = new DistributionUrlProvider(distribution).toUrl(
new Path("/test.cyberduck.ch/p/f", EnumSet.of(Path.Type.file))).find(DescriptiveUrl.Type.origin);
assertEquals("https://test.cyberduck.ch.s3.amazonaws.com/p/f", url.getUrl());
assertEquals("n Origin URL", url.getHelp());
}
|
@Override
public void stopConnector(final String connName, final Callback<Void> callback) {
log.trace("Submitting request to transition connector {} to STOPPED state", connName);
addRequest(
() -> {
if (!configState.contains(connName))
throw new NotFoundException("Unknown connector " + connName);
// We only allow the leader to handle this request since it involves writing task configs to the config topic
if (!isLeader()) {
callback.onCompletion(new NotLeaderException("Only the leader can transition connectors to the STOPPED state.", leaderUrl()), null);
return null;
}
// We write the task configs first since, if we fail between then and writing the target state, the
// cluster is still kept in a healthy state. A RUNNING connector with zero tasks is acceptable (although,
// if the connector is reassigned during the ensuing rebalance, it is likely that it will immediately generate
// a non-empty set of task configs). A STOPPED connector with a non-empty set of tasks is less acceptable
// and likely to confuse users.
writeTaskConfigs(connName, Collections.emptyList());
String stageDescription = "writing the STOPPED target stage for connector " + connName + " to the config topic";
try (TickThreadStage stage = new TickThreadStage(stageDescription)) {
configBackingStore.putTargetState(connName, TargetState.STOPPED);
}
// Force a read of the new target state for the connector
if (!refreshConfigSnapshot(workerSyncTimeoutMs)) {
log.warn("Failed to read to end of config topic after writing the STOPPED target state for connector {}", connName);
}
callback.onCompletion(null, null);
return null;
},
forwardErrorAndTickThreadStages(callback)
);
}
|
@Test
public void testStopConnector() throws Exception {
when(herder.connectorType(anyMap())).thenReturn(ConnectorType.SOURCE);
when(member.memberId()).thenReturn("leader");
when(member.currentProtocolVersion()).thenReturn(CONNECT_PROTOCOL_V0);
// join as leader
expectRebalance(1, Collections.emptyList(), singletonList(TASK0), true);
expectConfigRefreshAndSnapshot(SNAPSHOT);
when(statusBackingStore.connectors()).thenReturn(Collections.emptySet());
expectMemberPoll();
when(worker.startSourceTask(eq(TASK0), any(), any(), any(), eq(herder), eq(TargetState.STARTED))).thenReturn(true);
herder.tick(); // join
// handle stop request
expectMemberEnsureActive();
expectConfigRefreshAndSnapshot(SNAPSHOT);
doNothing().when(configBackingStore).putTaskConfigs(CONN1, Collections.emptyList());
doNothing().when(configBackingStore).putTargetState(CONN1, TargetState.STOPPED);
FutureCallback<Void> cb = new FutureCallback<>();
herder.stopConnector(CONN1, cb); // external request
herder.tick(); // continue
assertTrue(cb.isDone(), "Callback should already have been invoked by herder");
cb.get(0, TimeUnit.MILLISECONDS);
verifyNoMoreInteractions(worker, member, configBackingStore, statusBackingStore);
}
|
@Override
public boolean isTrusted(Address address) {
if (address == null) {
return false;
}
if (trustedInterfaces.isEmpty()) {
return true;
}
String host = address.getHost();
if (matchAnyInterface(host, trustedInterfaces)) {
return true;
} else {
if (logger.isFineEnabled()) {
logger.fine(
"Address %s doesn't match any trusted interface", host);
}
return false;
}
}
|
@Test
public void givenInterfaceIsConfigured_whenMessageWithNonMatchingHost_thenDoNotTrust() throws UnknownHostException {
AddressCheckerImpl joinMessageTrustChecker = new AddressCheckerImpl(singleton("127.0.0.2"), logger);
Address address = createAddress("127.0.0.1");
assertFalse(joinMessageTrustChecker.isTrusted(address));
}
|
public static SimpleFunction<Row, String> getRowToJsonStringsFunction(Schema beamSchema) {
return new RowToJsonFn<String>(beamSchema) {
@Override
public String apply(Row input) {
return RowJsonUtils.rowToJson(objectMapper, input);
}
};
}
|
@Test
public void testGetRowToJsonStringsFunction() {
for (TestCase<? extends RowEncodable> caze : testCases) {
String expected = caze.jsonString;
String actual = JsonUtils.getRowToJsonStringsFunction(caze.row.getSchema()).apply(caze.row);
assertJsonEquals(caze.userT.toString(), expected, actual);
}
}
|
public static Ip4Prefix valueOf(int address, int prefixLength) {
return new Ip4Prefix(Ip4Address.valueOf(address), prefixLength);
}
|
@Test
public void testContainsIpPrefixIPv4() {
Ip4Prefix ipPrefix;
ipPrefix = Ip4Prefix.valueOf("1.2.0.0/24");
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/24")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/32")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.4/32")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/16")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.3.0.0/24")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("0.0.0.0/16")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("0.0.0.0/0")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("255.255.255.255/32")));
ipPrefix = Ip4Prefix.valueOf("1.2.0.0/32");
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/24")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/32")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.4/32")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/16")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.3.0.0/24")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("0.0.0.0/16")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("0.0.0.0/0")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("255.255.255.255/32")));
ipPrefix = Ip4Prefix.valueOf("0.0.0.0/0");
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/24")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/32")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.4/32")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/16")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("1.3.0.0/24")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("0.0.0.0/16")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("0.0.0.0/0")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("255.255.255.255/32")));
ipPrefix = Ip4Prefix.valueOf("255.255.255.255/32");
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/24")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/32")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.4/32")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.2.0.0/16")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("1.3.0.0/24")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("0.0.0.0/16")));
assertFalse(ipPrefix.contains(Ip4Prefix.valueOf("0.0.0.0/0")));
assertTrue(ipPrefix.contains(Ip4Prefix.valueOf("255.255.255.255/32")));
}
|
@VisibleForTesting
static int getNumWriteBehindBuffers(int numBuffers) {
int numIOBufs = (int) (Math.log(numBuffers) / Math.log(4) - 1.5);
return numIOBufs > 6 ? 6 : numIOBufs;
}
|
@Test
public void testIOBufferCountComputation() {
assertThat(BinaryHashTable.getNumWriteBehindBuffers(32)).isEqualTo(1);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(33)).isEqualTo(1);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(40)).isEqualTo(1);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(64)).isEqualTo(1);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(127)).isEqualTo(1);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(128)).isEqualTo(2);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(129)).isEqualTo(2);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(511)).isEqualTo(2);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(512)).isEqualTo(3);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(513)).isEqualTo(3);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(2047)).isEqualTo(3);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(2048)).isEqualTo(4);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(2049)).isEqualTo(4);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(8191)).isEqualTo(4);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(8192)).isEqualTo(5);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(8193)).isEqualTo(5);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(32767)).isEqualTo(5);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(32768)).isEqualTo(6);
assertThat(BinaryHashTable.getNumWriteBehindBuffers(Integer.MAX_VALUE)).isEqualTo(6);
}
|
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
boolean addFieldName, boolean addCr ) {
StringBuilder retval = new StringBuilder( 128 );
String fieldname = v.getName();
int length = v.getLength();
int precision = v.getPrecision();
if ( addFieldName ) {
retval.append( fieldname ).append( ' ' );
}
int type = v.getType();
switch ( type ) {
case ValueMetaInterface.TYPE_TIMESTAMP:
case ValueMetaInterface.TYPE_DATE:
retval.append( "TIMESTAMP" );
break;
case ValueMetaInterface.TYPE_BOOLEAN:
if ( supportsBooleanDataType() ) {
retval.append( "BOOLEAN" );
} else {
retval.append( "CHAR(1)" );
}
break;
case ValueMetaInterface.TYPE_NUMBER:
case ValueMetaInterface.TYPE_INTEGER:
case ValueMetaInterface.TYPE_BIGNUMBER:
if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
fieldname.equalsIgnoreCase( pk ) // Primary key
) {
retval.append( "BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 0, INCREMENT BY 1) PRIMARY KEY" );
} else {
if ( length > 0 ) {
if ( precision > 0 || length > 18 ) {
retval.append( "NUMERIC(" ).append( length ).append( ", " ).append( precision ).append( ')' );
} else {
if ( length > 9 ) {
retval.append( "BIGINT" );
} else {
if ( length < 5 ) {
retval.append( "SMALLINT" );
} else {
retval.append( "INTEGER" );
}
}
}
} else {
retval.append( "DOUBLE PRECISION" );
}
}
break;
case ValueMetaInterface.TYPE_STRING:
if ( length >= DatabaseMeta.CLOB_LENGTH ) {
retval.append( "LONGVARCHAR" );
} else {
retval.append( "VARCHAR" );
if ( length > 0 ) {
retval.append( '(' ).append( length );
} else {
retval.append( '(' ); // Maybe use some default DB String length?
}
retval.append( ')' );
}
break;
default:
retval.append( " UNKNOWN" );
break;
}
if ( addCr ) {
retval.append( Const.CR );
}
return retval.toString();
}
|
@Test
public void testGetFieldDefinition() throws Exception {
ValueMetaInterface vm = new ValueMetaString();
String sql = hypersonicDatabaseMeta.getFieldDefinition( vm, null, null, false, false, false );
String expectedSql = "VARCHAR()";
assertEquals( "Check PDI-11461 without length", expectedSql, sql );
vm.setLength( DatabaseMeta.CLOB_LENGTH - 1 );
sql = hypersonicDatabaseMeta.getFieldDefinition( vm, null, null, false, false, false );
expectedSql = "VARCHAR(" + ( DatabaseMeta.CLOB_LENGTH - 1 ) + ")";
assertEquals( "Check PDI-11461 with length", expectedSql, sql );
vm.setLength( DatabaseMeta.CLOB_LENGTH );
sql = hypersonicDatabaseMeta.getFieldDefinition( vm, null, null, false, false, false );
expectedSql = "LONGVARCHAR";
assertEquals( "Check PDI-11461 with clob/text length", expectedSql, sql );
}
|
@Override
public boolean isAsync() {
return isAsync;
}
|
@Test(dataProvider = "caches")
@CacheSpec(population = Population.EMPTY)
public void cacheFactory_loadFactory(
BoundedLocalCache<Int, Int> cache, CacheContext context) throws Throwable {
var factory1 = LocalCacheFactory.loadFactory(cache.getClass().getSimpleName());
var other = factory1.newInstance(context.caffeine(), /* cacheLoader */ null, context.isAsync());
assertThat(other.getClass()).isEqualTo(cache.getClass());
assertThat(LocalCacheFactory.FACTORIES).containsEntry(
cache.getClass().getSimpleName(), factory1);
var factory2 = LocalCacheFactory.loadFactory(cache.getClass().getSimpleName());
assertThat(factory2).isSameInstanceAs(factory1);
}
|
public static Getter newFieldGetter(Object object, Getter parent, Field field, String modifier) throws Exception {
return newGetter(object, parent, modifier, field.getType(), field::get,
(t, et) -> new FieldGetter(parent, field, modifier, t, et));
}
|
@Test
public void newFieldGetter_whenExtractingFromEmpty_Array_AndReducerSuffixInNotEmpty_thenReturnNullGetter()
throws Exception {
OuterObject object = OuterObject.emptyInner("name");
Getter getter = GetterFactory.newFieldGetter(object, null, innersArrayField, "[any]");
Class<?> returnType = getter.getReturnType();
assertEquals(InnerObject.class, returnType);
}
|
@GET
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ClusterInfo get() {
return getClusterInfo();
}
|
@Test
public void testClusterSlash() throws JSONException, Exception {
WebResource r = resource();
// test with trailing "/" to make sure acts same as without slash
ClientResponse response = r.path("ws").path("v1").path("cluster/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterInfo(json);
}
|
public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) {
FunctionConfig mergedConfig = existingConfig.toBuilder().build();
if (!existingConfig.getTenant().equals(newConfig.getTenant())) {
throw new IllegalArgumentException("Tenants differ");
}
if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) {
throw new IllegalArgumentException("Namespaces differ");
}
if (!existingConfig.getName().equals(newConfig.getName())) {
throw new IllegalArgumentException("Function Names differ");
}
if (!StringUtils.isEmpty(newConfig.getClassName())) {
mergedConfig.setClassName(newConfig.getClassName());
}
if (!StringUtils.isEmpty(newConfig.getJar())) {
mergedConfig.setJar(newConfig.getJar());
}
if (newConfig.getInputSpecs() == null) {
newConfig.setInputSpecs(new HashMap<>());
}
if (mergedConfig.getInputSpecs() == null) {
mergedConfig.setInputSpecs(new HashMap<>());
}
if (newConfig.getInputs() != null) {
newConfig.getInputs().forEach((topicName -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder().isRegexPattern(false).build());
}));
}
if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) {
newConfig.getInputSpecs().put(newConfig.getTopicsPattern(),
ConsumerConfig.builder()
.isRegexPattern(true)
.build());
}
if (newConfig.getCustomSerdeInputs() != null) {
newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.serdeClassName(serdeClassName)
.isRegexPattern(false)
.build());
});
}
if (newConfig.getCustomSchemaInputs() != null) {
newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.schemaType(schemaClassname)
.isRegexPattern(false)
.build());
});
}
if (!newConfig.getInputSpecs().isEmpty()) {
newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> {
if (!existingConfig.getInputSpecs().containsKey(topicName)) {
throw new IllegalArgumentException("Input Topics cannot be altered");
}
if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) {
throw new IllegalArgumentException(
"isRegexPattern for input topic " + topicName + " cannot be altered");
}
mergedConfig.getInputSpecs().put(topicName, consumerConfig);
});
}
if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName()
.equals(existingConfig.getOutputSerdeClassName())) {
throw new IllegalArgumentException("Output Serde mismatch");
}
if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType()
.equals(existingConfig.getOutputSchemaType())) {
throw new IllegalArgumentException("Output Schema mismatch");
}
if (!StringUtils.isEmpty(newConfig.getLogTopic())) {
mergedConfig.setLogTopic(newConfig.getLogTopic());
}
if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees()
.equals(existingConfig.getProcessingGuarantees())) {
throw new IllegalArgumentException("Processing Guarantees cannot be altered");
}
if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering()
.equals(existingConfig.getRetainOrdering())) {
throw new IllegalArgumentException("Retain Ordering cannot be altered");
}
if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering()
.equals(existingConfig.getRetainKeyOrdering())) {
throw new IllegalArgumentException("Retain Key Ordering cannot be altered");
}
if (!StringUtils.isEmpty(newConfig.getOutput())) {
mergedConfig.setOutput(newConfig.getOutput());
}
if (newConfig.getUserConfig() != null) {
mergedConfig.setUserConfig(newConfig.getUserConfig());
}
if (newConfig.getSecrets() != null) {
mergedConfig.setSecrets(newConfig.getSecrets());
}
if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) {
throw new IllegalArgumentException("Runtime cannot be altered");
}
if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) {
throw new IllegalArgumentException("AutoAck cannot be altered");
}
if (newConfig.getMaxMessageRetries() != null) {
mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries());
}
if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) {
mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic());
}
if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName()
.equals(existingConfig.getSubName())) {
throw new IllegalArgumentException("Subscription Name cannot be altered");
}
if (newConfig.getParallelism() != null) {
mergedConfig.setParallelism(newConfig.getParallelism());
}
if (newConfig.getResources() != null) {
mergedConfig
.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources()));
}
if (newConfig.getWindowConfig() != null) {
mergedConfig.setWindowConfig(newConfig.getWindowConfig());
}
if (newConfig.getTimeoutMs() != null) {
mergedConfig.setTimeoutMs(newConfig.getTimeoutMs());
}
if (newConfig.getCleanupSubscription() != null) {
mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription());
}
if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) {
mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags());
}
if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) {
mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions());
}
if (newConfig.getProducerConfig() != null) {
mergedConfig.setProducerConfig(newConfig.getProducerConfig());
}
return mergedConfig;
}
|
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Tenants differ")
public void testMergeDifferentTenant() {
FunctionConfig functionConfig = createFunctionConfig();
FunctionConfig newFunctionConfig = createUpdatedFunctionConfig("tenant", "Different");
FunctionConfigUtils.validateUpdate(functionConfig, newFunctionConfig);
}
|
public static Set<Result> anaylze(String log) {
Set<Result> results = new HashSet<>();
for (Rule rule : Rule.values()) {
Matcher matcher = rule.pattern.matcher(log);
if (matcher.find()) {
results.add(new Result(rule, log, matcher));
}
}
return results;
}
|
@Test
public void outOfMemoryJVM1() throws IOException {
CrashReportAnalyzer.Result result = findResultByRule(
CrashReportAnalyzer.anaylze(loadLog("/logs/out_of_memory2.txt")),
CrashReportAnalyzer.Rule.OUT_OF_MEMORY);
}
|
@Override
public String buildContext() {
final String plugins = ((Collection<?>) getSource())
.stream()
.map(s -> ((ShenyuDictDO) s).getDictName())
.collect(Collectors.joining(","));
return String.format("the shenyu dict[%s] is %s", plugins, StringUtils.lowerCase(getType().getType().toString()));
}
|
@Test
public void batchChangeDictContextTest() {
BatchDictChangedEvent batchDictChangedEvent =
new BatchDictChangedEvent(Arrays.asList(one, two), null, EventTypeEnum.DICT_UPDATE, "test-operator");
String context = String.format("the shenyu dict[%s] is %s", "one,two", EventTypeEnum.DICT_UPDATE.getType().toString().toLowerCase());
assertEquals(context, batchDictChangedEvent.buildContext());
}
|
public static Object coerceValue(DMNType requiredType, Object valueToCoerce) {
return (requiredType != null && valueToCoerce != null) ? actualCoerceValue(requiredType, valueToCoerce) :
valueToCoerce;
}
|
@Test
void coerceValueCollectionToArrayConverted() {
Object item = "TESTED_OBJECT";
Object value = Collections.singleton(item);
DMNType requiredType = new SimpleTypeImpl("http://www.omg.org/spec/DMN/20180521/FEEL/",
"string",
null,
false,
null,
null,
null,
BuiltInType.STRING);
Object retrieved = CoerceUtil.coerceValue(requiredType, value);
assertNotNull(retrieved);
assertEquals(item, retrieved);
}
|
public DateTokenConverter<Object> getPrimaryDateTokenConverter() {
Converter<Object> p = headTokenConverter;
while (p != null) {
if (p instanceof DateTokenConverter) {
DateTokenConverter<Object> dtc = (DateTokenConverter<Object>) p;
// only primary converters should be returned as
if(dtc.isPrimary())
return dtc;
}
p = p.getNext();
}
return null;
}
|
@Test
public void settingTimeZoneOptionHasAnEffect() {
TimeZone tz = TimeZone.getTimeZone("Australia/Perth");
FileNamePattern fnp = new FileNamePattern("%d{hh, " + tz.getID() + "}", context);
assertEquals(tz, fnp.getPrimaryDateTokenConverter().getTimeZone());
}
|
@Override
public List<V> containsEach(Collection<V> c) {
return get(containsEachAsync(c));
}
|
@Test
public void testContainsEach() {
RSet<Integer> set = redisson.getSet("list", IntegerCodec.INSTANCE);
set.add(0);
set.add(1);
assertThat(set.containsEach(Collections.emptySet())).isEmpty();
assertThat(set.containsEach(Arrays.asList(0, 1)))
.hasSize(2)
.containsOnly(0, 1);
assertThat(set.containsEach(Arrays.asList(0, 1, 2)))
.hasSize(2)
.containsOnly(0, 1);
assertThat(set.containsEach(Arrays.asList(0, 1, 0, 2)))
.hasSize(3)
.containsOnly(0, 1, 0);
assertThat(set.containsEach(Arrays.asList(2, 3, 4)))
.hasSize(0);
}
|
@Override
public boolean areEqual(Object one, Object another) {
if (one == another) {
return true;
}
if (one == null || another == null) {
return false;
}
if (one instanceof String && another instanceof String) {
return one.equals(another);
}
if ((one instanceof Collection && another instanceof Collection) ||
(one instanceof Map && another instanceof Map)) {
return Objects.equals(one, another);
}
if (one.getClass().equals(another.getClass())) {
Method equalsMethod = ReflectionUtils.getMethodOrNull(one.getClass(), "equals", Object.class);
if (equalsMethod != null && !Object.class.equals(equalsMethod.getDeclaringClass())) {
return one.equals(another);
}
}
return objectMapperWrapper.toJsonNode(objectMapperWrapper.toString(one)).equals(
objectMapperWrapper.toJsonNode(objectMapperWrapper.toString(another))
);
}
|
@Test
public void testClassesWithoutEqualsMethodShouldEqualAsJsonNodes() {
JsonTypeDescriptor descriptor = new JsonTypeDescriptor();
FormWithoutEqualsMethod firstEntity = new FormWithoutEqualsMethod("value1");
FormWithoutEqualsMethod secondEntity = new FormWithoutEqualsMethod("value1");
assertTrue(descriptor.areEqual(firstEntity, secondEntity));
}
|
public List<InetAddress> getNetworkInterfaceAddresses() throws SocketException {
return Collections.list(NetworkInterface.getNetworkInterfaces())
.stream()
.flatMap(ni -> Collections.list(ni.getInetAddresses()).stream())
.toList();
}
|
@Test
public void itGetsListOfNetworkInterfaceAddresses() throws SocketException {
assertThat(underTest.getNetworkInterfaceAddresses())
.isInstanceOf(List.class)
.hasSizeGreaterThan(0);
}
|
@Override
public Service queryService(String serviceName, String groupName) throws NacosException {
return null;
}
|
@Test
void testQueryService() throws Exception {
Service service = client.queryService(SERVICE_NAME, GROUP_NAME);
assertNull(service);
}
|
@Override
public ContinuousEnumerationResult planSplits(IcebergEnumeratorPosition lastPosition) {
table.refresh();
if (lastPosition != null) {
return discoverIncrementalSplits(lastPosition);
} else {
return discoverInitialSplits();
}
}
|
@Test
public void testIncrementalFromLatestSnapshotWithEmptyTable() throws Exception {
ScanContext scanContext =
ScanContext.builder()
.startingStrategy(StreamingStartingStrategy.INCREMENTAL_FROM_LATEST_SNAPSHOT)
.splitSize(1L)
.build();
ContinuousSplitPlannerImpl splitPlanner =
new ContinuousSplitPlannerImpl(TABLE_RESOURCE.tableLoader().clone(), scanContext, null);
ContinuousEnumerationResult emptyTableInitialDiscoveryResult = splitPlanner.planSplits(null);
assertThat(emptyTableInitialDiscoveryResult.splits()).isEmpty();
assertThat(emptyTableInitialDiscoveryResult.fromPosition()).isNull();
assertThat(emptyTableInitialDiscoveryResult.toPosition().isEmpty()).isTrue();
assertThat(emptyTableInitialDiscoveryResult.toPosition().snapshotTimestampMs()).isNull();
ContinuousEnumerationResult emptyTableSecondDiscoveryResult =
splitPlanner.planSplits(emptyTableInitialDiscoveryResult.toPosition());
assertThat(emptyTableSecondDiscoveryResult.splits()).isEmpty();
assertThat(emptyTableSecondDiscoveryResult.fromPosition().isEmpty()).isTrue();
assertThat(emptyTableSecondDiscoveryResult.fromPosition().snapshotTimestampMs()).isNull();
assertThat(emptyTableSecondDiscoveryResult.toPosition().isEmpty()).isTrue();
assertThat(emptyTableSecondDiscoveryResult.toPosition().snapshotTimestampMs()).isNull();
// latest mode should discover both snapshots, as latest position is marked by when job starts
appendTwoSnapshots();
ContinuousEnumerationResult afterTwoSnapshotsAppended =
splitPlanner.planSplits(emptyTableSecondDiscoveryResult.toPosition());
assertThat(afterTwoSnapshotsAppended.splits()).hasSize(2);
// next 3 snapshots
IcebergEnumeratorPosition lastPosition = afterTwoSnapshotsAppended.toPosition();
for (int i = 0; i < 3; ++i) {
lastPosition = verifyOneCycle(splitPlanner, lastPosition).lastPosition;
}
}
|
public static CsvIOParse<Row> parseRows(Schema schema, CSVFormat csvFormat) {
CsvIOParseHelpers.validateCsvFormat(csvFormat);
CsvIOParseHelpers.validateCsvFormatWithSchema(csvFormat, schema);
RowCoder coder = RowCoder.of(schema);
CsvIOParseConfiguration.Builder<Row> builder = CsvIOParseConfiguration.builder();
builder.setCsvFormat(csvFormat).setSchema(schema).setCoder(coder).setFromRowFn(row -> row);
return CsvIOParse.<Row>builder().setConfigBuilder(builder).build();
}
|
@Test
public void givenInvalidCsvFormat_throws() {
Pipeline pipeline = Pipeline.create();
CSVFormat csvFormat =
CSVFormat.DEFAULT
.withHeader("a_string", "an_integer", "a_double")
.withAllowDuplicateHeaderNames(true);
Schema schema =
Schema.builder()
.addStringField("a_string")
.addInt32Field("an_integer")
.addDoubleField("a_double")
.build();
assertThrows(IllegalArgumentException.class, () -> CsvIO.parseRows(schema, csvFormat));
pipeline.run();
}
|
@VisibleForTesting
public ConfigDO validateConfigExists(Long id) {
if (id == null) {
return null;
}
ConfigDO config = configMapper.selectById(id);
if (config == null) {
throw exception(CONFIG_NOT_EXISTS);
}
return config;
}
|
@Test
public void testValidateConfigExist_notExists() {
assertServiceException(() -> configService.validateConfigExists(randomLongId()), CONFIG_NOT_EXISTS);
}
|
@ExecuteOn(TaskExecutors.IO)
@Get(uri = "/{executionId}")
@Operation(tags = {"Executions"}, summary = "Get an execution")
public Execution get(
@Parameter(description = "The execution id") @PathVariable String executionId
) {
return executionRepository
.findById(tenantService.resolveTenant(), executionId)
.orElse(null);
}
|
@Test
void get() {
Execution result = triggerInputsFlowExecution(false);
// Get the triggered execution by execution id
Execution foundExecution = client.retrieve(
GET("/api/v1/executions/" + result.getId()),
Execution.class
).block();
assertThat(foundExecution, is(notNullValue()));
assertThat(foundExecution.getId(), is(result.getId()));
assertThat(foundExecution.getNamespace(), is(result.getNamespace()));
}
|
public static void checkTrue(boolean expression, String errorMessage) {
if (!expression) {
throw new IllegalArgumentException(errorMessage);
}
}
|
@Test
public void test_checkTrue_whenTrue() {
checkTrue(true, "must be true");
}
|
@Override
public KvMetadata resolveMetadata(
boolean isKey,
List<MappingField> resolvedFields,
Map<String, String> options,
InternalSerializationService serializationService
) {
Map<QueryPath, MappingField> fieldsByPath = extractFields(resolvedFields, isKey);
List<TableField> fields = new ArrayList<>();
for (Entry<QueryPath, MappingField> entry : fieldsByPath.entrySet()) {
QueryPath path = entry.getKey();
QueryDataType type = entry.getValue().type();
String name = entry.getValue().name();
fields.add(new MapTableField(name, type, false, path));
}
maybeAddDefaultField(isKey, resolvedFields, fields, QueryDataType.OBJECT);
return new KvMetadata(
fields,
JsonQueryTargetDescriptor.INSTANCE,
JsonUpsertTargetDescriptor.INSTANCE
);
}
|
@Test
@Parameters({
"true",
"false"
})
public void when_noKeyOrThisPrefixInExternalName_then_usesValue(boolean key) {
KvMetadata metadata = INSTANCE.resolveMetadata(
key,
singletonList(field("field", QueryDataType.INT, "extField")),
emptyMap(),
null
);
assertThat(metadata.getFields()).containsExactly(
key
? new MapTableField[]{
new MapTableField("__key", QueryDataType.OBJECT, true, QueryPath.KEY_PATH)
}
: new MapTableField[]{
new MapTableField("field", QueryDataType.INT, false, new QueryPath("extField", false)),
new MapTableField("this", QueryDataType.OBJECT, true, QueryPath.VALUE_PATH)
});
}
|
@Public
@Deprecated
public static ApplicationId toApplicationId(RecordFactory recordFactory,
String applicationIdStr) {
return ApplicationId.fromString(applicationIdStr);
}
|
@Test
@SuppressWarnings("deprecation")
void testApplicationId() {
assertThrows(IllegalArgumentException.class, () -> {
ConverterUtils.toApplicationId("application_1423221031460");
});
}
|
protected boolean matches(String pattern, String value) {
char[] patArr = pattern.toCharArray();
char[] valArr = value.toCharArray();
int patIndex = 0;
int patEndIndex = patArr.length - 1;
int valIndex = 0;
int valEndIndex = valArr.length - 1;
char ch;
boolean patternContainsStar = false;
for (char patternChar : patArr) {
if (patternChar == '*') {
patternContainsStar = true;
break;
}
}
if (!patternContainsStar) {
// No '*'s, so we make a shortcut
if (patEndIndex != valEndIndex) {
return false; // Pattern and string do not have the same size
}
for (int i = 0; i <= patEndIndex; i++) {
ch = patArr[i];
if (ch != '?') {
if (ch != valArr[i]) {
return false;// Character mismatch
}
}
}
return true; // String matches against pattern
}
// Process characters before first star
while ((ch = patArr[patIndex]) != '*' && valIndex <= valEndIndex) {
if (ch != '?') {
if (ch != valArr[valIndex]) {
return false;// Character mismatch
}
}
patIndex++;
valIndex++;
}
if (valIndex > valEndIndex) {
// All characters in the value are used. Check if only '*'s remain
// in the pattern. If so, we succeeded. Otherwise failure.
for (int i = patIndex; i <= patEndIndex; i++) {
if (patArr[i] != '*') {
return false;
}
}
return true;
}
// Process characters after last star
while ((ch = patArr[patEndIndex]) != '*' && valIndex <= valEndIndex) {
if (ch != '?') {
if (ch != valArr[valEndIndex]) {
return false;// Character mismatch
}
}
patEndIndex--;
valEndIndex--;
}
if (valIndex > valEndIndex) {
// All characters in the value are used. Check if only '*'s remain
// in the pattern. If so, we succeeded. Otherwise failure.
for (int i = patIndex; i <= patEndIndex; i++) {
if (patArr[i] != '*') {
return false;
}
}
return true;
}
// process pattern between stars. patIndex and patEndIndex always point to a '*'.
while (patIndex != patEndIndex && valIndex <= valEndIndex) {
int innerPatternIndex = -1;
for (int i = patIndex + 1; i <= patEndIndex; i++) {
if (patArr[i] == '*') {
innerPatternIndex = i;
break;
}
}
if (innerPatternIndex == patIndex + 1) {
// Two stars next to each other, skip the first one.
patIndex++;
continue;
}
// Find the pattern between patIndex & innerPatternIndex in the value between
// valIndex and valEndIndex
int innerPatternLength = (innerPatternIndex - patIndex - 1);
int innerValueLength = (valEndIndex - valIndex + 1);
int foundIndex = -1;
innerValueLoop:
for (int i = 0; i <= innerValueLength - innerPatternLength; i++) {
for (int j = 0; j < innerPatternLength; j++) {
ch = patArr[patIndex + j + 1];
if (ch != '?') {
if (ch != valArr[valIndex + i + j]) {
continue innerValueLoop;
}
}
}
foundIndex = valIndex + i;
break;
}
if (foundIndex == -1) {
return false;
}
patIndex = innerPatternIndex;
valIndex = foundIndex + innerPatternLength;
}
// All characters in the string are used. Check if only '*'s are left
// in the pattern. If so, we succeeded. Otherwise failure.
for (int i = patIndex; i <= patEndIndex; i++) {
if (patArr[i] != '*') {
return false;
}
}
return true;
}
|
@Test
public void testMatches() {
assertMatch("x", "x");
assertNoMatch("x", "y");
assertMatch("xx", "xx");
assertNoMatch("xy", "xz");
assertMatch("?", "x");
assertMatch("x?", "xy");
assertMatch("?y", "xy");
assertMatch("x?z", "xyz");
assertMatch("*", "x");
assertMatch("x*", "x");
assertMatch("x*", "xy");
assertMatch("xy*", "xy");
assertMatch("xy*", "xyz");
assertMatch("*x", "x");
assertNoMatch("*x", "y");
assertMatch("*x", "wx");
assertNoMatch("*x", "wz");
assertMatch("*x", "vwx");
assertMatch("x*z", "xz");
assertMatch("x*z", "xyz");
assertMatch("x*z", "xyyz");
assertNoMatch("ab*t?z", "abz");
assertNoMatch("ab*d*yz", "abcdz");
assertMatch("ab**cd**ef*yz", "abcdefyz");
assertMatch("a*c?*z", "abcxyz");
assertMatch("a*cd*z", "abcdxyz");
assertMatch("*", "x:x");
assertMatch("*", "x:x:x");
assertMatch("x", "x:y");
assertMatch("x", "x:y:z");
assertMatch("foo?armat*", "foobarmatches");
assertMatch("f*", "f");
assertNoMatch("foo", "f");
assertMatch("fo*b", "foob");
assertNoMatch("fo*b*r", "fooba");
assertNoMatch("foo*", "f");
assertMatch("t*k?ou", "thankyou");
assertMatch("he*l*world", "helloworld");
assertNoMatch("foo", "foob");
assertMatch("*:ActiveMQ.Advisory", "foo:ActiveMQ.Advisory");
assertNoMatch("*:ActiveMQ.Advisory", "foo:ActiveMQ.Advisory.");
assertMatch("*:ActiveMQ.Advisory*", "foo:ActiveMQ.Advisory");
assertMatch("*:ActiveMQ.Advisory*", "foo:ActiveMQ.Advisory.");
assertMatch("*:ActiveMQ.Advisory.*", "foo:ActiveMQ.Advisory.Connection");
assertMatch("*:ActiveMQ.Advisory*:read", "foo:ActiveMQ.Advisory.Connection:read");
assertNoMatch("*:ActiveMQ.Advisory*:read", "foo:ActiveMQ.Advisory.Connection:write");
assertMatch("*:ActiveMQ.Advisory*:*", "foo:ActiveMQ.Advisory.Connection:read");
assertMatch("*:ActiveMQ.Advisory*:*", "foo:ActiveMQ.Advisory.");
assertMatch("topic", "topic:TEST:*");
assertNoMatch("*:ActiveMQ*", "topic:TEST:*");
assertMatch("topic:ActiveMQ.Advisory*", "topic:ActiveMQ.Advisory.Connection:create");
assertMatch("foo?ar", "foobar");
assertMatch("queue:*:read,write", "queue:testqueue:read");
assertMatch("queue:*:read,write", "queue:test*:read,write");
assertNoMatch("queue:*:read,write", "queue:*:read,write,delete");
}
|
@Override
public Response submitApplication(ApplicationSubmissionContextInfo newApp, HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
long startTime = clock.getTime();
// We verify the parameters to ensure that newApp is not empty and
// that the format of applicationId is correct.
if (newApp == null || newApp.getApplicationId() == null) {
routerMetrics.incrAppsFailedSubmitted();
String errMsg = "Missing ApplicationSubmissionContextInfo or "
+ "applicationSubmissionContext information.";
RouterAuditLogger.logFailure(getUser().getShortUserName(), SUBMIT_NEW_APP, UNKNOWN,
TARGET_WEB_SERVICE, errMsg);
return Response.status(Status.BAD_REQUEST).entity(errMsg).build();
}
try {
String applicationId = newApp.getApplicationId();
RouterServerUtil.validateApplicationId(applicationId);
} catch (IllegalArgumentException e) {
routerMetrics.incrAppsFailedSubmitted();
RouterAuditLogger.logFailure(getUser().getShortUserName(), SUBMIT_NEW_APP, UNKNOWN,
TARGET_WEB_SERVICE, e.getMessage());
return Response.status(Status.BAD_REQUEST).entity(e.getLocalizedMessage()).build();
}
List<SubClusterId> blackList = new ArrayList<>();
try {
int activeSubClustersCount = federationFacade.getActiveSubClustersCount();
int actualRetryNums = Math.min(activeSubClustersCount, numSubmitRetries);
Response response = ((FederationActionRetry<Response>) (retryCount) ->
invokeSubmitApplication(newApp, blackList, hsr, retryCount)).
runWithRetries(actualRetryNums, submitIntervalTime);
if (response != null) {
long stopTime = clock.getTime();
routerMetrics.succeededAppsSubmitted(stopTime - startTime);
return response;
}
} catch (Exception e) {
routerMetrics.incrAppsFailedSubmitted();
RouterAuditLogger.logFailure(getUser().getShortUserName(), SUBMIT_NEW_APP, UNKNOWN,
TARGET_WEB_SERVICE, e.getMessage());
return Response.status(Status.SERVICE_UNAVAILABLE).entity(e.getLocalizedMessage()).build();
}
routerMetrics.incrAppsFailedSubmitted();
String errMsg = String.format("Application %s with appId %s failed to be submitted.",
newApp.getApplicationName(), newApp.getApplicationId());
LOG.error(errMsg);
RouterAuditLogger.logFailure(getUser().getShortUserName(), SUBMIT_NEW_APP, UNKNOWN,
TARGET_WEB_SERVICE, errMsg);
return Response.status(Status.SERVICE_UNAVAILABLE).entity(errMsg).build();
}
|
@Test
public void testSubmitApplicationWrongFormat() throws IOException, InterruptedException {
ApplicationSubmissionContextInfo context =
new ApplicationSubmissionContextInfo();
context.setApplicationId("Application_wrong_id");
Response response = interceptor.submitApplication(context, null);
Assert.assertEquals(BAD_REQUEST, response.getStatus());
}
|
@Override
public List<String> detect(ClassLoader classLoader) {
List<File> classpathContents =
classGraph
.disableNestedJarScanning()
.addClassLoader(classLoader)
.scan(1)
.getClasspathFiles();
return classpathContents.stream().map(File::getAbsolutePath).collect(Collectors.toList());
}
|
@Test
public void shouldNotDetectClassPathResourceThatIsNotAFile() throws Exception {
String url = "http://www.example.com/all-the-secrets.jar";
ClassLoader classLoader = new URLClassLoader(new URL[] {new URL(url)});
ClasspathScanningResourcesDetector detector =
new ClasspathScanningResourcesDetector(new ClassGraph());
List<String> result = detector.detect(classLoader);
assertThat(result, not(hasItem(containsString(url))));
}
|
public Map<Uuid, String> topicIdToNameView() {
return new TranslatedValueMapView<>(topicsById, TopicImage::name);
}
|
@Test
public void testTopicIdToNameView() {
Map<Uuid, String> map = IMAGE1.topicIdToNameView();
assertTrue(map.containsKey(FOO_UUID));
assertEquals("foo", map.get(FOO_UUID));
assertTrue(map.containsKey(BAR_UUID));
assertEquals("bar", map.get(BAR_UUID));
assertFalse(map.containsKey(BAZ_UUID));
assertNull(map.get(BAZ_UUID));
HashSet<String> names = new HashSet<>();
map.values().iterator().forEachRemaining(names::add);
HashSet<String> expectedNames = new HashSet<>(Arrays.asList("foo", "bar"));
assertEquals(expectedNames, names);
assertThrows(UnsupportedOperationException.class, () -> map.remove(FOO_UUID));
assertThrows(UnsupportedOperationException.class, () -> map.put(FOO_UUID, "bar"));
}
|
@Override
public boolean addClass(final Class<?> stepClass) {
if (stepClasses.contains(stepClass)) {
return true;
}
checkNoComponentAnnotations(stepClass);
if (hasCucumberContextConfiguration(stepClass)) {
checkOnlyOneClassHasCucumberContextConfiguration(stepClass);
withCucumberContextConfiguration = stepClass;
}
stepClasses.add(stepClass);
return true;
}
|
@Test
void shouldFailIfMultipleClassesWithSpringAnnotationsAreFound() {
final ObjectFactory factory = new SpringFactory();
factory.addClass(WithSpringAnnotations.class);
Executable testMethod = () -> factory.addClass(BellyStepDefinitions.class);
CucumberBackendException actualThrown = assertThrows(CucumberBackendException.class, testMethod);
assertThat(actualThrown.getMessage(), startsWith(
"Glue class class io.cucumber.spring.contextconfig.BellyStepDefinitions and class io.cucumber.spring.SpringFactoryTest$WithSpringAnnotations are both (meta-)annotated with @CucumberContextConfiguration.\n"
+
"Please ensure only one class configures the spring context"));
}
|
@Override
public InetSocketAddress resolveHost() {
List<InetSocketAddress> list = addressList;
checkState(
list != null, "No service url is provided yet");
checkState(
!list.isEmpty(), "No hosts found for service url : " + serviceUrl);
if (list.size() == 1) {
return list.get(0);
} else {
int originalIndex = CURRENT_INDEX_UPDATER.getAndUpdate(this, last -> (last + 1) % list.size());
return list.get((originalIndex + 1) % list.size());
}
}
|
@Test(expectedExceptions = IllegalStateException.class)
public void testResolveBeforeUpdateServiceUrl() {
resolver.resolveHost();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.