focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public MaintenanceDomain decode(ObjectNode json, CodecContext context) {
if (json == null || !json.isObject()) {
return null;
}
JsonNode mdNode = json.get(MD);
String mdName = nullIsIllegal(mdNode.get(MD_NAME), "mdName is required").asText();
String mdNameType = MdId.MdNameType.CHARACTERSTRING.name();
if (mdNode.get(MD_NAME_TYPE) != null) {
mdNameType = mdNode.get(MD_NAME_TYPE).asText();
}
try {
MdId mdId = MdMaNameUtil.parseMdName(mdNameType, mdName);
MaintenanceDomain.MdBuilder builder = DefaultMaintenanceDomain.builder(mdId);
JsonNode mdLevelNode = mdNode.get(MD_LEVEL);
if (mdLevelNode != null) {
MdLevel mdLevel = MdLevel.valueOf(mdLevelNode.asText());
builder = builder.mdLevel(mdLevel);
}
JsonNode mdNumericIdNode = mdNode.get(MD_NUMERIC_ID);
if (mdNumericIdNode != null) {
short mdNumericId = (short) mdNumericIdNode.asInt();
builder = builder.mdNumericId(mdNumericId);
}
return builder.build();
} catch (CfmConfigException e) {
throw new IllegalArgumentException(e);
}
}
|
@Test
public void testDecodeMd2() throws IOException {
String mdString = "{\"md\": { \"mdName\": \"test.opennetworking.org\"," +
"\"mdNameType\": \"DOMAINNAME\"}}";
InputStream input = new ByteArrayInputStream(
mdString.getBytes(StandardCharsets.UTF_8));
JsonNode cfg = mapper.readTree(input);
MaintenanceDomain mdDecode1 = context
.codec(MaintenanceDomain.class).decode((ObjectNode) cfg, context);
assertEquals(MDID2_DOMAIN, mdDecode1.mdId());
assertEquals(MaintenanceDomain.MdLevel.LEVEL0, mdDecode1.mdLevel());
assertEquals(0, mdDecode1.mdNumericId());
}
|
@Override
public void loginFailure(HttpRequest request, AuthenticationException e) {
checkRequest(request);
requireNonNull(e, "AuthenticationException can't be null");
if (!LOGGER.isDebugEnabled()) {
return;
}
Source source = e.getSource();
LOGGER.debug("login failure [cause|{}][method|{}][provider|{}|{}][IP|{}|{}][login|{}]",
emptyIfNull(e.getMessage()),
source.getMethod(), source.getProvider(), source.getProviderName(),
request.getRemoteAddr(), getAllIps(request),
preventLogFlood(emptyIfNull(e.getLogin())));
}
|
@Test
public void login_failure_logs_X_Forwarded_For_header_from_request_and_supports_multiple_headers() {
AuthenticationException exception = newBuilder()
.setSource(Source.realm(Method.EXTERNAL, "bar"))
.setMessage("Boom!")
.setLogin("foo")
.build();
HttpRequest request = mockRequest("1.2.3.4", List.of("2.3.4.5", "6.5.4.3"), List.of("9.5.6.7"), List.of("6.3.2.4"));
underTest.loginFailure(request, exception);
verifyLog("login failure [cause|Boom!][method|EXTERNAL][provider|REALM|bar][IP|1.2.3.4|2.3.4.5,6.5.4.3,9.5.6.7,6.3.2.4][login|foo]",
Set.of("logout", "login success"));
}
|
@Override
public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
try {
if(file.getType().contains(Path.Type.upload)) {
return new NullInputStream(0L);
}
final HttpRange range = HttpRange.withStatus(status);
final RequestEntityRestStorageService client = session.getClient();
final Map<String, Object> requestHeaders = new HashMap<>();
final Map<String, String> requestParameters = new HashMap<>();
if(file.attributes().getVersionId() != null) {
requestParameters.put("versionId", file.attributes().getVersionId());
}
if(status.isAppend()) {
final String header;
if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) {
header = String.format("bytes=%d-", range.getStart());
}
else {
header = String.format("bytes=%d-%d", range.getStart(), range.getEnd());
}
if(log.isDebugEnabled()) {
log.debug(String.format("Add range header %s for file %s", header, file));
}
requestHeaders.put(HttpHeaders.RANGE, header);
}
final Path bucket = containerService.getContainer(file);
final HttpResponse response = client.performRestGet(bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(),
containerService.getKey(file), requestParameters, requestHeaders, new int[]{HttpStatus.SC_PARTIAL_CONTENT, HttpStatus.SC_OK});
return new HttpMethodReleaseInputStream(response, status);
}
catch(ServiceException e) {
throw new S3ExceptionMappingService().map("Download {0} failed", e, file);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map("Download {0} failed", e, file);
}
}
|
@Test
public void testCloudFront() throws Exception {
final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume));
final String name = new AlphanumericRandomStringService().random();
final Path file = new Path(container, name, EnumSet.of(Path.Type.file));
final byte[] content = RandomUtils.nextBytes(2018);
final TransferStatus status = new TransferStatus().withLength(content.length);
status.setAcl(Acl.CANNED_PUBLIC_READ);
status.setChecksum(new SHA256ChecksumCompute().compute(new ByteArrayInputStream(content), status));
final OutputStream out = new S3WriteFeature(session, new S3AccessControlListFeature(session)).write(file, status, new DisabledConnectionCallback());
new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out);
final CountingInputStream in = new CountingInputStream(new S3ReadFeature(cloudfront).read(
new Path(file.getName(), EnumSet.of(Path.Type.file)), status, new DisabledConnectionCallback()));
final BytecountStreamListener count = new BytecountStreamListener();
new StreamCopier(status, status).withListener(count).transfer(in, NullOutputStream.NULL_OUTPUT_STREAM);
assertEquals(content.length, count.getRecv());
new S3DefaultDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
@Override
public Neighbor<double[], E> nearest(double[] q) {
int index = -1;
double nearest = Double.MAX_VALUE;
for (int i : getCandidates(q)) {
double[] x = keys.get(i);
if (q != x) {
double distance = MathEx.distance(q, x);
if (distance < nearest) {
index = i;
nearest = distance;
}
}
}
return index == -1 ? null : new Neighbor<>(keys.get(index), data.get(index), index, nearest);
}
|
@Test
public void testNearest() {
System.out.println("nearest");
int recall = 0;
double error = 0.0;
int hit = 0;
for (double[] xi : testx) {
Neighbor neighbor = lsh.nearest(xi);
if (neighbor != null) {
hit++;
Neighbor truth = naive.nearest(xi);
if (neighbor.index == truth.index) {
recall++;
} else {
error += Math.abs(neighbor.distance - truth.distance) / truth.distance;
}
}
}
error /= (hit - recall);
assertEquals(1154, recall);
assertEquals(2007, hit);
assertEquals(0.1305, error, 1E-4);
System.out.format("recall is %.2f%%%n", 100.0 * recall / testx.length);
System.out.format("error when miss is %.2f%%%n", 100.0 * error);
System.out.format("null rate is %.2f%%%n", 100.0 - 100.0 * hit / testx.length);
}
|
public String getMetricsPrefix() {
return Optional.ofNullable(getProperty(CommonConstants.Minion.CONFIG_OF_METRICS_PREFIX_KEY))
.orElseGet(() -> getProperty(CommonConstants.Minion.DEPRECATED_CONFIG_OF_METRICS_PREFIX_KEY,
CommonConstants.Minion.CONFIG_OF_METRICS_PREFIX));
}
|
@Test
public void testDeprecatedConfigs()
throws ConfigurationException {
// Check configs with old names that have no pinot.minion prefix.
String[] cfgKeys = new String[]{
CommonConstants.Minion.DEPRECATED_PREFIX_OF_CONFIG_OF_PINOT_FS_FACTORY,
CommonConstants.Minion.DEPRECATED_PREFIX_OF_CONFIG_OF_SEGMENT_FETCHER_FACTORY,
CommonConstants.Minion.DEPRECATED_PREFIX_OF_CONFIG_OF_SEGMENT_UPLOADER,
CommonConstants.Minion.DEPRECATED_PREFIX_OF_CONFIG_OF_PINOT_CRYPTER
};
PropertiesConfiguration config = CommonsConfigurationUtils.fromPath(
PropertiesConfiguration.class.getClassLoader().getResource("pinot-configuration-old-minion.properties")
.getFile());
PinotConfiguration rawCfg = new PinotConfiguration(config);
final MinionConf oldConfig = new MinionConf(rawCfg.toMap());
Assert.assertEquals(oldConfig.getMetricsPrefix(), "pinot.minion.old.custom.metrics.");
for (String cfgKey : cfgKeys) {
Assert.assertFalse(oldConfig.subset(cfgKey).isEmpty(), cfgKey);
Assert.assertTrue(oldConfig.subset("pinot.minion." + cfgKey).isEmpty(), cfgKey);
}
// Check configs with new names that have the pinot.minion prefix.
config = CommonsConfigurationUtils.fromPath(
PropertiesConfiguration.class.getClassLoader().getResource("pinot-configuration-new-minion.properties")
.getFile());
rawCfg = new PinotConfiguration(config);
final MinionConf newConfig = new MinionConf(rawCfg.toMap());
for (String cfgKey : cfgKeys) {
Assert.assertTrue(newConfig.subset(cfgKey).isEmpty(), cfgKey);
Assert.assertFalse(newConfig.subset("pinot.minion." + cfgKey).isEmpty(), cfgKey);
}
// Check the config values.
Assert.assertEquals(newConfig.getMetricsPrefix(), "pinot.minion.new.custom.metrics.");
PinotConfiguration subcfg = newConfig.subset(CommonConstants.Minion.PREFIX_OF_CONFIG_OF_PINOT_FS_FACTORY);
Assert.assertEquals(subcfg.subset("class").getProperty("s3"), "org.apache.pinot.plugin.filesystem.S3PinotFS");
subcfg = newConfig.subset(CommonConstants.Minion.PREFIX_OF_CONFIG_OF_SEGMENT_FETCHER_FACTORY);
Assert.assertEquals(subcfg.getProperty("protocols"), "file,http,s3");
subcfg = newConfig.subset(CommonConstants.Minion.PREFIX_OF_CONFIG_OF_SEGMENT_UPLOADER);
Assert.assertEquals(subcfg.subset("https").getProperty("enabled"), "true");
subcfg = newConfig.subset(CommonConstants.Minion.PREFIX_OF_CONFIG_OF_PINOT_CRYPTER);
Assert.assertEquals(subcfg.subset("class").getProperty("nooppinotcrypter"),
"org.apache.pinot.core.crypt.NoOpPinotCrypter");
}
|
static void loadProperties(Map<String, String> map, Reader tmpReader) throws IOException {
BufferedReader reader = new BufferedReader(tmpReader);
String line;
try {
while ((line = reader.readLine()) != null) {
if (line.startsWith("//") || line.startsWith("#")) {
continue;
}
if (Helper.isEmpty(line)) {
continue;
}
int index = line.indexOf("=");
if (index < 0) {
LOGGER.warn("Skipping configuration at line:" + line);
continue;
}
String field = line.substring(0, index);
String value = line.substring(index + 1);
map.put(field.trim(), value.trim());
}
} finally {
reader.close();
}
}
|
@Test
public void testLoadProperties() throws IOException {
Map<String, String> map = new HashMap<>();
StorableProperties.loadProperties(map, new StringReader("blup=test\n blup2 = xy"));
assertEquals("test", map.get("blup"));
assertEquals("xy", map.get("blup2"));
}
|
public static void setProtectedFieldValue(String protectedField, Object object, Object newValue) {
try {
// acgegi would silently fail to write to final fields
// FieldUtils.writeField(Object, field, true) only sets accessible on *non* public fields
// and then fails with IllegalAccessException (even if you make the field accessible in the interim!
// for backwards compatability we need to use a few steps
Field field = org.apache.commons.lang.reflect.FieldUtils.getField(object.getClass(), protectedField, true);
field.setAccessible(true);
field.set(object, newValue);
} catch (Exception x) {
throw new RuntimeException(x);
}
}
|
@Issue("JENKINS-64390")
@Test
public void setProtectedFieldValue_Should_fail_silently_to_set_public_final_fields_in_InnerClass() {
InnerClassWithPublicFinalField sut = new InnerClassWithPublicFinalField();
FieldUtils.setProtectedFieldValue("myField", sut, "test");
assertEquals("original", sut.getMyField());
}
|
@Udf(description = "Returns the sine of an INT value")
public Double sin(
@UdfParameter(
value = "value",
description = "The value in radians to get the sine of."
) final Integer value
) {
return sin(value == null ? null : value.doubleValue());
}
|
@Test
public void shouldHandleZero() {
assertThat(udf.sin(0.0), closeTo(0.0, 0.000000000000001));
assertThat(udf.sin(0), closeTo(0.0, 0.000000000000001));
assertThat(udf.sin(0L), closeTo(0.0, 0.000000000000001));
}
|
public static void touch(String path, String fileName) throws IOException {
FileUtils.touch(Paths.get(path, fileName).toFile());
}
|
@Test
void testTouchWithFileName() throws IOException {
File file = Paths.get(TMP_PATH, UUID.randomUUID().toString()).toFile();
assertFalse(file.exists());
DiskUtils.touch(file.getParent(), file.getName());
assertTrue(file.exists());
file.deleteOnExit();
}
|
@Override
@Transactional(rollbackFor = Exception.class)
public void updateFileConfigMaster(Long id) {
// 校验存在
validateFileConfigExists(id);
// 更新其它为非 master
fileConfigMapper.updateBatch(new FileConfigDO().setMaster(false));
// 更新
fileConfigMapper.updateById(new FileConfigDO().setId(id).setMaster(true));
// 清空缓存
clearCache(null, true);
}
|
@Test
public void testUpdateFileConfigMaster_notExists() {
// 调用, 并断言异常
assertServiceException(() -> fileConfigService.updateFileConfigMaster(randomLongId()), FILE_CONFIG_NOT_EXISTS);
}
|
@Override
public boolean match(String attributeValue) {
if (attributeValue == null) {
return false;
}
switch (type) {
case Equals:
return attributeValue.equals(value);
case StartsWith:
return (length == -1 || length == attributeValue.length()) && attributeValue.startsWith(value);
case EndsWith:
return (length == -1 || length == attributeValue.length()) && attributeValue.endsWith(value);
case Contains:
return attributeValue.contains(value);
case Regexp:
return regexPattern.matcher(attributeValue).matches();
default:
throw new IllegalStateException("Unexpected type " + type);
}
}
|
@Test
public void testDegeneratedEndsWith() {
assertTrue(new LikeCondition("%ab").match("ab"));
assertTrue(new LikeCondition("%ab").match("xab"));
assertTrue(new LikeCondition("%ab").match("xxab"));
assertFalse(new LikeCondition("%ab").match("abx"));
assertFalse(new LikeCondition("%ab").match("axb"));
assertTrue(new LikeCondition("_ab").match("cab"));
assertFalse(new LikeCondition("_ab").match("ab"));
assertFalse(new LikeCondition("_ab").match("xxab"));
assertFalse(new LikeCondition("_ab").match("abc"));
assertFalse(new LikeCondition("_ab").match("xabc"));
}
|
@Override
public void init(DatabaseMetaData metaData) throws SQLException {
checkDbVersion(metaData, MIN_SUPPORTED_VERSION);
checkDriverVersion(metaData);
}
|
@Test
void test_driver_versions() throws Exception {
DatabaseMetaData metadata = newMetadata( 19, 2, "18.3.0.0.0");
underTest.init(metadata);
metadata = newMetadata(19, 2, "12.2.0.1.0");
underTest.init(metadata);
// no error
metadata = newMetadata(19, 2, "12.1.0.2.0");
underTest.init(metadata);
// no error
metadata = newMetadata(19, 2, "12.1.0.1.0");
underTest.init(metadata);
// no error
metadata = newMetadata(19, 2, "12.0.2");
underTest.init(metadata);
// no error
metadata = newMetadata(19, 2, "11.1.0.2");
try {
underTest.init(metadata);
fail();
} catch (MessageException e) {
assertThat(e).hasMessage("Unsupported Oracle driver version: 11.1.0.2. Minimal supported version is 12.1.");
}
}
|
public ScheduledExecutorService getScheduledExecutorService() {
return scheduledExecutorService;
}
|
@Test
public void testGetScheduledExecutorService() {
assertEquals(executorService, pullMessageService.getScheduledExecutorService());
}
|
@Override
public PathLocation getDestinationForPath(final String path)
throws IOException {
verifyMountTable();
PathLocation res;
readLock.lock();
try {
if (this.locationCache == null) {
res = lookupLocation(processTrashPath(path));
} else {
Callable<? extends PathLocation> meh = (Callable<PathLocation>) () -> {
this.getLocCacheMiss().increment();
return lookupLocation(processTrashPath(path));
};
res = this.locationCache.get(processTrashPath(path), meh);
this.getLocCacheAccess().increment();
}
if (isTrashPath(path)) {
List<RemoteLocation> remoteLocations = new ArrayList<>();
for (RemoteLocation remoteLocation : res.getDestinations()) {
remoteLocations.add(new RemoteLocation(remoteLocation, path));
}
return new PathLocation(path, remoteLocations,
res.getDestinationOrder());
} else {
return res;
}
} catch (ExecutionException e) {
Throwable cause = e.getCause();
final IOException ioe;
if (cause instanceof IOException) {
ioe = (IOException) cause;
} else {
ioe = new IOException(cause);
}
throw ioe;
} finally {
readLock.unlock();
}
}
|
@Test
public void testMuiltipleDestinations() throws IOException {
try {
mountTable.getDestinationForPath("/multi");
fail("The getDestinationForPath call should fail.");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains(
"MountTableResolver should not resolve multiple destinations", ioe);
}
}
|
@Override
public Long dbSize(RedisClusterNode node) {
return execute(node, RedisCommands.DBSIZE);
}
|
@Test
public void testDbSize() {
RedisClusterNode master = getFirstMaster();
Long size = connection.dbSize(master);
assertThat(size).isZero();
}
|
@SuppressWarnings("unused") // Required for automatic type inference
public static <K> Builder0<K> forClass(final Class<K> type) {
return new Builder0<>();
}
|
@SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT")
@Test
public void shouldWorkWithSuppliersR2() {
// Given:
handlerMapR2 = HandlerMaps.forClass(BaseType.class)
.withArgTypes(String.class, Integer.class)
.withReturnType(Number.class)
.put(LeafTypeA.class, () -> handlerR2_1)
.build();
// When:
handlerMapR2.get(LeafTypeA.class).handle("A", 2, LEAF_A);
// Then:
verify(handlerR2_1).handle("A", 2, LEAF_A);
}
|
@Override
public Predicate<FileInfo> get() {
return FileInfo -> {
try {
Long time = mGetter.apply(FileInfo);
Interval interval = Interval.between(time, time + 1);
return mInterval.intersect(interval).isValid();
} catch (RuntimeException e) {
LOG.debug("Failed to filter: ", e);
return false;
}
};
}
|
@Test
public void testDatePredicateInterval() throws ParseException {
FileFilter filter =
FileFilter.newBuilder().setName("lastModifiedDate").setValue("2020/01/01, 2023/09/14")
.build();
FileInfo info = new FileInfo();
info.setLastModificationTimeMs(mDateFormat.parse("2021/09/15").getTime());
assertTrue(FilePredicate.create(filter).get().test(info));
info.setLastModificationTimeMs(mDateFormat.parse("2019/09/05").getTime());
assertFalse(FilePredicate.create(filter).get().test(info));
Date d = mDateFormat.parse("2023/09/16");
info.setLastModificationTimeMs(d.getTime());
assertFalse(FilePredicate.create(filter).get().test(info));
info.setLastModificationTimeMs(mDateFormat.parse("2020/01/01").getTime());
assertTrue(FilePredicate.create(filter).get().test(info));
info.setLastModificationTimeMs(mDateFormat.parse("2023/09/14").getTime() - 1);
assertTrue(FilePredicate.create(filter).get().test(info));
info.setLastModificationTimeMs(mDateFormat.parse("2023/09/13").getTime());
assertTrue(FilePredicate.create(filter).get().test(info));
info.setLastModificationTimeMs(mDateFormat.parse("2023/09/14").getTime());
assertFalse(FilePredicate.create(filter).get().test(info));
info.setLastModificationTimeMs(mDateFormat.parse("2023/09/14").getTime() + 1);
assertFalse(FilePredicate.create(filter).get().test(info));
}
|
@Override
public void start() {
// We request a split only if we did not get splits during the checkpoint restore.
// Otherwise, reader restarts will keep requesting more and more splits.
if (getNumberOfCurrentlyAssignedSplits() == 0) {
requestSplit(Collections.emptyList());
}
}
|
@Test
public void testReaderMetrics() throws Exception {
TestingReaderOutput<RowData> readerOutput = new TestingReaderOutput<>();
TestingMetricGroup metricGroup = new TestingMetricGroup();
TestingReaderContext readerContext = new TestingReaderContext(new Configuration(), metricGroup);
IcebergSourceReader reader = createReader(metricGroup, readerContext, null);
reader.start();
testOneSplitFetcher(reader, readerOutput, metricGroup, 1);
testOneSplitFetcher(reader, readerOutput, metricGroup, 2);
}
|
@Override
public void importData(JsonReader reader) throws IOException {
logger.info("Reading configuration for 1.3");
// this *HAS* to start as an object
reader.beginObject();
while (reader.hasNext()) {
JsonToken tok = reader.peek();
switch (tok) {
case NAME:
String name = reader.nextName();
// find out which member it is
if (name.equals(CLIENTS)) {
readClients(reader);
} else if (name.equals(GRANTS)) {
readGrants(reader);
} else if (name.equals(WHITELISTEDSITES)) {
readWhitelistedSites(reader);
} else if (name.equals(BLACKLISTEDSITES)) {
readBlacklistedSites(reader);
} else if (name.equals(AUTHENTICATIONHOLDERS)) {
readAuthenticationHolders(reader);
} else if (name.equals(ACCESSTOKENS)) {
readAccessTokens(reader);
} else if (name.equals(REFRESHTOKENS)) {
readRefreshTokens(reader);
} else if (name.equals(SYSTEMSCOPES)) {
readSystemScopes(reader);
} else {
boolean processed = false;
for (MITREidDataServiceExtension extension : extensions) {
if (extension.supportsVersion(THIS_VERSION)) {
processed = extension.importExtensionData(name, reader);
if (processed) {
// if the extension processed data, break out of this inner loop
// (only the first extension to claim an extension point gets it)
break;
}
}
}
if (!processed) {
// unknown token, skip it
reader.skipValue();
}
}
break;
case END_OBJECT:
// the object ended, we're done here
reader.endObject();
continue;
default:
logger.debug("Found unexpected entry");
reader.skipValue();
continue;
}
}
fixObjectReferences();
for (MITREidDataServiceExtension extension : extensions) {
if (extension.supportsVersion(THIS_VERSION)) {
extension.fixExtensionObjectReferences(maps);
break;
}
}
maps.clearAll();
}
|
@Test
public void testFixRefreshTokenAuthHolderReferencesOnImport() throws IOException, ParseException {
String expiration1 = "2014-09-10T22:49:44.090+00:00";
Date expirationDate1 = formatter.parse(expiration1, Locale.ENGLISH);
ClientDetailsEntity mockedClient1 = mock(ClientDetailsEntity.class);
when(mockedClient1.getClientId()).thenReturn("mocked_client_1");
OAuth2Request req1 = new OAuth2Request(new HashMap<String, String>(), "client1", new ArrayList<GrantedAuthority>(),
true, new HashSet<String>(), new HashSet<String>(), "http://foo.com",
new HashSet<String>(), null);
Authentication mockAuth1 = mock(Authentication.class, withSettings().serializable());
OAuth2Authentication auth1 = new OAuth2Authentication(req1, mockAuth1);
AuthenticationHolderEntity holder1 = new AuthenticationHolderEntity();
holder1.setId(1L);
holder1.setAuthentication(auth1);
OAuth2RefreshTokenEntity token1 = new OAuth2RefreshTokenEntity();
token1.setId(1L);
token1.setClient(mockedClient1);
token1.setExpiration(expirationDate1);
token1.setJwt(JWTParser.parse("eyJhbGciOiJub25lIn0.eyJqdGkiOiJmOTg4OWQyOS0xMTk1LTQ4ODEtODgwZC1lZjVlYzAwY2Y4NDIifQ."));
token1.setAuthenticationHolder(holder1);
String expiration2 = "2015-01-07T18:31:50.079+00:00";
Date expirationDate2 = formatter.parse(expiration2, Locale.ENGLISH);
ClientDetailsEntity mockedClient2 = mock(ClientDetailsEntity.class);
when(mockedClient2.getClientId()).thenReturn("mocked_client_2");
OAuth2Request req2 = new OAuth2Request(new HashMap<String, String>(), "client2", new ArrayList<GrantedAuthority>(),
true, new HashSet<String>(), new HashSet<String>(), "http://bar.com",
new HashSet<String>(), null);
Authentication mockAuth2 = mock(Authentication.class, withSettings().serializable());
OAuth2Authentication auth2 = new OAuth2Authentication(req2, mockAuth2);
AuthenticationHolderEntity holder2 = new AuthenticationHolderEntity();
holder2.setId(2L);
holder2.setAuthentication(auth2);
OAuth2RefreshTokenEntity token2 = new OAuth2RefreshTokenEntity();
token2.setId(2L);
token2.setClient(mockedClient2);
token2.setExpiration(expirationDate2);
token2.setJwt(JWTParser.parse("eyJhbGciOiJub25lIn0.eyJqdGkiOiJlYmEyYjc3My0xNjAzLTRmNDAtOWQ3MS1hMGIxZDg1OWE2MDAifQ."));
token2.setAuthenticationHolder(holder2);
String configJson = "{" +
"\"" + MITREidDataService.SYSTEMSCOPES + "\": [], " +
"\"" + MITREidDataService.ACCESSTOKENS + "\": [], " +
"\"" + MITREidDataService.CLIENTS + "\": [], " +
"\"" + MITREidDataService.GRANTS + "\": [], " +
"\"" + MITREidDataService.WHITELISTEDSITES + "\": [], " +
"\"" + MITREidDataService.BLACKLISTEDSITES + "\": [], " +
"\"" + MITREidDataService.AUTHENTICATIONHOLDERS + "\": [" +
"{\"id\":1,\"authentication\":{\"authorizationRequest\":{\"clientId\":\"client1\",\"redirectUri\":\"http://foo.com\"},"
+ "\"userAuthentication\":null}}," +
"{\"id\":2,\"authentication\":{\"authorizationRequest\":{\"clientId\":\"client2\",\"redirectUri\":\"http://bar.com\"},"
+ "\"userAuthentication\":null}}" +
" ]," +
"\"" + MITREidDataService.REFRESHTOKENS + "\": [" +
"{\"id\":1,\"clientId\":\"mocked_client_1\",\"expiration\":\"2014-09-10T22:49:44.090+00:00\","
+ "\"authenticationHolderId\":1,\"value\":\"eyJhbGciOiJub25lIn0.eyJqdGkiOiJmOTg4OWQyOS0xMTk1LTQ4ODEtODgwZC1lZjVlYzAwY2Y4NDIifQ.\"}," +
"{\"id\":2,\"clientId\":\"mocked_client_2\",\"expiration\":\"2015-01-07T18:31:50.079+00:00\","
+ "\"authenticationHolderId\":2,\"value\":\"eyJhbGciOiJub25lIn0.eyJqdGkiOiJlYmEyYjc3My0xNjAzLTRmNDAtOWQ3MS1hMGIxZDg1OWE2MDAifQ.\"}" +
" ]" +
"}";
logger.debug(configJson);
JsonReader reader = new JsonReader(new StringReader(configJson));
final Map<Long, OAuth2RefreshTokenEntity> fakeRefreshTokenTable = new HashMap<>();
final Map<Long, AuthenticationHolderEntity> fakeAuthHolderTable = new HashMap<>();
when(tokenRepository.saveRefreshToken(isA(OAuth2RefreshTokenEntity.class))).thenAnswer(new Answer<OAuth2RefreshTokenEntity>() {
Long id = 343L;
@Override
public OAuth2RefreshTokenEntity answer(InvocationOnMock invocation) throws Throwable {
OAuth2RefreshTokenEntity _token = (OAuth2RefreshTokenEntity) invocation.getArguments()[0];
if(_token.getId() == null) {
_token.setId(id++);
}
fakeRefreshTokenTable.put(_token.getId(), _token);
return _token;
}
});
when(tokenRepository.getRefreshTokenById(anyLong())).thenAnswer(new Answer<OAuth2RefreshTokenEntity>() {
@Override
public OAuth2RefreshTokenEntity answer(InvocationOnMock invocation) throws Throwable {
Long _id = (Long) invocation.getArguments()[0];
return fakeRefreshTokenTable.get(_id);
}
});
when(clientRepository.getClientByClientId(anyString())).thenAnswer(new Answer<ClientDetailsEntity>() {
@Override
public ClientDetailsEntity answer(InvocationOnMock invocation) throws Throwable {
String _clientId = (String) invocation.getArguments()[0];
ClientDetailsEntity _client = mock(ClientDetailsEntity.class);
when(_client.getClientId()).thenReturn(_clientId);
return _client;
}
});
when(authHolderRepository.save(isA(AuthenticationHolderEntity.class))).thenAnswer(new Answer<AuthenticationHolderEntity>() {
Long id = 356L;
@Override
public AuthenticationHolderEntity answer(InvocationOnMock invocation) throws Throwable {
AuthenticationHolderEntity _holder = (AuthenticationHolderEntity) invocation.getArguments()[0];
if(_holder.getId() == null) {
_holder.setId(id++);
}
fakeAuthHolderTable.put(_holder.getId(), _holder);
return _holder;
}
});
when(authHolderRepository.getById(anyLong())).thenAnswer(new Answer<AuthenticationHolderEntity>() {
@Override
public AuthenticationHolderEntity answer(InvocationOnMock invocation) throws Throwable {
Long _id = (Long) invocation.getArguments()[0];
return fakeAuthHolderTable.get(_id);
}
});
dataService.importData(reader);
List<OAuth2RefreshTokenEntity> savedRefreshTokens = new ArrayList(fakeRefreshTokenTable.values()); //capturedRefreshTokens.getAllValues();
Collections.sort(savedRefreshTokens, new refreshTokenIdComparator());
assertThat(savedRefreshTokens.get(0).getAuthenticationHolder().getId(), equalTo(356L));
assertThat(savedRefreshTokens.get(1).getAuthenticationHolder().getId(), equalTo(357L));
}
|
public static ConfigurableResource parseResourceConfigValue(String value)
throws AllocationConfigurationException {
return parseResourceConfigValue(value, Long.MAX_VALUE);
}
|
@Test
public void testDuplicateMemoryDefinitionPercentage() throws Exception {
expectInvalidResourcePercentage("memory");
parseResourceConfigValue("50% 80% memory, 100%cpu");
}
|
@SuppressWarnings("removal") // Since JDK 22
public static void releaseFence()
{
UnsafeAccess.UNSAFE.storeFence();
}
|
@Test
void releaseFence()
{
MemoryAccess.releaseFence();
}
|
@NonNull
public static Permutor<FeedItem> getPermutor(@NonNull SortOrder sortOrder) {
Comparator<FeedItem> comparator = null;
Permutor<FeedItem> permutor = null;
switch (sortOrder) {
case EPISODE_TITLE_A_Z:
comparator = (f1, f2) -> itemTitle(f1).compareTo(itemTitle(f2));
break;
case EPISODE_TITLE_Z_A:
comparator = (f1, f2) -> itemTitle(f2).compareTo(itemTitle(f1));
break;
case DATE_OLD_NEW:
comparator = (f1, f2) -> pubDate(f1).compareTo(pubDate(f2));
break;
case DATE_NEW_OLD:
comparator = (f1, f2) -> pubDate(f2).compareTo(pubDate(f1));
break;
case DURATION_SHORT_LONG:
comparator = (f1, f2) -> Integer.compare(duration(f1), duration(f2));
break;
case DURATION_LONG_SHORT:
comparator = (f1, f2) -> Integer.compare(duration(f2), duration(f1));
break;
case EPISODE_FILENAME_A_Z:
comparator = (f1, f2) -> itemLink(f1).compareTo(itemLink(f2));
break;
case EPISODE_FILENAME_Z_A:
comparator = (f1, f2) -> itemLink(f2).compareTo(itemLink(f1));
break;
case FEED_TITLE_A_Z:
comparator = (f1, f2) -> feedTitle(f1).compareTo(feedTitle(f2));
break;
case FEED_TITLE_Z_A:
comparator = (f1, f2) -> feedTitle(f2).compareTo(feedTitle(f1));
break;
case RANDOM:
permutor = Collections::shuffle;
break;
case SMART_SHUFFLE_OLD_NEW:
permutor = (queue) -> smartShuffle(queue, true);
break;
case SMART_SHUFFLE_NEW_OLD:
permutor = (queue) -> smartShuffle(queue, false);
break;
case SIZE_SMALL_LARGE:
comparator = (f1, f2) -> Long.compare(size(f1), size(f2));
break;
case SIZE_LARGE_SMALL:
comparator = (f1, f2) -> Long.compare(size(f2), size(f1));
break;
case COMPLETION_DATE_NEW_OLD:
comparator = (f1, f2) -> f2.getMedia().getPlaybackCompletionDate()
.compareTo(f1.getMedia().getPlaybackCompletionDate());
break;
default:
throw new IllegalArgumentException("Permutor not implemented");
}
if (comparator != null) {
final Comparator<FeedItem> comparator2 = comparator;
permutor = (queue) -> Collections.sort(queue, comparator2);
}
return permutor;
}
|
@Test
public void testEnsureNonNullPermutors() {
for (SortOrder sortOrder : SortOrder.values()) {
assertNotNull("The permutor for SortOrder " + sortOrder + " is unexpectedly null",
FeedItemPermutors.getPermutor(sortOrder));
}
}
|
public static List<Criterion> parse(String filter) {
return StreamSupport.stream(CRITERIA_SPLITTER.split(filter).spliterator(), false)
.map(FilterParser::parseCriterion)
.toList();
}
|
@Test
public void accept_empty_query() {
List<Criterion> criterion = FilterParser.parse("");
assertThat(criterion).isEmpty();
}
|
public long put(final K key, final V value, final long timestamp) {
if (timestampedStore != null) {
timestampedStore.put(key, ValueAndTimestamp.make(value, timestamp));
return PUT_RETURN_CODE_IS_LATEST;
}
if (versionedStore != null) {
return versionedStore.put(key, value, timestamp);
}
throw new IllegalStateException("KeyValueStoreWrapper must be initialized with either timestamped or versioned store");
}
|
@Test
public void shouldPutNullToTimestampedStore() {
givenWrapperWithTimestampedStore();
final long putReturnCode = wrapper.put(KEY, null, VALUE_AND_TIMESTAMP.timestamp());
assertThat(putReturnCode, equalTo(PUT_RETURN_CODE_IS_LATEST));
verify(timestampedStore).put(KEY, null);
}
|
public Paragraph addNewParagraph(AuthenticationInfo authenticationInfo) {
return insertNewParagraph(paragraphs.size(), authenticationInfo);
}
|
@Test
void addParagraphWithEmptyReplNameTest() {
Note note = new Note("test", "", interpreterFactory, interpreterSettingManager,
paragraphJobListener, credentials, noteEventListener, zConf, noteParser);
Paragraph p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
assertNull(p.getText());
}
|
@PublicAPI(usage = ACCESS)
public Set<Dependency> getDirectDependenciesFromSelf() {
return javaClassDependencies.getDirectDependenciesFromClass();
}
|
@Test
public void direct_dependencies_from_self_by_annotation() {
JavaClass javaClass = importClasses(ClassWithAnnotationDependencies.class)
.get(ClassWithAnnotationDependencies.class);
assertThat(javaClass.getDirectDependenciesFromSelf())
.areAtLeastOne(annotationTypeDependency()
.from(ClassWithAnnotationDependencies.class)
.to(OnClass.class)
.inLineNumber(0))
.areAtLeastOne(annotationTypeDependency()
.from(ClassWithAnnotationDependencies.class)
.to(OnField.class)
.inLineNumber(0))
.areAtLeastOne(annotationTypeDependency()
.from(ClassWithAnnotationDependencies.class)
.to(OnConstructor.class)
.inLineNumber(0))
.areAtLeastOne(annotationTypeDependency()
.from(ClassWithAnnotationDependencies.class)
.to(OnMethod.class)
.inLineNumber(0))
.areAtLeastOne(annotationTypeDependency()
.from(ClassWithAnnotationDependencies.class)
.to(OnMethodParam.class)
.inLineNumber(0))
.areAtLeastOne(annotationMemberOfTypeDependency()
.from(ClassWithAnnotationDependencies.class)
.to(WithType.class)
.inLineNumber(0))
.areAtLeastOne(annotationMemberOfTypeDependency()
.from(ClassWithAnnotationDependencies.class)
.to(B.class)
.inLineNumber(0));
}
|
public void setNourishment(Nourishment nourishment) {
this.giant.setNourishment(nourishment);
}
|
@Test
void testSetNourishment() {
final var model = mock(GiantModel.class);
final var view = mock(GiantView.class);
final var controller = new GiantController(model, view);
verifyNoMoreInteractions(model, view);
for (final var nourishment : Nourishment.values()) {
controller.setNourishment(nourishment);
verify(model).setNourishment(nourishment);
verifyNoMoreInteractions(view);
}
controller.getNourishment();
//noinspection ResultOfMethodCallIgnored
verify(model).getNourishment();
verifyNoMoreInteractions(model, view);
}
|
public void start() {
view.setPresenter(this);
view.open();
}
|
@Test
void wiring() {
presenter.start();
assertNotNull(stub.getPresenter());
assertTrue(stub.isOpened());
}
|
public Cookie decode(String header) {
final int headerLen = checkNotNull(header, "header").length();
if (headerLen == 0) {
return null;
}
CookieBuilder cookieBuilder = null;
loop: for (int i = 0;;) {
// Skip spaces and separators.
for (;;) {
if (i == headerLen) {
break loop;
}
char c = header.charAt(i);
if (c == ',') {
// Having multiple cookies in a single Set-Cookie header is
// deprecated, modern browsers only parse the first one
break loop;
} else if (c == '\t' || c == '\n' || c == 0x0b || c == '\f'
|| c == '\r' || c == ' ' || c == ';') {
i++;
continue;
}
break;
}
int nameBegin = i;
int nameEnd;
int valueBegin;
int valueEnd;
for (;;) {
char curChar = header.charAt(i);
if (curChar == ';') {
// NAME; (no value till ';')
nameEnd = i;
valueBegin = valueEnd = -1;
break;
} else if (curChar == '=') {
// NAME=VALUE
nameEnd = i;
i++;
if (i == headerLen) {
// NAME= (empty value, i.e. nothing after '=')
valueBegin = valueEnd = 0;
break;
}
valueBegin = i;
// NAME=VALUE;
int semiPos = header.indexOf(';', i);
valueEnd = i = semiPos > 0 ? semiPos : headerLen;
break;
} else {
i++;
}
if (i == headerLen) {
// NAME (no value till the end of string)
nameEnd = headerLen;
valueBegin = valueEnd = -1;
break;
}
}
if (valueEnd > 0 && header.charAt(valueEnd - 1) == ',') {
// old multiple cookies separator, skipping it
valueEnd--;
}
if (cookieBuilder == null) {
// cookie name-value pair
DefaultCookie cookie = initCookie(header, nameBegin, nameEnd, valueBegin, valueEnd);
if (cookie == null) {
return null;
}
cookieBuilder = new CookieBuilder(cookie, header);
} else {
// cookie attribute
cookieBuilder.appendAttribute(nameBegin, nameEnd, valueBegin, valueEnd);
}
}
return cookieBuilder != null ? cookieBuilder.cookie() : null;
}
|
@Test
public void testDecodingComplexCookie() {
String c1 = "myCookie=myValue;max-age=50;path=/apathsomewhere;"
+ "domain=.adomainsomewhere;secure;comment=this is a comment;version=2;"
+ "commentURL=\"http://aurl.com\";port='80,8080';discard;";
Cookie cookie = ClientCookieDecoder.STRICT.decode(c1);
assertNotNull(cookie);
assertEquals("myValue", cookie.value());
assertEquals(".adomainsomewhere", cookie.domain());
assertEquals(50, cookie.maxAge());
assertEquals("/apathsomewhere", cookie.path());
assertTrue(cookie.isSecure());
}
|
public String getQuery() throws Exception {
return getQuery(weatherConfiguration.getLocation());
}
|
@Test
public void testZipQuery() throws Exception {
WeatherConfiguration weatherConfiguration = new WeatherConfiguration();
weatherConfiguration.setZip("2493CJ,nl");
weatherConfiguration.setMode(WeatherMode.XML);
weatherConfiguration.setLanguage(WeatherLanguage.nl);
weatherConfiguration.setAppid(APPID);
WeatherQuery weatherQuery = new WeatherQuery(weatherConfiguration);
weatherConfiguration.setGeoLocationProvider(geoLocationProvider);
String query = weatherQuery.getQuery();
assertThat(query, is(
"http://api.openweathermap.org/data/2.5/weather?zip=2493CJ,nl&lang=nl&mode=xml&APPID=9162755b2efa555823cfe0451d7fff38"));
}
|
public static Object convertValue(String className, Object cleanValue, ClassLoader classLoader) {
// "null" string is converted to null
cleanValue = "null".equals(cleanValue) ? null : cleanValue;
if (!isPrimitive(className) && cleanValue == null) {
return null;
}
Class<?> clazz = loadClass(className, classLoader);
// if it is not a String, it has to be an instance of the desired type
if (!(cleanValue instanceof String)) {
if (clazz.isInstance(cleanValue)) {
return cleanValue;
}
throw new IllegalArgumentException(new StringBuilder().append("Object ").append(cleanValue)
.append(" is not a String or an instance of ").append(className).toString());
}
String value = (String) cleanValue;
try {
if (clazz.isAssignableFrom(String.class)) {
return value;
} else if (clazz.isAssignableFrom(BigDecimal.class)) {
return parseBigDecimal(value);
} else if (clazz.isAssignableFrom(BigInteger.class)) {
return parseBigInteger(value);
} else if (clazz.isAssignableFrom(Boolean.class) || clazz.isAssignableFrom(boolean.class)) {
return parseBoolean(value);
} else if (clazz.isAssignableFrom(Byte.class) || clazz.isAssignableFrom(byte.class)) {
return Byte.parseByte(value);
} else if (clazz.isAssignableFrom(Character.class) || clazz.isAssignableFrom(char.class)) {
return parseChar(value);
} else if (clazz.isAssignableFrom(Double.class) || clazz.isAssignableFrom(double.class)) {
return Double.parseDouble(cleanStringForNumberParsing(value));
} else if (clazz.isAssignableFrom(Float.class) || clazz.isAssignableFrom(float.class)) {
return Float.parseFloat(cleanStringForNumberParsing(value));
} else if (clazz.isAssignableFrom(Integer.class) || clazz.isAssignableFrom(int.class)) {
return Integer.parseInt(cleanStringForNumberParsing(value));
} else if (clazz.isAssignableFrom(LocalDate.class)) {
return LocalDate.parse(value, DateTimeFormatter.ISO_LOCAL_DATE);
} else if (clazz.isAssignableFrom(LocalDateTime.class)) {
return LocalDateTime.parse(value, DateTimeFormatter.ISO_LOCAL_DATE_TIME);
} else if (clazz.isAssignableFrom(LocalTime.class)) {
return LocalTime.parse(value, DateTimeFormatter.ISO_LOCAL_TIME);
} else if (clazz.isAssignableFrom(Long.class) || clazz.isAssignableFrom(long.class)) {
return Long.parseLong(cleanStringForNumberParsing(value));
} else if (clazz.isAssignableFrom(Short.class) || clazz.isAssignableFrom(short.class)) {
return Short.parseShort(cleanStringForNumberParsing(value));
} else if (Enum.class.isAssignableFrom(clazz)) {
return Enum.valueOf(((Class<? extends Enum>) clazz), value);
}
} catch (RuntimeException e) {
throw new IllegalArgumentException(new StringBuilder().append("Impossible to parse '")
.append(value).append("' as ").append(className).append(" [")
.append(e.getMessage()).append("]").toString());
}
throw new IllegalArgumentException(new StringBuilder().append("Class ").append(className)
.append(" is not natively supported. Please use an MVEL expression" +
" to use it.").toString());
}
|
@Test
public void convertValue_manyCases() {
assertThat(convertValue(String.class.getCanonicalName(), "Test", classLoader)).isEqualTo("Test");
assertThat(convertValue(BigDecimal.class.getCanonicalName(), "13.33", classLoader)).isEqualTo(BigDecimal.valueOf(13.33));
assertThat(convertValue(BigDecimal.class.getCanonicalName(), "13", classLoader)).isEqualTo(BigDecimal.valueOf(13));
assertThat(convertValue(BigDecimal.class.getCanonicalName(), "1,232,113.33", classLoader)).isEqualTo(BigDecimal.valueOf(1232113.33));
assertThat(convertValue(BigDecimal.class.getCanonicalName(), "1232113.33", classLoader)).isEqualTo(BigDecimal.valueOf(1232113.33));
assertThat(convertValue(BigInteger.class.getCanonicalName(), "13.33", classLoader)).isEqualTo(BigInteger.valueOf(13));
assertThat(convertValue(BigInteger.class.getCanonicalName(), "13", classLoader)).isEqualTo(BigInteger.valueOf(13));
assertThat(convertValue(BigInteger.class.getCanonicalName(), "1,232,113.33", classLoader)).isEqualTo(BigInteger.valueOf(1232113));
assertThat(convertValue(BigInteger.class.getCanonicalName(), "1232113", classLoader)).isEqualTo(BigInteger.valueOf(1232113));
assertThat(convertValue(boolean.class.getCanonicalName(), "false", classLoader)).isEqualTo(false);
assertThat(convertValue(Boolean.class.getCanonicalName(), "true", classLoader)).isEqualTo(true);
assertThat(convertValue(int.class.getCanonicalName(), "1", classLoader)).isEqualTo(1);
assertThat(convertValue(Integer.class.getCanonicalName(), "1", classLoader)).isEqualTo(1);
assertThat(convertValue(long.class.getCanonicalName(), "1", classLoader)).isEqualTo(1L);
assertThat(convertValue(Long.class.getCanonicalName(), "1", classLoader)).isEqualTo(1L);
assertThat(convertValue(double.class.getCanonicalName(), "1", classLoader)).isEqualTo(1.0d);
assertThat(convertValue(Double.class.getCanonicalName(), "1", classLoader)).isEqualTo(1.0d);
assertThat(convertValue(float.class.getCanonicalName(), "1", classLoader)).isEqualTo(1.0f);
assertThat(convertValue(Float.class.getCanonicalName(), "1", classLoader)).isEqualTo(1.0f);
assertThat(convertValue(double.class.getCanonicalName(), "1.0", classLoader)).isEqualTo(1.0d);
assertThat(convertValue(Double.class.getCanonicalName(), "1.0", classLoader)).isEqualTo(1.0d);
assertThat(convertValue(float.class.getCanonicalName(), "1.0", classLoader)).isEqualTo(1.0f);
assertThat(convertValue(Float.class.getCanonicalName(), "1.0", classLoader)).isEqualTo(1.0f);
assertThat(convertValue(double.class.getCanonicalName(), "1.0d", classLoader)).isEqualTo(1.0d);
assertThat(convertValue(Double.class.getCanonicalName(), "1.0d", classLoader)).isEqualTo(1.0d);
assertThat(convertValue(float.class.getCanonicalName(), "1.0f", classLoader)).isEqualTo(1.0f);
assertThat(convertValue(Float.class.getCanonicalName(), "1.0f", classLoader)).isEqualTo(1.0f);
assertThat(convertValue(double.class.getCanonicalName(), "1.0D", classLoader)).isEqualTo(1.0d);
assertThat(convertValue(Double.class.getCanonicalName(), "1.0D", classLoader)).isEqualTo(1.0d);
assertThat(convertValue(float.class.getCanonicalName(), "1.0F", classLoader)).isEqualTo(1.0f);
assertThat(convertValue(Float.class.getCanonicalName(), "1.0F", classLoader)).isEqualTo(1.0f);
assertThat(convertValue(char.class.getCanonicalName(), "a", classLoader)).isEqualTo('a');
assertThat(convertValue(Character.class.getCanonicalName(), "a", classLoader)).isEqualTo('a');
assertThat(convertValue(short.class.getCanonicalName(), "1", classLoader)).isEqualTo((short) 1);
assertThat(convertValue(Short.class.getCanonicalName(), "1", classLoader)).isEqualTo((short) 1);
assertThat(convertValue(byte.class.getCanonicalName(), Byte.toString("0".getBytes()[0]), classLoader)).isEqualTo("0".getBytes()[0]);
assertThat(convertValue(Byte.class.getCanonicalName(), Byte.toString("0".getBytes()[0]), classLoader)).isEqualTo("0".getBytes()[0]);
assertThat(convertValue(LocalDate.class.getCanonicalName(), "2018-05-20", classLoader)).isEqualTo(LocalDate.of(2018, 5, 20));
assertThat(convertValue(LocalDateTime.class.getCanonicalName(), "2017-02-18T10:30", classLoader)).isEqualTo(LocalDateTime.of(2017, 2, 18, 10, 30));
assertThat(convertValue(LocalDateTime.class.getCanonicalName(), "1982-04-04T00:20", classLoader)).isEqualTo(LocalDateTime.of(1982, 4, 4, 0, 20, 0));
assertThat(convertValue(LocalDateTime.class.getCanonicalName(), "1982-10-13T02:09:00.999999999", classLoader)).isEqualTo(LocalDateTime.of(1982, 10, 13, 2, 9, 0, 999999999));
assertThat(convertValue(LocalTime.class.getCanonicalName(), "01:09:00", classLoader)).isEqualTo(LocalTime.of(1, 9, 0));
assertThat(convertValue(LocalTime.class.getCanonicalName(), "04:59:07.009999999", classLoader)).isEqualTo(LocalTime.of(4, 59, 07, 9999999));
assertThat(convertValue(LocalTime.class.getCanonicalName(), "23:45", classLoader)).isEqualTo(LocalTime.of(23, 45));
assertThat(convertValue(LocalTime.class.getCanonicalName(), "01:09:00", classLoader)).isEqualTo(LocalTime.of(1, 9, 0));
assertThat(convertValue(LocalTime.class.getCanonicalName(), "04:59:07.009999999", classLoader)).isEqualTo(LocalTime.of(4, 59, 07, 9999999));
assertThat(convertValue(EnumTest.class.getCanonicalName(), "FIRST", classLoader)).isEqualTo(EnumTest.FIRST);
assertThat(convertValue(Float.class.getCanonicalName(), null, classLoader)).isNull();
}
|
@Override
public void createNode(KubevirtNode node) {
checkNotNull(node, ERR_NULL_NODE);
KubevirtNode intNode;
KubevirtNode tunNode;
if (node.intgBridge() == null) {
String deviceIdStr = genDpidFromName(INTEGRATION_BRIDGE + "-" + node.hostname());
checkNotNull(deviceIdStr, ERR_NULL_DEVICE_ID);
intNode = node.updateIntgBridge(DeviceId.deviceId(deviceIdStr));
checkArgument(!hasIntgBridge(intNode.intgBridge(), intNode.hostname()),
NOT_DUPLICATED_MSG, intNode.intgBridge());
} else {
intNode = node;
checkArgument(!hasIntgBridge(intNode.intgBridge(), intNode.hostname()),
NOT_DUPLICATED_MSG, intNode.intgBridge());
}
if (node.tunBridge() == null) {
String deviceIdStr = genDpidFromName(TUNNEL_BRIDGE + "-" + node.hostname());
checkNotNull(deviceIdStr, ERR_NULL_DEVICE_ID);
tunNode = intNode.updateTunBridge(DeviceId.deviceId(deviceIdStr));
checkArgument(!hasTunBridge(tunNode.tunBridge(), tunNode.hostname()),
NOT_DUPLICATED_MSG, tunNode.tunBridge());
} else {
tunNode = intNode;
checkArgument(!hasTunBridge(tunNode.tunBridge(), tunNode.hostname()),
NOT_DUPLICATED_MSG, tunNode.tunBridge());
}
nodeStore.createNode(tunNode);
log.info(String.format(MSG_NODE, tunNode.hostname(), MSG_CREATED));
}
|
@Test(expected = NullPointerException.class)
public void testCreateNullNode() {
target.createNode(null);
}
|
static String generateIndexName(String baseString) {
return generateResourceId(
baseString,
ILLEGAL_INDEX_NAME_CHARS,
REPLACE_INDEX_NAME_CHAR,
MAX_INDEX_NAME_LENGTH,
TIME_FORMAT);
}
|
@Test
public void testGenerateIndexNameShouldReplaceNullCharacter() {
String testBaseString = "Test\0DB\0Name";
String actual = generateIndexName(testBaseString);
assertThat(actual).matches("test-db-name-\\d{8}-\\d{6}-\\d{6}");
}
|
@Override
String getInterfaceName(Invoker invoker, String prefix) {
return DubboUtils.getInterfaceName(invoker, prefix);
}
|
@Test
public void testInterfaceLevelFollowControlAsync() throws InterruptedException {
Invoker invoker = DubboTestUtil.getDefaultMockInvoker();
Invocation invocation = DubboTestUtil.getDefaultMockInvocationOne();
when(invocation.getAttachment(ASYNC_KEY)).thenReturn(Boolean.TRUE.toString());
initFlowRule(DubboUtils.getInterfaceName(invoker));
Result result1 = invokeDubboRpc(false, invoker, invocation);
assertEquals("normal", result1.getValue());
// should fallback because the qps > 1
Result result2 = invokeDubboRpc(false, invoker, invocation);
assertEquals("fallback", result2.getValue());
// sleeping 1000 ms to reset qps
Thread.sleep(1000);
Result result3 = invokeDubboRpc(false, invoker, invocation);
assertEquals("normal", result3.getValue());
verifyInvocationStructureForCallFinish(invoker, invocation);
}
|
public static List<UpdateRequirement> forReplaceView(
ViewMetadata base, List<MetadataUpdate> metadataUpdates) {
Preconditions.checkArgument(null != base, "Invalid view metadata: null");
Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null");
Builder builder = new Builder(null, false);
builder.require(new UpdateRequirement.AssertViewUUID(base.uuid()));
metadataUpdates.forEach(builder::update);
return builder.build();
}
|
@Test
public void setCurrentViewVersion() {
List<UpdateRequirement> requirements =
UpdateRequirements.forReplaceView(
viewMetadata,
ImmutableList.of(
new MetadataUpdate.AddViewVersion(
ImmutableViewVersion.builder()
.versionId(3)
.schemaId(1)
.timestampMillis(System.currentTimeMillis())
.defaultNamespace(Namespace.of("ns"))
.build()),
new MetadataUpdate.AddViewVersion(
ImmutableViewVersion.builder()
.versionId(2)
.schemaId(1)
.timestampMillis(System.currentTimeMillis())
.defaultNamespace(Namespace.of("ns"))
.build()),
new MetadataUpdate.AddViewVersion(
ImmutableViewVersion.builder()
.versionId(1)
.schemaId(1)
.timestampMillis(System.currentTimeMillis())
.defaultNamespace(Namespace.of("ns"))
.build()),
new MetadataUpdate.SetCurrentViewVersion(2)));
requirements.forEach(req -> req.validate(viewMetadata));
assertThat(requirements)
.hasSize(1)
.hasOnlyElementsOfTypes(UpdateRequirement.AssertViewUUID.class);
assertViewUUID(requirements);
}
|
public List<LimitQuota> getRateLimit() {
return rateLimit;
}
|
@Test
public void testRateLimit() {
List<LimitQuota> limitQuotaList = limitConfig.getRateLimit();
Assert.assertEquals(limitQuotaList.size(), 2);
}
|
@Override
public void onDelete(Extension extension) {
if (isDisposed() || !matchers.onDeleteMatcher().match(extension)) {
return;
}
// TODO filter the event
queue.addImmediately(new Request(extension.getMetadata().getName()));
}
|
@Test
void shouldDeleteExtensionWhenDeletePredicateAlwaysFalse() {
var type = GroupVersionKind.fromAPIVersionAndKind("v1alpha1", "User");
when(matchers.onDeleteMatcher()).thenReturn(
DefaultExtensionMatcher.builder(client, type).build());
watcher.onDelete(createFake("fake-name"));
verify(matchers, times(1)).onDeleteMatcher();
verify(queue, times(0)).add(any());
verify(queue, times(0)).addImmediately(any());
}
|
public final String getPayload() {
logger.atInfo().log(
"%s generated payload `%s`, %s use the callback server",
this.config, this.payload, this.attributes.getUsesCallbackServer() ? "does" : "does not");
return this.payload;
}
|
@Test
public void getPayload_returnsPayloadString() {
Validator validator = (unused) -> false;
Payload payload = new Payload("my-payload", validator, PAYLOAD_ATTRIBUTES, CONFIG);
assertEquals("my-payload", payload.getPayload());
}
|
@Override
public Collection<DelayMeasurementEntry> getAllDms(
MdId mdName, MaIdShort maName, MepId mepId)
throws CfmConfigException, SoamConfigException {
MepEntry mep = cfmMepService.getMep(mdName, maName, mepId);
if (mep == null || mep.deviceId() == null) {
throw new CfmConfigException("MEP :"
+ mdName + "/" + maName + "/" + mepId + " does not exist");
} else if (deviceService.getDevice(mep.deviceId()) == null) {
throw new CfmConfigException("Device " + mep.deviceId() + " from MEP :"
+ mdName + "/" + maName + "/" + mepId + " does not exist");
} else if (!deviceService.getDevice(mep.deviceId()).is(SoamDmProgrammable.class)) {
throw new CfmConfigException("Device " + mep.deviceId() + " from MEP :"
+ mdName + "/" + maName + "/" + mepId +
" does not implement SoamDmProgrammable");
}
log.debug("Retrieving DMs for MD {}, MA {}, MEP {} on Device {}",
mdName, maName, mepId, mep.deviceId());
return deviceService.getDevice(mep.deviceId())
.as(SoamDmProgrammable.class).getAllDms(mdName, maName, mepId);
}
|
@Test
public void testGetAllDms() throws CfmConfigException, SoamConfigException {
expect(deviceService.getDevice(DEVICE_ID1)).andReturn(device1).anyTimes();
replay(deviceService);
expect(mepService.getMep(MDNAME1, MANAME1, MEPID1)).andReturn(mep1).anyTimes();
replay(mepService);
expect(driverService.getDriver(DEVICE_ID1)).andReturn(testDriver).anyTimes();
replay(driverService);
Collection<DelayMeasurementEntry> dmEntries =
soamManager.getAllDms(MDNAME1, MANAME1, MEPID1);
assertNotNull(dmEntries);
assertEquals(1, dmEntries.size());
}
|
public static ExecutionEnvironment createBatchExecutionEnvironment(FlinkPipelineOptions options) {
return createBatchExecutionEnvironment(
options,
MoreObjects.firstNonNull(options.getFilesToStage(), Collections.emptyList()),
options.getFlinkConfDir());
}
|
@Test
public void shouldInferParallelismFromEnvironmentBatch() throws IOException {
String flinkConfDir = extractFlinkConfig();
FlinkPipelineOptions options = getDefaultPipelineOptions();
options.setRunner(TestFlinkRunner.class);
options.setFlinkMaster("host:80");
ExecutionEnvironment bev =
FlinkExecutionEnvironments.createBatchExecutionEnvironment(
options, Collections.emptyList(), flinkConfDir);
assertThat(options.getParallelism(), is(23));
assertThat(bev.getParallelism(), is(23));
}
|
public static void main(String[] argv) {
try {
YarnConfiguration conf = new YarnConfiguration();
GenericOptionsParser hParser = new GenericOptionsParser(conf, argv);
argv = hParser.getRemainingArgs();
if (argv.length > 1) {
if (argv[0].equals("-format-policy-store")) {
handFormatPolicyStateStore(conf);
} else {
printUsage(System.err);
}
} else {
startGPG(argv, conf);
}
} catch (Throwable t) {
LOG.error("Error starting global policy generator", t);
System.exit(-1);
}
}
|
@Test
public void testGPGCLI() {
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
ByteArrayOutputStream dataErr = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
System.setErr(new PrintStream(dataErr));
GlobalPolicyGenerator.main(new String[]{"-help", "-format-policy-store"});
assertTrue(dataErr.toString().contains(
"Usage: yarn gpg [-format-policy-store]"));
}
|
void restoreBatch(final Collection<ConsumerRecord<byte[], byte[]>> records) {
// compute the observed stream time at the end of the restore batch, in order to speed up
// restore by not bothering to read from/write to segments which will have expired by the
// time the restoration process is complete.
long endOfBatchStreamTime = observedStreamTime;
for (final ConsumerRecord<byte[], byte[]> record : records) {
endOfBatchStreamTime = Math.max(endOfBatchStreamTime, record.timestamp());
}
final VersionedStoreClient<?> restoreClient = restoreWriteBuffer.getClient();
// note: there is increased risk for hitting an out-of-memory during this restore loop,
// compared to for non-versioned key-value stores, because this versioned store
// implementation stores multiple records (for the same key) together in a single RocksDB
// "segment" entry -- restoring a single changelog entry could require loading multiple
// records into memory. how high this memory amplification will be is very much dependent
// on the specific workload and the value of the "segment interval" parameter.
synchronized (position) {
for (final ConsumerRecord<byte[], byte[]> record : records) {
if (record.timestamp() < observedStreamTime - gracePeriod) {
// record is older than grace period and was therefore never written to the store
continue;
}
// advance observed stream time as usual, for use in deciding whether records have
// exceeded the store's grace period and should be dropped.
observedStreamTime = Math.max(observedStreamTime, record.timestamp());
ChangelogRecordDeserializationHelper.applyChecksAndUpdatePosition(
record,
consistencyEnabled,
position
);
// put records to write buffer
doPut(
restoreClient,
endOfBatchStreamTime,
new Bytes(record.key()),
record.value(),
record.timestamp()
);
}
try {
restoreWriteBuffer.flush();
} catch (final RocksDBException e) {
throw new ProcessorStateException("Error restoring batch to store " + name, e);
}
}
}
|
@Test
public void shouldRestoreWithNulls() {
final List<DataRecord> records = new ArrayList<>();
records.add(new DataRecord("k", null, SEGMENT_INTERVAL + 20));
records.add(new DataRecord("k", null, SEGMENT_INTERVAL - 1));
records.add(new DataRecord("k", null, SEGMENT_INTERVAL + 1));
records.add(new DataRecord("k", null, SEGMENT_INTERVAL - 10));
records.add(new DataRecord("k", null, SEGMENT_INTERVAL + 10));
records.add(new DataRecord("k", "vp5", SEGMENT_INTERVAL + 5));
records.add(new DataRecord("k", "vn5", SEGMENT_INTERVAL - 5));
records.add(new DataRecord("k", "vn6", SEGMENT_INTERVAL - 6));
store.restoreBatch(getChangelogRecords(records));
verifyGetNullFromStore("k");
verifyTimestampedGetNullFromStore("k", SEGMENT_INTERVAL + 30);
verifyTimestampedGetNullFromStore("k", SEGMENT_INTERVAL + 15);
verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL + 6, "vp5", SEGMENT_INTERVAL + 5, SEGMENT_INTERVAL + 10);
verifyTimestampedGetNullFromStore("k", SEGMENT_INTERVAL + 2);
verifyTimestampedGetNullFromStore("k", SEGMENT_INTERVAL);
verifyTimestampedGetNullFromStore("k", SEGMENT_INTERVAL - 1);
verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL - 5, "vn5", SEGMENT_INTERVAL - 5, SEGMENT_INTERVAL - 1);
verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL - 6, "vn6", SEGMENT_INTERVAL - 6, SEGMENT_INTERVAL - 5);
verifyTimestampedGetNullFromStore("k", SEGMENT_INTERVAL - 8);
}
|
public static IpPrefix valueOf(int address, int prefixLength) {
return new IpPrefix(IpAddress.valueOf(address), prefixLength);
}
|
@Test(expected = NullPointerException.class)
public void testInvalidValueOfNullArrayIPv6() {
IpPrefix ipPrefix;
byte[] value;
value = null;
ipPrefix = IpPrefix.valueOf(IpAddress.Version.INET6, value, 120);
}
|
public Db tx(VoidFunc1<Db> func) throws SQLException {
return tx(null, func);
}
|
@Test
@Disabled
public void txTest() throws SQLException {
Db.use().tx(db -> {
db.insert(Entity.create("user").set("name", "unitTestUser2"));
db.update(Entity.create().set("age", 79), Entity.create("user").set("name", "unitTestUser2"));
db.del("user", "name", "unitTestUser2");
});
}
|
public static List<URL> parseURLs(String address, Map<String, String> defaults) {
if (StringUtils.isEmpty(address)) {
throw new IllegalArgumentException("Address is not allowed to be empty, please re-enter.");
}
String[] addresses = REGISTRY_SPLIT_PATTERN.split(address);
if (addresses == null || addresses.length == 0) {
throw new IllegalArgumentException(
"Addresses is not allowed to be empty, please re-enter."); // here won't be empty
}
List<URL> registries = new ArrayList<>();
for (String addr : addresses) {
registries.add(parseURL(addr, defaults));
}
return registries;
}
|
@Test
void testParseUrls() {
String addresses = "192.168.0.1|192.168.0.2|192.168.0.3";
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("username", "root");
parameters.put("password", "alibaba");
parameters.put("port", "10000");
parameters.put("protocol", "dubbo");
List<URL> urls = UrlUtils.parseURLs(addresses, parameters);
assertEquals("192.168.0.1" + ":10000", urls.get(0).getAddress());
assertEquals("192.168.0.2" + ":10000", urls.get(1).getAddress());
}
|
@DeleteMapping("/selector")
public ShenyuAdminResult deleteSelector(@RequestBody @Valid @NotNull final DataPermissionDTO dataPermissionDTO) {
return ShenyuAdminResult.success(ShenyuResultMessage.DELETE_SUCCESS, dataPermissionService.deleteSelector(dataPermissionDTO));
}
|
@Test
public void deleteSelector() throws Exception {
DataPermissionDTO dataPermissionDTO = new DataPermissionDTO();
dataPermissionDTO.setDataId("testDataId");
dataPermissionDTO.setUserId("testUserId");
given(this.dataPermissionService.deleteSelector(dataPermissionDTO)).willReturn(1);
this.mockMvc.perform(MockMvcRequestBuilders.delete("/data-permission/selector")
.contentType(MediaType.APPLICATION_JSON)
.content(GsonUtils.getInstance().toJson(dataPermissionDTO)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.message", is(ShenyuResultMessage.DELETE_SUCCESS)))
.andExpect(jsonPath("$.data", is(1)))
.andReturn();
}
|
public static byte[] parseHex(String string) {
return hexFormat.parseHex(string);
}
|
@Test
@Parameters(method = "hexStringToBytesVectors")
public void parseHexValidUppercase(String hexString, byte[] expectedBytes) {
byte[] actual = ByteUtils.parseHex(hexString);
assertArrayEquals("incorrect hex formatted string", expectedBytes, actual);
}
|
@VisibleForTesting
void checkNoPendingTasks(DbSession dbSession, EntityDto entityDto) {
//This check likely can be removed when we remove the column 'private' from components table in SONAR-20126.
checkState(countPendingTask(dbSession, entityDto.getKey()) == 0, "Component visibility can't be changed as long as it has background task(s) pending or in progress");
}
|
@Test
void checkNoPendingTasks_whenEntityNotFound_throwsIae() {
EntityDto entityDto = mockEntityDto();
when(dbClient.entityDao().selectByKey(dbSession, entityDto.getKey())).thenReturn(Optional.empty());
assertThatIllegalStateException()
.isThrownBy(() -> visibilityService.checkNoPendingTasks(dbSession, entityDto))
.withMessage("Can't find entity entityKey");
}
|
@Override
public void updateConfig(ConfigSaveReqVO updateReqVO) {
// 校验自己存在
validateConfigExists(updateReqVO.getId());
// 校验参数配置 key 的唯一性
validateConfigKeyUnique(updateReqVO.getId(), updateReqVO.getKey());
// 更新参数配置
ConfigDO updateObj = ConfigConvert.INSTANCE.convert(updateReqVO);
configMapper.updateById(updateObj);
}
|
@Test
public void testUpdateConfig_success() {
// mock 数据
ConfigDO dbConfig = randomConfigDO();
configMapper.insert(dbConfig);// @Sql: 先插入出一条存在的数据
// 准备参数
ConfigSaveReqVO reqVO = randomPojo(ConfigSaveReqVO.class, o -> {
o.setId(dbConfig.getId()); // 设置更新的 ID
});
// 调用
configService.updateConfig(reqVO);
// 校验是否更新正确
ConfigDO config = configMapper.selectById(reqVO.getId()); // 获取最新的
assertPojoEquals(reqVO, config);
}
|
public static String toAbsolute(String baseURL, String relativeURL) {
String relURL = relativeURL;
// Relative to protocol
if (relURL.startsWith("//")) {
return StringUtils.substringBefore(baseURL, "//") + "//"
+ StringUtils.substringAfter(relURL, "//");
}
// Relative to domain name
if (relURL.startsWith("/")) {
return getRoot(baseURL) + relURL;
}
// Relative to full full page URL minus ? or #
if (relURL.startsWith("?") || relURL.startsWith("#")) {
// this is a relative url and should have the full page base
return baseURL.replaceFirst("(.*?)([\\?\\#])(.*)", "$1") + relURL;
}
// Relative to last directory/segment
if (!relURL.contains("://")) {
String base = baseURL.replaceFirst("(.*?)([\\?\\#])(.*)", "$1");
if (StringUtils.countMatches(base, '/') > 2) {
base = base.replaceFirst("(.*/)(.*)", "$1");
}
if (base.endsWith("/")) {
// This is a URL relative to the last URL segment
relURL = base + relURL;
} else {
relURL = base + "/" + relURL;
}
}
// Not detected as relative, so return as is
return relURL;
}
|
@Test
public void testToAbsoluteRelativeToLastDirectory() {
s = "g.html";
t = "https://www.example.com/a/b/g.html";
assertEquals(t, HttpURL.toAbsolute(absURL, s));
}
|
public static long getNumSector(String requestSize, String sectorSize) {
Double memSize = Double.parseDouble(requestSize);
Double sectorBytes = Double.parseDouble(sectorSize);
Double nSectors = memSize / sectorBytes;
Double memSizeKB = memSize / 1024;
Double memSizeGB = memSize / (1024 * 1024 * 1024);
Double memSize100GB = memSizeGB / 100;
// allocation bitmap file: one bit per sector
Double allocBitmapSize = nSectors / 8;
// extend overflow file: 4MB, plus 4MB per 100GB
Double extOverflowFileSize = memSize100GB * 1024 * 1024 * 4;
// journal file: 8MB, plus 8MB per 100GB
Double journalFileSize = memSize100GB * 1024 * 1024 * 8;
// catalog file: 10bytes per KB
Double catalogFileSize = memSizeKB * 10;
// hot files: 5bytes per KB
Double hotFileSize = memSizeKB * 5;
// quota users file and quota groups file
Double quotaUsersFileSize = (memSizeGB * 256 + 1) * 64;
Double quotaGroupsFileSize = (memSizeGB * 32 + 1) * 64;
Double metadataSize = allocBitmapSize + extOverflowFileSize + journalFileSize
+ catalogFileSize + hotFileSize + quotaUsersFileSize + quotaGroupsFileSize;
Double allocSize = memSize + metadataSize;
Double numSectors = allocSize / sectorBytes;
System.out.println(numSectors.longValue() + 1); // round up
return numSectors.longValue() + 1;
}
|
@Test
public void getSectorTest512GB() {
String testRequestSize = "549755813888"; // 512GB
String testSectorSize = "512";
long result = HFSUtils.getNumSector(testRequestSize, testSectorSize);
assertEquals(1089876870L, result);
}
|
@Override
public void deleteAll(Collection<?> collection) throws CacheWriterException {
long startNanos = Timer.nanos();
try {
delegate.get().deleteAll(collection);
} finally {
deleteAllProbe.recordValue(Timer.nanosElapsed(startNanos));
}
}
|
@Test
public void deleteAll() {
Collection<Object> c = new LinkedList<>();
cacheWriter.deleteAll(c);
verify(delegate).deleteAll(c);
assertProbeCalledOnce("deleteAll");
}
|
@Override
public int size() {
return complete(asyncCounterMap.size());
}
|
@Test
public void testConstruction() {
assertThat(atomicCounterMap.size(), is(0));
}
|
@Override
public String getName() {
return _name;
}
|
@Test
public void testSha512TransformFunction() {
ExpressionContext expression = RequestContextUtils.getExpression(String.format("sha512(%s)", BYTES_SV_COLUMN));
TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap);
assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper);
assertEquals(transformFunction.getName(), "sha512");
String[] expectedValues = new String[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
expectedValues[i] = DigestUtils.sha512Hex(_bytesSVValues[i]);
}
testTransformFunction(transformFunction, expectedValues);
}
|
@Description("Return the children for a Bing tile")
@ScalarFunction("bing_tile_children")
@SqlType("array(" + BingTileType.NAME + ")")
public static Block bingTileChildren(@SqlType(BingTileType.NAME) long input)
{
BingTile tile = BingTile.decode(input);
try {
List<BingTile> children = tile.findChildren();
BlockBuilder blockBuilder = BIGINT.createBlockBuilder(null, children.size());
children.stream().forEach(child -> BIGINT.writeLong(blockBuilder, child.encode()));
return blockBuilder.build();
}
catch (IllegalArgumentException e) {
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e.getMessage(), e);
}
}
|
@Test
public void testBingTileChildren()
{
assertBingTileChildren("0", OptionalInt.empty(), ImmutableList.of("00", "01", "02", "03"));
assertBingTileChildren("0", OptionalInt.of(3), ImmutableList.of(
"000", "001", "002", "003",
"010", "011", "012", "013",
"020", "021", "022", "023",
"030", "031", "032", "033"));
assertInvalidFunction("bing_tile_children(bing_tile('0'), 0)", "newZoom must be greater than or equal to current zoom 1: 0");
assertInvalidFunction(format("bing_tile_children(bing_tile('0'), %s)", MAX_ZOOM_LEVEL + 1), format("newZoom must be less than or equal to %s: %s", MAX_ZOOM_LEVEL, MAX_ZOOM_LEVEL + 1));
}
|
@Override
public synchronized CompletableFuture<Void> addConsumer(Consumer consumer) {
return validateKeySharedMeta(consumer).thenRun(() -> {
try {
internalAddConsumer(consumer);
} catch (BrokerServiceException.ConsumerAssignException e) {
throw FutureUtil.wrapToCompletionException(e);
}
});
}
|
@Test
public void testEmptyRanges() {
HashRangeExclusiveStickyKeyConsumerSelector selector = new HashRangeExclusiveStickyKeyConsumerSelector(10);
Consumer consumer = mock(Consumer.class);
KeySharedMeta keySharedMeta = new KeySharedMeta()
.setKeySharedMode(KeySharedMode.STICKY);
when(consumer.getKeySharedMeta()).thenReturn(keySharedMeta);
try {
selector.addConsumer(consumer).get();
Assert.fail("Should have failed");
} catch (InterruptedException e) {
throw new RuntimeException(e);
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof BrokerServiceException.ConsumerAssignException);
}
}
|
@Override
public String registerURI(final String selectorName, final List<URIRegisterDTO> uriList) {
String result;
String key = key(selectorName);
try {
this.removeFallBack(key);
result = this.doRegisterURI(selectorName, uriList);
logger.info("Register success: {},{}", selectorName, uriList);
} catch (Exception ex) {
logger.warn("Register exception: cause:{}", ex.getMessage());
result = "";
this.addFallback(key, new FallbackHolder(selectorName, uriList));
}
return result;
}
|
@Test
public void testRegisterURI() {
MockFallbackShenyuClientRegisterService mockFallbackShenyuClientRegisterService = new MockFallbackShenyuClientRegisterService();
assertEquals("doRegisterURI", mockFallbackShenyuClientRegisterService.registerURI("Selector_Name", new ArrayList<>()));
MockFallbackShenyuClientRegisterServiceException mockFallbackShenyuClientRegisterServiceException = new MockFallbackShenyuClientRegisterServiceException();
assertEquals(StringUtils.EMPTY, mockFallbackShenyuClientRegisterServiceException.registerURI("Selector_Name", new ArrayList<>()));
}
|
@PostMapping("/auth/saveOrUpdate")
public Mono<String> saveOrUpdate(@RequestBody final AppAuthData appAuthData) {
if (CollectionUtils.isEmpty(subscribers)) {
return Mono.just(Constants.SUCCESS);
}
LOG.info("saveOrUpdate apache shenyu local app auth");
subscribers.forEach(authDataSubscriber -> authDataSubscriber.onSubscribe(appAuthData));
return Mono.just(Constants.SUCCESS);
}
|
@Test
public void testSaveOrUpdate() throws Exception {
final MockHttpServletResponse response = this.mockMvc.perform(MockMvcRequestBuilders.post("/shenyu/auth/saveOrUpdate")
.contentType(MediaType.APPLICATION_JSON)
.content(GsonUtils.getInstance().toJson(appAuthData)))
.andReturn().getResponse();
assertThat(response.getStatus()).isEqualTo(HttpStatus.OK.value());
subscribers.forEach(subscriber -> verify(subscriber).onSubscribe(appAuthData));
final MockHttpServletResponse responseError = this.mockMvcSubscribersNull.perform(MockMvcRequestBuilders.post("/shenyu/auth/saveOrUpdate")
.contentType(MediaType.APPLICATION_JSON)
.content(GsonUtils.getInstance().toJson(appAuthData)))
.andReturn().getResponse();
assertThat(responseError.getStatus()).isEqualTo(HttpStatus.OK.value());
}
|
public static Select select(String fieldName) { return new Select(fieldName);
}
|
@Test
void double_numeric_operations() {
String q = Q.select("*")
.from("sd1")
.where("f1").le(1.1D)
.and("f2").lt(2.2D)
.and("f3").ge(3.3D)
.and("f4").gt(4.4D)
.and("f5").eq(5.5D)
.and("f6").inRange(6.6D, 7.7D)
.build();
assertEquals(q, "yql=select * from sd1 where f1 <= 1.1 and f2 < 2.2 and f3 >= 3.3 and f4 > 4.4 and f5 = 5.5 and range(f6, 6.6, 7.7)");
}
|
public static <
EventTypeT,
EventKeyTypeT,
ResultTypeT,
StateTypeT extends MutableState<EventTypeT, ResultTypeT>>
OrderedEventProcessor<EventTypeT, EventKeyTypeT, ResultTypeT, StateTypeT> create(
OrderedProcessingHandler<EventTypeT, EventKeyTypeT, StateTypeT, ResultTypeT> handler) {
return new AutoValue_OrderedEventProcessor<>(handler);
}
|
@Test
public void testWindowedProcessing() throws CannotProvideCoderException {
Instant base = new Instant(0);
TestStream<Event> values =
TestStream.create(streamingPipeline.getCoderRegistry().getCoder(Event.class))
.advanceWatermarkTo(base)
.addElements(
// Start of first window
TimestampedValue.of(
Event.create(0, "id-1", "a"), base.plus(Duration.standardSeconds(1))),
TimestampedValue.of(
Event.create(1, "id-1", "b"), base.plus(Duration.standardSeconds(2))),
TimestampedValue.of(
Event.create(0, "id-2", "x"), base.plus(Duration.standardSeconds(1))),
TimestampedValue.of(
Event.create(1, "id-2", "y"), base.plus(Duration.standardSeconds(2))),
TimestampedValue.of(
Event.create(2, "id-2", "z"), base.plus(Duration.standardSeconds(2))),
// Start of second window. Numbering must start with 0 again.
TimestampedValue.of(
Event.create(0, "id-1", "c"), base.plus(Duration.standardSeconds(10))),
TimestampedValue.of(
Event.create(1, "id-1", "d"), base.plus(Duration.standardSeconds(11))))
.advanceWatermarkToInfinity();
Pipeline pipeline = streamingPipeline;
PCollection<Event> rawInput = pipeline.apply("Create Streaming Events", values);
PCollection<KV<String, KV<Long, String>>> input =
rawInput.apply("To KV", ParDo.of(new MapEventsToKV()));
input = input.apply("Window input", Window.into(FixedWindows.of(Duration.standardSeconds(5))));
StringBufferOrderedProcessingHandler handler =
new StringBufferOrderedProcessingHandler(
EMISSION_FREQUENCY_ON_EVERY_ELEMENT, INITIAL_SEQUENCE_OF_0);
handler.setMaxOutputElementsPerBundle(LARGE_MAX_RESULTS_PER_OUTPUT);
handler.setStatusUpdateFrequency(null);
handler.setProduceStatusUpdateOnEveryEvent(true);
OrderedEventProcessor<String, String, String, StringBuilderState> orderedEventProcessor =
OrderedEventProcessor.create(handler);
OrderedEventProcessorResult<String, String, String> processingResult =
input.apply("Process Events", orderedEventProcessor);
IntervalWindow window1 = new IntervalWindow(base, base.plus(Duration.standardSeconds(5)));
PAssert.that("Output matches in window 1", processingResult.output())
.inWindow(window1)
.containsInAnyOrder(
KV.of("id-1", "a"),
KV.of("id-1", "ab"),
KV.of("id-2", "x"),
KV.of("id-2", "xy"),
KV.of("id-2", "xyz"));
IntervalWindow window2 =
new IntervalWindow(
base.plus(Duration.standardSeconds(10)), base.plus(Duration.standardSeconds(15)));
PAssert.that("Output matches in window 2", processingResult.output())
.inWindow(window2)
.containsInAnyOrder(KV.of("id-1", "c"), KV.of("id-1", "cd"));
PAssert.that("Statuses match in window 1", processingResult.processingStatuses())
.inWindow(window1)
.containsInAnyOrder(
KV.of("id-1", OrderedProcessingStatus.create(0L, 0, null, null, 1, 1, 0, false)),
KV.of("id-1", OrderedProcessingStatus.create(1L, 0, null, null, 2, 2, 0, false)),
KV.of("id-2", OrderedProcessingStatus.create(0L, 0, null, null, 1, 1, 0, false)),
KV.of("id-2", OrderedProcessingStatus.create(1L, 0, null, null, 2, 2, 0, false)),
KV.of("id-2", OrderedProcessingStatus.create(2L, 0, null, null, 3, 3, 0, false)));
PAssert.that("Statuses match in window 2", processingResult.processingStatuses())
.inWindow(window2)
.containsInAnyOrder(
KV.of("id-1", OrderedProcessingStatus.create(0L, 0, null, null, 1, 1, 0, false)),
KV.of("id-1", OrderedProcessingStatus.create(1L, 0, null, null, 2, 2, 0, false)));
PAssert.that("Unprocessed events match", processingResult.unprocessedEvents())
.containsInAnyOrder(NO_EXPECTED_DLQ_EVENTS);
pipeline.run();
}
|
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) {
return api.send(request);
}
|
@Test
public void sendSticker() {
Message message = bot.execute(new SendSticker(chatId, stickerFileAnim).emoji("\uD83D\uDE00")).message();
MessageTest.checkMessage(message);
StickerTest.check(message.sticker(), false, true);
assertTrue(message.sticker().isAnimated());
assertEquals("\uD83D\uDE00", message.sticker().emoji());
assertEquals(Sticker.Type.regular, message.sticker().type());
message = bot.execute(new SendSticker(chatId, stickerId).thumbnail(thumbFile)).message();
MessageTest.checkMessage(message);
StickerTest.check(message.sticker(), true, false);
assertFalse(message.sticker().isAnimated());
message = bot.execute(new SendSticker(chatId, imageBytes).thumbnail(thumbBytes)).message();
MessageTest.checkMessage(message);
StickerTest.check(message.sticker(), false, true);
}
|
@Override
public void execute(GraphModel graphModel) {
Graph graph = graphModel.getUndirectedGraphVisible();
execute(graph);
}
|
@Test
public void testColumnCreation() {
GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1);
Modularity h = new Modularity();
h.execute(graphModel);
Assert.assertTrue(graphModel.getNodeTable().hasColumn(Modularity.MODULARITY_CLASS));
}
|
public static MemorySegment wrapCopy(byte[] bytes, int start, int end)
throws IllegalArgumentException {
checkArgument(end >= start);
checkArgument(end <= bytes.length);
MemorySegment copy = allocateUnpooledSegment(end - start);
copy.put(0, bytes, start, copy.size());
return copy;
}
|
@Test
void testWrapCopyEmpty() {
MemorySegmentFactory.wrapCopy(new byte[0], 0, 0);
}
|
public static float updateEstimation(String topic, CompressionType type, float observedRatio) {
float[] compressionRatioForTopic = getAndCreateEstimationIfAbsent(topic);
float currentEstimation = compressionRatioForTopic[type.id];
synchronized (compressionRatioForTopic) {
if (observedRatio > currentEstimation)
compressionRatioForTopic[type.id] = Math.max(currentEstimation + COMPRESSION_RATIO_DETERIORATE_STEP, observedRatio);
else if (observedRatio < currentEstimation) {
compressionRatioForTopic[type.id] = Math.max(currentEstimation - COMPRESSION_RATIO_IMPROVING_STEP, observedRatio);
}
}
return compressionRatioForTopic[type.id];
}
|
@Test
public void testUpdateEstimation() {
class EstimationsObservedRatios {
final float currentEstimation;
final float observedRatio;
EstimationsObservedRatios(float currentEstimation, float observedRatio) {
this.currentEstimation = currentEstimation;
this.observedRatio = observedRatio;
}
}
// If currentEstimation is smaller than observedRatio, the updatedCompressionRatio is currentEstimation plus
// COMPRESSION_RATIO_DETERIORATE_STEP 0.05, otherwise currentEstimation minus COMPRESSION_RATIO_IMPROVING_STEP
// 0.005. There are four cases,and updatedCompressionRatio shouldn't smaller than observedRatio in all of cases.
// Refer to non test code for more details.
List<EstimationsObservedRatios> estimationsObservedRatios = Arrays.asList(
new EstimationsObservedRatios(0.8f, 0.84f),
new EstimationsObservedRatios(0.6f, 0.7f),
new EstimationsObservedRatios(0.6f, 0.4f),
new EstimationsObservedRatios(0.004f, 0.001f));
for (EstimationsObservedRatios estimationsObservedRatio : estimationsObservedRatios) {
String topic = "tp";
CompressionRatioEstimator.setEstimation(topic, CompressionType.ZSTD, estimationsObservedRatio.currentEstimation);
float updatedCompressionRatio = CompressionRatioEstimator.updateEstimation(topic, CompressionType.ZSTD, estimationsObservedRatio.observedRatio);
assertTrue(updatedCompressionRatio >= estimationsObservedRatio.observedRatio);
}
}
|
public final void tag(I input, ScopedSpan span) {
if (input == null) throw new NullPointerException("input == null");
if (span == null) throw new NullPointerException("span == null");
if (span.isNoop()) return;
tag(span, input, span.context());
}
|
@Test
public void tag_mutableSpan_threadSafe() throws InterruptedException {
int numThreads = 1000;
ExecutorService service = Executors.newFixedThreadPool(numThreads);
try {
for (int i = 0; i < numThreads; i++) {
String val = String.valueOf(i);
Tag<Object> tag = new Tag<Object>("key" + i) {
@Override protected String parseValue(Object input, TraceContext context) {
return val;
}
};
service.submit(() -> tag.tag(input, context, mutableSpan));
}
} finally {
service.shutdown();
service.awaitTermination(1, TimeUnit.MINUTES);
}
assertThat(mutableSpan.tagCount()).isEqualTo(numThreads);
}
|
public final void containsKey(@Nullable Object key) {
check("keySet()").that(checkNotNull(actual).keySet()).contains(key);
}
|
@Test
public void containsKey() {
ImmutableMultimap<String, String> multimap = ImmutableMultimap.of("kurt", "kluever");
assertThat(multimap).containsKey("kurt");
}
|
@Override
public double logp(int k) {
if (k == 0) {
return Math.log(q);
} else if (k == 1) {
return Math.log(p);
} else {
return Double.NEGATIVE_INFINITY;
}
}
|
@Test
public void testLogP() {
System.out.println("logP");
BernoulliDistribution instance = new BernoulliDistribution(0.3);
instance.rand();
assertEquals(Math.log(0.7), instance.logp(0), 1E-7);
assertEquals(Math.log(0.3), instance.logp(1), 1E-7);
}
|
@Override
public boolean retainAll(Collection<?> c) {
return get(retainAllAsync(c));
}
|
@Test
public void testRetainAll() {
RScoredSortedSet<Integer> set = redisson.getScoredSortedSet("simple");
for (int i = 0; i < 20000; i++) {
set.add(i*10, i);
}
assertThat(set.retainAll(Arrays.asList(1, 2))).isTrue();
assertThat(set).containsExactly(1, 2);
assertThat(set.size()).isEqualTo(2);
assertThat(set.getScore(1)).isEqualTo(10);
assertThat(set.getScore(2)).isEqualTo(20);
}
|
public void registerStrategy(BatchingStrategy<?, ?, ?> strategy) {
_strategies.add(strategy);
}
|
@Test
public void testClassifyFailure() {
RecordingStrategy<Integer, Integer, String> strategy =
new RecordingStrategy<>((key, promise) -> promise.done(String.valueOf(key)), key -> key / key);
_batchingSupport.registerStrategy(strategy);
Task<String> task = Task.par(strategy.batchable(0).recover(e -> "failed"), strategy.batchable(1).recover(e -> "failed"))
.map("concat", (s0, s1) -> s0 + s1);
String result = runAndWait("TestBatchingSupport.testClassifyFailure", task);
assertEquals(result, "failed1");
assertEquals(strategy.getExecutedBatches().size(), 0);
assertEquals(strategy.getExecutedSingletons().size(), 1);
}
|
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CoordinatorResult<?, ?> that = (CoordinatorResult<?, ?>) o;
if (!Objects.equals(records, that.records)) return false;
if (!Objects.equals(response, that.response)) return false;
if (!Objects.equals(replayRecords, that.replayRecords)) return false;
if (!Objects.equals(isAtomic, that.isAtomic)) return false;
return Objects.equals(appendFuture, that.appendFuture);
}
|
@Test
public void testEquals() {
CoordinatorResult<String, CoordinatorRecord> result1 = new CoordinatorResult<>(Collections.emptyList(), "response");
CoordinatorResult<String, CoordinatorRecord> result2 = new CoordinatorResult<>(Collections.emptyList(), "response");
assertEquals(result1, result2);
}
|
public static String joinFilter( char operator, List<String> parts )
{
final StringBuilder result = new StringBuilder();
result.append('(').append(operator);
for ( final String part : parts )
{
result.append('(').append(part).append(')');
}
result.append(')');
return result.toString();
}
|
@Test
public void testJoinFilter() throws Exception
{
// Setup fixture.
final List<String> input = new ArrayList<>();
input.add( "foo" );
input.add( "bar" );
// Execute system under test.
final String result = LdapManager.joinFilter( '|', input );
// Verify result.
assertNotNull( result );
assertEquals( "(|(foo)(bar))", result );
}
|
public static Duration parseDuration(String text) {
checkNotNull(text);
final String trimmed = text.trim();
checkArgument(!trimmed.isEmpty(), "argument is an empty- or whitespace-only string");
final int len = trimmed.length();
int pos = 0;
char current;
while (pos < len && (current = trimmed.charAt(pos)) >= '0' && current <= '9') {
pos++;
}
final String number = trimmed.substring(0, pos);
final String unitLabel = trimmed.substring(pos).trim().toLowerCase(Locale.US);
if (number.isEmpty()) {
throw new NumberFormatException("text does not start with a number");
}
final BigInteger value;
try {
value = new BigInteger(number); // this throws a NumberFormatException
} catch (NumberFormatException e) {
throw new IllegalArgumentException(
"The value '" + number + "' cannot be represented as an integer number.", e);
}
final ChronoUnit unit;
if (unitLabel.isEmpty()) {
unit = ChronoUnit.MILLIS;
} else {
unit = LABEL_TO_UNIT_MAP.get(unitLabel);
}
if (unit == null) {
throw new IllegalArgumentException(
"Time interval unit label '"
+ unitLabel
+ "' does not match any of the recognized units: "
+ TimeUnit.getAllUnits());
}
try {
return convertBigIntToDuration(value, unit);
} catch (ArithmeticException e) {
throw new IllegalArgumentException(
"The value '"
+ number
+ "' cannot be represented as java.time.Duration (numeric overflow).",
e);
}
}
|
@Test
void testParseDurationMicros() {
assertThat(TimeUtils.parseDuration("565731µs").getNano()).isEqualTo(565731 * 1000L);
assertThat(TimeUtils.parseDuration("565731micro").getNano()).isEqualTo(565731 * 1000L);
assertThat(TimeUtils.parseDuration("565731micros").getNano()).isEqualTo(565731 * 1000L);
assertThat(TimeUtils.parseDuration("565731microsecond").getNano())
.isEqualTo(565731 * 1000L);
assertThat(TimeUtils.parseDuration("565731microseconds").getNano())
.isEqualTo(565731 * 1000L);
assertThat(TimeUtils.parseDuration("565731 µs").getNano()).isEqualTo(565731 * 1000L);
}
|
static Map<String, SerializableFunction<Double, Double>> getNumericPredictorsMap(final List<NumericPredictor> numericPredictors) {
return numericPredictors.stream()
.collect(Collectors.toMap(numericPredictor ->numericPredictor.getField(),
KiePMMLRegressionTableFactory::getNumericPredictorEntry));
}
|
@Test
void getNumericPredictorsMap() {
final List<NumericPredictor> numericPredictors = IntStream.range(0, 3).mapToObj(index -> {
String predictorName = "predictorName-" + index;
double coefficient = 1.23 * index;
return PMMLModelTestUtils.getNumericPredictor(predictorName, index, coefficient);
}).collect(Collectors.toList());
Map<String, SerializableFunction<Double, Double>> retrieved =
KiePMMLRegressionTableFactory.getNumericPredictorsMap(numericPredictors);
assertThat(retrieved).hasSameSizeAs(numericPredictors);
}
|
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(
value = "NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE",
justification = "Handled in switchIfEmpty")
@Override
public HouseTable save(HouseTable entity) {
CreateUpdateEntityRequestBodyUserTable requestBody =
new CreateUpdateEntityRequestBodyUserTable().entity(houseTableMapper.toUserTable(entity));
return getHtsRetryTemplate(Arrays.asList(IllegalStateException.class))
.execute(
context ->
apiInstance
.putUserTable(requestBody)
.map(EntityResponseBodyUserTable::getEntity)
.map(houseTableMapper::toHouseTable)
.onErrorResume(this::handleHtsHttpError)
.block(Duration.ofSeconds(REQUEST_TIMEOUT_SECONDS)));
}
|
@Test
public void testRepoSave() {
EntityResponseBodyUserTable putResponse = new EntityResponseBodyUserTable();
putResponse.entity(houseTableMapper.toUserTable(HOUSE_TABLE));
mockHtsServer.enqueue(
new MockResponse()
.setResponseCode(201)
.setBody((new Gson()).toJson(putResponse))
.addHeader("Content-Type", "application/json"));
HouseTable result = htsRepo.save(HOUSE_TABLE);
Assertions.assertEquals(result.getTableId(), HOUSE_TABLE.getTableId());
Assertions.assertEquals(result.getDatabaseId(), HOUSE_TABLE.getDatabaseId());
Assertions.assertEquals(result.getTableLocation(), HOUSE_TABLE.getTableLocation());
Assertions.assertEquals(result.getTableVersion(), HOUSE_TABLE.getTableVersion());
Assertions.assertEquals(result.getStorageType(), HOUSE_TABLE.getStorageType());
}
|
@SuppressWarnings("java:S1172")
public static void ignore(Throwable t) {
}
|
@Test
public void test_ignore() {
ignore(new Exception());
}
|
public static <T> ReadAll<T> readAll() {
return new AutoValue_CassandraIO_ReadAll.Builder<T>().build();
}
|
@Test
public void testReadAllRingRange() {
RingRange physRR =
fromEncodedKey(
cluster.getMetadata(), TypeCodec.varchar().serialize("phys", ProtocolVersion.V3));
RingRange mathRR =
fromEncodedKey(
cluster.getMetadata(), TypeCodec.varchar().serialize("math", ProtocolVersion.V3));
RingRange logicRR =
fromEncodedKey(
cluster.getMetadata(), TypeCodec.varchar().serialize("logic", ProtocolVersion.V3));
PCollection<Scientist> output =
pipeline
.apply(Create.of(getReadWithRingRange(physRR), getReadWithRingRange(mathRR, logicRR)))
.apply(
CassandraIO.<Scientist>readAll().withCoder(SerializableCoder.of(Scientist.class)));
PCollection<KV<String, Integer>> mapped =
output.apply(
MapElements.via(
new SimpleFunction<Scientist, KV<String, Integer>>() {
@Override
public KV<String, Integer> apply(Scientist scientist) {
return KV.of(scientist.department, scientist.id);
}
}));
PAssert.that(mapped.apply("Count occurrences per department", Count.perKey()))
.satisfies(
input -> {
HashMap<String, Long> map = new HashMap<>();
for (KV<String, Long> element : input) {
map.put(element.getKey(), element.getValue());
}
assertEquals(3, map.size()); // do we have all three departments
assertEquals(10L, (long) map.get("phys"));
assertEquals(4L, (long) map.get("math"));
assertEquals(2L, (long) map.get("logic"));
return null;
});
pipeline.run();
}
|
@Override
public void watch(final String key, final DataChangedEventListener dataChangedEventListener) {
Watch.Listener listener = Watch.listener(response -> {
for (WatchEvent each : response.getEvents()) {
Type type = getEventChangedType(each);
if (Type.IGNORED != type) {
dispatchEvent(dataChangedEventListener, each, type);
}
}
});
ByteSequence prefix = ByteSequence.from(key, StandardCharsets.UTF_8);
Preconditions.checkNotNull(prefix, "prefix should not be null");
client.getWatchClient().watch(prefix,
WatchOption.newBuilder().withRange(OptionsUtil.prefixEndOf(prefix)).build(), listener);
}
|
@Test
void assertWatchDelete() {
doAnswer(invocationOnMock -> {
Watch.Listener listener = (Watch.Listener) invocationOnMock.getArguments()[2];
listener.onNext(buildWatchResponse(WatchEvent.EventType.DELETE));
return mock(Watch.Watcher.class);
}).when(watch).watch(any(ByteSequence.class), any(WatchOption.class), any(Watch.Listener.class));
repository.watch("key1", event -> {
});
verify(watch).watch(any(ByteSequence.class), any(WatchOption.class), any(Watch.Listener.class));
}
|
@Override
public PayloadSerializer getSerializer(Schema schema, Map<String, Object> tableParams) {
Class<? extends TBase> thriftClass = getMessageClass(tableParams);
TProtocolFactory protocolFactory = getProtocolFactory(tableParams);
inferAndVerifySchema(thriftClass, schema);
return getPayloadSerializer(schema, protocolFactory, thriftClass);
}
|
@Test
public void serialize() throws Exception {
byte[] bytes =
provider
.getSerializer(
SHUFFLED_SCHEMA,
ImmutableMap.of(
"thriftClass", TestThriftMessage.class.getName(),
"thriftProtocolFactoryClass", TCompactProtocol.Factory.class.getName()))
.serialize(ROW);
TestThriftMessage result = new TestThriftMessage();
new TDeserializer(new TCompactProtocol.Factory()).deserialize(result, bytes);
assertEquals(MESSAGE, result);
}
|
public static Builder builder() {
return new Builder();
}
|
@Test
// Test cases that are JSON that can be created via the Builder
public void testRoundTripSerDe() throws JsonProcessingException {
String fullJson =
"{\"namespace\":[\"accounting\",\"tax\"],\"properties\":{\"owner\":\"Hank\"}}";
GetNamespaceResponse fullValue =
GetNamespaceResponse.builder().withNamespace(NAMESPACE).setProperties(PROPERTIES).build();
assertRoundTripSerializesEquallyFrom(fullJson, fullValue);
String emptyProps = "{\"namespace\":[\"accounting\",\"tax\"],\"properties\":{}}";
assertRoundTripSerializesEquallyFrom(
emptyProps, GetNamespaceResponse.builder().withNamespace(NAMESPACE).build());
assertRoundTripSerializesEquallyFrom(
emptyProps,
GetNamespaceResponse.builder()
.withNamespace(NAMESPACE)
.setProperties(EMPTY_PROPERTIES)
.build());
}
|
@Override
public String rpcType() {
return RpcTypeEnum.GRPC.getName();
}
|
@Test
public void testRpcType() {
assertEquals(grpcShenyuContextDecorator.rpcType(), PluginEnum.GRPC.getName());
}
|
public DeleteGranularity deleteGranularity() {
String valueAsString =
confParser
.stringConf()
.option(SparkWriteOptions.DELETE_GRANULARITY)
.tableProperty(TableProperties.DELETE_GRANULARITY)
.defaultValue(TableProperties.DELETE_GRANULARITY_DEFAULT)
.parse();
return DeleteGranularity.fromString(valueAsString);
}
|
@Test
public void testDeleteGranularityDefault() {
Table table = validationCatalog.loadTable(tableIdent);
SparkWriteConf writeConf = new SparkWriteConf(spark, table, ImmutableMap.of());
DeleteGranularity value = writeConf.deleteGranularity();
assertThat(value).isEqualTo(DeleteGranularity.PARTITION);
}
|
public GpuAllocation assignGpus(Container container)
throws ResourceHandlerException {
GpuAllocation allocation = internalAssignGpus(container);
// Wait for a maximum of waitPeriodForResource seconds if no
// available GPU are there which are yet to be released.
int timeWaiting = 0;
while (allocation == null) {
if (timeWaiting >= waitPeriodForResource) {
break;
}
// Sleep for 1 sec to ensure there are some free GPU devices which are
// getting released.
try {
LOG.info("Container : " + container.getContainerId()
+ " is waiting for free GPU devices.");
Thread.sleep(WAIT_MS_PER_LOOP);
timeWaiting += WAIT_MS_PER_LOOP;
allocation = internalAssignGpus(container);
} catch (InterruptedException e) {
// On any interrupt, break the loop and continue execution.
Thread.currentThread().interrupt();
LOG.warn("Interrupted while waiting for available GPU");
break;
}
}
if(allocation == null) {
String message = "Could not get valid GPU device for container '" +
container.getContainerId()
+ "' as some other containers might not releasing GPUs.";
LOG.warn(message);
throw new ResourceHandlerException(message);
}
return allocation;
}
|
@Test
public void testRequestZeroGpu() throws ResourceHandlerException {
addGpus(new GpuDevice(1, 1));
Container container = createMockContainer(0, 5L);
GpuAllocation allocation =
testSubject.assignGpus(container);
assertNoAllocation(allocation);
}
|
@Override
public long computePullFromWhereWithException(MessageQueue mq) throws MQClientException {
ConsumeFromWhere consumeFromWhere = litePullConsumerImpl.getDefaultLitePullConsumer().getConsumeFromWhere();
long result = -1;
switch (consumeFromWhere) {
case CONSUME_FROM_LAST_OFFSET: {
long lastOffset = litePullConsumerImpl.getOffsetStore().readOffset(mq, ReadOffsetType.MEMORY_FIRST_THEN_STORE);
if (lastOffset >= 0) {
result = lastOffset;
} else if (-1 == lastOffset) {
if (mq.getTopic().startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) { // First start, no offset
result = 0L;
} else {
try {
result = this.mQClientFactory.getMQAdminImpl().maxOffset(mq);
} catch (MQClientException e) {
log.warn("Compute consume offset from last offset exception, mq={}, exception={}", mq, e);
throw e;
}
}
} else {
result = -1;
}
break;
}
case CONSUME_FROM_FIRST_OFFSET: {
long lastOffset = litePullConsumerImpl.getOffsetStore().readOffset(mq, ReadOffsetType.MEMORY_FIRST_THEN_STORE);
if (lastOffset >= 0) {
result = lastOffset;
} else if (-1 == lastOffset) {
result = 0L;
} else {
result = -1;
}
break;
}
case CONSUME_FROM_TIMESTAMP: {
long lastOffset = litePullConsumerImpl.getOffsetStore().readOffset(mq, ReadOffsetType.MEMORY_FIRST_THEN_STORE);
if (lastOffset >= 0) {
result = lastOffset;
} else if (-1 == lastOffset) {
if (mq.getTopic().startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) {
try {
result = this.mQClientFactory.getMQAdminImpl().maxOffset(mq);
} catch (MQClientException e) {
log.warn("Compute consume offset from last offset exception, mq={}, exception={}", mq, e);
throw e;
}
} else {
try {
long timestamp = UtilAll.parseDate(this.litePullConsumerImpl.getDefaultLitePullConsumer().getConsumeTimestamp(),
UtilAll.YYYYMMDDHHMMSS).getTime();
result = this.mQClientFactory.getMQAdminImpl().searchOffset(mq, timestamp);
} catch (MQClientException e) {
log.warn("Compute consume offset from last offset exception, mq={}, exception={}", mq, e);
throw e;
}
}
} else {
result = -1;
}
break;
}
}
return result;
}
|
@Test
public void testComputePullFromWhereWithException_ne_minus1() throws MQClientException {
for (ConsumeFromWhere where : new ConsumeFromWhere[]{
ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET,
ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET,
ConsumeFromWhere.CONSUME_FROM_TIMESTAMP}) {
consumer.setConsumeFromWhere(where);
when(offsetStore.readOffset(any(MessageQueue.class), any(ReadOffsetType.class))).thenReturn(0L);
assertEquals(0, rebalanceImpl.computePullFromWhereWithException(mq));
when(offsetStore.readOffset(any(MessageQueue.class), any(ReadOffsetType.class))).thenReturn(-2L);
assertEquals(-1, rebalanceImpl.computePullFromWhereWithException(mq));
}
}
|
@Override
public void run() {
try {
// We kill containers until the kernel reports the OOM situation resolved
// Note: If the kernel has a delay this may kill more than necessary
while (true) {
String status = cgroups.getCGroupParam(
CGroupsHandler.CGroupController.MEMORY,
"",
CGROUP_PARAM_MEMORY_OOM_CONTROL);
if (!status.contains(CGroupsHandler.UNDER_OOM)) {
break;
}
boolean containerKilled = killContainer();
if (!containerKilled) {
// This can happen, if SIGKILL did not clean up
// non-PGID or containers or containers launched by other users
// or if a process was put to the root YARN cgroup.
throw new YarnRuntimeException(
"Could not find any containers but CGroups " +
"reserved for containers ran out of memory. " +
"I am giving up");
}
}
} catch (ResourceHandlerException ex) {
LOG.warn("Could not fetch OOM status. " +
"This is expected at shutdown. Exiting.", ex);
}
}
|
@Test
public void testKillOneOverLimitOpportunisticContainerUponOOM()
throws Exception {
ConcurrentHashMap<ContainerId, Container> containers =
new ConcurrentHashMap<>();
int currentContainerId = 0;
Container c1 = createContainer(currentContainerId++, false, 2, true);
containers.put(c1.getContainerId(), c1);
Container c2 = createContainer(currentContainerId++, false, 1, true);
containers.put(c2.getContainerId(), c2);
Container c3 = createContainer(currentContainerId++, true, 1, true);
containers.put(c3.getContainerId(), c3);
ContainerExecutor ex = createContainerExecutor(containers);
Context context = mock(Context.class);
when(context.getContainers()).thenReturn(containers);
when(context.getContainerExecutor()).thenReturn(ex);
CGroupsHandler cGroupsHandler = mock(CGroupsHandler.class);
when(cGroupsHandler.getCGroupParam(
CGroupsHandler.CGroupController.MEMORY,
"",
CGROUP_PARAM_MEMORY_OOM_CONTROL))
.thenReturn("under_oom 1")
.thenReturn("under_oom 0");
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c1.getContainerId().toString(), CGROUP_PROCS_FILE))
.thenReturn("1234").thenReturn("");
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c1.getContainerId().toString(), CGROUP_PARAM_MEMORY_USAGE_BYTES))
.thenReturn(getMB(9));
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c1.getContainerId().toString(), CGROUP_PARAM_MEMORY_MEMSW_USAGE_BYTES))
.thenReturn(getMB(9));
// container c2 is out of its limit
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c2.getContainerId().toString(), CGROUP_PROCS_FILE))
.thenReturn("1235").thenReturn("");
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c2.getContainerId().toString(), CGROUP_PARAM_MEMORY_USAGE_BYTES))
.thenReturn(getMB(11));
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c2.getContainerId().toString(), CGROUP_PARAM_MEMORY_MEMSW_USAGE_BYTES))
.thenReturn(getMB(11));
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c3.getContainerId().toString(), CGROUP_PROCS_FILE))
.thenReturn("1236").thenReturn("");
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c3.getContainerId().toString(), CGROUP_PARAM_MEMORY_USAGE_BYTES))
.thenReturn(getMB(9));
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c3.getContainerId().toString(), CGROUP_PARAM_MEMORY_MEMSW_USAGE_BYTES))
.thenReturn(getMB(9));
DefaultOOMHandler handler =
new DefaultOOMHandler(context, false) {
@Override
protected CGroupsHandler getCGroupsHandler() {
return cGroupsHandler;
}
};
handler.run();
verify(ex, times(1)).signalContainer(
new ContainerSignalContext.Builder()
.setPid("1235")
.setContainer(c2)
.setSignal(ContainerExecutor.Signal.KILL)
.build()
);
verify(ex, times(1)).signalContainer(any());
}
|
@VisibleForTesting
OutputBufferMemoryManager getMemoryManager()
{
return memoryManager;
}
|
@Test
public void testSharedBufferBlocking2()
{
// start with a complete future
SettableFuture<?> blockedFuture = SettableFuture.create();
blockedFuture.set(null);
MockMemoryReservationHandler reservationHandler = new MockMemoryReservationHandler(blockedFuture);
AggregatedMemoryContext memoryContext = newRootAggregatedMemoryContext(reservationHandler, 0L);
Page page = createPage(1);
long pageSize = PAGES_SERDE.serialize(page).getRetainedSizeInBytes();
// create a buffer that can only hold two pages
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), new DataSize(pageSize * 2, BYTE), memoryContext, directExecutor());
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
// add two pages to fill up the buffer (memory is available)
addPage(buffer, page);
addPage(buffer, page);
// fill up the memory pool
blockedFuture = SettableFuture.create();
reservationHandler.updateBlockedFuture(blockedFuture);
// allocate one more byte to make the buffer full
memoryManager.updateMemoryUsage(1L);
// more memory is available
blockedFuture.set(null);
memoryManager.onMemoryAvailable();
// memoryManager should still return a blocked future as the buffer is still full
assertFalse(memoryManager.getBufferBlockedFuture().isDone(), "buffer should be blocked");
// remove all pages from the memory manager and the 1 byte that we added above
memoryManager.updateMemoryUsage(-pageSize * 2 - 1);
// now we have both buffer space and memory available, so memoryManager shouldn't be blocked
assertTrue(memoryManager.getBufferBlockedFuture().isDone(), "buffer shouldn't be blocked");
// we should be able to add two pages after more memory is available
addPage(buffer, page);
addPage(buffer, page);
// the buffer is full now
enqueuePage(buffer, page);
}
|
@Override
public Processor<KIn, VIn, KOut, VOut> get() {
return new KStreamFlatMapProcessor();
}
|
@Test
public void testFlatMap() {
final StreamsBuilder builder = new StreamsBuilder();
final String topicName = "topic";
final KeyValueMapper<Number, Object, Iterable<KeyValue<String, String>>> mapper =
(key, value) -> {
final ArrayList<KeyValue<String, String>> result = new ArrayList<>();
for (int i = 0; i < key.intValue(); i++) {
result.add(KeyValue.pair(Integer.toString(key.intValue() * 10 + i), value.toString()));
}
return result;
};
final int[] expectedKeys = {0, 1, 2, 3};
final KStream<Integer, String> stream;
final MockApiProcessorSupplier<String, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
stream.flatMap(mapper).process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic =
driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0), Duration.ZERO);
for (final int expectedKey : expectedKeys) {
inputTopic.pipeInput(expectedKey, "V" + expectedKey);
}
}
assertEquals(6, supplier.theCapturedProcessor().processed().size());
final KeyValueTimestamp[] expected = {new KeyValueTimestamp<>("10", "V1", 0),
new KeyValueTimestamp<>("20", "V2", 0),
new KeyValueTimestamp<>("21", "V2", 0),
new KeyValueTimestamp<>("30", "V3", 0),
new KeyValueTimestamp<>("31", "V3", 0),
new KeyValueTimestamp<>("32", "V3", 0)};
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], supplier.theCapturedProcessor().processed().get(i));
}
}
|
public void decode(ByteBuf buffer) {
boolean last;
int statusCode;
while (true) {
switch(state) {
case READ_COMMON_HEADER:
if (buffer.readableBytes() < SPDY_HEADER_SIZE) {
return;
}
int frameOffset = buffer.readerIndex();
int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET;
int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET;
buffer.skipBytes(SPDY_HEADER_SIZE);
boolean control = (buffer.getByte(frameOffset) & 0x80) != 0;
int version;
int type;
if (control) {
// Decode control frame common header
version = getUnsignedShort(buffer, frameOffset) & 0x7FFF;
type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET);
streamId = 0; // Default to session Stream-ID
} else {
// Decode data frame common header
version = spdyVersion; // Default to expected version
type = SPDY_DATA_FRAME;
streamId = getUnsignedInt(buffer, frameOffset);
}
flags = buffer.getByte(flagsOffset);
length = getUnsignedMedium(buffer, lengthOffset);
// Check version first then validity
if (version != spdyVersion) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SPDY Version");
} else if (!isValidFrameHeader(streamId, type, flags, length)) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid Frame Error");
} else {
state = getNextState(type, length);
}
break;
case READ_DATA_FRAME:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0));
break;
}
// Generate data frames that do not exceed maxChunkSize
int dataLength = Math.min(maxChunkSize, length);
// Wait until entire frame is readable
if (buffer.readableBytes() < dataLength) {
return;
}
ByteBuf data = buffer.alloc().buffer(dataLength);
data.writeBytes(buffer, dataLength);
length -= dataLength;
if (length == 0) {
state = State.READ_COMMON_HEADER;
}
last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN);
delegate.readDataFrame(streamId, last, data);
break;
case READ_SYN_STREAM_FRAME:
if (buffer.readableBytes() < 10) {
return;
}
int offset = buffer.readerIndex();
streamId = getUnsignedInt(buffer, offset);
int associatedToStreamId = getUnsignedInt(buffer, offset + 4);
byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07);
last = hasFlag(flags, SPDY_FLAG_FIN);
boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL);
buffer.skipBytes(10);
length -= 10;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_STREAM Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional);
}
break;
case READ_SYN_REPLY_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_REPLY Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynReplyFrame(streamId, last);
}
break;
case READ_RST_STREAM_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (streamId == 0 || statusCode == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid RST_STREAM Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readRstStreamFrame(streamId, statusCode);
}
break;
case READ_SETTINGS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR);
numSettings = getUnsignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
length -= 4;
// Validate frame length against number of entries. Each ID/Value entry is 8 bytes.
if ((length & 0x07) != 0 || length >> 3 != numSettings) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SETTINGS Frame");
} else {
state = State.READ_SETTING;
delegate.readSettingsFrame(clear);
}
break;
case READ_SETTING:
if (numSettings == 0) {
state = State.READ_COMMON_HEADER;
delegate.readSettingsEnd();
break;
}
if (buffer.readableBytes() < 8) {
return;
}
byte settingsFlags = buffer.getByte(buffer.readerIndex());
int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1);
int value = getSignedInt(buffer, buffer.readerIndex() + 4);
boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE);
boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED);
buffer.skipBytes(8);
--numSettings;
delegate.readSetting(id, value, persistValue, persisted);
break;
case READ_PING_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
int pingId = getSignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
state = State.READ_COMMON_HEADER;
delegate.readPingFrame(pingId);
break;
case READ_GOAWAY_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
state = State.READ_COMMON_HEADER;
delegate.readGoAwayFrame(lastGoodStreamId, statusCode);
break;
case READ_HEADERS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid HEADERS Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readHeadersFrame(streamId, last);
}
break;
case READ_WINDOW_UPDATE_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (deltaWindowSize == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid WINDOW_UPDATE Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readWindowUpdateFrame(streamId, deltaWindowSize);
}
break;
case READ_HEADER_BLOCK:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readHeaderBlockEnd();
break;
}
if (!buffer.isReadable()) {
return;
}
int compressedBytes = Math.min(buffer.readableBytes(), length);
ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes);
headerBlock.writeBytes(buffer, compressedBytes);
length -= compressedBytes;
delegate.readHeaderBlock(headerBlock);
break;
case DISCARD_FRAME:
int numBytes = Math.min(buffer.readableBytes(), length);
buffer.skipBytes(numBytes);
length -= numBytes;
if (length == 0) {
state = State.READ_COMMON_HEADER;
break;
}
return;
case FRAME_ERROR:
buffer.skipBytes(buffer.readableBytes());
return;
default:
throw new Error("Shouldn't reach here.");
}
}
}
|
@Test
public void testInvalidSpdyHeadersFrameLength() throws Exception {
short type = 8;
byte flags = 0;
int length = 0; // invalid length
ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length);
encodeControlFrameHeader(buf, type, flags, length);
decoder.decode(buf);
verify(delegate).readFrameError(anyString());
assertFalse(buf.isReadable());
buf.release();
}
|
@Override
public void configure() throws Exception {
server.addEventListener(mbeans());
server.addConnector(plainConnector());
ContextHandlerCollection handlers = new ContextHandlerCollection();
deploymentManager.setContexts(handlers);
webAppContext = createWebAppContext();
JettyCustomErrorPageHandler errorHandler = new JettyCustomErrorPageHandler();
webAppContext.setErrorHandler(errorHandler);
webAppContext.insertHandler(gzipHandler());
server.addBean(errorHandler);
server.addBean(deploymentManager);
HandlerCollection serverLevelHandlers = new HandlerCollection();
serverLevelHandlers.setHandlers(new Handler[]{handlers});
server.setHandler(serverLevelHandlers);
performCustomConfiguration();
server.setStopAtShutdown(true);
}
|
@Test
public void shouldSetStopAtShutdown() throws Exception {
jettyServer.configure();
verify(server).setStopAtShutdown(true);
}
|
public GenericSQLRewriteResult rewrite(final SQLRewriteContext sqlRewriteContext, final QueryContext queryContext) {
DatabaseType protocolType = database.getProtocolType();
Map<String, StorageUnit> storageUnits = database.getResourceMetaData().getStorageUnits();
DatabaseType storageType = storageUnits.isEmpty() ? protocolType : storageUnits.values().iterator().next().getStorageType();
SQLTranslatorContext sqlTranslatorContext = translatorRule.translate(new DefaultSQLBuilder(sqlRewriteContext.getSql(), sqlRewriteContext.getSqlTokens()).toSQL(),
sqlRewriteContext.getParameterBuilder().getParameters(), queryContext, storageType, database, globalRuleMetaData);
return new GenericSQLRewriteResult(new SQLRewriteUnit(sqlTranslatorContext.getSql(), sqlTranslatorContext.getParameters()));
}
|
@Test
void assertRewrite() {
DatabaseType databaseType = mock(DatabaseType.class);
SQLTranslatorRule rule = new SQLTranslatorRule(new DefaultSQLTranslatorRuleConfigurationBuilder().build());
ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS);
when(database.getProtocolType()).thenReturn(databaseType);
Map<String, StorageUnit> storageUnits = mockStorageUnits(databaseType);
when(database.getResourceMetaData().getStorageUnits()).thenReturn(storageUnits);
CommonSQLStatementContext sqlStatementContext = mock(CommonSQLStatementContext.class);
when(sqlStatementContext.getDatabaseType()).thenReturn(databaseType);
QueryContext queryContext = mock(QueryContext.class, RETURNS_DEEP_STUBS);
when(queryContext.getSqlStatementContext()).thenReturn(sqlStatementContext);
GenericSQLRewriteResult actual = new GenericSQLRewriteEngine(rule, database, mock(RuleMetaData.class))
.rewrite(new SQLRewriteContext(database, sqlStatementContext, "SELECT 1", Collections.emptyList(), mock(ConnectionContext.class),
new HintValueContext()), queryContext);
assertThat(actual.getSqlRewriteUnit().getSql(), is("SELECT 1"));
assertThat(actual.getSqlRewriteUnit().getParameters(), is(Collections.emptyList()));
}
|
public static <T> RemoteIterator<T> remoteIteratorFromIterable(
Iterable<T> iterable) {
return new WrappedJavaIterator<>(iterable.iterator());
}
|
@Test
public void testJavaIterableSupport() throws Throwable {
CountdownIterable countdown = new CountdownIterable(100);
RemoteIterator<Integer> it = remoteIteratorFromIterable(
countdown);
verifyInvoked(it, 100, c -> counter++);
assertStringValueContains(it, "CountdownIterator");
extractStatistics(it);
// close the iterator
close(it);
countdown.assertCloseCount(0);
// and a new iterator can be crated
verifyInvoked(remoteIteratorFromIterable(countdown),
100, c -> counter++);
}
|
public static TimestampExtractionPolicy create(
final KsqlConfig ksqlConfig,
final LogicalSchema schema,
final Optional<TimestampColumn> timestampColumn
) {
if (!timestampColumn.isPresent()) {
return new MetadataTimestampExtractionPolicy(getDefaultTimestampExtractor(ksqlConfig));
}
final ColumnName col = timestampColumn.get().getColumn();
final Optional<String> timestampFormat = timestampColumn.get().getFormat();
final Column column = schema.findColumn(col)
.orElseThrow(() -> new KsqlException(
"The TIMESTAMP column set in the WITH clause does not exist in the schema: '"
+ col.toString(FormatOptions.noEscape()) + "'"));
final SqlBaseType tsColumnType = column.type().baseType();
if (tsColumnType == SqlBaseType.STRING) {
final String format = timestampFormat.orElseThrow(() -> new KsqlException(
"A String timestamp field has been specified without"
+ " also specifying the "
+ CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY.toLowerCase()));
return new StringTimestampExtractionPolicy(col, format);
}
if (timestampFormat.isPresent()) {
throw new KsqlException("'" + CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY
+ "' set in the WITH clause can only be used "
+ "when the timestamp column is of type STRING.");
}
if (tsColumnType == SqlBaseType.BIGINT) {
return new LongColumnTimestampExtractionPolicy(col);
}
if (tsColumnType == SqlBaseType.TIMESTAMP) {
return new TimestampColumnTimestampExtractionPolicy(col);
}
throw new KsqlException(
"Timestamp column, " + col + ", should be LONG(INT64), TIMESTAMP,"
+ " or a String with a "
+ CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY.toLowerCase()
+ " specified.");
}
|
@Test
public void shouldCreateLongTimestampPolicyWhenTimestampFieldIsOfTypeLong() {
// Given:
final String timestamp = "timestamp";
final LogicalSchema schema = schemaBuilder2
.valueColumn(ColumnName.of(timestamp.toUpperCase()), SqlTypes.BIGINT)
.build();
// When:
final TimestampExtractionPolicy result = TimestampExtractionPolicyFactory
.create(
ksqlConfig,
schema,
Optional.of(
new TimestampColumn(
ColumnName.of(timestamp.toUpperCase()),
Optional.empty()
)
)
);
// Then:
assertThat(result, instanceOf(LongColumnTimestampExtractionPolicy.class));
assertThat(result.getTimestampField(),
equalTo(ColumnName.of(timestamp.toUpperCase())));
}
|
@Override
public String getFileId(final Path file) throws BackgroundException {
if(StringUtils.isNotBlank(file.attributes().getFileId())) {
return file.attributes().getFileId();
}
if(file.isRoot()
|| new SimplePathPredicate(file).test(DriveHomeFinderService.MYDRIVE_FOLDER)
|| new SimplePathPredicate(file).test(DriveHomeFinderService.SHARED_FOLDER_NAME)
|| new SimplePathPredicate(file).test(DriveHomeFinderService.SHARED_DRIVES_NAME)) {
return DriveHomeFinderService.ROOT_FOLDER_ID;
}
final String cached = super.getFileId(file);
if(cached != null) {
if(log.isDebugEnabled()) {
log.debug(String.format("Return cached fileid %s for file %s", cached, file));
}
return cached;
}
if(new SimplePathPredicate(DriveHomeFinderService.SHARED_DRIVES_NAME).test(file.getParent())) {
final Path found = new DriveTeamDrivesListService(session, this).list(file.getParent(),
new DisabledListProgressListener()).find(new SimplePathPredicate(file)
);
if(null == found) {
throw new NotfoundException(file.getAbsolute());
}
return this.cache(file, found.attributes().getFileId());
}
final Path query;
if(file.isPlaceholder()) {
query = new Path(file.getParent(), FilenameUtils.removeExtension(file.getName()), file.getType(), file.attributes());
}
else {
query = file;
}
final AttributedList<Path> list = new FileidDriveListService(session, this, query).list(file.getParent(), new DisabledListProgressListener());
final Path found = list.filter(new IgnoreTrashedComparator()).find(new SimplePathPredicate(file));
if(null == found) {
throw new NotfoundException(file.getAbsolute());
}
return this.cache(file, found.attributes().getFileId());
}
|
@Test
public void testGetFileidBackslashCharacter() throws Exception {
final Path test = new Path(DriveHomeFinderService.MYDRIVE_FOLDER, String.format("%s\\", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file));
final DriveFileIdProvider fileid = new DriveFileIdProvider(session);
new DriveTouchFeature(session, fileid).touch(test, new TransferStatus());
assertNotNull(fileid.getFileId(test));
new DriveDeleteFeature(session, fileid).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
public static NamenodeRole convert(NamenodeRoleProto role) {
switch (role) {
case NAMENODE:
return NamenodeRole.NAMENODE;
case BACKUP:
return NamenodeRole.BACKUP;
case CHECKPOINT:
return NamenodeRole.CHECKPOINT;
}
return null;
}
|
@Test
public void testConvertStoragInfo() {
StorageInfo info = getStorageInfo(NodeType.NAME_NODE);
StorageInfoProto infoProto = PBHelper.convert(info);
StorageInfo info2 = PBHelper.convert(infoProto, NodeType.NAME_NODE);
assertEquals(info.getClusterID(), info2.getClusterID());
assertEquals(info.getCTime(), info2.getCTime());
assertEquals(info.getLayoutVersion(), info2.getLayoutVersion());
assertEquals(info.getNamespaceID(), info2.getNamespaceID());
}
|
public String getInstanceIdGenerator() {
return getMetaDataByKeyWithDefault(PreservedMetadataKeys.INSTANCE_ID_GENERATOR,
Constants.DEFAULT_INSTANCE_ID_GENERATOR);
}
|
@Test
void testGetInstanceIdGenerator() {
Instance instance = new Instance();
assertEquals(Constants.DEFAULT_INSTANCE_ID_GENERATOR, instance.getInstanceIdGenerator());
instance.addMetadata(PreservedMetadataKeys.INSTANCE_ID_GENERATOR, "test");
assertEquals("test", instance.getInstanceIdGenerator());
}
|
public synchronized void createTable(String tableId, Iterable<String> columnFamilies)
throws BigtableResourceManagerException {
createTable(tableId, columnFamilies, Duration.ofHours(1));
}
|
@Test
public void testCreateTableShouldThrowErrorWhenTableAdminClientFailsToCreateTable() {
when(bigtableResourceManagerClientFactory.bigtableTableAdminClient().exists(anyString()))
.thenReturn(false);
when(bigtableResourceManagerClientFactory.bigtableTableAdminClient().createTable(any()))
.thenThrow(RuntimeException.class);
assertThrows(
BigtableResourceManagerException.class,
() -> testManager.createTable(TABLE_ID, ImmutableList.of("cf1")));
}
|
public static HostDescription combine(BasicHostConfig cfg,
HostDescription descr) {
if (cfg == null || descr == null) {
return descr;
}
Set<HostLocation> locations = descr.locations();
Set<HostLocation> cfgLocations = cfg.locations();
if (cfgLocations != null) {
locations = cfgLocations.stream()
.map(hostLocation -> new HostLocation(hostLocation, System.currentTimeMillis()))
.collect(Collectors.toSet());
}
Set<HostLocation> auxLocations = descr.auxLocations();
Set<HostLocation> cfgAuxLocations = cfg.auxLocations();
if (cfgAuxLocations != null) {
auxLocations = cfgAuxLocations.stream()
.map(hostLocation -> new HostLocation(hostLocation, System.currentTimeMillis()))
.collect(Collectors.toSet());
}
Set<IpAddress> ipAddresses = descr.ipAddress();
Set<IpAddress> cfgIpAddresses = cfg.ipAddresses();
if (cfgIpAddresses != null) {
ipAddresses = cfgIpAddresses;
}
SparseAnnotations sa = combine(cfg, descr.annotations());
return new DefaultHostDescription(descr.hwAddress(), descr.vlan(),
locations, auxLocations, ipAddresses, descr.innerVlan(),
descr.tpid(), descr.configured(), sa);
}
|
@Test
public void testDescOps() {
HostDescription desc = BasicHostOperator.combine(BHC, HOST);
assertEquals(NAME, desc.annotations().value(AnnotationKeys.NAME));
assertEquals(String.valueOf(LON), desc.annotations().value(AnnotationKeys.LONGITUDE));
assertEquals(String.valueOf(LAT), desc.annotations().value(AnnotationKeys.LATITUDE));
}
|
public int getErrCode() {
return errCode;
}
|
@Test
void testConstructorWithErrorCodeAndMsg() {
NacosRuntimeException exception = new NacosRuntimeException(NacosException.INVALID_PARAM, "test");
assertEquals(NacosException.INVALID_PARAM, exception.getErrCode());
assertEquals("errCode: 400, errMsg: test ", exception.getMessage());
assertNull(exception.getCause());
}
|
public static void boundsCheck(int capacity, int index, int length) {
if (capacity < 0 || index < 0 || length < 0 || (index > (capacity - length))) {
throw new IndexOutOfBoundsException(String.format("index=%d, length=%d, capacity=%d", index, length, capacity));
}
}
|
@Test(expected = IndexOutOfBoundsException.class)
public void boundsCheck_whenLengthSmallerThanZero() {
ArrayUtils.boundsCheck(100, 0, -1);
}
|
public static CheckResult checkAtLeastOneExists(Config config, String... params) {
if (params.length == 0) {
return CheckResult.success();
}
List<String> missingParams = new LinkedList<>();
for (String param : params) {
if (!isValidParam(config, param)) {
missingParams.add(param);
}
}
if (missingParams.size() == params.length) {
String errorMsg =
String.format(
"please specify at least one config of [%s] as non-empty",
String.join(",", missingParams));
return CheckResult.error(errorMsg);
} else {
return CheckResult.success();
}
}
|
@Test
public void testCheckAtLeastOneExists() {
Config config = getConfig();
CheckResult checkResult = checkAtLeastOneExists(config, "k0", "k3", "k4");
Assertions.assertTrue(checkResult.isSuccess());
String errorMsg = "please specify at least one config of [%s] as non-empty";
checkResult = checkAtLeastOneExists(config, "k3", "k2");
Assertions.assertEquals(String.format(errorMsg, "k3,k2"), checkResult.getMsg());
}
|
@Deprecated
static void updateBlockHandlerFor(Class<?> clazz, String name, Method method) {
if (clazz == null || StringUtil.isBlank(name)) {
throw new IllegalArgumentException("Bad argument");
}
BLOCK_HANDLER_MAP.put(getKey(clazz, name), MethodWrapper.wrap(method));
}
|
@Test(expected = IllegalArgumentException.class)
public void testUpdateFallbackBadArgument() {
ResourceMetadataRegistry.updateBlockHandlerFor(String.class, "", new Class[0], String.class.getMethods()[0]);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.