focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public static Object convert(Class<?> expectedClass, Object originalObject) {
if (originalObject == null) {
return null;
}
Class<?> currentClass = originalObject.getClass();
if (expectedClass.isAssignableFrom(currentClass)) {
return originalObject;
}
if (PrimitiveBoxedUtils.areSameWithBoxing(expectedClass, originalObject.getClass())) {
// No cast/transformation originalObject
return originalObject;
}
if (expectedClass == String.class) {
return originalObject.toString();
}
Object toReturn;
String currentClassName = currentClass.getName();
switch (currentClassName) {
case "java.lang.String":
toReturn = convertFromString(expectedClass, (String) originalObject);
break;
case "int":
case "java.lang.Integer":
toReturn = convertFromInteger(expectedClass, (Integer) originalObject);
break;
case "double":
case "java.lang.Double":
toReturn = convertFromDouble(expectedClass, (Double) originalObject);
break;
case "float":
case "java.lang.Float":
toReturn = convertFromFloat(expectedClass, (Float) originalObject);
break;
default:
throw new KiePMMLException(String.format(FAILED_CONVERSION, originalObject,
expectedClass.getName()));
}
return toReturn;
}
|
@Test
void convertUnconvertibleFromDouble() {
UNCONVERTIBLE_FROM_DOUBLE.forEach((s, o) -> {
Class<?> expectedClass = o.getClass();
try {
ConverterTypeUtil.convert(expectedClass, s);
fail(String.format("Expecting KiePMMLException for %s %s", s, o));
} catch (Exception e) {
assertThat(e.getClass()).isEqualTo(KiePMMLException.class);
}
});
}
|
public Collection<ConfigEntry> entries() {
return Collections.unmodifiableCollection(entries.values());
}
|
@Test
public void shouldGetAllEntries() {
assertEquals(2, config.entries().size());
assertTrue(config.entries().contains(E1));
assertTrue(config.entries().contains(E2));
}
|
public void setUuids(Set<String> uuids) {
requireNonNull(uuids, "Uuids cannot be null");
checkState(this.uuids == null, "Uuids have already been initialized");
this.uuids = new HashSet<>(uuids);
}
|
@Test
public void fail_with_NPE_when_setting_null_uuids() {
assertThatThrownBy(() -> sut.setUuids(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Uuids cannot be null");
}
|
@Override
public Connector create(String catalogName, Map<String, String> requiredConfig, ConnectorContext context)
{
requireNonNull(requiredConfig, "requiredConfig is null");
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(
binder -> {
binder.bind(TypeManager.class).toInstance(context.getTypeManager());
binder.bind(FunctionMetadataManager.class).toInstance(context.getFunctionMetadataManager());
binder.bind(StandardFunctionResolution.class).toInstance(context.getStandardFunctionResolution());
binder.bind(RowExpressionService.class).toInstance(context.getRowExpressionService());
},
new JdbcModule(catalogName),
module);
Injector injector = app
.doNotInitializeLogging()
.setRequiredConfigurationProperties(requiredConfig)
.initialize();
return injector.getInstance(JdbcConnector.class);
}
catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
|
@Test
public void test()
{
JdbcConnectorFactory connectorFactory = new JdbcConnectorFactory(
"test",
new TestingH2JdbcModule(),
getClass().getClassLoader());
connectorFactory.create("test", TestingH2JdbcModule.createProperties(), new TestingConnectorContext());
}
|
public static String getCertFingerPrint(Certificate cert) {
byte [] digest = null;
try {
byte[] encCertInfo = cert.getEncoded();
MessageDigest md = MessageDigest.getInstance("SHA-1");
digest = md.digest(encCertInfo);
} catch (Exception e) {
logger.error("Exception:", e);
}
if (digest != null) {
return bytesToHex(digest).toLowerCase();
}
return null;
}
|
@Test
public void testGetCertFingerPrintSecondary() throws Exception {
X509Certificate cert = null;
try (InputStream is = Config.getInstance().getInputStreamFromFile("secondary.crt")){
CertificateFactory cf = CertificateFactory.getInstance("X.509");
cert = (X509Certificate) cf.generateCertificate(is);
} catch (Exception e) {
e.printStackTrace();
}
String fp = FingerPrintUtil.getCertFingerPrint(cert);
Assert.assertEquals("0775dcf9193095e791307a115c192cc897753499", fp);
}
|
@Override
public ContinuousEnumerationResult planSplits(IcebergEnumeratorPosition lastPosition) {
table.refresh();
if (lastPosition != null) {
return discoverIncrementalSplits(lastPosition);
} else {
return discoverInitialSplits();
}
}
|
@Test
public void testTableScanAllStats() throws Exception {
appendTwoSnapshots();
ScanContext scanContext =
ScanContext.builder()
.includeColumnStats(true)
.startingStrategy(StreamingStartingStrategy.TABLE_SCAN_THEN_INCREMENTAL)
.build();
ContinuousSplitPlannerImpl splitPlanner =
new ContinuousSplitPlannerImpl(TABLE_RESOURCE.tableLoader().clone(), scanContext, null);
ContinuousEnumerationResult initialResult = splitPlanner.planSplits(null);
assertThat(initialResult.splits()).hasSize(1);
IcebergSourceSplit split = Iterables.getOnlyElement(initialResult.splits());
assertThat(split.task().files()).hasSize(2);
verifyStatCount(split, 3);
IcebergEnumeratorPosition lastPosition = initialResult.toPosition();
for (int i = 0; i < 3; ++i) {
CycleResult result = verifyOneCycle(splitPlanner, lastPosition);
verifyStatCount(result.split, 3);
lastPosition = result.lastPosition;
}
}
|
@Override
public String resolve(Method method, Object[] arguments, String spelExpression) {
if (StringUtils.isEmpty(spelExpression)) {
return spelExpression;
}
if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) {
return stringValueResolver.resolveStringValue(spelExpression);
}
if (spelExpression.matches(METHOD_SPEL_REGEX)) {
SpelRootObject rootObject = new SpelRootObject(method, arguments);
MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer);
Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext);
return (String) evaluated;
}
if (spelExpression.matches(BEAN_SPEL_REGEX)) {
SpelRootObject rootObject = new SpelRootObject(method, arguments);
MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer);
evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory));
Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext);
return (String) evaluated;
}
return spelExpression;
}
|
@Test
public void placeholderSpelTest() throws Exception {
String testExpression = "${missingProperty:default}";
DefaultSpelResolverTest target = new DefaultSpelResolverTest();
Method testMethod = target.getClass().getMethod("testMethod", String.class);
String result = sut.resolve(testMethod, new Object[]{}, testExpression);
assertThat(result).isEqualTo("default");
}
|
private static boolean checkKeytool(String keytoolPath) {
final SystemCommand nativeCommand = new SystemCommand(null, null);
final List<String> arguments = new ArrayList<>();
arguments.add(keytoolPath);
arguments.add("-J-Xmx128m");
arguments.add("-help"); // $NON-NLS-1$
try {
int status = nativeCommand.run(arguments);
if (log.isDebugEnabled()) {
log.debug("checkKeyTool:status=" + status);
log.debug(nativeCommand.getOutResult());
}
/*
* Some implementations of keytool return status 1 for -help
* MacOS/Java 7 returns 2 if it cannot find keytool
*/
return status == 0 || status == 1; // TODO this is rather fragile
} catch (IOException ioe) {
log.info("Exception checking for keytool existence, will return false, try another way.");
log.debug("Exception is: ", ioe);
return false;
} catch (InterruptedException e) { // NOSONAR
log.error("Command was interrupted\n" + nativeCommand.getOutResult(), e);
Thread.currentThread().interrupt();
return false;
} catch (TimeoutException e) { // NOSONAR
log.info("Timeout reached while checking for keytool existence, will return false, try another way.", e);
return false;
}
}
|
@Test
public void testCheckKeytool() throws Exception {
SystemCommand sc = new SystemCommand(null, null);
List<String> arguments = new ArrayList<>();
arguments.add("xyzqwas"); // should not exist
Assertions.assertThrows(IOException.class, () -> {
int status = sc.run(arguments);
if (status == 0 || status == 1) {
fail("Missing executable should produce exit code of 0 or 1. Actual code is " + status);
}
});
}
|
public String generateReport(MavenProject project, final File xmlPath, final File htmlPath) throws IOException {
String out;
parseAllTestResults(xmlPath);
if (!testResults.isEmpty()) {
gatherBestRouteCoverages();
squashDuplicateRoutes();
generateRouteStatistics(project.getName(), htmlPath);
generateEipStatistics();
generateHtml(htmlPath);
out = String.format("Generated HTML reports for %d routes%n%n", routeStatisticMap.size());
} else {
out = "No routes found. No HTML reports were generated%n";
}
return out;
}
|
@Test
public void testGenerateReport() throws IOException {
File htmlPath = htmlPath();
Path htmlPathAsPath = Paths.get(htmlPath.getPath());
if (!Files.exists(htmlPathAsPath)) {
Files.createDirectories(htmlPathAsPath);
}
MavenProject mavenProject = new MavenProject();
mavenProject.setName(RESOURCES);
processor.generateReport(mavenProject, xmlPath(), htmlPath);
assertTrue(Files.exists(Paths.get(indexPath().getPath())));
}
|
private static int getSplitRatio(Configuration conf) {
int splitRatio = conf.getInt(
DistCpConstants.CONF_LABEL_SPLIT_RATIO,
DistCpConstants.SPLIT_RATIO_DEFAULT);
if (splitRatio <= 0) {
LOG.warn(DistCpConstants.CONF_LABEL_SPLIT_RATIO +
" should be positive. Fall back to default value: "
+ DistCpConstants.SPLIT_RATIO_DEFAULT);
splitRatio = DistCpConstants.SPLIT_RATIO_DEFAULT;
}
return splitRatio;
}
|
@Test
public void testGetSplitRatio() throws Exception {
Assert.assertEquals(1, DynamicInputFormat.getSplitRatio(1, 1000000000));
Assert.assertEquals(2, DynamicInputFormat.getSplitRatio(11000000, 10));
Assert.assertEquals(4, DynamicInputFormat.getSplitRatio(30, 700));
Assert.assertEquals(2, DynamicInputFormat.getSplitRatio(30, 200));
// Tests with negative value configuration
Configuration conf = new Configuration();
conf.setInt(DistCpConstants.CONF_LABEL_MAX_CHUNKS_TOLERABLE, -1);
conf.setInt(DistCpConstants.CONF_LABEL_MAX_CHUNKS_IDEAL, -1);
conf.setInt(DistCpConstants.CONF_LABEL_MIN_RECORDS_PER_CHUNK, -1);
conf.setInt(DistCpConstants.CONF_LABEL_SPLIT_RATIO, -1);
Assert.assertEquals(1,
DynamicInputFormat.getSplitRatio(1, 1000000000, conf));
Assert.assertEquals(2,
DynamicInputFormat.getSplitRatio(11000000, 10, conf));
Assert.assertEquals(4, DynamicInputFormat.getSplitRatio(30, 700, conf));
Assert.assertEquals(2, DynamicInputFormat.getSplitRatio(30, 200, conf));
// Tests with valid configuration
conf.setInt(DistCpConstants.CONF_LABEL_MAX_CHUNKS_TOLERABLE, 100);
conf.setInt(DistCpConstants.CONF_LABEL_MAX_CHUNKS_IDEAL, 30);
conf.setInt(DistCpConstants.CONF_LABEL_MIN_RECORDS_PER_CHUNK, 10);
conf.setInt(DistCpConstants.CONF_LABEL_SPLIT_RATIO, 53);
Assert.assertEquals(53, DynamicInputFormat.getSplitRatio(3, 200, conf));
}
|
public static DualInputSemanticProperties addSourceFieldOffsets(
DualInputSemanticProperties props,
int numInputFields1,
int numInputFields2,
int offset1,
int offset2) {
DualInputSemanticProperties offsetProps = new DualInputSemanticProperties();
// add offset to read fields on first input
if (props.getReadFields(0) != null) {
FieldSet offsetReadFields = new FieldSet();
for (int r : props.getReadFields(0)) {
offsetReadFields = offsetReadFields.addField(r + offset1);
}
offsetProps.addReadFields(0, offsetReadFields);
}
// add offset to read fields on second input
if (props.getReadFields(1) != null) {
FieldSet offsetReadFields = new FieldSet();
for (int r : props.getReadFields(1)) {
offsetReadFields = offsetReadFields.addField(r + offset2);
}
offsetProps.addReadFields(1, offsetReadFields);
}
// add offset to forward fields on first input
for (int s = 0; s < numInputFields1; s++) {
FieldSet targetFields = props.getForwardingTargetFields(0, s);
for (int t : targetFields) {
offsetProps.addForwardedField(0, s + offset1, t);
}
}
// add offset to forward fields on second input
for (int s = 0; s < numInputFields2; s++) {
FieldSet targetFields = props.getForwardingTargetFields(1, s);
for (int t : targetFields) {
offsetProps.addForwardedField(1, s + offset2, t);
}
}
return offsetProps;
}
|
@Test
void testAddSourceFieldOffsets() {
DualInputSemanticProperties semProps = new DualInputSemanticProperties();
semProps.addForwardedField(0, 0, 1);
semProps.addForwardedField(0, 3, 3);
semProps.addForwardedField(1, 1, 2);
semProps.addForwardedField(1, 1, 4);
semProps.addReadFields(0, new FieldSet(1, 2));
semProps.addReadFields(1, new FieldSet(0, 3, 4));
DualInputSemanticProperties offsetProps =
SemanticPropUtil.addSourceFieldOffsets(semProps, 4, 3, 1, 2);
assertThat(offsetProps.getForwardingTargetFields(0, 0)).isEmpty();
assertThat(offsetProps.getForwardingTargetFields(0, 1)).containsExactly(1);
assertThat(offsetProps.getForwardingTargetFields(0, 2)).isEmpty();
assertThat(offsetProps.getForwardingTargetFields(0, 3)).isEmpty();
assertThat(offsetProps.getForwardingTargetFields(0, 4)).containsExactly(3);
assertThat(offsetProps.getForwardingTargetFields(1, 0)).isEmpty();
assertThat(offsetProps.getForwardingTargetFields(1, 1)).isEmpty();
assertThat(offsetProps.getForwardingTargetFields(1, 2)).isEmpty();
assertThat(offsetProps.getForwardingTargetFields(1, 3)).containsExactly(4, 2);
assertThat(offsetProps.getReadFields(0)).containsExactly(2, 3);
assertThat(offsetProps.getReadFields(1)).containsExactly(2, 5, 6);
semProps = new DualInputSemanticProperties();
SemanticPropUtil.addSourceFieldOffsets(semProps, 4, 3, 2, 2);
}
|
protected float convertFloat(Object value) {
if (value instanceof Number) {
return ((Number) value).floatValue();
} else if (value instanceof String) {
return Float.parseFloat((String) value);
}
throw new IllegalArgumentException("Cannot convert to float: " + value.getClass().getName());
}
|
@Test
public void testFloatConversion() {
Table table = mock(Table.class);
when(table.schema()).thenReturn(SIMPLE_SCHEMA);
RecordConverter converter = new RecordConverter(table, config);
float expectedFloat = 123f;
ImmutableList.of("123", 123, 123L, 123d, expectedFloat)
.forEach(
input -> {
float val = converter.convertFloat(input);
assertThat(val).isEqualTo(expectedFloat);
});
}
|
@PostMapping("/meta/saveOrUpdate")
public Mono<String> saveOrUpdate(@RequestBody final MetaData metaData) {
if (CollectionUtils.isEmpty(subscribers)) {
return Mono.just(Constants.SUCCESS);
}
LOG.info("saveOrUpdate apache shenyu local meta data");
subscribers.forEach(metaDataSubscriber -> metaDataSubscriber.onSubscribe(metaData));
return Mono.just(Constants.SUCCESS);
}
|
@Test
public void testSaveOrUpdate() throws Exception {
final MockHttpServletResponse response = this.mockMvc.perform(MockMvcRequestBuilders.post("/shenyu/meta/saveOrUpdate")
.contentType(MediaType.APPLICATION_JSON)
.content(GsonUtils.getInstance().toJson(metaData)))
.andReturn().getResponse();
assertThat(response.getStatus()).isEqualTo(HttpStatus.OK.value());
subscribers.forEach(subscriber -> verify(subscriber).onSubscribe(metaData));
final MockHttpServletResponse subNullResponse = this.mockMvcSubscribersNull.perform(MockMvcRequestBuilders.post("/shenyu/meta/saveOrUpdate")
.contentType(MediaType.APPLICATION_JSON)
.content(GsonUtils.getInstance().toJson(metaData)))
.andReturn().getResponse();
assertThat(subNullResponse.getStatus()).isEqualTo(HttpStatus.OK.value());
}
|
@PostMapping(value = "/artifact/download")
public ResponseEntity<String> importArtifact(@RequestParam(value = "url", required = true) String url,
@RequestParam(value = "mainArtifact", defaultValue = "true") boolean mainArtifact,
@RequestParam(value = "secretName", required = false) String secretName) {
if (!url.isEmpty()) {
List<Service> services = null;
Secret secret = null;
if (secretName != null) {
secret = secretRepository.findByName(secretName).stream().findFirst().orElse(null);
log.debug("Secret {} was requested. Have we found it? {}", secretName, (secret != null));
}
try {
// Download remote to local file before import.
HTTPDownloader.FileAndHeaders fileAndHeaders = HTTPDownloader.handleHTTPDownloadToFileAndHeaders(url,
secret, true);
File localFile = fileAndHeaders.getLocalFile();
// Now try importing services.
services = serviceService.importServiceDefinition(localFile,
new ReferenceResolver(url, secret, true,
RelativeReferenceURLBuilderFactory
.getRelativeReferenceURLBuilder(fileAndHeaders.getResponseHeaders())),
new ArtifactInfo(url, mainArtifact));
} catch (IOException ioe) {
log.error("Exception while retrieving remote item " + url, ioe);
return new ResponseEntity<>("Exception while retrieving remote item", HttpStatus.INTERNAL_SERVER_ERROR);
} catch (MockRepositoryImportException mrie) {
return new ResponseEntity<>(mrie.getMessage(), HttpStatus.BAD_REQUEST);
}
if (services != null && !services.isEmpty()) {
return new ResponseEntity<>(
"{\"name\": \"" + services.get(0).getName() + ":" + services.get(0).getVersion() + "\"}",
HttpStatus.CREATED);
}
}
return new ResponseEntity<>(HttpStatus.NO_CONTENT);
}
|
@Test
@DisplayName("Should return 500 when there is an error retrieving remote item")
void shouldReturnInternalServerError() throws MockRepositoryImportException {
// arrange
String wrongUrl = "https://raw.githubusercontent.com/microcks/microcks/master/samples/wrong-openapi.yaml";
// act
ResponseEntity<String> responseEntity = sut.importArtifact(wrongUrl, false, null);
// assert
SoftAssertions.assertSoftly(softly -> {
softly.assertThat(responseEntity.getStatusCode()).isEqualTo(HttpStatus.INTERNAL_SERVER_ERROR);
softly.assertThat(responseEntity.getBody()).contains("Exception while retrieving remote item");
});
}
|
public static byte parsePermFromString(String permString) {
if (permString == null) {
return Permission.DENY;
}
switch (permString.trim()) {
case AclConstants.PUB:
return Permission.PUB;
case AclConstants.SUB:
return Permission.SUB;
case AclConstants.PUB_SUB:
case AclConstants.SUB_PUB:
return Permission.PUB | Permission.SUB;
case AclConstants.DENY:
return Permission.DENY;
default:
return Permission.DENY;
}
}
|
@Test
public void fromStringGetPermissionTest() {
byte perm = Permission.parsePermFromString("PUB");
Assert.assertEquals(perm, Permission.PUB);
perm = Permission.parsePermFromString("SUB");
Assert.assertEquals(perm, Permission.SUB);
perm = Permission.parsePermFromString("PUB|SUB");
Assert.assertEquals(perm, Permission.PUB | Permission.SUB);
perm = Permission.parsePermFromString("SUB|PUB");
Assert.assertEquals(perm, Permission.PUB | Permission.SUB);
perm = Permission.parsePermFromString("DENY");
Assert.assertEquals(perm, Permission.DENY);
perm = Permission.parsePermFromString("1");
Assert.assertEquals(perm, Permission.DENY);
perm = Permission.parsePermFromString(null);
Assert.assertEquals(perm, Permission.DENY);
}
|
public static void putIntByteBuffer(ByteBuffer buf, int b) {
buf.put((byte) (b & 0xFF));
}
|
@Test
public void putIntByteBuffer() {
class TestCase {
byte mExpected;
int mInput;
public TestCase(byte expected, int input) {
mExpected = expected;
mInput = input;
}
}
ArrayList<TestCase> testCases = new ArrayList<>();
testCases.add(new TestCase((byte) 0x00, 0x00));
testCases.add(new TestCase((byte) 0x12, 0x12));
testCases.add(new TestCase((byte) 0x34, 0x1234));
testCases.add(new TestCase((byte) 0x56, 0x123456));
testCases.add(new TestCase((byte) 0x78, 0x12345678));
for (TestCase testCase : testCases) {
ByteBuffer buf = ByteBuffer.allocate(1);
BufferUtils.putIntByteBuffer(buf, testCase.mInput);
assertEquals(testCase.mExpected, buf.get(0));
}
}
|
public static ObjectMapper ofIon() {
return ION_MAPPER;
}
|
@Test
@DefaultTimeZone("Europe/Athens")
void ion() throws IOException {
ObjectMapper mapper = JacksonMapper.ofIon();
Pojo original = pojo();
String s = mapper.writeValueAsString(original);
assertThat(s, containsString("nullable:null"));
Pojo deserialize = mapper.readValue(s, Pojo.class);
test(original, deserialize);
}
|
public static <T extends Throwable> void checkNotEmpty(final String value, final Supplier<T> exceptionSupplierIfUnexpected) throws T {
if (Strings.isNullOrEmpty(value)) {
throw exceptionSupplierIfUnexpected.get();
}
}
|
@Test
void assertCheckNotEmptyWithCollectionToThrowsException() {
assertThrows(SQLException.class, () -> ShardingSpherePreconditions.checkNotEmpty(Collections.emptyList(), SQLException::new));
}
|
public void update(final TRuntimeProfileTree thriftProfile) {
Reference<Integer> idx = new Reference<>(0);
update(thriftProfile.nodes, idx, false);
Preconditions.checkState(idx.getRef().equals(thriftProfile.nodes.size()));
}
|
@Test
public void testUpdate() {
RuntimeProfile profile = new RuntimeProfile("REAL_ROOT");
/* the profile tree
* ROOT(time=5s info[key=value])
* A(time=2s) B(time=1s info[BInfo1=BValu1;BInfo2=BValue2])
* A_SON(time=10ms counter[counterA1=1; counterA2=2; counterA1Son=3])
*/
TRuntimeProfileTree tprofileTree = new TRuntimeProfileTree();
TRuntimeProfileNode tnodeRoot = new TRuntimeProfileNode();
TRuntimeProfileNode tnodeA = new TRuntimeProfileNode();
TRuntimeProfileNode tnodeB = new TRuntimeProfileNode();
TRuntimeProfileNode tnodeASon = new TRuntimeProfileNode();
tnodeRoot.num_children = 2;
tnodeA.num_children = 1;
tnodeASon.num_children = 0;
tnodeB.num_children = 0;
tprofileTree.addToNodes(tnodeRoot);
tprofileTree.addToNodes(tnodeA);
tprofileTree.addToNodes(tnodeASon);
tprofileTree.addToNodes(tnodeB);
tnodeRoot.info_strings = new HashMap<String, String>();
tnodeRoot.info_strings.put("key", "value");
tnodeRoot.info_strings_display_order = new ArrayList<String>();
tnodeRoot.info_strings_display_order.add("key");
tnodeRoot.counters = Lists.newArrayList();
tnodeA.counters = Lists.newArrayList();
tnodeB.counters = Lists.newArrayList();
tnodeASon.counters = Lists.newArrayList();
tnodeRoot.counters.add(new TCounter("TotalTime", TUnit.TIME_NS, 3000000000L));
tnodeA.counters.add(new TCounter("TotalTime", TUnit.TIME_NS, 1000000000L));
tnodeB.counters.add(new TCounter("TotalTime", TUnit.TIME_NS, 1000000000L));
tnodeASon.counters.add(new TCounter("TotalTime", TUnit.TIME_NS, 10000000));
tnodeASon.counters.add(new TCounter("counterA1", TUnit.UNIT, 1));
tnodeASon.counters.add(new TCounter("counterA2", TUnit.BYTES, 1234567L));
tnodeASon.counters.add(new TCounter("counterA1Son", TUnit.UNIT, 3));
tnodeASon.child_counters_map = Maps.newHashMap();
Set<String> set1 = Sets.newHashSet();
set1.add("counterA1");
set1.add("counterA2");
tnodeASon.child_counters_map.put("", set1);
Set<String> set2 = Sets.newHashSet();
set2.add("counterA1Son");
tnodeASon.child_counters_map.put("counterA1", set2);
tnodeB.info_strings = Maps.newHashMap();
tnodeB.info_strings_display_order = Lists.newArrayList();
tnodeB.info_strings.put("BInfo1", "BValue1");
tnodeB.info_strings.put("BInfo2", "BValue2");
tnodeB.info_strings_display_order.add("BInfo2");
tnodeB.info_strings_display_order.add("BInfo1");
tnodeRoot.indent = true;
tnodeA.indent = true;
tnodeB.indent = true;
tnodeASon.indent = true;
tnodeRoot.name = "ROOT";
tnodeA.name = "A";
tnodeB.name = "B";
tnodeASon.name = "ASON";
profile.update(tprofileTree);
StringBuilder builder = new StringBuilder();
profile.computeTimeInProfile();
profile.prettyPrint(builder, "");
}
|
@Override
public @UnknownKeyFor @NonNull @Initialized String identifier() {
return "beam:schematransform:org.apache.beam:kafka_write:v1";
}
|
@Test
public void testManagedMappings() {
KafkaWriteSchemaTransformProvider provider = new KafkaWriteSchemaTransformProvider();
Map<String, String> mapping = ManagedTransformConstants.MAPPINGS.get(provider.identifier());
assertNotNull(mapping);
List<String> configSchemaFieldNames = provider.configurationSchema().getFieldNames();
for (String paramName : mapping.values()) {
assertTrue(configSchemaFieldNames.contains(paramName));
}
}
|
String authHeader(Map<String, String> attributes, Map<String, String> headers, String body,
AwsCredentials credentials, String timestamp, String httpMethod) {
return buildAuthHeader(
credentials.getAccessKey(),
credentialScopeEcs(timestamp),
signedHeaders(headers),
sign(attributes, headers, body, credentials, timestamp, httpMethod)
);
}
|
@Test
public void authHeaderEc2() {
// given
String timestamp = "20141106T111126Z";
Map<String, String> attributes = new HashMap<>();
attributes.put("Action", "DescribeInstances");
attributes.put("Version", "2016-11-15");
Map<String, String> headers = new HashMap<>();
headers.put("X-Amz-Date", timestamp);
headers.put("Host", "ec2.eu-central-1.amazonaws.com");
String body = "";
AwsCredentials credentials = AwsCredentials.builder()
.setAccessKey("AKIDEXAMPLE")
.setSecretKey("wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY")
.build();
AwsRequestSigner requestSigner = new AwsRequestSigner("eu-central-1", "ec2");
// when
String authHeader = requestSigner.authHeader(attributes, headers, body, credentials, timestamp, "POST");
// then
String expectedAuthHeader = "AWS4-HMAC-SHA256 "
+ "Credential=AKIDEXAMPLE/20141106/eu-central-1/ec2/aws4_request, "
+ "SignedHeaders=host;x-amz-date, "
+ "Signature=cedc903f54260b232ced76caf4a72f061565a51cc583a17da87b1132522f5893";
assertEquals(expectedAuthHeader, authHeader);
}
|
public static String toString(final Collection<?> col) {
if (col == null) {
return "null";
}
if (col.isEmpty()) {
return "[]";
}
return CycleDependencyHandler.wrap(col, o -> {
StringBuilder sb = new StringBuilder(32);
sb.append("[");
for (Object obj : col) {
if (sb.length() > 1) {
sb.append(", ");
}
if (obj == col) {
sb.append("(this ").append(obj.getClass().getSimpleName()).append(")");
} else {
sb.append(StringUtils.toString(obj));
}
}
sb.append("]");
return sb.toString();
});
}
|
@Test
public void testMapToString() {
Map<Object, Object> nullMap = null;
Map<Object, Object> emptyMap = new HashMap<>();
Map<Object, Object> filledMap = new HashMap<>();
filledMap.put("aaa", "111");
filledMap.put("bbb", "222");
filledMap.put("self", filledMap);
Assertions.assertEquals("null", CollectionUtils.toString(nullMap));
Assertions.assertEquals("{}", CollectionUtils.toString(emptyMap));
Assertions.assertEquals("{\"aaa\"->\"111\", \"bbb\"->\"222\", \"self\"->(this HashMap)}", CollectionUtils.toString(filledMap));
}
|
@Override
public void handlerRule(final RuleData ruleData) {
Optional.ofNullable(ruleData.getHandle()).ifPresent(ruleHandle -> {
JwtRuleHandle jwtRuleHandle = JwtRuleHandle.newInstance(ruleHandle);
CACHED_HANDLE.get().cachedHandle(CacheKeyUtils.INST.getKey(ruleData), jwtRuleHandle);
});
}
|
@Test
public void testHandlerRule() {
RuleData ruleData = new RuleData();
ruleData.setId("jwtRule");
ruleData.setSelectorId("jwt");
String handleJson = "{\"converter\":[{\"jwtVal\":\"sub\",\"headerVal\":\"id\"}]}";
ruleData.setHandle(handleJson);
jwtPluginDataHandlerUnderTest.handlerRule(ruleData);
assertEquals(handleJson, JwtPluginDataHandler.CACHED_HANDLE.get().obtainHandle(CacheKeyUtils.INST.getKey(ruleData)).toJson());
}
|
@Override
public RuleNodePath getRuleNodePath() {
return INSTANCE;
}
|
@Test
void assertNew() {
RuleNodePathProvider ruleNodePathProvider = new EncryptRuleNodePathProvider();
RuleNodePath actualRuleNodePath = ruleNodePathProvider.getRuleNodePath();
assertThat(actualRuleNodePath.getNamedItems().size(), is(2));
assertTrue(actualRuleNodePath.getNamedItems().containsKey(EncryptRuleNodePathProvider.ENCRYPTORS));
assertTrue(actualRuleNodePath.getNamedItems().containsKey(EncryptRuleNodePathProvider.TABLES));
assertTrue(actualRuleNodePath.getUniqueItems().isEmpty());
assertThat(actualRuleNodePath.getRoot().getRuleType(), is(EncryptRuleNodePathProvider.RULE_TYPE));
}
|
@VisibleForTesting
JobMeta filterPrivateDatabases( JobMeta jobMeta ) {
Set<String> privateDatabases = jobMeta.getPrivateDatabases();
if ( privateDatabases != null ) {
// keep only private transformation databases
for ( Iterator<DatabaseMeta> it = jobMeta.getDatabases().iterator(); it.hasNext(); ) {
DatabaseMeta databaseMeta = it.next();
String databaseName = databaseMeta.getName();
if ( !privateDatabases.contains( databaseName ) && !jobMeta.isDatabaseConnectionUsed( databaseMeta ) ) {
it.remove();
}
}
}
return jobMeta;
}
|
@Test
public void filterPrivateDatabasesNoPrivateDatabaseTest() {
IUnifiedRepository purMock = mock( IUnifiedRepository.class );
JobMeta jobMeta = new JobMeta( );
jobMeta.setDatabases( getDummyDatabases() );
jobMeta.setPrivateDatabases( new HashSet<>( ) );
StreamToJobNodeConverter jobConverter = new StreamToJobNodeConverter( purMock );
assertEquals( 0, jobConverter.filterPrivateDatabases( jobMeta ).getDatabases().size() );
}
|
public static long parseSize(final String propertyName, final String propertyValue)
{
final int lengthMinusSuffix = propertyValue.length() - 1;
final char lastCharacter = propertyValue.charAt(lengthMinusSuffix);
if (Character.isDigit(lastCharacter))
{
return Long.parseLong(propertyValue);
}
final long value = AsciiEncoding.parseLongAscii(propertyValue, 0, lengthMinusSuffix);
switch (lastCharacter)
{
case 'k':
case 'K':
if (value > MAX_K_VALUE)
{
throw new NumberFormatException(propertyName + " would overflow a long: " + propertyValue);
}
return value * 1024;
case 'm':
case 'M':
if (value > MAX_M_VALUE)
{
throw new NumberFormatException(propertyName + " would overflow a long: " + propertyValue);
}
return value * 1024 * 1024;
case 'g':
case 'G':
if (value > MAX_G_VALUE)
{
throw new NumberFormatException(propertyName + " would overflow a long: " + propertyValue);
}
return value * 1024 * 1024 * 1024;
default:
throw new NumberFormatException(
propertyName + ": " + propertyValue + " should end with: k, m, or g.");
}
}
|
@Test
void shouldThrowWhenParseSizeOverflows()
{
assertThrows(NumberFormatException.class, () -> parseSize("", 8589934592L + "g"));
}
|
public static Map<String, Object> flatten(Map<String, Object> originalMap, String parentKey, String separator) {
final Map<String, Object> result = new HashMap<>();
for (Map.Entry<String, Object> entry : originalMap.entrySet()) {
final String key = parentKey.isEmpty() ? entry.getKey() : parentKey + separator + entry.getKey();
final Object value = entry.getValue();
if (value instanceof Map) {
@SuppressWarnings("unchecked")
final Map<String, Object> valueMap = (Map<String, Object>) value;
result.putAll(flatten(valueMap, key, separator));
} else {
result.put(key, value);
}
}
return result;
}
|
@Test
public void flattenHandlesFlatMap() throws Exception {
final Map<String, Object> map = ImmutableMap.of(
"foo", "bar",
"baz", "qux");
assertThat(MapUtils.flatten(map, "", "_")).isEqualTo(map);
}
|
@Override
public EncodedMessage transform(ActiveMQMessage message) throws Exception {
if (message == null) {
return null;
}
long messageFormat = 0;
Header header = null;
Properties properties = null;
Map<Symbol, Object> daMap = null;
Map<Symbol, Object> maMap = null;
Map<String,Object> apMap = null;
Map<Object, Object> footerMap = null;
Section body = convertBody(message);
if (message.isPersistent()) {
if (header == null) {
header = new Header();
}
header.setDurable(true);
}
byte priority = message.getPriority();
if (priority != Message.DEFAULT_PRIORITY) {
if (header == null) {
header = new Header();
}
header.setPriority(UnsignedByte.valueOf(priority));
}
String type = message.getType();
if (type != null) {
if (properties == null) {
properties = new Properties();
}
properties.setSubject(type);
}
MessageId messageId = message.getMessageId();
if (messageId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setMessageId(getOriginalMessageId(message));
}
ActiveMQDestination destination = message.getDestination();
if (destination != null) {
if (properties == null) {
properties = new Properties();
}
properties.setTo(destination.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination));
}
ActiveMQDestination replyTo = message.getReplyTo();
if (replyTo != null) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyTo(replyTo.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo));
}
String correlationId = message.getCorrelationId();
if (correlationId != null) {
if (properties == null) {
properties = new Properties();
}
try {
properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId));
} catch (AmqpProtocolException e) {
properties.setCorrelationId(correlationId);
}
}
long expiration = message.getExpiration();
if (expiration != 0) {
long ttl = expiration - System.currentTimeMillis();
if (ttl < 0) {
ttl = 1;
}
if (header == null) {
header = new Header();
}
header.setTtl(new UnsignedInteger((int) ttl));
if (properties == null) {
properties = new Properties();
}
properties.setAbsoluteExpiryTime(new Date(expiration));
}
long timeStamp = message.getTimestamp();
if (timeStamp != 0) {
if (properties == null) {
properties = new Properties();
}
properties.setCreationTime(new Date(timeStamp));
}
// JMSX Message Properties
int deliveryCount = message.getRedeliveryCounter();
if (deliveryCount > 0) {
if (header == null) {
header = new Header();
}
header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount));
}
String userId = message.getUserID();
if (userId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8)));
}
String groupId = message.getGroupID();
if (groupId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupId(groupId);
}
int groupSequence = message.getGroupSequence();
if (groupSequence > 0) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence));
}
final Map<String, Object> entries;
try {
entries = message.getProperties();
} catch (IOException e) {
throw JMSExceptionSupport.create(e);
}
for (Map.Entry<String, Object> entry : entries.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (key.startsWith(JMS_AMQP_PREFIX)) {
if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) {
messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class);
continue;
} else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
continue;
} else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
continue;
} else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (maMap == null) {
maMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length());
maMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class));
continue;
} else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class));
continue;
} else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (daMap == null) {
daMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length());
daMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (footerMap == null) {
footerMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length());
footerMap.put(Symbol.valueOf(name), value);
continue;
}
} else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) {
// strip off the scheduled message properties
continue;
}
// The property didn't map into any other slot so we store it in the
// Application Properties section of the message.
if (apMap == null) {
apMap = new HashMap<>();
}
apMap.put(key, value);
int messageType = message.getDataStructureType();
if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) {
// Type of command to recognize advisory message
Object data = message.getDataStructure();
if(data != null) {
apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName());
}
}
}
final AmqpWritableBuffer buffer = new AmqpWritableBuffer();
encoder.setByteBuffer(buffer);
if (header != null) {
encoder.writeObject(header);
}
if (daMap != null) {
encoder.writeObject(new DeliveryAnnotations(daMap));
}
if (maMap != null) {
encoder.writeObject(new MessageAnnotations(maMap));
}
if (properties != null) {
encoder.writeObject(properties);
}
if (apMap != null) {
encoder.writeObject(new ApplicationProperties(apMap));
}
if (body != null) {
encoder.writeObject(body);
}
if (footerMap != null) {
encoder.writeObject(new Footer(footerMap));
}
return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength());
}
|
@Test
public void testConvertCompressedTextMessageCreatesDataSectionBody() throws Exception {
String contentString = "myTextMessageContent";
ActiveMQTextMessage outbound = createTextMessage(contentString, true);
outbound.setShortProperty(JMS_AMQP_ORIGINAL_ENCODING, AMQP_DATA);
outbound.onSend();
outbound.storeContent();
JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer();
EncodedMessage encoded = transformer.transform(outbound);
assertNotNull(encoded);
Message amqp = encoded.decode();
assertNotNull(amqp.getBody());
assertTrue(amqp.getBody() instanceof Data);
assertTrue(((Data) amqp.getBody()).getValue() instanceof Binary);
Binary data = ((Data) amqp.getBody()).getValue();
String contents = new String(data.getArray(), data.getArrayOffset(), data.getLength(), StandardCharsets.UTF_8);
assertEquals(contentString, contents);
}
|
@Override
public boolean isEmpty() {
return data.isEmpty();
}
|
@CacheSpec(population = Population.EMPTY, refreshAfterWrite = Expire.DISABLED,
expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED,
maximumSize = Maximum.DISABLED, weigher = CacheWeigher.DISABLED,
keys = ReferenceType.STRONG, values = ReferenceType.STRONG)
@Test(dataProvider = "caches")
public void noPolicy(Cache<Integer, Integer> cache, CacheContext context) {
assertThat(cache.policy().eviction()).isEmpty();
assertThat(cache.policy().expireAfterWrite()).isEmpty();
assertThat(cache.policy().expireAfterAccess()).isEmpty();
assertThat(cache.policy().refreshAfterWrite()).isEmpty();
}
|
@Override
public AuthenticationResult authenticate(final ChannelHandlerContext context, final PacketPayload payload) {
AuthorityRule rule = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getSingleRule(AuthorityRule.class);
if (MySQLConnectionPhase.AUTH_PHASE_FAST_PATH == connectionPhase) {
currentAuthResult = authenticatePhaseFastPath(context, payload, rule);
if (!currentAuthResult.isFinished()) {
return currentAuthResult;
}
} else if (MySQLConnectionPhase.AUTHENTICATION_METHOD_MISMATCH == connectionPhase) {
authenticateMismatchedMethod((MySQLPacketPayload) payload);
}
Grantee grantee = new Grantee(currentAuthResult.getUsername(), getHostAddress(context));
if (!login(rule, grantee, authResponse)) {
throw new AccessDeniedException(currentAuthResult.getUsername(), grantee.getHostname(), 0 != authResponse.length);
}
if (!authorizeDatabase(rule, grantee, currentAuthResult.getDatabase())) {
throw new DatabaseAccessDeniedException(currentAuthResult.getUsername(), grantee.getHostname(), currentAuthResult.getDatabase());
}
writeOKPacket(context);
return AuthenticationResultBuilder.finished(grantee.getUsername(), grantee.getHostname(), currentAuthResult.getDatabase());
}
|
@Test
void assertAuthenticationSwitchResponse() {
setConnectionPhase(MySQLConnectionPhase.AUTHENTICATION_METHOD_MISMATCH);
MySQLPacketPayload payload = mock(MySQLPacketPayload.class);
Channel channel = mock(Channel.class);
ChannelHandlerContext channelHandlerContext = mock(ChannelHandlerContext.class);
when(payload.readStringEOFByBytes()).thenReturn(authResponse);
when(channel.remoteAddress()).thenReturn(new InetSocketAddress("localhost", 3307));
when(channelHandlerContext.channel()).thenReturn(channel);
setAuthenticationResult();
AuthorityRule rule = mock(AuthorityRule.class);
ShardingSphereUser user = new ShardingSphereUser("root", "", "127.0.0.1");
when(rule.findUser(user.getGrantee())).thenReturn(Optional.of(user));
ShardingSpherePrivileges privileges = mockPrivileges();
when(rule.findPrivileges(user.getGrantee())).thenReturn(Optional.of(privileges));
when(rule.getAuthenticatorType(any())).thenReturn("");
ContextManager contextManager = mockContextManager(rule);
when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager);
authenticationEngine.authenticate(channelHandlerContext, payload);
assertThat(getAuthResponse(), is(authResponse));
}
|
@Deprecated
protected MetadataReportRetry getMetadataReportRetry() {
return metadataReportRetry;
}
|
@Test
void testRetryCancel() throws ClassNotFoundException {
String interfaceName = "org.apache.dubbo.metadata.store.RetryTestService";
String version = "1.0.0.retrycancel";
String group = null;
String application = "vic.retry";
URL storeUrl = URL.valueOf("retryReport://" + NetUtils.getLocalAddress().getHostName()
+ ":4444/org.apache.dubbo.TestServiceForRetryCancel?version=1.0.0.retrycancel&application=vic.retry");
RetryMetadataReport retryReport = new RetryMetadataReport(storeUrl, 2, applicationModel);
retryReport.metadataReportRetry.retryPeriod = 150L;
retryReport.metadataReportRetry.retryTimesIfNonFail = 2;
retryReport.semaphore = new Semaphore(1);
ScheduledThreadPoolExecutor retryExecutor = (ScheduledThreadPoolExecutor)
retryReport.getMetadataReportRetry().getRetryExecutor();
long completedTaskCount = retryExecutor.getCompletedTaskCount();
storeProvider(retryReport, interfaceName, version, group, application);
// Wait for the assignment of retryScheduledFuture to complete
await().until(() -> retryReport.metadataReportRetry.retryScheduledFuture != null);
assertFalse(retryReport.metadataReportRetry.retryScheduledFuture.isCancelled());
assertFalse(retryReport.metadataReportRetry.retryExecutor.isShutdown());
retryReport.semaphore.release(2);
await().until(() -> retryExecutor.getCompletedTaskCount() > completedTaskCount + 2);
await().untilAsserted(() -> assertTrue(retryReport.metadataReportRetry.retryScheduledFuture.isCancelled()));
await().untilAsserted(() -> assertTrue(retryReport.metadataReportRetry.retryExecutor.isShutdown()));
}
|
public abstract T getNow(T valueIfAbsent) throws InterruptedException, ExecutionException;
|
@Test
public void testCompletingFuturesViaCancellation() throws Exception {
final KafkaFutureImpl<String> future = new KafkaFutureImpl<>();
CompleterThread<String> myThread = new CompleterThread<>(future, null,
new CancellationException("Ultimate efficiency achieved."));
assertIsNotCompleted(future);
assertEquals("I am ready", future.getNow("I am ready"));
myThread.start();
awaitAndAssertCancelled(future, "Ultimate efficiency achieved.");
assertIsCancelled(future);
myThread.join();
assertNull(myThread.testException);
}
|
public void validateAndMergeOutputParams(StepRuntimeSummary runtimeSummary) {
Optional<String> externalJobId = extractExternalJobId(runtimeSummary);
if (externalJobId.isPresent()) {
Optional<OutputData> outputDataOpt =
outputDataDao.getOutputDataForExternalJob(externalJobId.get(), ExternalJobType.TITUS);
outputDataOpt.ifPresent(
outputData -> {
ParamsMergeHelper.mergeOutputDataParams(
runtimeSummary.getParams(), outputData.getParams());
});
}
}
|
@Test
public void testMissingJobIdArtifact() {
outputDataManager.validateAndMergeOutputParams(runtimeSummary);
assertTrue(runtimeSummary.getParams().isEmpty());
}
|
public ResourcePattern toKafkaResourcePattern() {
org.apache.kafka.common.resource.ResourceType kafkaType;
String kafkaName;
PatternType kafkaPattern = PatternType.LITERAL;
switch (type) {
case TOPIC:
kafkaType = org.apache.kafka.common.resource.ResourceType.TOPIC;
kafkaName = name;
if (AclResourcePatternType.PREFIX.equals(pattern)) {
kafkaPattern = PatternType.PREFIXED;
}
break;
case GROUP:
kafkaType = org.apache.kafka.common.resource.ResourceType.GROUP;
kafkaName = name;
if (AclResourcePatternType.PREFIX.equals(pattern)) {
kafkaPattern = PatternType.PREFIXED;
}
break;
case CLUSTER:
kafkaType = org.apache.kafka.common.resource.ResourceType.CLUSTER;
kafkaName = "kafka-cluster";
break;
case TRANSACTIONAL_ID:
kafkaType = org.apache.kafka.common.resource.ResourceType.TRANSACTIONAL_ID;
kafkaName = name;
if (AclResourcePatternType.PREFIX.equals(pattern)) {
kafkaPattern = PatternType.PREFIXED;
}
break;
default:
throw new IllegalArgumentException("Invalid Acl resource type: " + type);
}
if (kafkaName == null) {
throw new IllegalArgumentException("Name is required for resource type: " + type);
}
return new ResourcePattern(kafkaType, kafkaName, kafkaPattern);
}
|
@Test
public void testToKafkaResourcePatternForTransactionalIdResource() {
// Regular transactionalId
SimpleAclRuleResource transactionalIdResourceRules = new SimpleAclRuleResource("my-transactionalId", SimpleAclRuleResourceType.TRANSACTIONAL_ID, null);
ResourcePattern expectedKafkaResourcePattern = new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "my-transactionalId", PatternType.LITERAL);
assertThat(transactionalIdResourceRules.toKafkaResourcePattern(), is(expectedKafkaResourcePattern));
// Prefixed transactionalId
transactionalIdResourceRules = new SimpleAclRuleResource("my-", SimpleAclRuleResourceType.TRANSACTIONAL_ID, AclResourcePatternType.PREFIX);
expectedKafkaResourcePattern = new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "my-", PatternType.PREFIXED);
assertThat(transactionalIdResourceRules.toKafkaResourcePattern(), is(expectedKafkaResourcePattern));
}
|
@Override
@Nullable
public Portable[] readPortableArray(@Nonnull String fieldName) throws IOException {
return readIncompatibleField(fieldName, PORTABLE_ARRAY, super::readPortableArray);
}
|
@Test
public void testReadPortableArray() throws Exception {
assertNull(reader.readPortableArray("NO SUCH FIELD"));
}
|
public <T> TypeAdapter<T> getAdapter(TypeToken<T> type) {
Objects.requireNonNull(type, "type must not be null");
TypeAdapter<?> cached = typeTokenCache.get(type);
if (cached != null) {
@SuppressWarnings("unchecked")
TypeAdapter<T> adapter = (TypeAdapter<T>) cached;
return adapter;
}
Map<TypeToken<?>, TypeAdapter<?>> threadCalls = threadLocalAdapterResults.get();
boolean isInitialAdapterRequest = false;
if (threadCalls == null) {
threadCalls = new HashMap<>();
threadLocalAdapterResults.set(threadCalls);
isInitialAdapterRequest = true;
} else {
// the key and value type parameters always agree
@SuppressWarnings("unchecked")
TypeAdapter<T> ongoingCall = (TypeAdapter<T>) threadCalls.get(type);
if (ongoingCall != null) {
return ongoingCall;
}
}
TypeAdapter<T> candidate = null;
try {
FutureTypeAdapter<T> call = new FutureTypeAdapter<>();
threadCalls.put(type, call);
for (TypeAdapterFactory factory : factories) {
candidate = factory.create(this, type);
if (candidate != null) {
call.setDelegate(candidate);
// Replace future adapter with actual adapter
threadCalls.put(type, candidate);
break;
}
}
} finally {
if (isInitialAdapterRequest) {
threadLocalAdapterResults.remove();
}
}
if (candidate == null) {
throw new IllegalArgumentException(
"GSON (" + GsonBuildConfig.VERSION + ") cannot handle " + type);
}
if (isInitialAdapterRequest) {
/*
* Publish resolved adapters to all threads
* Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA
* would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA
* See https://github.com/google/gson/issues/625
*/
typeTokenCache.putAll(threadCalls);
}
return candidate;
}
|
@Test
public void testGetAdapter_Concurrency() {
class DummyAdapter<T> extends TypeAdapter<T> {
@Override
public void write(JsonWriter out, T value) throws IOException {
throw new AssertionError("not needed for this test");
}
@Override
public T read(JsonReader in) throws IOException {
throw new AssertionError("not needed for this test");
}
}
final AtomicInteger adapterInstancesCreated = new AtomicInteger(0);
final AtomicReference<TypeAdapter<?>> threadAdapter = new AtomicReference<>();
final Class<?> requestedType = Number.class;
Gson gson =
new GsonBuilder()
.registerTypeAdapterFactory(
new TypeAdapterFactory() {
private volatile boolean isFirstCall = true;
@Override
public <T> TypeAdapter<T> create(final Gson gson, TypeToken<T> type) {
if (isFirstCall) {
isFirstCall = false;
// Create a separate thread which requests an adapter for the same type
// This will cause this factory to return a different adapter instance than
// the one it is currently creating
Thread thread =
new Thread() {
@Override
public void run() {
threadAdapter.set(gson.getAdapter(requestedType));
}
};
thread.start();
try {
thread.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
// Create a new dummy adapter instance
adapterInstancesCreated.incrementAndGet();
return new DummyAdapter<>();
}
})
.create();
TypeAdapter<?> adapter = gson.getAdapter(requestedType);
assertThat(adapterInstancesCreated.get()).isEqualTo(2);
assertThat(adapter).isInstanceOf(DummyAdapter.class);
assertThat(threadAdapter.get()).isInstanceOf(DummyAdapter.class);
}
|
public static Schema convert(final org.apache.iceberg.Schema schema) {
ImmutableList.Builder<Field> fields = ImmutableList.builder();
for (NestedField f : schema.columns()) {
fields.add(convert(f));
}
return new Schema(fields.build());
}
|
@Test
public void convertComplex() {
Schema iceberg =
new Schema(
Types.NestedField.optional(
0, "m", MapType.ofOptional(1, 2, StringType.get(), LongType.get())),
Types.NestedField.required(
3,
"m2",
MapType.ofOptional(
4, 5, StringType.get(), ListType.ofOptional(6, TimestampType.withoutZone()))));
org.apache.arrow.vector.types.pojo.Schema arrow = ArrowSchemaUtil.convert(iceberg);
assertThat(arrow.getFields()).hasSameSizeAs(iceberg.columns());
}
|
public MailConfiguration getConfiguration() {
if (configuration == null) {
configuration = new MailConfiguration(getCamelContext());
}
return configuration;
}
|
@Test
public void testSMTPEndpointWithSubjectOption() {
MailEndpoint endpoint = checkEndpoint("smtp://myhost:25?subject=hello");
MailConfiguration config = endpoint.getConfiguration();
assertEquals("smtp", config.getProtocol(), "getProtocol()");
assertEquals("myhost", config.getHost(), "getHost()");
assertEquals(25, config.getPort(), "getPort()");
assertEquals("hello", config.getSubject(), "getSubject()");
assertFalse(config.isDebugMode());
}
|
Label(String spec) {
String[] labelParts = spec.trim().split("\\s*=\\s*");
if (labelParts.length != 2) {
throw new IllegalArgumentException(String.format("Invalid label specification: '%s'", spec));
}
this.key = labelParts[0];
this.value = labelParts[1];
}
|
@Test
public void label() {
// given
String label = "key=value";
// when
Label result = new Label(label);
// then
assertEquals("key", result.getKey());
assertEquals("value", result.getValue());
}
|
@Override
public long getUsableSpace() {
throw new UnsupportedOperationException("Not implemented");
}
|
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testGetUsableSpace() {
fs.getFile("nonsuch.txt").getUsableSpace();
}
|
@PublicAPI(usage = ACCESS)
public boolean isEquivalentTo(Class<?> clazz) {
return getName().equals(clazz.getName());
}
|
@Test
public void JavaClass_is_equivalent_to_reflect_type() {
JavaClass list = importClassWithContext(List.class);
assertThat(list.isEquivalentTo(List.class)).as("JavaClass is List.class").isTrue();
assertThat(list.isEquivalentTo(Collection.class)).as("JavaClass is Collection.class").isFalse();
}
|
static String getAbbreviation(Exception ex,
Integer statusCode,
String storageErrorMessage) {
String result = null;
for (RetryReasonCategory retryReasonCategory : rankedReasonCategories) {
final String abbreviation
= retryReasonCategory.captureAndGetAbbreviation(ex,
statusCode, storageErrorMessage);
if (abbreviation != null) {
result = abbreviation;
}
}
return result;
}
|
@Test
public void testIngressLimitRetryReason() {
Assertions.assertThat(RetryReason.getAbbreviation(null, HTTP_UNAVAILABLE, INGRESS_OVER_ACCOUNT_LIMIT.getErrorMessage())).isEqualTo(
INGRESS_LIMIT_BREACH_ABBREVIATION
);
}
|
@Override
public double read() {
return gaugeSource.read();
}
|
@Test
public void whenProbeRegisteredAfterGauge() {
DoubleGauge gauge = metricsRegistry.newDoubleGauge("foo.doubleField");
SomeObject someObject = new SomeObject();
metricsRegistry.registerStaticMetrics(someObject, "foo");
assertEquals(someObject.doubleField, gauge.read(), 10E-6);
}
|
@SuppressWarnings("unused") // Required for automatic type inference
public static <K> Builder0<K> forClass(final Class<K> type) {
return new Builder0<>();
}
|
@Test
public void shouldNotThrowOnDuplicateHandler0() {
HandlerMaps.forClass(BaseType.class)
.put(LeafTypeA.class, handler0_1)
.put(LeafTypeB.class, handler0_1);
}
|
public List<GitLabBranch> getBranches(String gitlabUrl, String pat, Long gitlabProjectId) {
String url = format("%s/projects/%s/repository/branches", gitlabUrl, gitlabProjectId);
LOG.debug("get branches : [{}]", url);
Request request = new Request.Builder()
.addHeader(PRIVATE_TOKEN, pat)
.get()
.url(url)
.build();
try (Response response = client.newCall(request).execute()) {
checkResponseIsSuccessful(response);
String body = response.body().string();
LOG.trace("loading branches payload result : [{}]", body);
return Arrays.asList(new GsonBuilder().create().fromJson(body, GitLabBranch[].class));
} catch (JsonSyntaxException e) {
throw new IllegalArgumentException("Could not parse GitLab answer to retrieve project branches. Got a non-json payload as result.");
} catch (IOException e) {
logException(url, e);
throw new IllegalStateException(e.getMessage(), e);
}
}
|
@Test
public void get_branches_fail_if_exception() throws IOException {
server.shutdown();
String instanceUrl = gitlabUrl;
assertThatThrownBy(() -> underTest.getBranches(instanceUrl, "pat", 12345L))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Failed to connect to");
}
|
public Reaper(Engine engine) {
_engine = engine;
}
|
@Test
public void testReaper()
throws InterruptedException {
final int COUNT = 10;
final List<Reaper.Zombie> zombies = new ArrayList<>();
final CountDownLatch latch = new CountDownLatch(COUNT);
for (int i = 0; i < COUNT; ++i) {
zombies.add(() -> Task.action("countDown", () -> latch.countDown()));
}
zombies.forEach(z -> _reaper.submit(z));
Assert.assertTrue(latch.await(10, TimeUnit.SECONDS));
}
|
public String convertInt(int i) {
return convert(i);
}
|
@Test
// test ways for dealing with flowing i converter, as in "foo%ix"
public void flowingI() {
{
FileNamePattern pp = new FileNamePattern("foo%i{}bar%i", context);
assertEquals("foo3bar3", pp.convertInt(3));
}
{
FileNamePattern pp = new FileNamePattern("foo%i{}bar%i", context);
assertEquals("foo3bar3", pp.convertInt(3));
}
}
|
public static Reader read(String fileName) throws FileNotFoundException {
return new InputStreamReader(readToStream(fileName), StandardCharsets.UTF_8);
}
|
@Test
public void shouldThrowWhenResourceNotFound() {
assertThrows(FileNotFoundException.class, () -> ResourceUtils.read("/not-existed"));
}
|
@Override
public String toString() {
return "QualityGateImpl{" +
"id='" + id + '\'' +
", name='" + name + '\'' +
", status=" + status +
", conditions=" + conditions +
'}';
}
|
@Test
public void verify_toString() {
when(condition.toString()).thenReturn("{Condition}");
assertThat(underTest)
.hasToString("QualityGateImpl{id='some id', name='some name', status=OK, conditions=[{Condition}]}");
}
|
@Override
public <T> T clone(T object) {
if (object instanceof String) {
return object;
} else if (object instanceof Collection) {
Object firstElement = findFirstNonNullElement((Collection) object);
if (firstElement != null && !(firstElement instanceof Serializable)) {
JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass());
return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type);
}
} else if (object instanceof Map) {
Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object);
if (firstEntry != null) {
Object key = firstEntry.getKey();
Object value = firstEntry.getValue();
if (!(key instanceof Serializable) || !(value instanceof Serializable)) {
JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass());
return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type);
}
}
} else if (object instanceof JsonNode) {
return (T) ((JsonNode) object).deepCopy();
}
if (object instanceof Serializable) {
try {
return (T) SerializationHelper.clone((Serializable) object);
} catch (SerializationException e) {
//it is possible that object itself implements java.io.Serializable, but underlying structure does not
//in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization
}
}
return jsonClone(object);
}
|
@Test
public void should_clone_map_of_serializable_key_and_value_with_null() {
Map<String, SerializableObject> original = new LinkedHashMap<>();
original.put("null", null);
original.put("key", new SerializableObject("value"));
Object cloned = serializer.clone(original);
assertEquals(original, cloned);
assertNotSame(original, cloned);
}
|
@Operation(summary = "updateProjectParameter", description = "UPDATE_PROJECT_PARAMETER_NOTES")
@Parameters({
@Parameter(name = "code", description = "PROJECT_PARAMETER_CODE", schema = @Schema(implementation = long.class, example = "123456")),
@Parameter(name = "projectParameterName", description = "PROJECT_PARAMETER_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "projectParameterValue", description = "PROJECT_PARAMETER_VALUE", schema = @Schema(implementation = String.class)),
})
@PutMapping(value = "/{code}")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_PROJECT_PARAMETER_ERROR)
public Result updateProjectParameter(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable("code") Long code,
@RequestParam("projectParameterName") String projectParameterName,
@RequestParam(value = "projectParameterValue") String projectParameterValue) {
return projectParameterService.updateProjectParameter(loginUser, projectCode, code, projectParameterName,
projectParameterValue);
}
|
@Test
public void testUpdateProjectParameter() {
User loginUser = getGeneralUser();
Mockito.when(projectParameterService.updateProjectParameter(Mockito.any(), Mockito.anyLong(), Mockito.anyLong(),
Mockito.any(), Mockito.any())).thenReturn(getSuccessResult());
Result result = projectParameterController.updateProjectParameter(loginUser, 1, 1L, "key", "value");
Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode());
}
|
@Override
public void run(DiagnosticsLogWriter writer) {
keyList.clear();
keyList.addAll(properties.keySet());
sort(keyList);
writer.startSection("ConfigProperties");
for (String key : keyList) {
String value = properties.get(key);
writer.writeKeyValueEntry(key, value);
}
writer.endSection();
}
|
@Test
public void testRun() {
plugin.run(logWriter);
assertContains("property1=value1");
}
|
String accessToken() {
String urlString = String.format("%s/computeMetadata/v1/instance/service-accounts/default/token", endpoint);
String accessTokenResponse = callGet(urlString);
return extractAccessToken(accessTokenResponse);
}
|
@Test
public void accessToken() {
// given
stubFor(get(urlEqualTo("/computeMetadata/v1/instance/service-accounts/default/token"))
.withHeader("Metadata-Flavor", equalTo("Google"))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK).withBody(accessTokenResponse(ACCESS_TOKEN))));
// when
String result = gcpMetadataApi.accessToken();
// then
assertEquals(ACCESS_TOKEN, result);
}
|
@Override
public Collection<RedisServer> slaves(NamedNode master) {
List<Map<String, String>> slaves = connection.sync(StringCodec.INSTANCE, RedisCommands.SENTINEL_SLAVES, master.getName());
return toRedisServersList(slaves);
}
|
@Test
public void testSlaves() {
Collection<RedisServer> masters = connection.masters();
Collection<RedisServer> slaves = connection.slaves(masters.iterator().next());
assertThat(slaves).hasSize(2);
}
|
public RedisCache(final RedisConfigProperties redisConfigProperties) {
this.redisTemplate = new ReactiveRedisTemplate<>(new RedisConnectionFactory(redisConfigProperties).getLettuceConnectionFactory(),
ShenyuRedisSerializationContext.bytesSerializationContext());
}
|
@Test
public void testRedisCache() {
final String testKey = "testRedisCache";
final ICache cache = new RedisCache(getConfig());
cache.isExist(testKey).subscribe(v -> assertEquals(Boolean.FALSE, v));
cache.cacheData(testKey, testKey.getBytes(StandardCharsets.UTF_8), 1000)
.subscribe(v -> assertEquals(Boolean.TRUE, v));
cache.isExist(testKey).subscribe(s -> assertEquals(Boolean.TRUE, s));
cache.getData(testKey).subscribe(data -> assertEquals(testKey, new String(data, StandardCharsets.UTF_8)));
}
|
@Override
public void visitProject(Component project, Path<AnalysisFromSonarQube94Visitor.AnalysisFromSonarQube94> path) {
measureRepository.add(project, analysisFromSonarQube94Metric, Measure.newMeasureBuilder().create(path.current().sonarQube94OrGreater));
}
|
@Test
public void visitProject_createMeasureForMetric() {
Component project = builder(FILE).setUuid("uuid")
.setKey("dbKey")
.setName("name")
.setStatus(Component.Status.SAME)
.setReportAttributes(mock(ReportAttributes.class))
.build();
PathAwareVisitor.Path<AnalysisFromSonarQube94> path = mock(PathAwareVisitor.Path.class);
when(path.current()).thenReturn(new AnalysisFromSonarQube94());
underTest.visitProject(project, path);
Measure expectedMeasure = Measure.newMeasureBuilder().create(true);
verify(measureRepository).add(project, metric, expectedMeasure);
}
|
@Override
public boolean isPluginDisabled(String pluginId) {
if (disabledPlugins.contains(pluginId)) {
return true;
}
return !enabledPlugins.isEmpty() && !enabledPlugins.contains(pluginId);
}
|
@Test
public void testIsPluginDisabledWithEnableEmpty() throws IOException {
createDisabledFile();
PluginStatusProvider statusProvider = new DefaultPluginStatusProvider(pluginsPath);
assertFalse(statusProvider.isPluginDisabled("plugin-1"));
assertTrue(statusProvider.isPluginDisabled("plugin-2"));
assertFalse(statusProvider.isPluginDisabled("plugin-3"));
}
|
@Override
public synchronized int read() throws IOException {
checkNotClosed();
if (finished) {
return -1;
}
file.readLock().lock();
try {
int b = file.read(pos++); // it's ok for pos to go beyond size()
if (b == -1) {
finished = true;
} else {
file.setLastAccessTime(fileSystemState.now());
}
return b;
} finally {
file.readLock().unlock();
}
}
|
@Test
public void testRead_partialArray_sliceSmaller() throws IOException {
JimfsInputStream in = newInputStream(1, 2, 3, 4, 5, 6, 7, 8);
byte[] bytes = new byte[12];
assertThat(in.read(bytes, 0, 6)).isEqualTo(6);
assertArrayEquals(bytes(1, 2, 3, 4, 5, 6, 0, 0, 0, 0, 0, 0), bytes);
assertThat(in.read(bytes, 6, 6)).isEqualTo(2);
assertArrayEquals(bytes(1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0), bytes);
assertEmpty(in);
}
|
@Override
public void onDeserializationFailure(
final String source,
final String changelog,
final byte[] data
) {
// NOTE: this only happens for values, we should never auto-register key schemas
final String sourceSubject = KsqlConstants.getSRSubject(source, false);
final String changelogSubject = KsqlConstants.getSRSubject(changelog, false);
// all schema registry events start with a magic byte 0x0 and then four bytes
// indicating the schema id - we extract that schema id from the data that failed
// to deserialize and then register it into the changelog subject
final int id = ByteBuffer.wrap(data, 1, Integer.BYTES).getInt();
final SchemaRegisterEvent event = new SchemaRegisterEvent(id, sourceSubject, changelogSubject);
try {
if (!failedAttempts.contains(event)) {
LOG.info("Trying to fetch & register schema id {} under subject {}", id, changelogSubject);
final ParsedSchema schema = srClient.getSchemaBySubjectAndId(sourceSubject, id);
srClient.register(changelogSubject, schema);
}
} catch (Exception e) {
LOG.warn("Failed during deserialization callback for topic {}. "
+ "Will not try again to register id {} under subject {}.",
source,
id,
changelogSubject,
e
);
failedAttempts.add(event);
}
}
|
@Test
public void shouldRegisterOtherSchemaIdIfFirstFails() throws IOException, RestClientException {
// Given:
when(srClient.getSchemaBySubjectAndId(KsqlConstants.getSRSubject(SOURCE, false), ID2)).thenReturn(schema2);
when(srClient.getSchemaBySubjectAndId(KsqlConstants.getSRSubject(SOURCE, false), ID)).thenReturn(schema);
when(srClient.register(KsqlConstants.getSRSubject(CHANGELOG, false), schema)).thenThrow(new KsqlException(""));
final RegisterSchemaCallback call = new RegisterSchemaCallback(srClient);
// When:
call.onDeserializationFailure(SOURCE, CHANGELOG, SOME_DATA);
call.onDeserializationFailure(SOURCE, CHANGELOG, OTHER_DATA);
// Then:
verify(srClient, times(1)).getSchemaBySubjectAndId(KsqlConstants.getSRSubject(SOURCE, false), ID2);
verify(srClient).register(KsqlConstants.getSRSubject(CHANGELOG, false), schema2);
}
|
public static void closeStreams(java.io.Closeable... streams) {
if (streams != null) {
cleanupWithLogger(null, streams);
}
}
|
@Test
public void testCloseStreams() throws IOException {
File tmpFile = null;
FileOutputStream fos;
BufferedOutputStream bos;
FileOutputStream nullStream = null;
try {
tmpFile = new File(GenericTestUtils.getTestDir(), "testCloseStreams.txt");
fos = new FileOutputStream(tmpFile) {
@Override
public void close() throws IOException {
throw new IOException();
}
};
bos = new BufferedOutputStream(
new FileOutputStream(tmpFile)) {
@Override
public void close() {
throw new NullPointerException();
}
};
IOUtils.closeStreams(fos, bos, nullStream);
IOUtils.closeStreams();
} finally {
FileUtils.deleteQuietly(tmpFile);
}
}
|
public final void isSameInstanceAs(@Nullable Object expected) {
if (actual != expected) {
failEqualityCheck(
SAME_INSTANCE,
expected,
/*
* Pass through *whether* the values are equal so that failEqualityCheck() can print that
* information. But remove the description of the difference, which is always about
* content, since people calling isSameInstanceAs() are explicitly not interested in
* content, only object identity.
*/
compareForEquality(expected).withoutDescription());
}
}
|
@Test
public void isSameInstanceAsFailureWithDifferentTypesAndSameToString() {
Object a = "true";
Object b = true;
expectFailure.whenTesting().that(a).isSameInstanceAs(b);
assertFailureKeys("expected specific instance", "an instance of", "but was", "an instance of");
assertFailureValue("expected specific instance", "true");
assertFailureValueIndexed("an instance of", 0, "java.lang.Boolean");
assertFailureValue("but was", "(non-equal value with same string representation)");
assertFailureValueIndexed("an instance of", 1, "java.lang.String");
}
|
@VisibleForTesting
void validateOldPassword(Long id, String oldPassword) {
AdminUserDO user = userMapper.selectById(id);
if (user == null) {
throw exception(USER_NOT_EXISTS);
}
if (!isPasswordMatch(oldPassword, user.getPassword())) {
throw exception(USER_PASSWORD_FAILED);
}
}
|
@Test
public void testValidateOldPassword_notExists() {
assertServiceException(() -> userService.validateOldPassword(randomLongId(), randomString()),
USER_NOT_EXISTS);
}
|
@Override
public JFieldVar apply(String nodeName, JsonNode node, JsonNode parent, JFieldVar field, Schema currentSchema) {
if (ruleFactory.getGenerationConfig().isIncludeJsr303Annotations()
&& (node.has("minLength") || node.has("maxLength"))
&& isApplicableType(field)) {
final Class<? extends Annotation> sizeClass
= ruleFactory.getGenerationConfig().isUseJakartaValidation()
? Size.class
: javax.validation.constraints.Size.class;
JAnnotationUse annotation = field.annotate(sizeClass);
if (node.has("minLength")) {
annotation.param("min", node.get("minLength").asInt());
}
if (node.has("maxLength")) {
annotation.param("max", node.get("maxLength").asInt());
}
}
return field;
}
|
@Test
public void testMaxAndMinLength() {
when(config.isIncludeJsr303Annotations()).thenReturn(true);
final int minValue = new Random().nextInt();
final int maxValue = new Random().nextInt();
JsonNode maxSubNode = Mockito.mock(JsonNode.class);
when(subNode.asInt()).thenReturn(minValue);
when(maxSubNode.asInt()).thenReturn(maxValue);
when(node.get("minLength")).thenReturn(subNode);
when(node.get("maxLength")).thenReturn(maxSubNode);
when(fieldVar.annotate(sizeClass)).thenReturn(annotation);
when(node.has("minLength")).thenReturn(true);
when(node.has("maxLength")).thenReturn(true);
when(fieldVar.type().boxify().fullName()).thenReturn(fieldClass.getTypeName());
JFieldVar result = rule.apply("node", node, null, fieldVar, null);
assertSame(fieldVar, result);
verify(fieldVar, times(isApplicable ? 1 : 0)).annotate(sizeClass);
verify(annotation, times(isApplicable ? 1 : 0)).param("min", minValue);
verify(annotation, times(isApplicable ? 1 : 0)).param("max", maxValue);
}
|
public static UnifiedDiff parseUnifiedDiff(InputStream stream) throws IOException, UnifiedDiffParserException {
UnifiedDiffReader parser = new UnifiedDiffReader(new BufferedReader(new InputStreamReader(stream)));
return parser.parse();
}
|
@Test
public void testParseIssue107_3() throws IOException {
UnifiedDiff diff = UnifiedDiffReader.parseUnifiedDiff(
UnifiedDiffReaderTest.class.getResourceAsStream("problem_diff_issue107_3.diff"));
assertThat(diff.getFiles().size()).isEqualTo(1);
UnifiedDiffFile file1 = diff.getFiles().get(0);
assertThat(file1.getFromFile()).isEqualTo("Billion laughs attack.md");
assertThat(file1.getPatch().getDeltas().size()).isEqualTo(1);
}
|
@Override
public void publish(ScannerReportWriter writer) {
List<DefaultAnalysisWarnings.Message> warnings = defaultAnalysisWarnings.warnings();
if (warnings.isEmpty()) {
return;
}
writer.writeAnalysisWarnings(warnings.stream()
.map(AnalysisWarningsPublisher::toProtobufAnalysisWarning)
.toList());
}
|
@Test
public void publish_warnings() throws IOException {
ScannerReportWriter writer = new ScannerReportWriter(fileStructure);
String warning1 = "warning 1";
String warning2 = "warning 2";
analysisWarnings.addUnique(warning1);
analysisWarnings.addUnique(warning1);
analysisWarnings.addUnique(warning2);
underTest.publish(writer);
ScannerReportReader reader = new ScannerReportReader(fileStructure);
List<ScannerReport.AnalysisWarning> warnings = Lists.newArrayList(reader.readAnalysisWarnings());
assertThat(warnings)
.extracting(ScannerReport.AnalysisWarning::getText)
.containsExactly(warning1, warning2);
}
|
void doSubmit(final Runnable action) {
CONTINUATION.get().submit(action);
}
|
@Test
public void testOnSuccessCalled() {
final AtomicInteger sequence = new AtomicInteger(0);
final AtomicInteger action = new AtomicInteger();
final AtomicInteger onSuccess = new AtomicInteger();
Continuations CONT = new Continuations();
CONT.doSubmit(() -> {
action.set(sequence.incrementAndGet());
});
CONT.doSubmit(() -> {
onSuccess.set(sequence.incrementAndGet());
});
assertEquals(action.get(), 1);
assertEquals(onSuccess.get(), 2);
}
|
public static ThrowableType getThrowableType(Throwable cause) {
final ThrowableAnnotation annotation =
cause.getClass().getAnnotation(ThrowableAnnotation.class);
return annotation == null ? ThrowableType.RecoverableError : annotation.value();
}
|
@Test
void testThrowableType_NonRecoverable() {
assertThat(
ThrowableClassifier.getThrowableType(
new SuppressRestartsException(new Exception(""))))
.isEqualTo(ThrowableType.NonRecoverableError);
}
|
@Override
public AuthenticationDataProvider getAuthData() throws PulsarClientException {
Lock readLock = cachedRoleTokenLock.readLock();
readLock.lock();
try {
if (cachedRoleTokenIsValid()) {
return new AuthenticationDataAthenz(roleToken,
isNotBlank(roleHeader) ? roleHeader : ZTSClient.getHeader());
}
} finally {
readLock.unlock();
}
Lock writeLock = cachedRoleTokenLock.writeLock();
writeLock.lock();
try {
// the following would set up the API call that requests tokens from the server
// that can only be used if they are 10 minutes from expiration and last twenty
// four hours
RoleToken token = getZtsClient().getRoleToken(providerDomain, null, minValidity, maxValidity, false);
roleToken = token.getToken();
cachedRoleTokenTimestamp = System.nanoTime();
return new AuthenticationDataAthenz(roleToken, isNotBlank(roleHeader) ? roleHeader : ZTSClient.getHeader());
} catch (Throwable t) {
throw new GettingAuthenticationDataException(t);
} finally {
writeLock.unlock();
}
}
|
@Test
public void testGetAuthData() throws Exception {
com.yahoo.athenz.auth.token.RoleToken roleToken = new com.yahoo.athenz.auth.token.RoleToken(
auth.getAuthData().getCommandData());
assertEquals(roleToken.getPrincipal(), String.format("%s.%s", TENANT_DOMAIN, TENANT_SERVICE));
int count = 0;
for (Map.Entry<String, String> header : auth.getAuthData().getHttpHeaders()) {
if (header.getKey().equals(ZTSClient.getHeader())) {
com.yahoo.athenz.auth.token.RoleToken roleTokenFromHeader = new com.yahoo.athenz.auth.token.RoleToken(
header.getValue());
assertEquals(roleTokenFromHeader.getPrincipal(), String.format("%s.%s", TENANT_DOMAIN, TENANT_SERVICE));
count++;
}
}
assertEquals(count, 1);
}
|
@Override
public TImmutablePartitionResult updateImmutablePartition(TImmutablePartitionRequest request) throws TException {
LOG.info("Receive update immutable partition: {}", request);
TImmutablePartitionResult result;
try {
result = updateImmutablePartitionInternal(request);
} catch (Throwable t) {
LOG.warn(t.getMessage(), t);
result = new TImmutablePartitionResult();
TStatus errorStatus = new TStatus(RUNTIME_ERROR);
errorStatus.setError_msgs(Lists.newArrayList(String.format("txn_id=%d failed. %s",
request.getTxn_id(), t.getMessage())));
result.setStatus(errorStatus);
}
LOG.info("Finish update immutable partition: {}", result);
return result;
}
|
@Test
public void testImmutablePartitionApi() throws TException {
Database db = GlobalStateMgr.getCurrentState().getDb("test");
OlapTable table = (OlapTable) db.getTable("site_access_auto");
List<Long> partitionIds = table.getPhysicalPartitions().stream()
.map(PhysicalPartition::getId).collect(Collectors.toList());
FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv);
TImmutablePartitionRequest request = new TImmutablePartitionRequest();
request.setDb_id(db.getId());
request.setTable_id(table.getId());
request.setPartition_ids(partitionIds);
TImmutablePartitionResult partition = impl.updateImmutablePartition(request);
Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.OK);
Assert.assertEquals(2, table.getPhysicalPartitions().size());
partition = impl.updateImmutablePartition(request);
Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.OK);
Assert.assertEquals(2, table.getPhysicalPartitions().size());
partitionIds = table.getPhysicalPartitions().stream()
.map(PhysicalPartition::getId).collect(Collectors.toList());
request.setPartition_ids(partitionIds);
partition = impl.updateImmutablePartition(request);
Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.OK);
Assert.assertEquals(3, table.getPhysicalPartitions().size());
}
|
public String getQuery() throws Exception {
return getQuery(weatherConfiguration.getLocation());
}
|
@Test
public void testLatLonQuery() throws Exception {
WeatherConfiguration weatherConfiguration = new WeatherConfiguration();
weatherConfiguration.setLon("4");
weatherConfiguration.setLat("52");
weatherConfiguration.setMode(WeatherMode.XML);
weatherConfiguration.setLanguage(WeatherLanguage.nl);
weatherConfiguration.setAppid(APPID);
WeatherQuery weatherQuery = new WeatherQuery(weatherConfiguration);
weatherConfiguration.setGeoLocationProvider(geoLocationProvider);
String query = weatherQuery.getQuery();
assertThat(query, is(
"http://api.openweathermap.org/data/2.5/weather?lat=52&lon=4&lang=nl&mode=xml&APPID=9162755b2efa555823cfe0451d7fff38"));
}
|
void placeOrder(Order order) {
sendShippingRequest(order);
}
|
@Test
void testPlaceOrder() throws Exception {
long paymentTime = timeLimits.paymentTime();
long queueTaskTime = timeLimits.queueTaskTime();
for (double d = 0.1; d < 2; d = d + 0.1) {
paymentTime *= d;
queueTaskTime *= d;
Commander c = buildCommanderObject(true);
var order = new Order(new User("K", "J"), "pen", 1f);
for (Order.MessageSent ms : Order.MessageSent.values()) {
c.placeOrder(order);
assertFalse(StringUtils.isBlank(order.id));
}
}
}
|
static final String[] getPrincipalNames(String keytabFileName) throws IOException {
Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
Set<String> principals = new HashSet<>();
List<PrincipalName> entries = keytab.getPrincipals();
for (PrincipalName entry : entries) {
principals.add(entry.getName().replace("\\", "/"));
}
return principals.toArray(new String[0]);
}
|
@Test
public void testGetPrincipalNamesMissingPattern() throws IOException {
createKeyTab(testKeytab, new String[]{"test/testhost@testRealm"});
try {
KerberosUtil.getPrincipalNames(testKeytab, null);
Assert.fail("Exception should have been thrown");
} catch (Exception e) {
//expects exception
}
}
|
@Udf(description = "Returns the natural logarithm (base e) of an INT value.")
public Double ln(
@UdfParameter(
value = "value",
description = "the value get the natual logarithm of."
) final Integer value
) {
return ln(value == null ? null : value.doubleValue());
}
|
@Test
public void shouldHandleNull() {
assertThat(udf.ln((Integer)null), is(nullValue()));
assertThat(udf.ln((Long)null), is(nullValue()));
assertThat(udf.ln((Double)null), is(nullValue()));
}
|
public Optional<Violation> validate(IndexSetConfig newConfig) {
// Don't validate prefix conflicts in case of an update
if (Strings.isNullOrEmpty(newConfig.id())) {
final Violation prefixViolation = validatePrefix(newConfig);
if (prefixViolation != null) {
return Optional.of(prefixViolation);
}
}
final Violation fieldMappingViolation = validateMappingChangesAreLegal(newConfig);
if (fieldMappingViolation != null) {
return Optional.of(fieldMappingViolation);
}
Violation refreshIntervalViolation = validateSimpleIndexSetConfig(newConfig);
if (refreshIntervalViolation != null){
return Optional.of(refreshIntervalViolation);
}
return Optional.empty();
}
|
@Test
public void testWarmTierKeywordReserved() {
IndexSetConfig config = testIndexSetConfig().toBuilder().indexPrefix("warm_").build();
this.validator = new IndexSetValidator(indexSetRegistry, elasticsearchConfiguration, dataTieringOrchestrator, dataTieringChecker);
assertThat(validator.validate(config)).hasValueSatisfying(v ->
assertThat(v.message()).contains("contains reserved keyword 'warm_'!"));
}
|
public static String getRemoteAddr(HttpServletRequest request) {
String remoteAddr = request.getRemoteAddr();
String proxyHeader = request.getHeader("X-Forwarded-For");
if (proxyHeader != null && ProxyServers.isProxyServer(remoteAddr)) {
final String clientAddr = proxyHeader.split(",")[0].trim();
if (!clientAddr.isEmpty()) {
remoteAddr = clientAddr;
}
}
return remoteAddr;
}
|
@Test
public void testRemoteAddr() {
assertEquals(clientAddr, getRemoteAddr(clientAddr, null, false));
}
|
protected static String cleanName(String name) {
name = FairSchedulerUtilities.trimQueueName(name);
if (name.contains(DOT)) {
String converted = name.replaceAll("\\.", DOT_REPLACEMENT);
LOG.warn("Name {} is converted to {} when it is used as a queue name.",
name, converted);
return converted;
} else {
return name;
}
}
|
@Test
public void testCleanName() {
// permutations of dot placements
final String clean = "clean";
final String dotted = "not.clean";
final String multiDot = "more.un.clean";
final String seqDot = "not..clean";
final String unTrimmed = " .invalid. "; // not really a valid queue
String cleaned = cleanName(clean);
assertEquals("Name was changed and it should not", clean, cleaned);
cleaned = cleanName(dotted);
assertFalse("Cleaned name contains dots and it should not",
cleaned.contains(DOT));
cleaned = cleanName(multiDot);
assertFalse("Cleaned name contains dots and it should not",
cleaned.contains(DOT));
assertNotEquals("Multi dot failed: wrong replacements found",
cleaned.indexOf(DOT_REPLACEMENT),
cleaned.lastIndexOf(DOT_REPLACEMENT));
cleaned = cleanName(seqDot);
assertFalse("Cleaned name contains dots and it should not",
cleaned.contains(DOT));
assertNotEquals("Sequential dot failed: wrong replacements found",
cleaned.indexOf(DOT_REPLACEMENT),
cleaned.lastIndexOf(DOT_REPLACEMENT));
cleaned = cleanName(unTrimmed);
assertTrue("Trimming start failed: space not removed or dot not replaced",
cleaned.startsWith(DOT_REPLACEMENT));
assertTrue("Trimming end failed: space not removed or dot not replaced",
cleaned.endsWith(DOT_REPLACEMENT));
}
|
@NonNull @VisibleForTesting
static String[] getPermissionsStrings(int requestCode) {
switch (requestCode) {
case CONTACTS_PERMISSION_REQUEST_CODE -> {
return new String[] {Manifest.permission.READ_CONTACTS};
}
case NOTIFICATION_PERMISSION_REQUEST_CODE -> {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
return new String[] {Manifest.permission.POST_NOTIFICATIONS};
} else {
return new String[0];
}
}
default -> throw new IllegalArgumentException("Unknown request code " + requestCode);
}
}
|
@Test
@Config(sdk = S_V2)
public void testGetPermissionsStringsNotificationsOldDevice() {
Assert.assertArrayEquals(
new String[0],
PermissionRequestHelper.getPermissionsStrings(
PermissionRequestHelper.NOTIFICATION_PERMISSION_REQUEST_CODE));
}
|
public static boolean matchAnyDomain(String name, Collection<String> patterns) {
if (patterns == null || patterns.isEmpty()) {
return false;
}
for (String pattern : patterns) {
if (matchDomain(name, pattern)) {
return true;
}
}
return false;
}
|
@Test
public void testMatchAnyDomain() {
assertTrue(AddressUtil.matchAnyDomain("hazelcast.com", singletonList("hazelcast.com")));
assertFalse(AddressUtil.matchAnyDomain("hazelcast.com", null));
assertFalse(AddressUtil.matchAnyDomain("hazelcast.com", Collections.emptyList()));
assertFalse(AddressUtil.matchAnyDomain("hazelcast.com", singletonList("abc.com")));
}
|
@Override
public V get(Object k) {
return containsKey(k) ? super.get(k) : defaultValue;
}
|
@Test
public void defaultToFive() {
map = new DefaultHashMap<>(5);
loadMap();
assertEquals("missing 1", 1, (int) map.get(ONE));
assertEquals("missing 2", 2, (int) map.get(TWO));
assertEquals("three?", 5, (int) map.get(THREE));
assertEquals("four?", 5, (int) map.get(FOUR));
}
|
public boolean statsHaveChanged() {
if (!aggregatedStats.hasUpdatesFromAllDistributors()) {
return false;
}
for (ContentNodeStats contentNodeStats : aggregatedStats.getStats()) {
int nodeIndex = contentNodeStats.getNodeIndex();
boolean currValue = mayHaveMergesPendingInGlobalSpace(nodeIndex);
Boolean prevValue = prevMayHaveMergesPendingInGlobalSpace(nodeIndex);
if (prevValue != null) {
if (prevValue != currValue) {
return true;
}
} else {
return true;
}
}
return false;
}
|
@Test
void stats_have_changed_if_in_sync_node_not_found_in_previous_stats() {
Fixture f = Fixture.fromStats(stats().inSync(0));
assertTrue(f.statsHaveChanged());
}
|
@Config("sql.forced-session-time-zone")
@ConfigDescription("User session time zone overriding value sent by client")
public SqlEnvironmentConfig setForcedSessionTimeZone(@Nullable String timeZoneId)
{
this.forcedSessionTimeZone = Optional.ofNullable(timeZoneId)
.map(TimeZoneKey::getTimeZoneKey);
return this;
}
|
@Test
public void testExplicitPropertyMappings()
{
Map<String, String> properties = new ImmutableMap.Builder<String, String>()
.put("sql.forced-session-time-zone", "UTC")
.build();
SqlEnvironmentConfig expected = new SqlEnvironmentConfig()
.setForcedSessionTimeZone("UTC");
assertFullMapping(properties, expected);
}
|
@NotNull
@Override
public List<InetAddress> lookup(@NotNull String host) throws UnknownHostException {
InetAddress address = InetAddress.getByName(host);
if (configuration.getBoolean(SONAR_VALIDATE_WEBHOOKS_PROPERTY).orElse(SONAR_VALIDATE_WEBHOOKS_DEFAULT_VALUE)
&& (address.isLoopbackAddress() || address.isAnyLocalAddress() || isLocalAddress(address))) {
throw new IllegalArgumentException("Invalid URL: loopback and wildcard addresses are not allowed for webhooks.");
}
return Collections.singletonList(address);
}
|
@Test
public void lookup_fail_on_192_168_1_21() throws UnknownHostException, SocketException {
InetAddress inetAddress = InetAddress.getByName(HttpUrl.parse("https://192.168.1.21/").host());
when(configuration.getBoolean(SONAR_VALIDATE_WEBHOOKS_PROPERTY))
.thenReturn(Optional.of(true));
when(networkInterfaceProvider.getNetworkInterfaceAddresses())
.thenReturn(ImmutableList.of(inetAddress));
Assertions.assertThatThrownBy(() -> underTest.lookup("192.168.1.21"))
.hasMessageContaining(INVALID_URL)
.isInstanceOf(IllegalArgumentException.class);
}
|
@Override
public void checkForInvalidityOfToken(String tokenId) {
final boolean isTokenInvalid = invalidTokenRepository.findByTokenId(tokenId).isPresent();
if (isTokenInvalid) {
throw new TokenAlreadyInvalidatedException(tokenId);
}
}
|
@Test
void givenInvalidToken_whenCheckForInvalidityOfToken_thenThrowTokenAlreadyInvalidatedException() {
// Given
String tokenId = "invalidToken";
InvalidTokenEntity invalidTokenEntity = InvalidTokenEntity.builder().tokenId(tokenId).build();
// When
when(invalidTokenRepository.findByTokenId(tokenId)).thenReturn(Optional.of(invalidTokenEntity));
// Then
assertThatThrownBy(() -> invalidTokenService.checkForInvalidityOfToken(tokenId))
.isInstanceOf(TokenAlreadyInvalidatedException.class)
.hasMessageContaining(tokenId);
// Verify
verify(invalidTokenRepository).findByTokenId(tokenId);
}
|
@Override
public List<TableInfo> getTableList(Long dataSourceConfigId, String nameLike, String commentLike) {
List<TableInfo> tables = getTableList0(dataSourceConfigId, null);
return tables.stream().filter(tableInfo -> (StrUtil.isEmpty(nameLike) || tableInfo.getName().contains(nameLike))
&& (StrUtil.isEmpty(commentLike) || tableInfo.getComment().contains(commentLike)))
.collect(Collectors.toList());
}
|
@Test
public void testGetTableList() {
// 准备参数
Long dataSourceConfigId = randomLongId();
// mock 方法
DataSourceConfigDO dataSourceConfig = new DataSourceConfigDO().setUsername("sa").setPassword("")
.setUrl("jdbc:h2:mem:testdb");
when(dataSourceConfigService.getDataSourceConfig(eq(dataSourceConfigId)))
.thenReturn(dataSourceConfig);
// 调用
List<TableInfo> tables = databaseTableService.getTableList(dataSourceConfigId,
"config", "参数");
// 断言
assertEquals(1, tables.size());
assertTableInfo(tables.get(0));
}
|
@Override
public ChannelFuture writeRstStream(ChannelHandlerContext ctx, int streamId, long errorCode,
ChannelPromise promise) {
// Delegate to the lifecycle manager for proper updating of connection state.
return lifecycleManager.resetStream(ctx, streamId, errorCode, promise);
}
|
@Test
public void rstStreamWriteForUnknownStreamShouldIgnore() throws Exception {
ChannelPromise promise = newPromise();
encoder.writeRstStream(ctx, 5, PROTOCOL_ERROR.code(), promise);
verify(writer, never()).writeRstStream(eq(ctx), anyInt(), anyLong(), eq(promise));
}
|
public static NotFoundException clusterNotFound(String appId, String clusterName) {
return new NotFoundException("cluster not found for appId:%s clusterName:%s", appId, clusterName);
}
|
@Test
public void testClusterNotFoundException() {
NotFoundException exception = NotFoundException.clusterNotFound(appId, clusterName);
assertEquals(exception.getMessage(), "cluster not found for appId:app-1001 clusterName:test");
}
|
@Override
public OpenstackVtap updateVtap(OpenstackVtap description) {
checkNotNull(description, VTAP_DESC_NULL, "vtap");
Set<DeviceId> txDevices = description.type().isValid(Type.VTAP_TX) ?
getEdgeDevice(Type.VTAP_TX, description.vtapCriterion()) : ImmutableSet.of();
Set<DeviceId> rxDevices = description.type().isValid(Type.VTAP_RX) ?
getEdgeDevice(Type.VTAP_RX, description.vtapCriterion()) : ImmutableSet.of();
DefaultOpenstackVtap vtap = DefaultOpenstackVtap.builder(description)
.txDeviceIds(txDevices)
.rxDeviceIds(rxDevices)
.build();
return store.updateVtap(vtap, true);
}
|
@Test
public void testUpdateNotExistingVtap() {
assertNull(target.updateVtap(VTAP_1));
}
|
@Override
public boolean tryReturnRecordAt(boolean isAtSplitPoint, Long recordStart) {
return tryReturnRecordAt(isAtSplitPoint, recordStart.longValue());
}
|
@Test
public void testTryReturnRecordSimpleSparse() throws Exception {
OffsetRangeTracker tracker = new OffsetRangeTracker(100, 200);
assertTrue(tracker.tryReturnRecordAt(true, 110));
assertTrue(tracker.tryReturnRecordAt(true, 140));
assertTrue(tracker.tryReturnRecordAt(true, 183));
assertFalse(tracker.tryReturnRecordAt(true, 210));
}
|
@Override
public ShenyuContext decorator(final ShenyuContext shenyuContext, final MetaData metaData) {
String path = shenyuContext.getPath();
shenyuContext.setMethod(path);
shenyuContext.setRealUrl(path);
shenyuContext.setRpcType(RpcTypeEnum.SPRING_CLOUD.getName());
shenyuContext.setModule(Optional.ofNullable(metaData).map(MetaData::getAppName)
.orElse(String.format("%s-%s", PluginEnum.SPRING_CLOUD.getName(), shenyuContext.getRpcType())));
return shenyuContext;
}
|
@Test
public void testDecorator() {
MetaData metaData = null;
ShenyuContext shenyuContext = new ShenyuContext();
springCloudShenyuContextDecorator.decorator(shenyuContext, metaData);
Assertions.assertNull(shenyuContext.getMethod());
Assertions.assertNull(shenyuContext.getRealUrl());
Assertions.assertEquals(shenyuContext.getRpcType(), "springCloud");
Assertions.assertEquals(shenyuContext.getModule(), "springCloud-springCloud");
}
|
@Override
public boolean hasAccess( RepositoryFilePermission perm ) throws KettleException {
if ( hasAccess == null ) {
hasAccess = new HashMap<RepositoryFilePermission, Boolean>();
}
if ( hasAccess.get( perm ) == null ) {
hasAccess.put( perm, new Boolean( aclService.hasAccess( getObjectId(), perm ) ) );
}
return hasAccess.get( perm ).booleanValue();
}
|
@Test
public void testAccess() throws Exception {
when( mockAclService.hasAccess( mockObjectId, RepositoryFilePermission.READ ) ).thenReturn( true );
when( mockAclService.hasAccess( mockObjectId, RepositoryFilePermission.WRITE ) ).thenReturn( false );
assertTrue( uiPurRepDir.hasAccess( RepositoryFilePermission.READ ) );
assertFalse( uiPurRepDir.hasAccess( RepositoryFilePermission.WRITE ) );
}
|
public ZLoop(Context context)
{
Objects.requireNonNull(context, "Context has to be supplied for ZLoop");
this.context = context;
pollers = new ArrayList<>();
timers = new ArrayList<>();
zombies = new ArrayList<>();
newTimers = new ArrayList<>();
}
|
@Test
public void testZLoop()
{
int rc;
// setUp() should create the context
assert (ctx != null);
ZLoop loop = new ZLoop(ctx);
assert (loop != null);
ZLoop.IZLoopHandler timerEvent = (loop12, item, arg) -> {
((Socket) arg).send("PING", 0);
return 0;
};
ZLoop.IZLoopHandler socketEvent = (loop1, item, arg) -> {
received = ((Socket) arg).recvStr(0);
// Just end the reactor
return -1;
};
// After 10 msecs, send a ping message to output
loop.addTimer(10, 1, timerEvent, input);
// When we get the ping message, end the reactor
PollItem pollInput = new PollItem(output, Poller.POLLIN);
rc = loop.addPoller(pollInput, socketEvent, output);
Assert.assertEquals(0, rc);
loop.start();
loop.removePoller(pollInput);
Assert.assertEquals("PING", received);
}
|
@Override
public Long zLexCount(byte[] key, org.springframework.data.domain.Range range) {
String min = value(range.getLowerBound(), "-");
String max = value(range.getUpperBound(), "+");
return read(key, StringCodec.INSTANCE, ZLEXCOUNT, key, min, max);
}
|
@Test
public void testZLexCount() {
StringRedisTemplate redisTemplate = new StringRedisTemplate();
redisTemplate.setConnectionFactory(new RedissonConnectionFactory(redisson));
redisTemplate.afterPropertiesSet();
redisTemplate.boundZSetOps("test").add("1", 10);
redisTemplate.boundZSetOps("test").add("2", 20);
redisTemplate.boundZSetOps("test").add("3", 30);
Long size = redisTemplate.boundZSetOps("test").lexCount(Range.closed("1", "2"));
assertThat(size).isEqualTo(2);
}
|
public static int getIdForValueMeta( String valueMetaName ) {
for ( PluginInterface plugin : pluginRegistry.getPlugins( ValueMetaPluginType.class ) ) {
if ( valueMetaName != null && valueMetaName.equalsIgnoreCase( plugin.getName() ) ) {
return Integer.valueOf( plugin.getIds()[0] );
}
}
return ValueMetaInterface.TYPE_NONE;
}
|
@Test
public void testGetIdForValueMeta() {
assertEquals( ValueMetaInterface.TYPE_NONE, ValueMetaFactory.getIdForValueMeta( null ) );
assertEquals( ValueMetaInterface.TYPE_NONE, ValueMetaFactory.getIdForValueMeta( "" ) );
assertEquals( ValueMetaInterface.TYPE_NONE, ValueMetaFactory.getIdForValueMeta( "None" ) );
assertEquals( ValueMetaInterface.TYPE_NUMBER, ValueMetaFactory.getIdForValueMeta( "Number" ) );
assertEquals( ValueMetaInterface.TYPE_STRING, ValueMetaFactory.getIdForValueMeta( "String" ) );
assertEquals( ValueMetaInterface.TYPE_DATE, ValueMetaFactory.getIdForValueMeta( "Date" ) );
assertEquals( ValueMetaInterface.TYPE_BOOLEAN, ValueMetaFactory.getIdForValueMeta( "Boolean" ) );
assertEquals( ValueMetaInterface.TYPE_INTEGER, ValueMetaFactory.getIdForValueMeta( "Integer" ) );
assertEquals( ValueMetaInterface.TYPE_BIGNUMBER, ValueMetaFactory.getIdForValueMeta( "BigNumber" ) );
assertEquals( ValueMetaInterface.TYPE_SERIALIZABLE, ValueMetaFactory.getIdForValueMeta( "Serializable" ) );
assertEquals( ValueMetaInterface.TYPE_BINARY, ValueMetaFactory.getIdForValueMeta( "Binary" ) );
assertEquals( ValueMetaInterface.TYPE_TIMESTAMP, ValueMetaFactory.getIdForValueMeta( "Timestamp" ) );
assertEquals( ValueMetaInterface.TYPE_INET, ValueMetaFactory.getIdForValueMeta( "Internet Address" ) );
}
|
@ScalarOperator(NOT_EQUAL)
@SqlType(StandardTypes.BOOLEAN)
@SqlNullable
public static Boolean notEqual(@SqlType(StandardTypes.BOOLEAN) boolean left, @SqlType(StandardTypes.BOOLEAN) boolean right)
{
return left != right;
}
|
@Test
public void testNotEqual()
{
assertFunction("true <> true", BOOLEAN, false);
assertFunction("true <> false", BOOLEAN, true);
assertFunction("false <> true", BOOLEAN, true);
assertFunction("false <> false", BOOLEAN, false);
}
|
protected boolean isListEmpty(ArrayNode json) {
for (JsonNode node : json) {
if (!isNodeEmpty(node)) {
return false;
}
}
return true;
}
|
@Test
public void isListEmpty_nodeWithEmptyField() {
ArrayNode json = new ArrayNode(factory);
ObjectNode nestedNode = new ObjectNode(factory);
json.add(nestedNode);
nestedNode.set("emptyField", new TextNode(""));
assertThat(expressionEvaluator.isListEmpty(json)).isTrue();
}
|
public static CvssV2 vectorToCvssV2(String vectorString, Double baseScore) {
if (vectorString.startsWith("CVSS:")) {
throw new IllegalArgumentException("Not a valid CVSSv2 vector string: " + vectorString);
}
final String[] metricStrings = vectorString.substring(vectorString.indexOf('/') + 1).split("/");
final HashMap<String, String> metrics = new HashMap<>();
for (int i = 0; i < metricStrings.length; i++) {
final String[] metricKeyVal = metricStrings[i].split(":");
if (metricKeyVal.length != 2) {
throw new IllegalArgumentException(
String.format("Not a valid CVSSv2 vector string '%s', invalid metric component '%s'",
vectorString, metricStrings[i]));
}
metrics.put(metricKeyVal[0], metricKeyVal[1]);
}
if (!metrics.keySet().containsAll(BASE_METRICS_V2)) {
throw new IllegalArgumentException(
String.format("Not a valid CVSSv2 vector string '%s'; missing one or more required Metrics;",
vectorString));
}
final String version = CvssV2Data.Version._2_0.value();
//"AV:L/AC:L/Au:N/C:N/I:N/A:C"
final CvssV2Data.AccessVectorType accessVector = CvssV2Data.AccessVectorType.fromValue(metrics.get("AV"));
final CvssV2Data.AccessComplexityType attackComplexity = CvssV2Data.AccessComplexityType.fromValue(metrics.get("AC"));
final CvssV2Data.AuthenticationType authentication = CvssV2Data.AuthenticationType.fromValue(metrics.get("Au"));
final CvssV2Data.CiaType confidentialityImpact = CvssV2Data.CiaType.fromValue(metrics.get("C"));
final CvssV2Data.CiaType integrityImpact = CvssV2Data.CiaType.fromValue(metrics.get("I"));
final CvssV2Data.CiaType availabilityImpact = CvssV2Data.CiaType.fromValue(metrics.get("A"));
final String baseSeverity = cvssV2ScoreToSeverity(baseScore);
final CvssV2Data data = new CvssV2Data(version, vectorString, accessVector, attackComplexity,
authentication, confidentialityImpact, integrityImpact, availabilityImpact, baseScore, baseSeverity,
null, null, null, null, null, null, null, null, null, null);
final CvssV2 cvss = new CvssV2(null, null, data, baseSeverity, null, null, null, null, null, null, null);
return cvss;
}
|
@Test
public void testVectorToCvssV2() {
String vectorString = "/AV:L/AC:L/Au:N/C:N/I:N/A:C";
Double baseScore = 1.0;
CvssV2 result = CvssUtil.vectorToCvssV2(vectorString, baseScore);
assertEquals(CvssV2Data.Version._2_0, result.getCvssData().getVersion());
assertEquals(CvssV2Data.AccessVectorType.LOCAL, result.getCvssData().getAccessVector());
assertEquals(CvssV2Data.AccessComplexityType.LOW, result.getCvssData().getAccessComplexity());
assertEquals(CvssV2Data.AuthenticationType.NONE, result.getCvssData().getAuthentication());
assertEquals(CvssV2Data.CiaType.NONE, result.getCvssData().getConfidentialityImpact());
assertEquals(CvssV2Data.CiaType.NONE, result.getCvssData().getIntegrityImpact());
assertEquals(CvssV2Data.CiaType.COMPLETE, result.getCvssData().getAvailabilityImpact());
assertEquals("LOW", result.getCvssData().getBaseSeverity());
assertEquals(1.0, result.getCvssData().getBaseScore(), 0);
}
|
@Override
public long skip(long ns) throws IOException {
ensureOpen();
if (mPosition >= mLimit) {
return 0;
}
long n = Math.min(mLimit - mPosition, ns);
n = Math.max(-mPosition, n);
mPosition += n;
return n;
}
|
@Test
void testSkipTooLong() throws IOException {
UnsafeStringReader reader = new UnsafeStringReader("abc");
reader.skip(10);
long skip = reader.skip(10);
assertThat(skip, is(0L));
}
|
@Override
public void start() throws Exception {
LOG.debug("Start leadership runner for job {}.", getJobID());
leaderElection.startLeaderElection(this);
}
|
@Test
void testLeaderAddressOfOutdatedLeaderIsIgnored() throws Exception {
final CompletableFuture<String> leaderAddressFuture = new CompletableFuture<>();
final JobMasterServiceLeadershipRunner jobManagerRunner =
newJobMasterServiceLeadershipRunnerBuilder()
.withSingleJobMasterServiceProcess(
TestingJobMasterServiceProcess.newBuilder()
.setGetLeaderAddressFutureSupplier(
() -> leaderAddressFuture)
.build())
.build();
jobManagerRunner.start();
final CompletableFuture<LeaderInformation> leaderFuture =
leaderElection.isLeader(UUID.randomUUID());
leaderElection.notLeader();
leaderAddressFuture.complete("foobar");
assertThatFuture(leaderFuture).willNotCompleteWithin(Duration.ofMillis(5));
}
|
public static ValueLabel formatBytes(long bytes) {
return new ValueLabel(bytes, BYTES_UNIT);
}
|
@Test
public void formatMegaBytes() {
vl = TopoUtils.formatBytes(3_000_000L);
assertEquals(AM_WM, TopoUtils.Magnitude.MEGA, vl.magnitude());
assertEquals(AM_WL, "2.86 MB", vl.toString());
}
|
@Override
public Set<String> tags() {
if (tags == null) {
return Set.of();
} else {
return ImmutableSet.copyOf(tags);
}
}
|
@Test
void tags_whenNull_shouldReturnEmptySet() {
assertThat(issue.tags()).isEmpty();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.