focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public CanaryScope buildCanaryScope(CanaryScope scope) { WavefrontCanaryScope wavefrontCanaryScope = new WavefrontCanaryScope(); wavefrontCanaryScope.setScope(scope.getScope()); wavefrontCanaryScope.setLocation(scope.getLocation()); wavefrontCanaryScope.setStart(scope.getStart()); wavefrontCanaryScope.setEnd(scope.getEnd()); wavefrontCanaryScope.setStep(scope.getStep()); wavefrontCanaryScope.setGranularity(generateGranularity(scope.getStep())); wavefrontCanaryScope.setExtendedScopeParams(scope.getExtendedScopeParams()); return wavefrontCanaryScope; }
@Test public void testBuildCanaryScope_WithDayGranularity() { CanaryScope canaryScope = new CanaryScope( "scope", "location", Instant.now(), Instant.now(), WavefrontCanaryScopeFactory.DAY, null); CanaryScope generatedCanaryScope = queryBuilder.buildCanaryScope(canaryScope); WavefrontCanaryScope wavefrontCanaryScope = (WavefrontCanaryScope) generatedCanaryScope; assertThat(wavefrontCanaryScope.getGranularity()).isEqualTo("d"); }
public void retain(IndexSet indexSet, IndexLifetimeConfig config, RetentionExecutor.RetentionAction action, String actionName) { final Map<String, Set<String>> deflectorIndices = indexSet.getAllIndexAliases(); // Account for DST and time zones in determining age final DateTime now = clock.nowUTC(); final long cutoffSoft = now.minus(config.indexLifetimeMin()).getMillis(); final long cutoffHard = now.minus(config.indexLifetimeMax()).getMillis(); final int removeCount = (int) deflectorIndices.keySet() .stream() .filter(indexName -> !indices.isReopened(indexName)) .filter(indexName -> !hasCurrentWriteAlias(indexSet, deflectorIndices, indexName)) .filter(indexName -> exceedsAgeLimit(indexName, cutoffSoft, cutoffHard)) .count(); if (LOG.isDebugEnabled()) { var debug = deflectorIndices.keySet().stream() .collect(Collectors.toMap(k -> k, k -> Map.of( "isReopened", indices.isReopened(k), "hasCurrentWriteAlias", hasCurrentWriteAlias(indexSet, deflectorIndices, k), "exceedsAgeLimit", exceedsAgeLimit(k, cutoffSoft, cutoffHard), "closingDate", indices.indexClosingDate(k), "creationDate", indices.indexCreationDate(k) ))); Joiner.MapJoiner mapJoiner = Joiner.on("\n").withKeyValueSeparator("="); LOG.debug("Debug info retain for indexSet <{}>: (min {}, max {}) removeCount: {} details: <{}>", indexSet.getIndexPrefix(), config.indexLifetimeMin(), config.indexLifetimeMax(), removeCount, mapJoiner.join(debug)); } if (removeCount > 0) { final String msg = "Running retention for " + removeCount + " aged-out indices."; LOG.info(msg); activityWriter.write(new Activity(msg, TimeBasedRetentionExecutor.class)); retentionExecutor.runRetention(indexSet, removeCount, action, actionName); } }
@Test public void timeBasedMissingClosingDate() { when(indices.indexClosingDate("test_1")).thenReturn(Optional.empty()); when(indices.indexCreationDate("test_1")).thenReturn(Optional.of(NOW.minusDays(17))); underTest.retain(indexSet, getIndexLifetimeConfig(14, 16), action, "action"); verify(action, times(1)).retain(retainedIndexName.capture(), eq(indexSet)); assertThat(retainedIndexName.getValue()).containsExactly("test_1"); }
public static SmartFilterTestExecutionResultDTO execSmartFilterTest(SmartFilterTestExecutionDTO execData) { Predicate<TopicMessageDTO> predicate; try { predicate = MessageFilters.celScriptFilter(execData.getFilterCode()); } catch (Exception e) { log.info("Smart filter '{}' compilation error", execData.getFilterCode(), e); return new SmartFilterTestExecutionResultDTO() .error("Compilation error : " + e.getMessage()); } try { var result = predicate.test( new TopicMessageDTO() .key(execData.getKey()) .content(execData.getValue()) .headers(execData.getHeaders()) .offset(execData.getOffset()) .partition(execData.getPartition()) .timestamp( Optional.ofNullable(execData.getTimestampMs()) .map(ts -> OffsetDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneOffset.UTC)) .orElse(null)) ); return new SmartFilterTestExecutionResultDTO() .result(result); } catch (Exception e) { log.info("Smart filter {} execution error", execData, e); return new SmartFilterTestExecutionResultDTO() .error("Execution error : " + e.getMessage()); } }
@Test void execSmartFilterTestCompilesToNonBooleanExpression() { var result = execSmartFilterTest( new SmartFilterTestExecutionDTO() .filterCode("1/0") ); assertThat(result.getResult()).isNull(); assertThat(result.getError()).containsIgnoringCase("Compilation error"); }
public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) { // tagging like maxweight:conditional=no/none @ destination/delivery/forestry/service String condValue = way.getTag("maxweight:conditional", ""); if (!condValue.isEmpty()) { String[] values = condValue.split("@"); if (values.length == 2) { String key = values[0].trim(); String value = values[1].trim(); if ("no".equals(key) || "none".equals(key)) { if (value.startsWith("(") && value.endsWith(")")) value = value.substring(1, value.length() - 1); mweEnc.setEnum(false, edgeId, edgeIntAccess, MaxWeightExcept.find(value)); return; } } } // For tagging like vehicle:conditional=destination @ (weight>3.5) AND maxweight=3.5 // For vehicle:conditional=no @ (weight>3.5) => NONE is used, which is consistent with max_weight being set to 3.5 in this case for (String restriction : HGV_RESTRICTIONS) { String value = way.getTag(restriction, ""); int atIndex = value.indexOf("@"); if (atIndex > 0) { double dec = OSMValueExtractor.conditionalWeightToTons(value); // set it only if the weight value is the same as in max_weight if (!Double.isNaN(dec) && (stringToTons(way.getTag("maxweight", "")) == dec || stringToTons(way.getTag("maxweightrating:hgv", "")) == dec || stringToTons(way.getTag("maxgcweight", "")) == dec)) { mweEnc.setEnum(false, edgeId, edgeIntAccess, MaxWeightExcept.find(value.substring(0, atIndex).trim())); break; } } } }
@Test public void testConditionalTags() { EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); ReaderWay readerWay = new ReaderWay(1); readerWay.setTag("highway", "primary"); readerWay.setTag("hgv:conditional", "no @ (weight > 7.5)"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(MaxWeightExcept.NONE, mwEnc.getEnum(false, edgeId, edgeIntAccess)); // weight=5 is missing edgeIntAccess = new ArrayEdgeIntAccess(1); readerWay.clearTags(); readerWay.setTag("highway", "primary"); readerWay.setTag("vehicle:conditional", "delivery @ (weight > 5)"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(MaxWeightExcept.NONE, mwEnc.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); readerWay.clearTags(); readerWay.setTag("highway", "primary"); readerWay.setTag("vehicle:conditional", "delivery @ (weight > 7.5)"); readerWay.setTag("maxweight", "7.5"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(MaxWeightExcept.DELIVERY, mwEnc.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); readerWay.clearTags(); readerWay.setTag("highway", "primary"); readerWay.setTag("hgv:conditional", "destination @ (maxweight > 5)"); readerWay.setTag("maxweight", "5"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(MaxWeightExcept.DESTINATION, mwEnc.getEnum(false, edgeId, edgeIntAccess)); }
public JwtBuilder jwtBuilder() { return new JwtBuilder(); }
@Test void testParseWith64Key() { NacosJwtParser parser = new NacosJwtParser(encode("SecretKey012345678901234567SecretKey0123456789012345678901289012")); String token = parser.jwtBuilder().setUserName("nacos").setExpiredTime(100L).compact(); assertTrue(token.startsWith(NacosSignatureAlgorithm.HS512.getHeader())); }
@Override public CompletableFuture<JobID> submitJob(@Nonnull JobGraph jobGraph) { CompletableFuture<java.nio.file.Path> jobGraphFileFuture = CompletableFuture.supplyAsync( () -> { try { final java.nio.file.Path jobGraphFile = Files.createTempFile( "flink-jobgraph-" + jobGraph.getJobID(), ".bin"); try (ObjectOutputStream objectOut = new ObjectOutputStream( Files.newOutputStream(jobGraphFile))) { objectOut.writeObject(jobGraph); } return jobGraphFile; } catch (IOException e) { throw new CompletionException( new FlinkException("Failed to serialize JobGraph.", e)); } }, executorService); CompletableFuture<Tuple2<JobSubmitRequestBody, Collection<FileUpload>>> requestFuture = jobGraphFileFuture.thenApply( jobGraphFile -> { List<String> jarFileNames = new ArrayList<>(8); List<JobSubmitRequestBody.DistributedCacheFile> artifactFileNames = new ArrayList<>(8); Collection<FileUpload> filesToUpload = new ArrayList<>(8); filesToUpload.add( new FileUpload( jobGraphFile, RestConstants.CONTENT_TYPE_BINARY)); for (Path jar : jobGraph.getUserJars()) { jarFileNames.add(jar.getName()); filesToUpload.add( new FileUpload( Paths.get(jar.toUri()), RestConstants.CONTENT_TYPE_JAR)); } for (Map.Entry<String, DistributedCache.DistributedCacheEntry> artifacts : jobGraph.getUserArtifacts().entrySet()) { final Path artifactFilePath = new Path(artifacts.getValue().filePath); try { // Only local artifacts need to be uploaded. if (!artifactFilePath.getFileSystem().isDistributedFS()) { artifactFileNames.add( new JobSubmitRequestBody.DistributedCacheFile( artifacts.getKey(), artifactFilePath.getName())); filesToUpload.add( new FileUpload( Paths.get(artifactFilePath.getPath()), RestConstants.CONTENT_TYPE_BINARY)); } } catch (IOException e) { throw new CompletionException( new FlinkException( "Failed to get the FileSystem of artifact " + artifactFilePath + ".", e)); } } final JobSubmitRequestBody requestBody = new JobSubmitRequestBody( jobGraphFile.getFileName().toString(), jarFileNames, artifactFileNames); return Tuple2.of( requestBody, Collections.unmodifiableCollection(filesToUpload)); }); final CompletableFuture<JobSubmitResponseBody> submissionFuture = requestFuture.thenCompose( requestAndFileUploads -> { LOG.info( "Submitting job '{}' ({}).", jobGraph.getName(), jobGraph.getJobID()); return sendRetriableRequest( JobSubmitHeaders.getInstance(), EmptyMessageParameters.getInstance(), requestAndFileUploads.f0, requestAndFileUploads.f1, isConnectionProblemOrServiceUnavailable(), (receiver, error) -> { if (error != null) { LOG.warn( "Attempt to submit job '{}' ({}) to '{}' has failed.", jobGraph.getName(), jobGraph.getJobID(), receiver, error); } else { LOG.info( "Successfully submitted job '{}' ({}) to '{}'.", jobGraph.getName(), jobGraph.getJobID(), receiver); } }); }); submissionFuture .exceptionally(ignored -> null) // ignore errors .thenCompose(ignored -> jobGraphFileFuture) .thenAccept( jobGraphFile -> { try { Files.delete(jobGraphFile); } catch (IOException e) { LOG.warn("Could not delete temporary file {}.", jobGraphFile, e); } }); return submissionFuture .thenApply(ignore -> jobGraph.getJobID()) .exceptionally( (Throwable throwable) -> { throw new CompletionException( new JobSubmissionException( jobGraph.getJobID(), "Failed to submit JobGraph.", ExceptionUtils.stripCompletionException(throwable))); }); }
@Test @Timeout(value = 120_000, unit = TimeUnit.MILLISECONDS) void testJobSubmissionWithoutUserArtifact() throws Exception { try (final TestRestServerEndpoint restServerEndpoint = createRestServerEndpoint(new TestJobSubmitHandler())) { try (RestClusterClient<?> restClusterClient = createRestClusterClient(restServerEndpoint.getServerAddress().getPort())) { restClusterClient.submitJob(jobGraph).get(); } } }
public boolean[][] getAdjacencyMatrix() { return adjacencyMatrix; }
@Test public void testPriorityQueueWithMinimalNewEdges() { Graph<BayesVariable> graph = new BayesNetwork(); GraphNode x0 = addNode(graph); GraphNode x1 = addNode(graph); GraphNode x2 = addNode(graph); GraphNode x3 = addNode(graph); GraphNode x4 = addNode(graph); GraphNode x5 = addNode(graph); GraphNode x6 = addNode(graph); GraphNode x7 = addNode(graph); GraphNode x8 = addNode(graph); GraphNode x9 = addNode(graph); GraphNode x10 = addNode(graph); GraphNode x11 = addNode(graph); GraphNode x12 = addNode(graph); // 3 new edges connectParentToChildren(x2, x1); connectParentToChildren(x3, x1); connectParentToChildren(x4, x1); // 1 new edge // we give this a high weight, to show required new edges is compared first connectParentToChildren(x6, x5); connectParentToChildren(x7, x5); x5.setContent(new BayesVariable<String>("x5", x0.getId(), new String[]{"a", "b", "c"}, new double[][]{{0.1, 0.1, 0.1}})); x6.setContent(new BayesVariable<String>("x6", x0.getId(), new String[]{"a", "b", "c"}, new double[][]{{0.1, 0.1, 0.1}})); x7.setContent(new BayesVariable<String>("x7", x0.getId(), new String[]{"a", "b", "c"}, new double[][]{{0.1, 0.1, 0.1}})); // 6 new edges connectParentToChildren(x9, x8); connectParentToChildren(x10, x8); connectParentToChildren(x11, x8); connectParentToChildren(x12, x8); JunctionTreeBuilder jtBuilder = new JunctionTreeBuilder( graph ); //jtBuilder.moralize(); // don't moralize, as we want to force a simpler construction for required edges, for the purposes of testing PriorityQueue<EliminationCandidate> p = new PriorityQueue<EliminationCandidate>(graph.size()); EliminationCandidate elmCandVert = new EliminationCandidate(graph, jtBuilder.getAdjacencyMatrix(), x1); p.add( elmCandVert ); elmCandVert = new EliminationCandidate(graph, jtBuilder.getAdjacencyMatrix(), x5); p.add( elmCandVert ); elmCandVert = new EliminationCandidate(graph, jtBuilder.getAdjacencyMatrix(), x8); p.add( elmCandVert ); EliminationCandidate v = p.remove(); int id = v.getV().getId(); assertThat(id).isEqualTo(5); assertThat(v.getNewEdgesRequired()).isEqualTo(1); v = p.remove(); id = v.getV().getId(); assertThat(id).isEqualTo(1); assertThat(v.getNewEdgesRequired()).isEqualTo(3); v = p.remove(); id = v.getV().getId(); assertThat(id).isEqualTo(8); assertThat(v.getNewEdgesRequired()).isEqualTo(6); assertThat(p.size()).isEqualTo(0); }
public static DisruptContext delay(long delay) { if (delay < 0) { throw new IllegalArgumentException("Delay cannot be smaller than 0"); } return new DelayDisruptContext(delay); }
@Test(expectedExceptions = IllegalArgumentException.class) public void testLatencyIllegal() { final long latency = -4200; DisruptContexts.delay(latency); }
@VisibleForTesting public static List<SchemaChangeEventType> resolveSchemaEvolutionTag(String tag) { List<SchemaChangeEventType> types = new ArrayList<>(Arrays.asList(SchemaChangeEventTypeFamily.ofTag(tag))); if (types.isEmpty()) { // It's a specified tag SchemaChangeEventType type = SchemaChangeEventType.ofTag(tag); if (type != null) { types.add(type); } } return types; }
@Test public void testResolveSchemaEvolutionTag() { assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("all")) .isEqualTo( Arrays.asList( ADD_COLUMN, ALTER_COLUMN_TYPE, CREATE_TABLE, DROP_COLUMN, DROP_TABLE, RENAME_COLUMN, TRUNCATE_TABLE)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("column")) .isEqualTo( Arrays.asList(ADD_COLUMN, ALTER_COLUMN_TYPE, DROP_COLUMN, RENAME_COLUMN)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("table")) .isEqualTo(Arrays.asList(CREATE_TABLE, DROP_TABLE, TRUNCATE_TABLE)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("rename.column")) .isEqualTo(Collections.singletonList(RENAME_COLUMN)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("drop")) .isEqualTo(Arrays.asList(DROP_COLUMN, DROP_TABLE)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("drop.column")) .isEqualTo(Collections.singletonList(DROP_COLUMN)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("create")) .isEqualTo(Collections.singletonList(CREATE_TABLE)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("create.table")) .isEqualTo(Collections.singletonList(CREATE_TABLE)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("alter")) .isEqualTo(Collections.singletonList(ALTER_COLUMN_TYPE)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("alter.column.type")) .isEqualTo(Collections.singletonList(ALTER_COLUMN_TYPE)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("add")) .isEqualTo(Collections.singletonList(ADD_COLUMN)); assertThat(ChangeEventUtils.resolveSchemaEvolutionTag("add.column")) .isEqualTo(Collections.singletonList(ADD_COLUMN)); }
public static <T> GoConfigClassLoader<T> classParser(Element e, Class<T> aClass, ConfigCache configCache, GoCipher goCipher, final ConfigElementImplementationRegistry registry, ConfigReferenceElements configReferenceElements) { return new GoConfigClassLoader<>(e, aClass, configCache, goCipher, registry, configReferenceElements); }
@Test public void shouldErrorOutWhenAttributeAwareConfigTagHasAttributeWithBlankValue() { final Element element = new Element("example"); final GoConfigClassLoader<ConfigWithAttributeAwareConfigTagAnnotation> loader = GoConfigClassLoader.classParser(element, ConfigWithAttributeAwareConfigTagAnnotation.class, configCache, goCipher, registry, referenceElements); assertThatThrownBy(loader::parse) .isInstanceOf(RuntimeException.class) .hasMessageContaining("Expected attribute `type` to be present for \n\t<example />."); }
@Override public void init(Properties config) throws ServletException { super.init(config); nonBrowserUserAgents = config.getProperty( NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT) .split("\\W*,\\W*"); for (int i = 0; i < nonBrowserUserAgents.length; i++) { nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH); } }
@Test(timeout=60000) public void testNonDefaultNonBrowserUserAgentAsNonBrowser() throws Exception { if (handler != null) { handler.destroy(); handler = null; } handler = getNewAuthenticationHandler(); Properties props = getDefaultProperties(); props.setProperty("alt-kerberos.non-browser.user-agents", "foo, bar"); try { handler.init(props); } catch (Exception ex) { handler = null; throw ex; } // Run the kerberos tests again testRequestWithoutAuthorization(); testRequestWithInvalidAuthorization(); testRequestWithAuthorization(); testRequestWithInvalidKerberosAuthorization(); }
public String checkAuthenticationStatus(AdSession adSession, SamlSession samlSession, String artifact) throws BvdException, SamlSessionException, UnsupportedEncodingException, AdException { AdAuthenticationStatus status = AdAuthenticationStatus.valueOfLabel(adSession.getAuthenticationStatus()); if (status == null) { throw new AdException("No successful authentication"); } return switch (status) { case STATUS_SUCCESS -> bvdClient.startBvdSession( adSession.getBsn(), "BSN", samlSession.getServiceEntityId(), LevelOfAssurance.map(String.valueOf(adSession.getAuthenticationLevel())), samlSession.getServiceUuid(), samlSession.getTransactionId()); case STATUS_CANCELED -> assertionConsumerServiceUrlService.generateRedirectUrl( artifact, samlSession.getTransactionId(), samlSession.getHttpSessionId(), BvdStatus.CANCELLED); default -> throw new AdException("No successful authentication"); }; }
@Test public void checkAuthenticationStatusFailedTest() { AdSession adSession = new AdSession(); adSession.setAuthenticationStatus(AdAuthenticationStatus.STATUS_FAILED.label); SamlSession samlSession1 = new SamlSession(1L); String artifact = "artifact"; Exception result = assertThrows(AdException.class, () -> adService.checkAuthenticationStatus(adSession, samlSession1, artifact)); assertEquals(result.getMessage(), "No successful authentication"); }
public boolean isAllLogsToConsoleEnabled(Props props) { return props.valueAsBoolean(LOG_CONSOLE.getKey(), false); }
@Test public void log_to_console_setting_disabled() { Properties properties = new Properties(); properties.setProperty("sonar.log.console", "false"); assertThat(underTest.isAllLogsToConsoleEnabled(new Props(properties))).isFalse(); }
@Override public void isEquivalentAccordingToCompareTo(@Nullable BigDecimal expected) { compareValues(expected); }
@Test public void isEquivalentAccordingToCompareTo() { // make sure this still works assertThat(TEN).isEquivalentAccordingToCompareTo(TEN); }
@Override public AppResponse process(Flow flow, ActivateAppRequest body) { String decodedPin = ChallengeService.decodeMaskedPin(appSession.getIv(), appAuthenticator.getSymmetricKey(), body.getMaskedPincode()); if ((decodedPin == null || !Pattern.compile("\\d{5}").matcher(decodedPin).matches())) { return flow.setFailedStateAndReturnNOK(appSession); } else if (!appAuthenticator.getUserAppId().equals(body.getUserAppId())){ digidClient.remoteLog("754", Map.of(lowerUnderscore(ACCOUNT_ID) ,appAuthenticator.getAccountId())); return flow.setFailedStateAndReturnNOK(appSession); } appAuthenticator.setMaskedPin(decodedPin); appAuthenticator.setLastSignInAt(ZonedDateTime.now()); if (!switchService.digidAppSwitchEnabled() ) { digidClient.remoteLog("824", Map.of(lowerUnderscore(ACCOUNT_ID), appAuthenticator.getAccountId())); throw new SwitchDisabledException(); } if (flow instanceof RequestAccountAndAppFlow || flow instanceof ActivateAppWithPasswordLetterFlow) { Map<String, String> result = digidClient.finishRegistration(appSession.getRegistrationId(), appSession.getAccountId(), flow.getName()); if (result.get(lowerUnderscore(STATUS)).equals("PENDING") && result.get(lowerUnderscore(ACTIVATION_CODE)) != null && result.get(lowerUnderscore(GELDIGHEIDSTERMIJN)) != null) { appAuthenticator.setStatus("pending"); appAuthenticator.setActivationCode(result.get(lowerUnderscore(ACTIVATION_CODE))); appAuthenticator.setGeldigheidstermijn(result.get(lowerUnderscore(GELDIGHEIDSTERMIJN))); appAuthenticator.setRequestedAt(ZonedDateTime.now()); return new StatusResponse("PENDING"); } else { return new NokResponse(); } } else { return ((ActivationFlow) flow).activateApp(appAuthenticator, appSession); } }
@Test void processNoDecodedPin(){ mockedActivateAppRequest.setMaskedPincode("1234"); when(mockedFlow.setFailedStateAndReturnNOK(any(AppSession.class))).thenReturn(new NokResponse()); AppResponse appResponse = pincodeSet.process(mockedFlow, mockedActivateAppRequest); assertTrue(appResponse instanceof NokResponse); }
@Override protected Num calculate(int index) { if (index == 0) { return indicator.getValue(0); } Num value = zero(); int loopLength = (index - barCount < 0) ? index + 1 : barCount; int actualIndex = index; for (int i = loopLength; i > 0; i--) { value = value.plus(numOf(i).multipliedBy(indicator.getValue(actualIndex))); actualIndex--; } return value.dividedBy(numOf((loopLength * (loopLength + 1)) / 2)); }
@Test public void calculate() { MockBarSeries series = new MockBarSeries(numFunction, 1d, 2d, 3d, 4d, 5d, 6d); Indicator<Num> close = new ClosePriceIndicator(series); Indicator<Num> wmaIndicator = new WMAIndicator(close, 3); assertNumEquals(1, wmaIndicator.getValue(0)); assertNumEquals(1.6667, wmaIndicator.getValue(1)); assertNumEquals(2.3333, wmaIndicator.getValue(2)); assertNumEquals(3.3333, wmaIndicator.getValue(3)); assertNumEquals(4.3333, wmaIndicator.getValue(4)); assertNumEquals(5.3333, wmaIndicator.getValue(5)); }
public static InetSocketAddress replaceUnresolvedNumericIp(InetSocketAddress inetSocketAddress) { requireNonNull(inetSocketAddress, "inetSocketAddress"); if (!inetSocketAddress.isUnresolved()) { return inetSocketAddress; } InetSocketAddress inetAddressForIpString = createForIpString( inetSocketAddress.getHostString(), inetSocketAddress.getPort()); if (inetAddressForIpString != null) { return inetAddressForIpString; } else { return inetSocketAddress; } }
@Test void replaceUnresolvedNumericIpBadValues() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> AddressUtils.replaceUnresolvedNumericIp(null)) .withMessage("inetSocketAddress"); }
public static KeyValueBytesStoreSupplier persistentTimestampedKeyValueStore(final String name) { Objects.requireNonNull(name, "name cannot be null"); return new RocksDBKeyValueBytesStoreSupplier(name, true); }
@Test public void shouldCreateRocksDbTimestampedStore() { assertThat(Stores.persistentTimestampedKeyValueStore("store").get(), instanceOf(RocksDBTimestampedStore.class)); }
protected Dependency getMainAndroidDependency(Dependency dependency1, Dependency dependency2) { if (!dependency1.isVirtual() && !dependency2.isVirtual() && Ecosystem.JAVA.equals(dependency1.getEcosystem()) && Ecosystem.JAVA.equals(dependency2.getEcosystem())) { final String name1 = dependency1.getActualFile().getName(); final String name2 = dependency2.getActualFile().getName(); if ("classes.jar".equals(name2) && "aar".equals(FileUtils.getFileExtension(name1)) && dependency2.getFileName().contains(name1)) { return dependency1; } if ("classes.jar".equals(name1) && "aar".equals(FileUtils.getFileExtension(name2)) && dependency1.getFileName().contains(name2)) { return dependency2; } } return null; }
@Test public void testGetMainAndroidDependency() throws Exception { ArchiveAnalyzer aa = null; try (Engine engine = new Engine(Engine.Mode.EVIDENCE_COLLECTION, getSettings())) { Dependency dependency1 = new Dependency(BaseTest.getResourceAsFile(this, "aar-1.0.0.aar")); dependency1.setEcosystem(Ecosystem.JAVA); aa = new ArchiveAnalyzer(); aa.initialize(getSettings()); aa.accept(dependency1.getActualFile()); aa.prepareAnalyzer(engine); aa.analyze(dependency1, engine); Dependency dependency2 = null; for (Dependency d : engine.getDependencies()) { if ("classes.jar".equals(d.getActualFile().getName())) { dependency2 = d; break; } } assertNotNull("classes.jar was not found", dependency2); dependency2.setEcosystem(Ecosystem.JAVA); DependencyMergingAnalyzer instance = new DependencyMergingAnalyzer(); Dependency expResult = dependency1; Dependency result = instance.getMainAndroidDependency(dependency1, dependency2); assertEquals(expResult, result); } finally { if (aa != null) { aa.closeAnalyzer(); } } }
@Override public byte[] putIfAbsent(final Bytes key, final byte[] valueAndTimestamp) { final byte[] previous = wrapped().putIfAbsent(key, valueAndTimestamp); if (previous == null) { // then it was absent log(key, rawValue(valueAndTimestamp), valueAndTimestamp == null ? context.timestamp() : timestamp(valueAndTimestamp)); } return previous; }
@Test public void shouldWriteToInnerOnPutIfAbsentNoPreviousValue() { store.putIfAbsent(hi, rawThere); assertThat(root.get(hi), equalTo(rawThere)); }
@Override public int rpcPortOffset() { return Integer.parseInt(System.getProperty(GrpcConstants.NACOS_SERVER_GRPC_PORT_OFFSET_KEY, String.valueOf(Constants.SDK_GRPC_PORT_DEFAULT_OFFSET))); }
@Test void testRpcPortOffsetDefault() { grpcSdkClient = new GrpcSdkClient("test"); assertEquals(1000, grpcSdkClient.rpcPortOffset()); }
@Override public Set<Path> getPaths(ElementId src, ElementId dst) { return super.getPaths(src, dst, (LinkWeigher) null); }
@Test(expected = NullPointerException.class) public void testGetPathsWithNullSrc() { VirtualNetwork vnet = setupEmptyVnet(); PathService pathService = manager.get(vnet.id(), PathService.class); pathService.getPaths(null, DID3); }
public boolean isSystemTopic(String topic) { return isSystemTopic(TopicName.get(topic)); }
@Test public void testIsSystemTopic() { BrokerService brokerService = pulsar.getBrokerService(); assertFalse(brokerService.isSystemTopic(TopicName.get("test"))); assertFalse(brokerService.isSystemTopic(TopicName.get("public/default/test"))); assertFalse(brokerService.isSystemTopic(TopicName.get("healthcheck"))); assertFalse(brokerService.isSystemTopic(TopicName.get("public/default/healthcheck"))); assertFalse(brokerService.isSystemTopic(TopicName.get("persistent://public/default/test"))); assertFalse(brokerService.isSystemTopic(TopicName.get("non-persistent://public/default/test"))); assertTrue(brokerService.isSystemTopic(TopicName.get("__change_events"))); assertTrue(brokerService.isSystemTopic(TopicName.get("__change_events-partition-0"))); assertTrue(brokerService.isSystemTopic(TopicName.get("__change_events-partition-1"))); assertTrue(brokerService.isSystemTopic(TopicName.get("__transaction_buffer_snapshot"))); assertTrue(brokerService.isSystemTopic(TopicName.get("__transaction_buffer_snapshot-partition-0"))); assertTrue(brokerService.isSystemTopic(TopicName.get("__transaction_buffer_snapshot-partition-1"))); assertTrue(brokerService.isSystemTopic(TopicName .get("topicxxx-partition-0-multiTopicsReader-f433329d68__transaction_pending_ack"))); assertTrue(brokerService.isSystemTopic( TopicName.get("topicxxx-multiTopicsReader-f433329d68__transaction_pending_ack"))); assertTrue(brokerService.isSystemTopic(TRANSACTION_COORDINATOR_ASSIGN)); assertTrue(brokerService.isSystemTopic(TRANSACTION_COORDINATOR_LOG)); NamespaceName heartbeatNamespaceV1 = NamespaceService .getHeartbeatNamespace(pulsar.getBrokerId(), pulsar.getConfig()); NamespaceName heartbeatNamespaceV2 = NamespaceService .getHeartbeatNamespaceV2(pulsar.getBrokerId(), pulsar.getConfig()); assertTrue(brokerService.isSystemTopic("persistent://" + heartbeatNamespaceV1.toString() + "/healthcheck")); assertTrue(brokerService.isSystemTopic(heartbeatNamespaceV2.toString() + "/healthcheck")); }
@Override public void start() { executorService.scheduleAtFixedRate(this::checks, getInitialDelay(), getEnqueueDelay(), SECONDS); }
@Test public void doNothingIfExceptionIsThrown() { when(lockManager.tryLock(any(), anyInt())).thenThrow(new IllegalArgumentException("Oops")); underTest.start(); executorService.runCommand(); verifyNoInteractions(dbClient); }
@CanIgnoreReturnValue public <K1 extends K, V1 extends V> Caffeine<K1, V1> expireAfter( Expiry<? super K1, ? super V1> expiry) { requireNonNull(expiry); requireState(this.expiry == null, "Expiry was already set to %s", this.expiry); requireState(this.expireAfterAccessNanos == UNSET_INT, "Expiry may not be used with expiresAfterAccess"); requireState(this.expireAfterWriteNanos == UNSET_INT, "Expiry may not be used with expiresAfterWrite"); @SuppressWarnings("unchecked") Caffeine<K1, V1> self = (Caffeine<K1, V1>) this; self.expiry = expiry; return self; }
@Test public void expireAfter() { var builder = Caffeine.newBuilder().expireAfter(expiry); assertThat(builder.expiry).isSameInstanceAs(expiry); assertThat(builder.build()).isNotNull(); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldNotAddSchemaIdsIfNotPresentAlready() { // Given: givenKeyAndValueInferenceSupported(); // When: final ConfiguredStatement<CreateStream> result = injector.inject(csStatement); // Then: assertFalse(result.getStatement().getProperties().getKeySchemaId().isPresent()); assertFalse(result.getStatement().getProperties().getValueSchemaId().isPresent()); }
public static Object eval(String expression, Map<String, Object> context) { return eval(expression, context, ListUtil.empty()); }
@Test public void jexlScriptTest(){ final ExpressionEngine engine = new JexlEngine(); final String exps2="if(a>0){return 100;}"; final Map<String,Object> map2=new HashMap<>(); map2.put("a", 1); final Object eval1 = engine.eval(exps2, map2, null); assertEquals(100, eval1); }
public static <T> RetryOperator<T> of(Retry retry) { return new RetryOperator<>(retry); }
@Test public void doNotRetryFromPredicateUsingMono() { RetryConfig config = RetryConfig.custom() .retryOnException(t -> t instanceof IOException) .waitDuration(Duration.ofMillis(50)) .maxAttempts(3).build(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willThrow(new HelloWorldException()); StepVerifier.create(Mono.fromCallable(helloWorldService::returnHelloWorld) .transformDeferred(RetryOperator.of(retry))) .expectSubscription() .expectError(HelloWorldException.class) .verify(Duration.ofSeconds(1)); then(helloWorldService).should().returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isEqualTo(1); assertThat(metrics.getNumberOfFailedCallsWithRetryAttempt()).isZero(); }
@Override public <T> Mono<T> run(Mono<T> toRun, Function<Throwable, Mono<T>> fallback) { Mono<T> toReturn = toRun.transform(new SentinelReactorTransformer<>( new EntryConfig(resourceName, entryType))); if (fallback != null) { toReturn = toReturn.onErrorResume(fallback); } return toReturn; }
@Test public void testCreateWithNullRule() { String id = "testCreateReactiveCbWithNullRule"; ReactiveSentinelCircuitBreaker cb = new ReactiveSentinelCircuitBreaker(id, Collections.singletonList(null)); assertThat(Mono.just("foobar").transform(it -> cb.run(it)).block()) .isEqualTo("foobar"); assertThat(DegradeRuleManager.hasConfig(id)).isFalse(); }
public static LogExceptionBehaviourInterface getExceptionStrategy( LogTableCoreInterface table ) { return getExceptionStrategy( table, null ); }
@Test public void testGetExceptionStrategyWithoutException() { LogExceptionBehaviourInterface exceptionStrategy = DatabaseLogExceptionFactory.getExceptionStrategy( logTable ); String strategyName = exceptionStrategy.getClass().getName(); assertEquals( SUPPRESSABLE, strategyName ); }
@Override @CacheEvict(value = RedisKeyConstants.PERMISSION_MENU_ID_LIST, allEntries = true) // allEntries 清空所有缓存,因为 permission 如果变更,涉及到新老两个 permission。直接清理,简单有效 public void updateMenu(MenuSaveVO updateReqVO) { // 校验更新的菜单是否存在 if (menuMapper.selectById(updateReqVO.getId()) == null) { throw exception(MENU_NOT_EXISTS); } // 校验父菜单存在 validateParentMenu(updateReqVO.getParentId(), updateReqVO.getId()); // 校验菜单(自己) validateMenu(updateReqVO.getParentId(), updateReqVO.getName(), updateReqVO.getId()); // 更新到数据库 MenuDO updateObj = BeanUtils.toBean(updateReqVO, MenuDO.class); initMenuProperty(updateObj); menuMapper.updateById(updateObj); }
@Test public void testUpdateMenu_success() { // mock 数据(构造父子菜单) MenuDO sonMenuDO = createParentAndSonMenu(); Long sonId = sonMenuDO.getId(); // 准备参数 MenuSaveVO reqVO = randomPojo(MenuSaveVO.class, o -> { o.setId(sonId); o.setName("testSonName"); // 修改名字 o.setParentId(sonMenuDO.getParentId()); o.setType(MenuTypeEnum.MENU.getType()); }); // 调用 menuService.updateMenu(reqVO); // 校验记录的属性是否正确 MenuDO dbMenu = menuMapper.selectById(sonId); assertPojoEquals(reqVO, dbMenu); }
public Value parseWithOffsetInJsonPointer(String json, String offsetInJsonPointer) { return this.delegate.parseWithOffsetInJsonPointer(json, offsetInJsonPointer); }
@Test public void testParseWithPointer1() throws Exception { final JsonParser parser = new JsonParser(); final Value msgpackValue = parser.parseWithOffsetInJsonPointer("{\"a\": {\"b\": 1}}", "/a/b"); assertEquals(1, msgpackValue.asIntegerValue().asInt()); }
public String getNode(String key, String group) { return zkClient.getConfig(key, group); }
@Test public void testGetNode() { String result = zooKeeperBufferedClient.getNode(PARENT_PATH, null); Assert.assertEquals(NODE_CONTENT, result); }
public static DLPReidentifyText.Builder newBuilder() { return new AutoValue_DLPReidentifyText.Builder(); }
@Test public void throwsExceptionWhenDelimiterIsNullAndHeadersAreSet() { PCollectionView<List<String>> header = testPipeline.apply(Create.of("header")).apply(View.asList()); assertThrows( "Column delimiter should be set if headers are present.", IllegalArgumentException.class, () -> DLPReidentifyText.newBuilder() .setProjectId(PROJECT_ID) .setBatchSizeBytes(BATCH_SIZE_SMALL) .setReidentifyTemplateName(TEMPLATE_NAME) .setHeaderColumns(header) .build()); testPipeline.run().waitUntilFinish(); }
@Override public void invoke(IN value, Context context) throws Exception { bufferLock.lock(); try { // TODO this implementation is not very effective, // optimize this with MemorySegment if needed ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputViewStreamWrapper wrapper = new DataOutputViewStreamWrapper(baos); serializer.serialize(value, wrapper); invokingRecordBytes = baos.size(); if (invokingRecordBytes > maxBytesPerBatch) { throw new RuntimeException( "Record size is too large for CollectSinkFunction. Record size is " + invokingRecordBytes + " bytes, " + "but max bytes per batch is only " + maxBytesPerBatch + " bytes. " + "Please consider increasing max bytes per batch value by setting " + CollectSinkOperatorFactory.MAX_BATCH_SIZE.key()); } if (currentBufferBytes + invokingRecordBytes > bufferSizeLimitBytes) { bufferCanAddNextResultCondition.await(); } buffer.add(baos.toByteArray()); currentBufferBytes += baos.size(); } finally { bufferLock.unlock(); } }
@Test void testDuplicatedToken() throws Exception { functionWrapper.openFunction(); for (int i = 0; i < 6; i++) { functionWrapper.invoke(i); } String version = initializeVersion(); CollectCoordinationResponse response; response = functionWrapper.sendRequestAndGetResponse(version, 0); assertResponseEquals(response, version, 0, Arrays.asList(0, 1, 2)); response = functionWrapper.sendRequestAndGetResponse(version, 4); assertResponseEquals(response, version, 0, Arrays.asList(4, 5)); response = functionWrapper.sendRequestAndGetResponse(version, 4); assertResponseEquals(response, version, 0, Arrays.asList(4, 5)); functionWrapper.closeFunctionNormally(); }
private HttpResponse getTenant(RestApi.RequestContext context) { TenantName name = TenantName.from(context.pathParameters().getStringOrThrow("tenant")); if ( ! tenantRepository.checkThatTenantExists(name)) throw new RestApiException.NotFound("Tenant '" + name + "' was not found."); return new TenantGetResponse(name); }
@Test public void testTenantCreateWithAllPossibleCharactersInName() throws Exception { TenantName tenantName = TenantName.from("aB-9999_foo"); assertNull(tenantRepository.getTenant(tenantName)); assertResponse(PUT, "/application/v2/tenant/aB-9999_foo", "{\"message\":\"Tenant " + tenantName + " created.\"}"); }
public RequestAndSize parseRequest(ByteBuffer buffer) { if (isUnsupportedApiVersionsRequest()) { // Unsupported ApiVersion requests are treated as v0 requests and are not parsed ApiVersionsRequest apiVersionsRequest = new ApiVersionsRequest(new ApiVersionsRequestData(), (short) 0, header.apiVersion()); return new RequestAndSize(apiVersionsRequest, 0); } else { ApiKeys apiKey = header.apiKey(); try { short apiVersion = header.apiVersion(); return AbstractRequest.parseRequest(apiKey, apiVersion, buffer); } catch (Throwable ex) { throw new InvalidRequestException("Error getting request for apiKey: " + apiKey + ", apiVersion: " + header.apiVersion() + ", connectionId: " + connectionId + ", listenerName: " + listenerName + ", principal: " + principal, ex); } } }
@Test public void testInvalidRequestForImplicitHashCollection() throws UnknownHostException { short version = (short) 5; // choose a version with fixed length encoding, for simplicity ByteBuffer corruptBuffer = produceRequest(version); // corrupt the length of the topics array corruptBuffer.putInt(8, (Integer.MAX_VALUE - 1) / 2); RequestHeader header = new RequestHeader(ApiKeys.PRODUCE, version, "console-producer", 3); RequestContext context = new RequestContext(header, "0", InetAddress.getLocalHost(), KafkaPrincipal.ANONYMOUS, new ListenerName("ssl"), SecurityProtocol.SASL_SSL, ClientInformation.EMPTY, true); String msg = assertThrows(InvalidRequestException.class, () -> context.parseRequest(corruptBuffer)).getCause().getMessage(); assertEquals("Tried to allocate a collection of size 1073741823, but there are only 17 bytes remaining.", msg); }
@Override public Publisher<Exchange> to(String uri, Object data) { String streamName = requestedUriToStream.computeIfAbsent(uri, camelUri -> { try { String uuid = context.getUuidGenerator().generateUuid(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("reactive-streams:" + uuid) .to(camelUri); } }); return uuid; } catch (Exception e) { throw new IllegalStateException("Unable to create requested reactive stream from direct URI: " + uri, e); } }); return toStream(streamName, data); }
@Test public void testToWithExchange() throws Exception { context.start(); Set<String> values = Collections.synchronizedSet(new TreeSet<>()); CountDownLatch latch = new CountDownLatch(3); Flux.just(1, 2, 3) .flatMap(e -> crs.to("bean:hello", e)) .map(Exchange::getMessage) .map(e -> e.getBody(String.class)) .doOnNext(values::add) .doOnNext(res -> latch.countDown()) .subscribe(); assertTrue(latch.await(2, TimeUnit.SECONDS)); assertEquals(new TreeSet<>(Arrays.asList("Hello 1", "Hello 2", "Hello 3")), values); }
public boolean hasViewPermissionDefined() { return !viewConfig.equals(new ViewConfig()); }
@Test public void shouldReturnFalseIfViewPermissionNotDefined() { Authorization authorization = new Authorization(new ViewConfig()); assertThat(authorization.hasViewPermissionDefined(), is(false)); }
@Override public void open() throws Exception { executableStage = ExecutableStage.fromPayload(payload); hasSdfProcessFn = hasSDF(executableStage); initializeUserState(executableStage, getKeyedStateBackend(), pipelineOptions); // TODO: Wire this into the distributed cache and make it pluggable. // TODO: Do we really want this layer of indirection when accessing the stage bundle factory? // It's a little strange because this operator is responsible for the lifetime of the stage // bundle "factory" (manager?) but not the job or Flink bundle factories. How do we make // ownership of the higher level "factories" explicit? Do we care? stageContext = contextFactory.get(jobInfo); stageBundleFactory = stageContext.getStageBundleFactory(executableStage); stateRequestHandler = getStateRequestHandler(executableStage); progressHandler = new BundleProgressHandler() { @Override public void onProgress(ProcessBundleProgressResponse progress) { if (flinkMetricContainer != null) { flinkMetricContainer.updateMetrics(stepName, progress.getMonitoringInfosList()); } } @Override public void onCompleted(ProcessBundleResponse response) { if (flinkMetricContainer != null) { flinkMetricContainer.updateMetrics(stepName, response.getMonitoringInfosList()); } } }; finalizationHandler = BundleFinalizationHandlers.inMemoryFinalizer( stageBundleFactory.getInstructionRequestHandler()); checkpointHandler = getBundleCheckpointHandler(hasSdfProcessFn); minEventTimeTimerTimestampInCurrentBundle = Long.MAX_VALUE; minEventTimeTimerTimestampInLastBundle = Long.MAX_VALUE; super.setPreBundleCallback(this::preBundleStartCallback); super.setBundleFinishedCallback(this::finishBundleCallback); // This will call {@code createWrappingDoFnRunner} which needs the above dependencies. super.open(); }
@Test public void expectedInputsAreSent() throws Exception { TupleTag<Integer> mainOutput = new TupleTag<>("main-output"); DoFnOperator.MultiOutputOutputManagerFactory<Integer> outputManagerFactory = new DoFnOperator.MultiOutputOutputManagerFactory( mainOutput, VoidCoder.of(), new SerializablePipelineOptions(FlinkPipelineOptions.defaults())); ExecutableStageDoFnOperator<Integer, Integer> operator = getOperator(mainOutput, Collections.emptyList(), outputManagerFactory); @SuppressWarnings("unchecked") RemoteBundle bundle = Mockito.mock(RemoteBundle.class); when(stageBundleFactory.getBundle(any(), any(), any(), any(), any(), any())).thenReturn(bundle); @SuppressWarnings("unchecked") FnDataReceiver<WindowedValue<?>> receiver = Mockito.mock(FnDataReceiver.class); when(bundle.getInputReceivers()).thenReturn(ImmutableMap.of("input", receiver)); WindowedValue<Integer> one = WindowedValue.valueInGlobalWindow(1); WindowedValue<Integer> two = WindowedValue.valueInGlobalWindow(2); WindowedValue<Integer> three = WindowedValue.valueInGlobalWindow(3); OneInputStreamOperatorTestHarness<WindowedValue<Integer>, WindowedValue<Integer>> testHarness = new OneInputStreamOperatorTestHarness<>(operator); testHarness.open(); testHarness.processElement(new StreamRecord<>(one)); testHarness.processElement(new StreamRecord<>(two)); testHarness.processElement(new StreamRecord<>(three)); verify(receiver).accept(one); verify(receiver).accept(two); verify(receiver).accept(three); verifyNoMoreInteractions(receiver); testHarness.close(); }
@Override public boolean writeRefOrNull(MemoryBuffer buffer, Object obj) { if (obj == null) { buffer.writeByte(Fury.NULL_FLAG); return true; } else { buffer.writeByte(Fury.NOT_NULL_VALUE_FLAG); return false; } }
@Test public void testWriteRefOrNull() { NoRefResolver referenceResolver = new NoRefResolver(); MemoryBuffer buffer = MemoryBuffer.newHeapBuffer(32); assertTrue(referenceResolver.writeRefOrNull(buffer, null)); assertFalse(referenceResolver.writeRefOrNull(buffer, new Object())); Object o = new Object(); assertFalse(referenceResolver.writeRefOrNull(buffer, o)); assertFalse(referenceResolver.writeRefOrNull(buffer, o)); assertFalse(referenceResolver.writeNullFlag(buffer, o)); assertTrue(referenceResolver.writeNullFlag(buffer, null)); }
public static AppConfigurationEntry keytabEntry(String keytab, String principal) { checkNotNull(keytab, "keytab"); checkNotNull(principal, "principal"); Map<String, String> keytabKerberosOptions = new HashMap<>(); if (IBM_JAVA) { keytabKerberosOptions.put("useKeytab", prependFileUri(keytab)); keytabKerberosOptions.put("credsType", "both"); } else { keytabKerberosOptions.put("keyTab", keytab); keytabKerberosOptions.put("doNotPrompt", "true"); keytabKerberosOptions.put("useKeyTab", "true"); keytabKerberosOptions.put("storeKey", "true"); } keytabKerberosOptions.put("principal", principal); keytabKerberosOptions.put("refreshKrb5Config", "true"); keytabKerberosOptions.putAll(debugOptions); AppConfigurationEntry keytabKerberosAce = new AppConfigurationEntry( getKrb5LoginModuleName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, keytabKerberosOptions); return keytabKerberosAce; }
@Test public void testKeytabEntry() { String keytab = "user.keytab"; String principal = "user"; AppConfigurationEntry entry = KerberosUtils.keytabEntry(keytab, principal); assertNotNull(entry); }
public static JdbcUrlUtil.UrlInfo parse(String url) { String serverName = ""; Integer port = DEFAULT_PORT; String dbInstance = null; int hostIndex = url.indexOf("://"); if (hostIndex <= 0) { return null; } Map<String, String> props = Collections.emptyMap(); String[] split = url.split(";", 2); if (split.length > 1) { props = parseQueryParams(split[1], ";"); serverName = props.get("serverName"); dbInstance = props.getOrDefault("databaseName", props.get("database")); if (props.containsKey("portNumber")) { String portNumber = props.get("portNumber"); try { port = Integer.parseInt(portNumber); } catch (NumberFormatException e) { } } } String urlServerName = split[0].substring(hostIndex + 3); if (!urlServerName.isEmpty()) { serverName = urlServerName; } int portLoc = serverName.indexOf(":"); if (portLoc > 1) { port = Integer.parseInt(serverName.substring(portLoc + 1)); serverName = serverName.substring(0, portLoc); } int instanceLoc = serverName.indexOf("\\"); if (instanceLoc > 1) { serverName = serverName.substring(0, instanceLoc); } if (serverName.isEmpty()) { return null; } String suffix = props.entrySet().stream() .filter( e -> !e.getKey().equals("databaseName") && !e.getKey().equals("database")) .map(e -> e.getKey() + "=" + e.getValue()) .collect(Collectors.joining(";", "", "")); suffix = Optional.ofNullable(suffix).orElse(""); return new JdbcUrlUtil.UrlInfo( url, String.format("jdbc:sqlserver://%s:%s", serverName, port) + ";" + suffix, serverName, port, dbInstance, suffix); }
@Test public void testParse() { String url = "jdbc:sqlserver://localhost:1433;databaseName=myDB;encrypt=true;trustServerCertificate=false;loginTimeout=30;"; JdbcUrlUtil.UrlInfo urlInfo = SqlServerURLParser.parse(url); assertEquals("localhost", urlInfo.getHost()); assertEquals(1433, urlInfo.getPort()); assertEquals(url, urlInfo.getOrigin()); assertEquals( "encrypt=true;trustServerCertificate=false;loginTimeout=30", urlInfo.getSuffix()); assertEquals("myDB", urlInfo.getDefaultDatabase().get()); assertEquals( "jdbc:sqlserver://localhost:1433;encrypt=true;trustServerCertificate=false;loginTimeout=30", urlInfo.getUrlWithoutDatabase()); }
public static List<String> getPossibleMountPoints(String path) throws InvalidPathException { String basePath = cleanPath(path); List<String> paths = new ArrayList<>(); if ((basePath != null) && !basePath.equals(AlluxioURI.SEPARATOR)) { paths.add(basePath); String parent = getParent(path); while (!parent.equals(AlluxioURI.SEPARATOR)) { paths.add(0, parent); parent = getParent(parent); } } return paths; }
@Test public void getPossibleMountPointsNoException() throws InvalidPathException { ArrayList<String> paths = new ArrayList<>(); assertEquals(paths, PathUtils.getPossibleMountPoints("/")); assertEquals(paths, PathUtils.getPossibleMountPoints("//")); paths.add("/a"); assertEquals(paths, PathUtils.getPossibleMountPoints("/a")); assertEquals(paths, PathUtils.getPossibleMountPoints("/a/")); paths.add("/a/b"); assertEquals(paths, PathUtils.getPossibleMountPoints("/a/b")); assertEquals(paths, PathUtils.getPossibleMountPoints("/a/b/")); paths.add("/a/b/c"); assertEquals(paths, PathUtils.getPossibleMountPoints("/a/b/c")); assertEquals(paths, PathUtils.getPossibleMountPoints("/a/b/c/")); }
public static RecordDataSchema.Field copyField(RecordDataSchema.Field originalField, DataSchema fieldSchemaToReplace) { RecordDataSchema.Field newField = new RecordDataSchema.Field(fieldSchemaToReplace); if (originalField.getAliases() != null) { newField.setAliases(originalField.getAliases(), new StringBuilder()); } if (originalField.getDefault() != null) { newField.setDefault(originalField.getDefault()); } if (originalField.getDoc() != null) { newField.setDoc(originalField.getDoc()); } if (originalField.getName() != null) { newField.setName(originalField.getName(), new StringBuilder()); } if (originalField.getOrder() != null) { newField.setOrder(originalField.getOrder()); } if (originalField.getProperties() != null) { newField.setProperties(originalField.getProperties()); } newField.setOptional(originalField.getOptional()); return newField; }
@Test public void testCopyField() throws Exception { RecordDataSchema fooSchema = (RecordDataSchema) TestUtil.dataSchemaFromString(fooSchemaText); RecordDataSchema.Field field = fooSchema.getField("intField"); // Use old field to do the exact copy RecordDataSchema.Field newField = CopySchemaUtil.copyField(field, field.getType()); newField.setRecord(field.getRecord()); // Copy result should appear to be the same Assert.assertEquals(field, newField); }
@Override public boolean isRedundant(final Object other) { if (other instanceof PatternInspector) { if (pattern.getObjectType() .getType() .equals(((PatternInspector) other).getPattern() .getObjectType() .getType())) { return inspectorList.isRedundant(((PatternInspector) other).inspectorList); } else { return false; } } else { return false; } }
@Test void testRedundancy01() throws Exception { assertThat(a.isRedundant(b)).isTrue(); assertThat(b.isRedundant(a)).isTrue(); }
@Override public Ring<T> createRing(Map<T, Integer> pointsMap) { return _ringFactory.createRing(pointsMap); }
@Test(groups = { "small", "back-end" }) public void testPointsCleanUp() throws URISyntaxException { Map<String, Integer> pointsMp = buildPointsMap(6); PointBasedConsistentHashRingFactory<String> ringFactory = new PointBasedConsistentHashRingFactory<>(new DegraderLoadBalancerStrategyConfig(1L)); Ring<String> ring = ringFactory.createRing(pointsMp); assertNotNull(ring.get(1000)); pointsMp.remove("http://test.linkedin.com:10001"); pointsMp.remove("http://test.linkedin.com:10003"); ring = ringFactory.createRing(pointsMp); assertNotNull(ring.get(1000)); // factory should keep all the points -- the default MinUnusedEntry = 3 Map<String, List<Point<String>>> pointsMap = ringFactory.getPointsMap(); assertEquals(pointsMap.size(), 6); pointsMp.remove("http://test.linkedin.com:10004"); pointsMp.remove("http://test.linkedin.com:10005"); ring = ringFactory.createRing(pointsMp); assertNotNull(ring.get(1000)); // factory should clean up and build new points because unused entry == 3 pointsMap = ringFactory.getPointsMap(); assertEquals(pointsMap.size(), 2); }
public static FrameworkModel defaultModel() { FrameworkModel instance = defaultInstance; if (instance == null) { synchronized (globalLock) { resetDefaultFrameworkModel(); if (defaultInstance == null) { defaultInstance = new FrameworkModel(); } instance = defaultInstance; } } Assert.notNull(instance, "Default FrameworkModel is null"); return instance; }
@Test void testDefaultModel() { FrameworkModel frameworkModel = FrameworkModel.defaultModel(); Assertions.assertTrue(FrameworkModel.getAllInstances().contains(frameworkModel)); String desc = frameworkModel.getDesc(); Assertions.assertEquals(desc, "Dubbo Framework[" + frameworkModel.getInternalId() + "]"); frameworkModel.destroy(); }
@Override public void initialize(Configuration configuration, Properties tableProperties, Properties partitionProperties) throws SerDeException { super.initialize(configuration, tableProperties, partitionProperties); jsonSerde.initialize(configuration, tableProperties, partitionProperties, false); StructTypeInfo rowTypeInfo = jsonSerde.getTypeInfo(); cachedObjectInspector = HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo); try { schema = HCatSchemaUtils.getHCatSchema(rowTypeInfo).get(0).getStructSubSchema(); LOG.debug("schema : {}", schema); LOG.debug("fields : {}", schema.getFieldNames()); } catch (HCatException e) { throw new SerDeException(e); } }
@Test public void testUpperCaseKey() throws Exception { Configuration conf = new Configuration(); Properties props = new Properties(); props.put(serdeConstants.LIST_COLUMNS, "empid,name"); props.put(serdeConstants.LIST_COLUMN_TYPES, "int,string"); JsonSerDe rjsd = new JsonSerDe(); rjsd.initialize(conf, props, null); Text text1 = new Text("{ \"empId\" : 123, \"name\" : \"John\" } "); Text text2 = new Text("{ \"empId\" : 456, \"name\" : \"Jane\" } "); HCatRecord expected1 = new DefaultHCatRecord(Arrays.<Object>asList(123, "John")); HCatRecord expected2 = new DefaultHCatRecord(Arrays.<Object>asList(456, "Jane")); assertTrue(HCatDataCheckUtil.recordsEqual((HCatRecord)rjsd.deserialize(text1), expected1)); assertTrue(HCatDataCheckUtil.recordsEqual((HCatRecord)rjsd.deserialize(text2), expected2)); }
public static DLPReidentifyText.Builder newBuilder() { return new AutoValue_DLPReidentifyText.Builder(); }
@Test public void throwsExceptionWhenBatchSizeIsTooLarge() { assertThrows( String.format( "Batch size is too large! It should be smaller or equal than %d.", DLPDeidentifyText.DLP_PAYLOAD_LIMIT_BYTES), IllegalArgumentException.class, () -> DLPReidentifyText.newBuilder() .setProjectId(PROJECT_ID) .setBatchSizeBytes(Integer.MAX_VALUE) .setReidentifyTemplateName(TEMPLATE_NAME) .setColumnDelimiter(DELIMITER) .build()); }
public void validate(OptionRule rule) { List<RequiredOption> requiredOptions = rule.getRequiredOptions(); for (RequiredOption requiredOption : requiredOptions) { validate(requiredOption); for (Option<?> option : requiredOption.getOptions()) { if (SingleChoiceOption.class.isAssignableFrom(option.getClass())) { // is required option and not match condition, skip validate if (isConditionOption(requiredOption) && !matchCondition( (RequiredOption.ConditionalRequiredOptions) requiredOption)) { continue; } validateSingleChoice(option); } } } for (Option option : rule.getOptionalOptions()) { if (SingleChoiceOption.class.isAssignableFrom(option.getClass())) { validateSingleChoice(option); } } }
@Test public void testSimpleConditionalRequiredOptionsWithDefaultValue() { OptionRule rule = OptionRule.builder() .optional(TEST_MODE) .conditional(TEST_MODE, OptionTest.TestMode.TIMESTAMP, TEST_TIMESTAMP) .build(); Map<String, Object> config = new HashMap<>(); Executable executable = () -> validate(config, rule); // Expression mismatch Assertions.assertDoesNotThrow(executable); // Expression match, and required options absent config.put(TEST_MODE.key(), "timestamp"); assertEquals( "ErrorCode:[API-02], ErrorDescription:[Option item validate failed] - There are unconfigured options, the options('option.timestamp') are required" + " because ['option.mode' == TIMESTAMP] is true.", assertThrows(OptionValidationException.class, executable).getMessage()); // Expression match, and required options all present config.put(TEST_TIMESTAMP.key(), "564231238596789"); Assertions.assertDoesNotThrow(executable); // Expression mismatch config.put(TEST_MODE.key(), "EARLIEST"); Assertions.assertDoesNotThrow(executable); }
public static SortDir sortDir(String s) { return !DESC.equals(s) ? SortDir.ASC : SortDir.DESC; }
@Test public void sortDirNull() { assertEquals("null sort dir", SortDir.ASC, TableModel.sortDir(null)); }
public StringSubject factValue(String key) { return doFactValue(key, null); }
@Test public void factValueIntFailWrongValue() { expectFailureWhenTestingThat(fact("foo", "the foo")).factValue("foo", 0).isEqualTo("the bar"); assertFailureValue("value of", "failure.factValue(foo, 0)"); }
Mono<ServerResponse> createReply(ServerRequest request) { String commentName = request.pathVariable("name"); return request.bodyToMono(ReplyRequest.class) .flatMap(replyRequest -> { Reply reply = replyRequest.toReply(); reply.getSpec().setIpAddress(IpAddressUtils.getIpAddress(request)); reply.getSpec().setUserAgent(HaloUtils.userAgentFrom(request)); // fix gh-2951 reply.getSpec().setHidden(false); return environmentFetcher.fetchComment() .map(commentSetting -> { if (isFalse(commentSetting.getEnable())) { throw new AccessDeniedException( "The comment function has been turned off.", "problemDetail.comment.turnedOff", null); } if (checkReplyOwner(reply, commentSetting.getSystemUserOnly())) { throw new AccessDeniedException("Allow only system users to comment.", "problemDetail.comment.systemUsersOnly", null); } reply.getSpec() .setApproved(isFalse(commentSetting.getRequireReviewForNew())); return reply; }) .defaultIfEmpty(reply); }) .flatMap(reply -> replyService.create(commentName, reply)) .flatMap(comment -> ServerResponse.ok().bodyValue(comment)) .transformDeferred(createIpBasedRateLimiter(request)) .onErrorMap(RequestNotPermitted.class, RateLimitExceededException::new); }
@Test void createReply() { when(replyService.create(any(), any())).thenReturn(Mono.empty()); final ReplyRequest replyRequest = new ReplyRequest(); replyRequest.setRaw("raw"); replyRequest.setContent("content"); replyRequest.setAllowNotification(true); when(rateLimiterRegistry.rateLimiter("comment-creation-from-ip-127.0.0.1", "comment-creation")) .thenReturn(RateLimiter.ofDefaults("comment-creation")); webTestClient.post() .uri("/comments/test-comment/reply") .header("X-Forwarded-For", "127.0.0.1") .bodyValue(replyRequest) .exchange() .expectStatus() .isOk(); ArgumentCaptor<Reply> captor = ArgumentCaptor.forClass(Reply.class); verify(replyService, times(1)).create(eq("test-comment"), captor.capture()); Reply value = captor.getValue(); assertThat(value.getSpec().getIpAddress()).isNotNull(); assertThat(value.getSpec().getUserAgent()).isNotNull(); assertThat(value.getSpec().getQuoteReply()).isNull(); verify(rateLimiterRegistry).rateLimiter("comment-creation-from-ip-127.0.0.1", "comment-creation"); }
@Override public HttpResponse get() throws InterruptedException, ExecutionException { try { final Object result = process(0, null); if (result instanceof Throwable) { throw new ExecutionException((Throwable) result); } return (HttpResponse) result; } finally { isDone = true; } }
@Test(expected = ExecutionException.class) public void errGetTimeoutThrowable() throws ExecutionException, InterruptedException, TimeoutException { get(new Exception("wrong"), false); }
static String calculateBillingProjectId(Optional<String> configParentProjectId, Optional<Credentials> credentials) { if (configParentProjectId.isPresent()) { return configParentProjectId.get(); } // All other credentials types (User, AppEngine, GCE, CloudShell, etc.) take it from the environment if (credentials.isPresent() && credentials.get() instanceof ServiceAccountCredentials) { return ((ServiceAccountCredentials) credentials.get()).getProjectId(); } return BigQueryOptions.getDefaultProjectId(); }
@Test public void testBothConfigurationAndCredentials() throws Exception { String projectId = BigQueryConnectorModule.calculateBillingProjectId(Optional.of("pid"), credentials()); assertThat(projectId).isEqualTo("pid"); }
public static <InputT, OutputT> Growth<InputT, OutputT, OutputT> growthOf( Growth.PollFn<InputT, OutputT> pollFn, Requirements requirements) { return new AutoValue_Watch_Growth.Builder<InputT, OutputT, OutputT>() .setTerminationPerInput(Growth.never()) .setPollFn(Contextful.of(pollFn, requirements)) // use null as a signal that this is the identity function and output coder can be // reused as key coder .setOutputKeyFn(null) .build(); }
@Test @Category({NeedsRunner.class, UsesUnboundedSplittableParDo.class}) public void testSinglePollMultipleInputs() { PCollection<KV<String, String>> res = p.apply(Create.of("a", "b")) .apply( Watch.growthOf( new PollFn<String, String>() { @Override public PollResult<String> apply(String element, Context c) throws Exception { return PollResult.complete( Instant.now(), Arrays.asList(element + ".foo", element + ".bar")); } }) .withPollInterval(Duration.ZERO)); PAssert.that(res) .containsInAnyOrder( Arrays.asList( KV.of("a", "a.foo"), KV.of("a", "a.bar"), KV.of("b", "b.foo"), KV.of("b", "b.bar"))); p.run(); }
public static String fix(final String raw) { if ( raw == null || "".equals( raw.trim() )) { return raw; } MacroProcessor macroProcessor = new MacroProcessor(); macroProcessor.setMacros( macros ); return macroProcessor.parse( raw ); }
@Test public void testAllActionsMushedTogether() { String result = KnowledgeHelperFixerTest.fixer.fix( "insert(myObject ); update(ourObject);\t retract(herObject);" ); assertEqualsIgnoreWhitespace( "drools.insert(myObject ); drools.update(ourObject);\t drools.retract(herObject);", result ); result = KnowledgeHelperFixerTest.fixer.fix( "insert( myObject ); update(ourObject);\t retract(herObject );\ninsert( myObject ); update(ourObject);\t retract( herObject );" ); assertEqualsIgnoreWhitespace( "drools.insert( myObject ); drools.update(ourObject);\t drools.retract(herObject );\ndrools.insert( myObject ); drools.update(ourObject);\t drools.retract( herObject );", result ); }
public List<String> mergePartitions( MergingStrategy mergingStrategy, List<String> sourcePartitions, List<String> derivedPartitions) { if (!derivedPartitions.isEmpty() && !sourcePartitions.isEmpty() && mergingStrategy != MergingStrategy.EXCLUDING) { throw new ValidationException( "The base table already has partitions defined. You might want to specify " + "EXCLUDING PARTITIONS."); } if (!derivedPartitions.isEmpty()) { return derivedPartitions; } return sourcePartitions; }
@Test void mergeIncludingPartitionsFailsOnDuplicate() { List<String> sourcePartitions = Arrays.asList("col3", "col4"); List<String> derivedPartitions = Arrays.asList("col1", "col2"); assertThatThrownBy( () -> util.mergePartitions( MergingStrategy.INCLUDING, sourcePartitions, derivedPartitions)) .isInstanceOf(ValidationException.class) .hasMessage( "The base table already has partitions defined. You might want " + "to specify EXCLUDING PARTITIONS."); }
@Override public ObjectNode encode(Criterion criterion, CodecContext context) { EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context); return encoder.encode(); }
@Test public void matchEthTypeTest() { Criterion criterion = Criteria.matchEthType((short) 0x8844); ObjectNode result = criterionCodec.encode(criterion, context); assertThat(result, matchesCriterion(criterion)); }
@Override public int hashCode() { int result = lowBoundary != null ? lowBoundary.hashCode() : 0; result = 31 * result + (highBoundary != null ? highBoundary.hashCode() : 0); result = 31 * result + (lowEndPoint != null ? lowEndPoint.hashCode() : 0); result = 31 * result + (highEndPoint != null ? highEndPoint.hashCode() : 0); return result; }
@Test void hashCodeTest() { final RangeImpl rangeImpl = new RangeImpl(Range.RangeBoundary.OPEN, 10, 15, Range.RangeBoundary.OPEN); assertThat(rangeImpl.hashCode()).isEqualTo(rangeImpl.hashCode()); RangeImpl rangeImpl2 = new RangeImpl(Range.RangeBoundary.OPEN, 10, 15, Range.RangeBoundary.OPEN); assertThat(rangeImpl2.hashCode()).isEqualTo(rangeImpl.hashCode()); rangeImpl2 = new RangeImpl(Range.RangeBoundary.OPEN, 10, 15, Range.RangeBoundary.CLOSED); assertThat(rangeImpl2).doesNotHaveSameHashCodeAs(rangeImpl); rangeImpl2 = new RangeImpl(Range.RangeBoundary.CLOSED, 10, 15, Range.RangeBoundary.OPEN); assertThat(rangeImpl2).doesNotHaveSameHashCodeAs(rangeImpl); rangeImpl2 = new RangeImpl(Range.RangeBoundary.CLOSED, 10, 15, Range.RangeBoundary.CLOSED); assertThat(rangeImpl2).doesNotHaveSameHashCodeAs(rangeImpl); rangeImpl2 = new RangeImpl(Range.RangeBoundary.CLOSED, 12, 15, Range.RangeBoundary.CLOSED); assertThat(rangeImpl2).doesNotHaveSameHashCodeAs(rangeImpl); rangeImpl2 = new RangeImpl(Range.RangeBoundary.CLOSED, 12, 17, Range.RangeBoundary.CLOSED); assertThat(rangeImpl2).doesNotHaveSameHashCodeAs(rangeImpl); }
@Override public V load(K key) { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void load() { cacheStore.load("somekey"); }
@Override public Map<Consumer, List<Range>> getConsumerKeyHashRanges() { Map<Consumer, List<Range>> result = new LinkedHashMap<>(); rwLock.readLock().lock(); try { int start = 0; for (Map.Entry<Integer, List<Consumer>> entry: hashRing.entrySet()) { for (Consumer consumer: entry.getValue()) { result.computeIfAbsent(consumer, key -> new ArrayList<>()) .add(Range.of(start, entry.getKey())); } start = entry.getKey() + 1; } } finally { rwLock.readLock().unlock(); } return result; }
@Test public void shouldRemoveConsumersFromConsumerKeyHashRanges() { ConsistentHashingStickyKeyConsumerSelector selector = new ConsistentHashingStickyKeyConsumerSelector(12); List<Consumer> consumers = IntStream.range(1, 100).mapToObj(i -> "consumer" + i) .map(consumerName -> { Consumer consumer = mock(Consumer.class); when(consumer.consumerName()).thenReturn(consumerName); return consumer; }).collect(Collectors.toList()); // when consumers are added consumers.forEach(selector::addConsumer); // then each consumer should have a range Assert.assertEquals(selector.getConsumerKeyHashRanges().size(), consumers.size()); // when consumers are removed consumers.forEach(selector::removeConsumer); // then there should be no mapping remaining Assert.assertEquals(selector.getConsumerKeyHashRanges().size(), 0); }
@Override public synchronized boolean tryReturnRecordAt( boolean isAtSplitPoint, @Nullable ShufflePosition groupStart) { if (lastGroupStart == null && !isAtSplitPoint) { throw new IllegalStateException( String.format("The first group [at %s] must be at a split point", groupStart.toString())); } if (this.startPosition != null && groupStart.compareTo(this.startPosition) < 0) { throw new IllegalStateException( String.format( "Trying to return record at %s which is before the starting position at %s", groupStart, this.startPosition)); } int comparedToLast = (lastGroupStart == null) ? 1 : groupStart.compareTo(this.lastGroupStart); if (comparedToLast < 0) { throw new IllegalStateException( String.format( "Trying to return group at %s which is before the last-returned group at %s", groupStart, this.lastGroupStart)); } if (isAtSplitPoint) { splitPointsSeen++; if (comparedToLast == 0) { throw new IllegalStateException( String.format( "Trying to return a group at a split point with same position as the " + "previous group: both at %s, last group was %s", groupStart, lastGroupWasAtSplitPoint ? "at a split point." : "not at a split point.")); } if (stopPosition != null && groupStart.compareTo(stopPosition) >= 0) { return false; } } else { checkState( comparedToLast == 0, // This case is not a violation of general RangeTracker semantics, but it is // contrary to how GroupingShuffleReader in particular works. Hitting it would // mean it's behaving unexpectedly. "Trying to return a group not at a split point, but with a different position " + "than the previous group: last group was %s at %s, current at %s", lastGroupWasAtSplitPoint ? "a split point" : "a non-split point", lastGroupStart, groupStart); } this.lastGroupStart = groupStart; this.lastGroupWasAtSplitPoint = isAtSplitPoint; return true; }
@Test public void testTryReturnRecordBeforeStart() throws Exception { GroupingShuffleRangeTracker tracker = new GroupingShuffleRangeTracker(ofBytes(3, 0, 0), ofBytes(5, 0, 0)); expected.expect(IllegalStateException.class); tracker.tryReturnRecordAt(true, ofBytes(1, 2, 3)); }
@Override protected void set(String key, String value) { props.put( requireNonNull(key, "key can't be null"), requireNonNull(value, "value can't be null").trim()); }
@Test public void set_throws_NPE_if_key_is_null() { MapSettings underTest = new MapSettings(); expectKeyNullNPE(() -> underTest.set(null, randomAlphanumeric(3))); }
@Override public List<Integer> embed(String s, Context context) { var start = System.nanoTime(); var tokens = tokenizer.embed(s, context); runtime.sampleSequenceLength(tokens.size(), context); runtime.sampleEmbeddingLatency((System.nanoTime() - start)/1_000_000d, context); return tokens; }
@Test public void testEmbedderWithNormalization() { String input = "This is a test"; var context = new Embedder.Context("schema.indexing"); Tensor result = normalizedEmbedder.embed(input, context, TensorType.fromSpec(("tensor<float>(x[8])"))); assertEquals(1.0, result.multiply(result).sum().asDouble(), 1e-3); result = normalizedEmbedder.embed(input, context, TensorType.fromSpec(("tensor<float>(x[16])"))); assertEquals(1.0, result.multiply(result).sum().asDouble(), 1e-3); Tensor binarizedResult = embedder.embed(input, context, TensorType.fromSpec(("tensor<int8>(x[2])"))); assertEquals("tensor<int8>(x[2]):[119, 44]", binarizedResult.toAbbreviatedString()); }
public Optional<Topic> getTopicReference(String topic) { CompletableFuture<Optional<Topic>> future = topics.get(topic); if (future != null && future.isDone() && !future.isCompletedExceptionally()) { return future.join(); } else { return Optional.empty(); } }
@Test public void testStatsOfStorageSizeWithSubscription() throws Exception { final String topicName = "persistent://prop/ns-abc/no-subscription"; Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create(); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName).get(); assertNotNull(topicRef); assertEquals(topicRef.getStats(false, false, false).storageSize, 0); for (int i = 0; i < 10; i++) { producer.send(new byte[10]); } assertTrue(topicRef.getStats(false, false, false).storageSize > 0); }
public void merge(SequenceSet sequenceSet) { Sequence node = sequenceSet.getHead(); while (node != null) { add(node); node = node.getNext(); } }
@Test public void testMerge() { SequenceSet set = new SequenceSet(); set.add(new Sequence(0, 100)); SequenceSet set2 = new SequenceSet(); set.add(new Sequence(50, 150)); set.merge(set2); assertEquals(151, set.rangeSize()); assertEquals(1, set.size()); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_density_xxhdpi() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("xxhdpi", config); assertThat(config.density).isEqualTo(DENSITY_XXHIGH); }
@Override public Tuple apply(Tuple x) { int[] bag = new int[featureIndex.size()]; for (String column : columns) { for (String word : tokenizer.apply(x.getString(column))) { Integer index = featureIndex.get(word); if (index != null) { if (binary) bag[index] = 1; else bag[index]++; } } } return Tuple.of(bag, schema); }
@Test public void testFeature() throws IOException { System.out.println("feature"); String[][] text = smile.util.Paths.getTestDataLines("text/movie.txt") .map(String::trim) .filter(line -> !line.isEmpty()) .map(line -> line.split("\\s+", 2)) .toArray(String[][]::new); String[] feature = { "outstanding", "wonderfully", "wasted", "lame", "awful", "poorly", "ridiculous", "waste", "worst", "bland", "unfunny", "stupid", "dull", "fantastic", "laughable", "mess", "pointless", "terrific", "memorable", "superb", "boring", "badly", "subtle", "terrible", "excellent", "perfectly", "masterpiece", "realistic", "flaws" }; BagOfWords bag = new BagOfWords(tokenizer, feature); int[][] x = new int[text.length][]; for (int i = 0; i < text.length; i++) { x[i] = bag.apply(text[i][1]); assertEquals(feature.length, x[i].length); } assertEquals(1, x[0][15]); }
public MediaType detect(InputStream input, Metadata metadata) { // Look for a type hint in the input metadata String hint = metadata.get(Metadata.CONTENT_TYPE); if (hint != null) { MediaType type = MediaType.parse(hint); if (type != null) { return type; } } return MediaType.OCTET_STREAM; }
@Test public void testDetect() { assertDetect(MediaType.TEXT_PLAIN, "text/plain"); assertDetect(MediaType.TEXT_PLAIN, "TEXT/PLAIN"); assertDetect(MediaType.TEXT_PLAIN, " text/\tplain\n"); assertDetect(TEXT_PLAIN_A_EQ_B, "text/plain; a=b"); assertDetect(TEXT_PLAIN_A_EQ_B, "\ttext/plain; a=b\n"); assertDetect(MediaType.OCTET_STREAM, "text\\plain"); // test also the zero input cases assertDetect(MediaType.OCTET_STREAM, ""); assertDetect(MediaType.OCTET_STREAM, null); try { assertEquals(MediaType.OCTET_STREAM, detector.detect(null, new Metadata())); } catch (IOException e) { fail("TypeDetector should never throw an IOException"); } }
@Transactional public MeetingConfirmResponse create(String uuid, long attendeeId, MeetingConfirmRequest request) { LocalDateTime startDateTime = request.toStartDateTime(); LocalDateTime endDateTime = request.toEndDateTime(); Meeting meeting = meetingRepository.findByUuid(uuid) .orElseThrow(() -> new MomoException(MeetingErrorCode.INVALID_UUID)); Attendee attendee = attendeeRepository.findByIdAndMeeting(attendeeId, meeting) .orElseThrow(() -> new MomoException(AttendeeErrorCode.INVALID_ATTENDEE)); validateHostPermission(attendee); validateNotAlreadyConfirmed(meeting); validateMeetingLocked(meeting); validateTimeRange(meeting, startDateTime, endDateTime); validateDateRange(meeting, startDateTime, endDateTime); ConfirmedMeeting confirmedMeeting = new ConfirmedMeeting(meeting, startDateTime, endDateTime); confirmedMeetingRepository.save(confirmedMeeting); return MeetingConfirmResponse.from(confirmedMeeting); }
@DisplayName("약속에 포함되지 않은 시간의 일정을 확정 시 예외가 발생한다.") @Test void confirmScheduleThrowsExceptionWhen_InvalidTime() { MeetingConfirmRequest request = new MeetingConfirmRequest( today.getDate(), Timeslot.TIME_2200.startTime(), today.getDate(), Timeslot.TIME_2300.startTime() ); assertThatThrownBy(() -> meetingConfirmService.create(meeting.getUuid(), attendee.getId(), request)) .isInstanceOf(MomoException.class) .hasMessage(MeetingErrorCode.INVALID_DATETIME_RANGE.message()); }
public AbstractFile getLastFile(final long logIndex, final int waitToWroteSize, final boolean createIfNecessary) { AbstractFile lastFile = null; while (true) { int fileCount = 0; this.readLock.lock(); try { if (!this.files.isEmpty()) { fileCount = this.files.size(); final AbstractFile file = this.files.get(fileCount - 1); if (waitToWroteSize <= 0 || !file.reachesFileEndBy(waitToWroteSize)) { lastFile = file; } else if (file.reachesFileEndBy(waitToWroteSize)) { // Reach file end , need to fill blank bytes in file end file.fillEmptyBytesInFileEnd(); } } } finally { this.readLock.unlock(); } // Try to get a new file if (lastFile == null && createIfNecessary) { this.writeLock.lock(); try { if (this.files.size() != fileCount) { // That means already create a new file , just continue and try again continue; } lastFile = this.allocateService.takeEmptyFile(); if (lastFile != null) { final long newFileOffset = (long) this.files.size() * (long) this.fileSize; lastFile.setFileFromOffset(newFileOffset); this.files.add(lastFile); this.swapOutFilesIfNecessary(); return lastFile; } else { continue; } } catch (final Exception e) { LOG.error("Error on create new abstract file , current logIndex:{}", logIndex); } finally { this.writeLock.unlock(); } } return lastFile; } }
@Test public void writeDataToSecondFile() { writeDataToFirstFile(); // Try get last file again , this file is a new blank file (from allocator) final AbstractFile lastFile = this.fileManager.getLastFile(10, 10, true); assertEquals(lastFile.getFileFromOffset(), this.indexFileSize); // Write 5 index to second file , wrotePosition = 30 + 50 final IndexFile indexFile = (IndexFile) lastFile; for (int i = 10; i <= 15; i++) { indexFile.appendIndex(i, i, segmentIndex); } }
public static <T> Builder<T> builder() { return new Builder<>(); }
@Test void testBuilder() { String pluginName = null; String functionName = "concat"; String description = "concatenate two strings"; List<InputVariable> parameters = Collections .singletonList(new InputVariable("string1", "java.lang.String", "first string to concatenate", null, true, null)); OutputVariable<?> returnParameter = new OutputVariable<>("java.lang.String", "concatenated strings"); KernelFunction<String> result = new KernelFunctionFromPrompt.Builder() .withName(functionName) .withDescription(description) .withInputParameters(parameters) .withOutputVariable(returnParameter) .withPromptTemplate((kernel, args, context) -> Mono.empty()) .build(); assertEquals(functionName, result.getName()); assertEquals(pluginName, result.getPluginName()); assertEquals(description, result.getDescription()); // TODO: This assert fails because getParameters is a List<KernelParameterMetadata<?>>, not an List<InputVariable> // This feels like it's broken. Until this is fixed, we can compare the types // assertEquals(parameters, result.getMetadata().getParameters()); assertEquals(parameters.size(), result.getMetadata().getParameters().size()); for (int i = 0; i < parameters.size(); i++) { assertEquals(parameters.get(i).getDescription(), result.getMetadata().getParameters().get(i).getDescription()); assertEquals(parameters.get(i).getName(), result.getMetadata().getParameters().get(i).getName()); assertEquals(parameters.get(i).getType(), result.getMetadata().getParameters().get(i).getType()); assertEquals(parameters.get(i).isRequired(), result.getMetadata().getParameters().get(i).isRequired()); } assertEquals(returnParameter, result.getMetadata().getOutputVariableType()); assertEquals(returnParameter.getType(), result.getMetadata().getOutputVariableType().getType()); assertEquals(returnParameter.getDescription(), result.getMetadata().getOutputVariableType().getDescription()); }
DateRange getRange(String dateRangeString) throws ParseException { if (dateRangeString == null || dateRangeString.isEmpty()) return null; String[] dateArr = dateRangeString.split("-"); if (dateArr.length > 2 || dateArr.length < 1) return null; // throw new IllegalArgumentException("Only Strings containing two Date separated by a '-' or a single Date are allowed"); ParsedCalendar from = parseDateString(dateArr[0]); ParsedCalendar to; if (dateArr.length == 2) to = parseDateString(dateArr[1]); else // faster and safe? // to = new ParsedCalendar(from.parseType, (Calendar) from.parsedCalendar.clone()); to = parseDateString(dateArr[0]); try { return new DateRange(from, to); } catch (IllegalArgumentException ex) { return null; } }
@Test public void testParseReverseDateRangeDayOnly() throws ParseException { // This is reverse since Su=7 and Mo=1 // Note: If we use Locale.Germany or Locale.UK for calendar creation // then cal.set(DAY_OF_WEEK, 7) results in a "time in millis" after Saturday leading to reverse=false DateRange dateRange = dateRangeParser.getRange("Sa-Su"); assertTrue(dateRange.dayOnly); assertTrue(dateRange.reverse); assertFalse(dateRange.isInRange(getCalendar(2015, Calendar.DECEMBER, 25))); assertTrue(dateRange.isInRange(getCalendar(2015, Calendar.DECEMBER, 26))); assertTrue(dateRange.isInRange(getCalendar(2015, Calendar.DECEMBER, 27))); assertFalse(dateRange.isInRange(getCalendar(2015, Calendar.DECEMBER, 28))); }
@Override public RegisteredClient getClientConfiguration(ServerConfiguration issuer) { RegisteredClient client = staticClientService.getClientConfiguration(issuer); if (client != null) { return client; } else { return dynamicClientService.getClientConfiguration(issuer); } }
@Test public void getClientConfiguration_useStatic() { Mockito.when(mockStaticService.getClientConfiguration(mockServerConfig)).thenReturn(mockClient); RegisteredClient result = hybridService.getClientConfiguration(mockServerConfig); Mockito.verify(mockStaticService).getClientConfiguration(mockServerConfig); Mockito.verify(mockDynamicService, Mockito.never()).getClientConfiguration(Matchers.any(ServerConfiguration.class)); assertEquals(mockClient, result); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldHandleAliasQualifiedSelectStar() { // Given: final SingleStatementContext stmt = givenQuery("SELECT T.* FROM TEST1 T;"); // When: final Query result = (Query) builder.buildStatement(stmt); // Then: assertThat(result.getSelect(), is(new Select(ImmutableList.of(new AllColumns(Optional.of(SourceName.of("T"))))))); }
@Udf(description = "Returns the natural logarithm (base e) of an INT value.") public Double ln( @UdfParameter( value = "value", description = "the value get the natual logarithm of." ) final Integer value ) { return ln(value == null ? null : value.doubleValue()); }
@Test public void shouldHandlePositive() { assertThat(udf.ln(1), is(0.0)); assertThat(udf.ln(1L), is(0.0)); assertThat(udf.ln(1.0), is(0.0)); }
private void removeStaticMember(ConsumerGroupMember oldMember) { if (oldMember.instanceId() != null) { staticMembers.remove(oldMember.instanceId()); } }
@Test public void testRemoveStaticMember() { ConsumerGroup consumerGroup = createConsumerGroup("foo"); ConsumerGroupMember member = new ConsumerGroupMember.Builder("member") .setSubscribedTopicNames(Arrays.asList("foo", "bar")) .setInstanceId("instance") .build(); consumerGroup.updateMember(member); assertTrue(consumerGroup.hasMember("member")); consumerGroup.removeMember("member"); assertFalse(consumerGroup.hasMember("member")); assertNull(consumerGroup.staticMember("instance")); assertNull(consumerGroup.staticMemberId("instance")); }
protected String[] decodeCookie(String cookieValue) throws InvalidCookieException { int paddingCount = 4 - (cookieValue.length() % 4); if (paddingCount < 4) { char[] padding = new char[paddingCount]; Arrays.fill(padding, '='); cookieValue += new String(padding); } String cookieAsPlainText; try { cookieAsPlainText = new String(Base64.getDecoder().decode(cookieValue.getBytes())); } catch (IllegalArgumentException ex) { throw new InvalidCookieException( "Cookie token was not Base64 encoded; value was '" + cookieValue + "'"); } String[] tokens = StringUtils.delimitedListToStringArray(cookieAsPlainText, DELIMITER); for (int i = 0; i < tokens.length; i++) { tokens[i] = URLDecoder.decode(tokens[i], StandardCharsets.UTF_8); } return tokens; }
@Test void decodeCookieTest() { var cookieValue = "YWRtaW46MTcxODk2NDE3NDgwODpTSEE" + "tMjU2OmNkOTM0ZTAyZWQ4NGJmMzc1ZTA4MmE1OWU4YTA3NTNiMzA3ODg1MjZmYzA3Yjgy" + "YzVmY2Y3YmJiYzdjYzRkNWU"; // 123 % 4 = 3, so we need to add 1 '=' to make it a multiple of 4 for // spring-security/gh-15127 assertThat(cookieValue.length()).isEqualTo(123); var cookie = tokenBasedRememberMeServices.decodeCookie(cookieValue); assertThat(cookie).containsExactly("admin", "1718964174808", "SHA-256", "cd934e02ed84bf375e082a59e8a0753b30788526fc07b82c5fcf7bbbc7cc4d5e"); cookieValue = "ZmFrZS11c2VyOjE3MTY0MzUxODczMjM6U0hBLTI1NjoyOWYxYzdjY2JiNDg5NzQxMz" + "kyZDI3YmE1YzMwZjMwZDA1Yzc5ZWU2NjI4OWI2ZDZkYTViNDMxYmJhOTlhMGM3"; assertThat(cookieValue.length()).isEqualTo(128); cookie = tokenBasedRememberMeServices.decodeCookie(cookieValue); assertThat(cookie).containsExactly("fake-user", "1716435187323", "SHA-256", "29f1c7ccbb489741392d27ba5c30f30d05c79ee66289b6d6da5b431bba99a0c7"); }
public static String[][] assignExecutors( List<? extends ScanTaskGroup<?>> taskGroups, List<String> executorLocations) { Map<Integer, JavaHash<StructLike>> partitionHashes = Maps.newHashMap(); String[][] locations = new String[taskGroups.size()][]; for (int index = 0; index < taskGroups.size(); index++) { locations[index] = assign(taskGroups.get(index), executorLocations, partitionHashes); } return locations; }
@TestTemplate public void testFileScanTaskWithUnpartitionedDeletes() { List<ScanTask> tasks1 = ImmutableList.of( new MockFileScanTask( mockDataFile(Row.of()), mockDeleteFiles(2, Row.of()), SCHEMA, PartitionSpec.unpartitioned()), new MockFileScanTask( mockDataFile(Row.of()), mockDeleteFiles(2, Row.of()), SCHEMA, PartitionSpec.unpartitioned()), new MockFileScanTask( mockDataFile(Row.of()), mockDeleteFiles(2, Row.of()), SCHEMA, PartitionSpec.unpartitioned())); ScanTaskGroup<ScanTask> taskGroup1 = new BaseScanTaskGroup<>(tasks1); List<ScanTask> tasks2 = ImmutableList.of( new MockFileScanTask( mockDataFile(null), mockDeleteFiles(2, null), SCHEMA, PartitionSpec.unpartitioned()), new MockFileScanTask( mockDataFile(null), mockDeleteFiles(2, null), SCHEMA, PartitionSpec.unpartitioned()), new MockFileScanTask( mockDataFile(null), mockDeleteFiles(2, null), SCHEMA, PartitionSpec.unpartitioned())); ScanTaskGroup<ScanTask> taskGroup2 = new BaseScanTaskGroup<>(tasks2); List<ScanTaskGroup<ScanTask>> taskGroups = ImmutableList.of(taskGroup1, taskGroup2); String[][] locations = SparkPlanningUtil.assignExecutors(taskGroups, EXECUTOR_LOCATIONS); // should not assign executors if the table is unpartitioned assertThat(locations.length).isEqualTo(2); assertThat(locations[0]).isEmpty(); assertThat(locations[1]).isEmpty(); }
public boolean isLeaseExpired() { return lockExpiryTime > 0L && Clock.currentTimeMillis() > lockExpiryTime; }
@Test public void testIsLeaseExpired() throws Exception { LockGuard stateLock = LockGuard.NOT_LOCKED; assertFalse(stateLock.isLeaseExpired()); Address endpoint = newAddress(); stateLock = new LockGuard(endpoint, TXN, TimeUnit.HOURS.toMillis(1)); assertFalse(stateLock.isLeaseExpired()); stateLock = new LockGuard(endpoint, TXN, 1); final LockGuard finalStateLock = stateLock; HazelcastTestSupport.assertTrueEventually(() -> assertTrue(finalStateLock.isLeaseExpired())); }
public static String getName(Class<?> c) { if (c.isArray()) { StringBuilder sb = new StringBuilder(); do { sb.append("[]"); c = c.getComponentType(); } while (c.isArray()); return c.getName() + sb.toString(); } return c.getName(); }
@Test void testGetName() { // getName assertEquals("boolean", ReflectUtils.getName(boolean.class)); assertEquals("int[][][]", ReflectUtils.getName(int[][][].class)); assertEquals("java.lang.Object[][]", ReflectUtils.getName(Object[][].class)); }
@Override public String execute(SampleResult previousResult, Sampler currentSampler) throws InvalidVariableException { JMeterVariables vars = getVariables(); String stringToSplit = ((CompoundVariable) values[0]).execute(); String varNamePrefix = ((CompoundVariable) values[1]).execute().trim(); String splitString = ","; if (values.length > 2) { // Split string provided String newSplitString = ((CompoundVariable) values[2]).execute(); splitString = newSplitString.length() > 0 ? newSplitString : splitString; } log.debug("Split {} using {} into {}", stringToSplit, splitString, varNamePrefix); String[] parts = JOrphanUtils.split(stringToSplit, splitString, "?");// $NON-NLS-1$ vars.put(varNamePrefix, stringToSplit); vars.put(varNamePrefix + "_n", Integer.toString(parts.length));// $NON-NLS-1$ for (int i = 1; i <= parts.length; i++) { if (log.isDebugEnabled()){ log.debug(parts[i-1]); } vars.put(varNamePrefix + "_" + i, parts[i - 1]);// $NON-NLS-1$ } vars.remove(varNamePrefix + "_" + (parts.length+1)); return stringToSplit; }
@Test public void shouldSplitWithoutAnyArguments() throws Exception { String src = "a,b,c"; SplitFunction split; split = splitParams(src, "VAR1", null); assertEquals(src, split.execute()); assertEquals(src, vars.get("VAR1")); assertEquals("3", vars.get("VAR1_n")); assertEquals("a", vars.get("VAR1_1")); assertEquals("b", vars.get("VAR1_2")); assertEquals("c", vars.get("VAR1_3")); assertNull(vars.get("VAR1_4")); split = splitParams(src, "VAR1", ""); assertEquals(src, split.execute()); assertEquals(src, vars.get("VAR1")); assertEquals("3", vars.get("VAR1_n")); assertEquals("a", vars.get("VAR1_1")); assertEquals("b", vars.get("VAR1_2")); assertEquals("c", vars.get("VAR1_3")); assertNull(vars.get("VAR1_4")); split = splitParams(src, "VAR2", ","); assertEquals(src, split.execute()); assertEquals(src, vars.get("VAR2")); assertEquals("3", vars.get("VAR2_n")); assertEquals("a", vars.get("VAR2_1")); assertEquals("b", vars.get("VAR2_2")); assertEquals("c", vars.get("VAR2_3")); assertNull(vars.get("VAR2_4")); src = "a|b|c"; split = splitParams(src, "VAR3", "|"); assertEquals(src, split.execute()); assertEquals(src, vars.get("VAR3")); assertEquals("3", vars.get("VAR3_n")); assertEquals("a", vars.get("VAR3_1")); assertEquals("b", vars.get("VAR3_2")); assertEquals("c", vars.get("VAR3_3")); assertNull(vars.get("VAR3_4")); src = "a|b||"; split = splitParams(src, "VAR4", "|"); assertEquals(src, split.execute()); assertEquals(src, vars.get("VAR4")); assertEquals("4", vars.get("VAR4_n")); assertEquals("a", vars.get("VAR4_1")); assertEquals("b", vars.get("VAR4_2")); assertEquals("?", vars.get("VAR4_3")); assertNull(vars.get("VAR4_5")); src = "a,,c"; vars.put("VAR", src); split = splitParams("${VAR}", "VAR", null); assertEquals(src, split.execute()); assertEquals("3", vars.get("VAR_n")); assertEquals("a", vars.get("VAR_1")); assertEquals("?", vars.get("VAR_2")); assertEquals("c", vars.get("VAR_3")); assertNull(vars.get("VAR_4")); src = "a,b"; vars.put("VAR", src); split = splitParams("${VAR}", "VAR", null); assertEquals(src, split.execute()); assertEquals("2", vars.get("VAR_n")); assertEquals("a", vars.get("VAR_1")); assertEquals("b", vars.get("VAR_2")); assertNull(vars.get("VAR_3")); src = "a,,c,"; vars.put("VAR", src); split = splitParams("${VAR}", "VAR5", null); assertEquals(src, split.execute()); assertEquals("4", vars.get("VAR5_n")); assertEquals("a", vars.get("VAR5_1")); assertEquals("?", vars.get("VAR5_2")); assertEquals("c", vars.get("VAR5_3")); assertEquals("?", vars.get("VAR5_4")); assertNull(vars.get("VAR5_5")); }
@Override public Optional<ReadError> read(DbFileSources.Line.Builder lineBuilder) { if (readError == null) { try { processHighlightings(lineBuilder); } catch (RangeOffsetConverterException e) { readError = new ReadError(HIGHLIGHTING, lineBuilder.getLine()); LOG.debug(format("Inconsistency detected in Highlighting data. Highlighting will be ignored for file '%s'", file.getKey()), e); } } return Optional.ofNullable(readError); }
@Test public void not_fail_and_stop_processing_when_range_offset_converter_throw_RangeOffsetConverterException() { TextRange textRange1 = newTextRange(LINE_1, LINE_1); doThrow(RangeOffsetConverterException.class).when(rangeOffsetConverter).offsetToString(textRange1, LINE_1, DEFAULT_LINE_LENGTH); HighlightingLineReader highlightingLineReader = newReader(of( textRange1, HighlightingType.ANNOTATION, newSingleLineTextRangeWithExpectingLabel(LINE_2, RANGE_LABEL_1), HIGHLIGHTING_STRING)); LineReader.ReadError readErrorLine1 = new LineReader.ReadError(HIGHLIGHTING, LINE_1); assertThat(highlightingLineReader.read(line1)).contains(readErrorLine1); assertThat(highlightingLineReader.read(line2)).contains(readErrorLine1); assertNoHighlighting(); assertThat(logTester.logs(DEBUG)).isNotEmpty(); }
@Nonnull @Override public Result addChunk(ByteBuf buffer) { final byte[] readable = new byte[buffer.readableBytes()]; buffer.readBytes(readable, buffer.readerIndex(), buffer.readableBytes()); final GELFMessage msg = new GELFMessage(readable); final ByteBuf aggregatedBuffer; switch (msg.getGELFType()) { case CHUNKED: try { chunkCounter.inc(); aggregatedBuffer = checkForCompletion(msg); if (aggregatedBuffer == null) { return VALID_EMPTY_RESULT; } } catch (IllegalArgumentException | IllegalStateException | IndexOutOfBoundsException e) { log.debug("Invalid gelf message chunk, dropping message.", e); return INVALID_RESULT; } break; case ZLIB: case GZIP: case UNCOMPRESSED: aggregatedBuffer = Unpooled.wrappedBuffer(readable); break; case UNSUPPORTED: return INVALID_RESULT; default: return INVALID_RESULT; } return new Result(aggregatedBuffer, true); }
@Test public void duplicateChunk() { final byte[] messageId1 = generateMessageId(1); final byte[] messageId2 = generateMessageId(2); final ByteBuf chunk1 = createChunk(messageId1, (byte) 0, (byte) 2, new byte[16]); final ByteBuf chunk2 = createChunk(messageId1, (byte) 0, (byte) 2, new byte[16]); final ByteBuf chunk3 = createChunk(messageId2, (byte) 0, (byte) 2, new byte[16]); final ByteBuf chunk4 = createChunk(messageId1, (byte) 1, (byte) 2, new byte[16]); final ByteBuf chunk5 = createChunk(messageId2, (byte) 1, (byte) 2, new byte[16]); assertNull("message should not be complete", aggregator.addChunk(chunk1).getMessage()); assertNull("message should not be complete", aggregator.addChunk(chunk2).getMessage()); assertNull("message should not be complete", aggregator.addChunk(chunk3).getMessage()); assertNotNull("message 1 should be complete", aggregator.addChunk(chunk4).getMessage()); assertNotNull("message 2 should be complete", aggregator.addChunk(chunk5).getMessage()); assertEquals(2, counterValueNamed(metricRegistry, COMPLETE_MESSAGES)); assertEquals(5, counterValueNamed(metricRegistry, CHUNK_COUNTER)); assertEquals(0, counterValueNamed(metricRegistry, WAITING_MESSAGES)); assertEquals(0, counterValueNamed(metricRegistry, EXPIRED_CHUNKS)); assertEquals(0, counterValueNamed(metricRegistry, EXPIRED_MESSAGES)); assertEquals(1, counterValueNamed(metricRegistry, DUPLICATE_CHUNKS)); }
@VisibleForTesting static Key extractKey(String cacheKeys, Configuration conf) { // generate key elements in a certain order, so that the Key instances are comparable List<Object> elements = Lists.newArrayList(); elements.add(conf.get(HiveConf.ConfVars.METASTORE_URIS.varname, "")); elements.add(conf.get(HiveCatalog.HIVE_CONF_CATALOG, "hive")); if (cacheKeys == null || cacheKeys.isEmpty()) { return Key.of(elements); } Set<KeyElementType> types = Sets.newTreeSet(Comparator.comparingInt(Enum::ordinal)); Map<String, String> confElements = Maps.newTreeMap(); for (String element : cacheKeys.split(",", -1)) { String trimmed = element.trim(); if (trimmed.toLowerCase(Locale.ROOT).startsWith(CONF_ELEMENT_PREFIX)) { String key = trimmed.substring(CONF_ELEMENT_PREFIX.length()); ValidationException.check( !confElements.containsKey(key), "Conf key element %s already specified", key); confElements.put(key, conf.get(key)); } else { KeyElementType type = KeyElementType.valueOf(trimmed.toUpperCase()); switch (type) { case UGI: case USER_NAME: ValidationException.check( !types.contains(type), "%s key element already specified", type.name()); types.add(type); break; default: throw new ValidationException("Unknown key element %s", trimmed); } } } for (KeyElementType type : types) { switch (type) { case UGI: try { elements.add(UserGroupInformation.getCurrentUser()); } catch (IOException e) { throw new UncheckedIOException(e); } break; case USER_NAME: try { elements.add(UserGroupInformation.getCurrentUser().getUserName()); } catch (IOException e) { throw new UncheckedIOException(e); } break; default: throw new RuntimeException("Unexpected key element " + type.name()); } } for (String key : confElements.keySet()) { elements.add(ConfElement.of(key, confElements.get(key))); } return Key.of(elements); }
@Test public void testCacheKey() throws Exception { UserGroupInformation current = UserGroupInformation.getCurrentUser(); UserGroupInformation foo1 = UserGroupInformation.createProxyUser("foo", current); UserGroupInformation foo2 = UserGroupInformation.createProxyUser("foo", current); UserGroupInformation bar = UserGroupInformation.createProxyUser("bar", current); HiveConf hiveConf = HIVE_METASTORE_EXTENSION.hiveConf(); Key key1 = foo1.doAs( (PrivilegedAction<Key>) () -> CachedClientPool.extractKey("user_name,conf:key1", hiveConf)); Key key2 = foo2.doAs( (PrivilegedAction<Key>) () -> CachedClientPool.extractKey("conf:key1,user_name", hiveConf)); assertThat(key2).as("Key elements order shouldn't matter").isEqualTo(key1); key1 = foo1.doAs((PrivilegedAction<Key>) () -> CachedClientPool.extractKey("ugi", hiveConf)); key2 = bar.doAs((PrivilegedAction<Key>) () -> CachedClientPool.extractKey("ugi", hiveConf)); assertThat(key2).as("Different users are not supposed to be equivalent").isNotEqualTo(key1); key2 = foo2.doAs((PrivilegedAction<Key>) () -> CachedClientPool.extractKey("ugi", hiveConf)); assertThat(key2) .as("Different UGI instances are not supposed to be equivalent") .isNotEqualTo(key1); key1 = CachedClientPool.extractKey("ugi", hiveConf); key2 = CachedClientPool.extractKey("ugi,conf:key1", hiveConf); assertThat(key2) .as("Keys with different number of elements are not supposed to be equivalent") .isNotEqualTo(key1); Configuration conf1 = new Configuration(hiveConf); Configuration conf2 = new Configuration(hiveConf); conf1.set("key1", "val"); key1 = CachedClientPool.extractKey("conf:key1", conf1); key2 = CachedClientPool.extractKey("conf:key1", conf2); assertThat(key2) .as("Config with different values are not supposed to be equivalent") .isNotEqualTo(key1); conf2.set("key1", "val"); conf2.set("key2", "val"); key2 = CachedClientPool.extractKey("conf:key2", conf2); assertThat(key2) .as("Config with different keys are not supposed to be equivalent") .isNotEqualTo(key1); key1 = CachedClientPool.extractKey("conf:key1,ugi", conf1); key2 = CachedClientPool.extractKey("ugi,conf:key1", conf2); assertThat(key2).as("Config with same key/value should be equivalent").isEqualTo(key1); conf1.set("key2", "val"); key1 = CachedClientPool.extractKey("conf:key2 ,conf:key1", conf1); key2 = CachedClientPool.extractKey("conf:key2,conf:key1", conf2); assertThat(key2).as("Config with same key/value should be equivalent").isEqualTo(key1); assertThatThrownBy( () -> CachedClientPool.extractKey("ugi,ugi", hiveConf), "Duplicate key elements should result in an error") .isInstanceOf(ValidationException.class) .hasMessageContaining("UGI key element already specified"); assertThatThrownBy( () -> CachedClientPool.extractKey("conf:k1,conf:k2,CONF:k1", hiveConf), "Duplicate conf key elements should result in an error") .isInstanceOf(ValidationException.class) .hasMessageContaining("Conf key element k1 already specified"); }
@Override public String getPrefix() { return String.format("%s.%s", DAVProtocol.class.getPackage().getName(), StringUtils.upperCase(this.getType().name())); }
@Test public void testPrefix() { assertEquals("ch.cyberduck.core.dav.DAV", new DAVProtocol().getPrefix()); }
@SuppressWarnings("unchecked") public static String encode(Type parameter) { if (parameter instanceof NumericType) { return encodeNumeric(((NumericType) parameter)); } else if (parameter instanceof Address) { return encodeAddress((Address) parameter); } else if (parameter instanceof Bool) { return encodeBool((Bool) parameter); } else if (parameter instanceof Bytes) { return encodeBytes((Bytes) parameter); } else if (parameter instanceof DynamicBytes) { return encodeDynamicBytes((DynamicBytes) parameter); } else if (parameter instanceof Utf8String) { return encodeString((Utf8String) parameter); } else if (parameter instanceof StaticArray) { if (DynamicStruct.class.isAssignableFrom( ((StaticArray) parameter).getComponentType())) { return encodeStaticArrayWithDynamicStruct((StaticArray) parameter); } else { return encodeArrayValues((StaticArray) parameter); } } else if (parameter instanceof DynamicStruct) { return encodeDynamicStruct((DynamicStruct) parameter); } else if (parameter instanceof DynamicArray) { return encodeDynamicArray((DynamicArray) parameter); } else if (parameter instanceof PrimitiveType) { return encode(((PrimitiveType) parameter).toSolidityType()); } else { throw new UnsupportedOperationException( "Type cannot be encoded: " + parameter.getClass()); } }
@Test public void testPrimitiveShort() { assertEquals( encode(new Short((short) 0)), ("0000000000000000000000000000000000000000000000000000000000000000")); assertEquals( encode(new Short(java.lang.Short.MIN_VALUE)), ("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8000")); assertEquals( encode(new Short(java.lang.Short.MAX_VALUE)), ("0000000000000000000000000000000000000000000000000000000000007fff")); }
@Override public Object run() throws ZuulException { RequestContext ctx = RequestContext.getCurrentContext(); Throwable throwable = ctx.getThrowable(); if (throwable != null) { if (!BlockException.isBlockException(throwable)) { // Trace exception for each entry and exit entries in order. // The entries can be retrieved from the request context. SentinelEntryUtils.tryTraceExceptionThenExitFromCurrentContext(throwable); RecordLog.info("[SentinelZuulErrorFilter] Trace error cause", throwable.getCause()); } } return null; }
@Test public void testRun() throws Exception { SentinelZuulErrorFilter sentinelZuulErrorFilter = new SentinelZuulErrorFilter(); Object result = sentinelZuulErrorFilter.run(); Assert.assertNull(result); }
public static String formatExpression(final Expression expression) { return formatExpression(expression, FormatOptions.of(s -> false)); }
@Test public void shouldFormatSimpleCaseExpression() { final SimpleCaseExpression expression = new SimpleCaseExpression( new StringLiteral("operand"), Collections.singletonList( new WhenClause(new StringLiteral("foo"), new LongLiteral(1))), Optional.empty()); assertThat(ExpressionFormatter.formatExpression(expression), equalTo("(CASE 'operand' WHEN 'foo' THEN 1 END)")); }
static @Nullable <V> V getWhenSuccessful(@Nullable CompletableFuture<V> future) { try { return (future == null) ? null : future.join(); } catch (CancellationException | CompletionException e) { return null; } }
@Test(dataProvider = "successful") public void getWhenSuccessful_success(CompletableFuture<Integer> future) { assertThat(Async.getWhenSuccessful(future)).isEqualTo(1); }
@PublicAPI(usage = ACCESS) public static <T> DescribedPredicate<T> describe(String description, Predicate<? super T> predicate) { return new DescribePredicate<>(description, predicate).forSubtype(); }
@Test public void describe_works() { Predicate<Integer> isEven = input -> input % 2 == 0; assertThat(describe("is even", isEven)) .accepts(8) .hasDescription("is even") .accepts(4) .rejects(5); }
@Override public void applyToAllPartitions(ApplyPartitionFunction<OUT> applyPartitionFunction) throws Exception { if (isKeyed) { for (Object key : keySet) { partitionedContext .getStateManager() .executeInKeyContext( () -> { try { applyPartitionFunction.apply(collector, partitionedContext); } catch (Exception e) { throw new RuntimeException(e); } }, key); } } else { // non-keyed operator has only one partition. applyPartitionFunction.apply(collector, partitionedContext); } }
@Test void testApplyToAllPartitions() throws Exception { AtomicInteger counter = new AtomicInteger(0); List<Integer> collectedData = new ArrayList<>(); TestingTimestampCollector<Integer> collector = TestingTimestampCollector.<Integer>builder() .setCollectConsumer(collectedData::add) .build(); CompletableFuture<Void> cf = new CompletableFuture<>(); StreamingRuntimeContext operatorRuntimeContext = ContextTestUtils.createStreamingRuntimeContext(); DefaultRuntimeContext runtimeContext = new DefaultRuntimeContext( operatorRuntimeContext.getJobInfo().getJobName(), operatorRuntimeContext.getJobType(), 1, 2, "mock-task", operatorRuntimeContext.getMetricGroup()); DefaultNonPartitionedContext<Integer> nonPartitionedContext = new DefaultNonPartitionedContext<>( runtimeContext, new DefaultPartitionedContext( runtimeContext, Optional::empty, (key) -> cf.complete(null), UnsupportedProcessingTimeManager.INSTANCE, ContextTestUtils.createStreamingRuntimeContext(), new MockOperatorStateStore()), collector, false, null); nonPartitionedContext.applyToAllPartitions( (out, ctx) -> { counter.incrementAndGet(); out.collect(10); }); assertThat(counter.get()).isEqualTo(1); assertThat(cf).isNotCompleted(); assertThat(collectedData).containsExactly(10); }
public ImmutableList<PluginMatchingResult<VulnDetector>> getVulnDetectors( ReconnaissanceReport reconnaissanceReport) { return tsunamiPlugins.entrySet().stream() .filter(entry -> isVulnDetector(entry.getKey())) .map(entry -> matchAllVulnDetectors(entry.getKey(), entry.getValue(), reconnaissanceReport)) .flatMap(Streams::stream) .collect(toImmutableList()); }
@Test public void getVulnDetectors_whenSoftwareFilterHasMatchingService_returnsMatchedService() { NetworkService wordPressService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 80)) .setTransportProtocol(TransportProtocol.TCP) .setServiceName("http") .setSoftware(Software.newBuilder().setName("WordPress")) .build(); NetworkService jenkinsService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 443)) .setTransportProtocol(TransportProtocol.TCP) .setServiceName("https") .setSoftware(Software.newBuilder().setName("Jenkins")) .build(); NetworkService noNameService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 12345)) .setTransportProtocol(TransportProtocol.TCP) .build(); ReconnaissanceReport fakeReconnaissanceReport = ReconnaissanceReport.newBuilder() .setTargetInfo(TargetInfo.getDefaultInstance()) .addNetworkServices(wordPressService) .addNetworkServices(jenkinsService) .addNetworkServices(noNameService) .build(); PluginManager pluginManager = Guice.createInjector( new FakePortScannerBootstrapModule(), new FakeServiceFingerprinterBootstrapModule(), FakeSoftwareFilteringDetector.getModule()) .getInstance(PluginManager.class); ImmutableList<PluginMatchingResult<VulnDetector>> vulnDetectors = pluginManager.getVulnDetectors(fakeReconnaissanceReport); assertThat(vulnDetectors).hasSize(1); assertThat(vulnDetectors.get(0).tsunamiPlugin().getClass()) .isEqualTo(FakeSoftwareFilteringDetector.class); assertThat(vulnDetectors.get(0).matchedServices()) .containsExactly(jenkinsService, noNameService); }
public Resource getIncrementAllocation() { Long memory = null; Integer vCores = null; Map<String, Long> others = new HashMap<>(); ResourceInformation[] resourceTypes = ResourceUtils.getResourceTypesArray(); for (int i=0; i < resourceTypes.length; ++i) { String name = resourceTypes[i].getName(); String propertyKey = getAllocationIncrementPropKey(name); String propValue = get(propertyKey); if (propValue != null) { Matcher matcher = RESOURCE_REQUEST_VALUE_PATTERN.matcher(propValue); if (matcher.matches()) { long value = Long.parseLong(matcher.group(1)); String unit = matcher.group(2); long valueInDefaultUnits = getValueInDefaultUnits(value, unit, name); others.put(name, valueInDefaultUnits); } else { throw new IllegalArgumentException("Property " + propertyKey + " is not in \"value [unit]\" format: " + propValue); } } } if (others.containsKey(ResourceInformation.MEMORY_MB.getName())) { memory = others.get(ResourceInformation.MEMORY_MB.getName()); if (get(RM_SCHEDULER_INCREMENT_ALLOCATION_MB) != null) { String overridingKey = getAllocationIncrementPropKey( ResourceInformation.MEMORY_MB.getName()); LOG.warn("Configuration " + overridingKey + "=" + get(overridingKey) + " is overriding the " + RM_SCHEDULER_INCREMENT_ALLOCATION_MB + "=" + get(RM_SCHEDULER_INCREMENT_ALLOCATION_MB) + " property"); } others.remove(ResourceInformation.MEMORY_MB.getName()); } else { memory = getLong( RM_SCHEDULER_INCREMENT_ALLOCATION_MB, DEFAULT_RM_SCHEDULER_INCREMENT_ALLOCATION_MB); } if (others.containsKey(ResourceInformation.VCORES.getName())) { vCores = others.get(ResourceInformation.VCORES.getName()).intValue(); if (get(RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES) != null) { String overridingKey = getAllocationIncrementPropKey( ResourceInformation.VCORES.getName()); LOG.warn("Configuration " + overridingKey + "=" + get(overridingKey) + " is overriding the " + RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES + "=" + get(RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES) + " property"); } others.remove(ResourceInformation.VCORES.getName()); } else { vCores = getInt( RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES, DEFAULT_RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES); } return Resource.newInstance(memory, vCores, others); }
@Test public void testAllocationIncrementVCoreWithUnit() throws Exception { Configuration conf = new Configuration(); conf.set(YarnConfiguration.RESOURCE_TYPES + "." + ResourceInformation.VCORES.getName() + FairSchedulerConfiguration.INCREMENT_ALLOCATION, "1k"); FairSchedulerConfiguration fsc = new FairSchedulerConfiguration(conf); Resource min = Resources.createResource(0L, 0); Resource max = Resources.createResource(Long.MAX_VALUE, Integer.MAX_VALUE); Resource increment = fsc.getIncrementAllocation(); DominantResourceCalculator resourceCalculator = new DominantResourceCalculator(); assertEquals(1000, resourceCalculator.normalize( Resources.createResource(0L, 999), min, max, increment) .getVirtualCores()); assertEquals(1000, resourceCalculator.normalize( Resources.createResource(0L, 1000), min, max, increment) .getVirtualCores()); assertEquals(2000, resourceCalculator.normalize( Resources.createResource(0L, 1001), min, max, increment) .getVirtualCores()); }
public static String replaceFirst(String source, String search, String replace) { int start = source.indexOf(search); int len = search.length(); if (start == -1) { return source; } if (start == 0) { return replace + source.substring(len); } return source.substring(0, start) + replace + source.substring(start + len); }
@Test public void testReplace5() { assertEquals("abcdef", JOrphanUtils.replaceFirst("abcdef", "alt=\"\" ", "")); }
@Override public Comparison compare(final Path.Type type, final PathAttributes local, final PathAttributes remote) { switch(type) { case directory: if(log.isDebugEnabled()) { log.debug(String.format("Compare local attributes %s with remote %s using %s", local, remote, directories)); } return directories.compare(type, local, remote); case symboliclink: return symlinks.compare(type, local, remote); default: if(log.isDebugEnabled()) { log.debug(String.format("Compare local attributes %s with remote %s using %s", local, remote, files)); } return files.compare(type, local, remote); } }
@Test public void testCompareFile() { final DefaultComparisonService c = new DefaultComparisonService(new TestProtocol()); assertEquals(Comparison.equal, c.compare(Path.Type.file, new PathAttributes().withETag("1"), new PathAttributes().withETag("1"))); assertEquals(Comparison.unknown, c.compare(Path.Type.file, new PathAttributes().withETag("1"), new PathAttributes().withETag("2"))); assertEquals(Comparison.equal, c.compare(Path.Type.file, new PathAttributes().withETag("1").withSize(1000L), new PathAttributes().withETag("2").withSize(1000L))); assertEquals(Comparison.equal, c.compare(Path.Type.file, new PathAttributes().withETag("1").withSize(1000L).withModificationDate(1680879106939L), new PathAttributes().withETag("2").withSize(1000L).withModificationDate(1680879106939L))); assertEquals(Comparison.local, c.compare(Path.Type.file, new PathAttributes().withETag("1").withSize(1000L).withModificationDate(1680879107939L), new PathAttributes().withETag("2").withSize(1000L).withModificationDate(1680879106939L))); }
public abstract TraceContext shallowCopy(TraceContext context);
@Test void shallowCopy() { TraceContext context = TraceContext.newBuilder().traceId(1).spanId(2).debug(true) .addExtra(1L).build(); assertThat(InternalPropagation.instance.shallowCopy(context)) .isNotSameAs(context) .usingRecursiveComparison() .isEqualTo(context); }
public static NameNodeConnector getNameNodeConnector(Configuration conf) throws InterruptedException { final Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf); final Path externalSPSPathId = HdfsServerConstants.MOVER_ID_PATH; String serverName = ExternalStoragePolicySatisfier.class.getSimpleName(); while (true) { try { final List<NameNodeConnector> nncs = NameNodeConnector .newNameNodeConnectors(namenodes, serverName, externalSPSPathId, conf, NameNodeConnector.DEFAULT_MAX_IDLE_ITERATIONS); return nncs.get(0); } catch (IOException e) { LOG.warn("Failed to connect with namenode", e); if (e.getMessage().equals("Another " + serverName + " is running.")) { ExitUtil.terminate(-1, "Exit immediately because another " + serverName + " is running"); } Thread.sleep(3000); // retry the connection after few secs } } }
@Test(timeout = 300000) public void testInfiniteStartWhenAnotherSPSRunning() throws Exception { try { // Create cluster and create mover path when get NameNodeConnector. createCluster(true); // Disable system exit for assert. ExitUtil.disableSystemExit(); // Get NameNodeConnector one more time to simulate starting other sps process. // It should exit immediately when another sps is running. LambdaTestUtils.intercept(ExitUtil.ExitException.class, "Exit immediately because another ExternalStoragePolicySatisfier is running", () -> ExternalStoragePolicySatisfier.getNameNodeConnector(config)); } finally { // Reset first exit exception to avoid AssertionError in MiniDFSCluster#shutdown. // This has no effect on functionality. ExitUtil.resetFirstExitException(); shutdownCluster(); } }