focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override protected Object getContent(ScmGetRequest request) { GithubScm.validateUserHasPushPermission(request.getApiUrl(), request.getCredentials().getPassword().getPlainText(), request.getOwner(), request.getRepo()); String url = String.format("%s/repos/%s/%s/contents/%s", request.getApiUrl(), request.getOwner(), request.getRepo(), request.getPath()); if(request.getBranch() != null){ //if branch is present fetch this file from branch url += "?ref="+request.getBranch(); } try { Map ghContent = HttpRequest.get(url) .withAuthorizationToken(request.getCredentials().getPassword().getPlainText()) .to(Map.class); if(ghContent == null){ throw new ServiceException.UnexpectedErrorException("Failed to load file: "+request.getPath()); } String base64Data = (String)ghContent.get("content"); // JENKINS-47887 - this content contains \n which breaks IE11 base64Data = base64Data == null ? null : base64Data.replace("\n", ""); return new GithubFile(new GitContent.Builder() .sha((String)ghContent.get("sha")) .name((String)ghContent.get("name")) .repo(request.getRepo()) .owner(request.getOwner()) .path(request.getPath()) .base64Data(base64Data) .build()); } catch (IOException e) { throw new ServiceException.UnexpectedErrorException(String.format("Failed to load file %s: %s", request.getPath(),e.getMessage()), e); } }
@Test public void getContentForOrgFolder() throws UnirestException { String credentialId = createGithubCredential(user); StaplerRequest staplerRequest = mockStapler(); MultiBranchProject mbp = mockMbp(credentialId, user, GithubScm.DOMAIN_NAME); GithubFile content = (GithubFile) new GithubScmContentProvider().getContent(staplerRequest, mbp); assertEquals("Jenkinsfile", content.getContent().getName()); assertEquals("e23b8ef5c2c4244889bf94db6c05cc08ea138aef", content.getContent().getSha()); assertEquals("PR-demo", content.getContent().getRepo()); assertEquals("cloudbeers", content.getContent().getOwner()); }
@Override public CompletableFuture<HeartbeatResponseData> heartbeat( RequestContext context, HeartbeatRequestData request ) { if (!isActive.get()) { return CompletableFuture.completedFuture(new HeartbeatResponseData() .setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()) ); } if (!isGroupIdNotEmpty(request.groupId())) { return CompletableFuture.completedFuture(new HeartbeatResponseData() .setErrorCode(Errors.INVALID_GROUP_ID.code()) ); } return runtime.scheduleWriteOperation( "classic-group-heartbeat", topicPartitionFor(request.groupId()), Duration.ofMillis(config.offsetCommitTimeoutMs()), coordinator -> coordinator.classicGroupHeartbeat(context, request) ).exceptionally(exception -> handleOperationException( "classic-group-heartbeat", request, exception, (error, __) -> { if (error == Errors.COORDINATOR_LOAD_IN_PROGRESS) { // The group is still loading, so blindly respond return new HeartbeatResponseData() .setErrorCode(Errors.NONE.code()); } else { return new HeartbeatResponseData() .setErrorCode(error.code()); } } )); }
@Test public void testHeartbeat() throws Exception { CoordinatorRuntime<GroupCoordinatorShard, CoordinatorRecord> runtime = mockRuntime(); GroupCoordinatorService service = new GroupCoordinatorService( new LogContext(), createConfig(), runtime, new GroupCoordinatorMetrics(), createConfigManager() ); HeartbeatRequestData request = new HeartbeatRequestData() .setGroupId("foo"); service.startup(() -> 1); when(runtime.scheduleWriteOperation( ArgumentMatchers.eq("classic-group-heartbeat"), ArgumentMatchers.eq(new TopicPartition("__consumer_offsets", 0)), ArgumentMatchers.eq(Duration.ofMillis(5000)), ArgumentMatchers.any() )).thenReturn(CompletableFuture.completedFuture( new HeartbeatResponseData() )); CompletableFuture<HeartbeatResponseData> future = service.heartbeat( requestContext(ApiKeys.HEARTBEAT), request ); assertTrue(future.isDone()); assertEquals(new HeartbeatResponseData(), future.get()); }
@Override public ExactlyOnceSupport exactlyOnceSupport(Map<String, String> props) { return consumerUsesReadCommitted(props) ? ExactlyOnceSupport.SUPPORTED : ExactlyOnceSupport.UNSUPPORTED; }
@Test public void testExactlyOnceSupport() { String readCommitted = "read_committed"; String readUncommitted = "read_uncommitted"; String readGarbage = "read_garbage"; // Connector is configured correctly, but exactly-once can't be supported assertExactlyOnceSupport(null, null, false); assertExactlyOnceSupport(readUncommitted, null, false); assertExactlyOnceSupport(null, readUncommitted, false); assertExactlyOnceSupport(readUncommitted, readUncommitted, false); // Connector is configured correctly, and exactly-once can be supported assertExactlyOnceSupport(readCommitted, null, true); assertExactlyOnceSupport(null, readCommitted, true); assertExactlyOnceSupport(readUncommitted, readCommitted, true); assertExactlyOnceSupport(readCommitted, readCommitted, true); // Connector is configured incorrectly, but is able to react gracefully assertExactlyOnceSupport(readGarbage, null, false); assertExactlyOnceSupport(null, readGarbage, false); assertExactlyOnceSupport(readGarbage, readGarbage, false); assertExactlyOnceSupport(readCommitted, readGarbage, false); assertExactlyOnceSupport(readUncommitted, readGarbage, false); assertExactlyOnceSupport(readGarbage, readUncommitted, false); assertExactlyOnceSupport(readGarbage, readCommitted, true); }
public void populate(LiveOperations liveOperations) { this.liveOperations.forEach((key, value) -> value.keySet().forEach(callId -> liveOperations.add(key, callId))); }
@Test public void testPopulate() throws UnknownHostException { r.register(createOperation("1.2.3.4", 1234, 2223L)); r.register(createOperation("1.2.3.4", 1234, 2222L)); r.register(createOperation("1.2.3.3", 1234, 2222L)); CallsPerMember liveOperations = new CallsPerMember(new Address("1.2.3.3", 1234)); r.populate(liveOperations); Set<Address> addresses = liveOperations.addresses(); assertEquals(2, addresses.size()); assertTrue(addresses.contains(new Address("1.2.3.4", 1234))); assertTrue(addresses.contains(new Address("1.2.3.3", 1234))); long[] runningOperations = liveOperations.toOpControl(new Address("1.2.3.4", 1234)).runningOperations(); assertTrue(Arrays.equals(new long[]{2222, 2223}, runningOperations) || Arrays.equals(new long[]{2223, 2222}, runningOperations)); runningOperations = liveOperations.toOpControl(new Address("1.2.3.3", 1234)).runningOperations(); assertArrayEquals(new long[]{2222}, runningOperations); //callIds. }
public static Position emptyPosition() { return new Position(new ConcurrentHashMap<>()); }
@Test public void shouldNotMatchNull() { final Position position = Position.emptyPosition(); assertNotEquals(position, null); }
@Udf public int instr(final String str, final String substring) { return instr(str, substring, 1); }
@Test public void shouldTruncateOutOfBoundIndexes() { assertThat(udf.instr("CORPORATE FLOOR", "OR", 100), is(0)); assertThat(udf.instr("CORPORATE FLOOR", "OR", -100), is(0)); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) @Override public ClusterInfo get() { return getClusterInfo(); }
@Test public void testInfoDefault() throws JSONException, Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("cluster") .path("info").get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); verifyClusterInfo(json); }
@Override public String begin(String applicationId, String transactionServiceGroup, String name, int timeout) throws TransactionException { GlobalSession session = GlobalSession.createGlobalSession(applicationId, transactionServiceGroup, name, timeout); MDC.put(RootContext.MDC_KEY_XID, session.getXid()); session.begin(); // transaction start event MetricsPublisher.postSessionDoingEvent(session, false); return session.getXid(); }
@Test public void beginTest() throws Exception { String xid = core.begin(applicationId, txServiceGroup, txName, timeout); globalSession = SessionHolder.findGlobalSession(xid); Assertions.assertNotNull(globalSession); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.SPAM && event.getType() != ChatMessageType.GAMEMESSAGE && event.getType() != ChatMessageType.MESBOX) { return; } final var msg = event.getMessage(); if (WOOD_CUT_PATTERN.matcher(msg).matches()) { if (session == null) { session = new WoodcuttingSession(); } session.setLastChopping(); session.incrementLogsCut(); } var matcher = ANIMA_BARK_PATTERN.matcher(msg); if (matcher.matches()) { if (session == null) { session = new WoodcuttingSession(); } session.setLastChopping(); int num = Integer.parseInt(matcher.group(1)); session.incrementBark(num); } if (msg.contains("A bird's nest falls out of the tree")) { if (clueTierSpawned == null || clueTierSpawned.ordinal() >= config.clueNestNotifyTier().ordinal()) { notifier.notify(config.showNestNotification(), "A bird nest has spawned!"); } // Clear the clue tier that has previously spawned clueTierSpawned = null; } if (msg.startsWith("The sapling seems to love")) { int ingredientNum = msg.contains("first") ? 1 : (msg.contains("second") ? 2 : (msg.contains("third") ? 3 : -1)); if (ingredientNum == -1) { log.debug("unable to find ingredient index from message: {}", msg); return; } GameObject ingredientObj = saplingIngredients.stream() .filter(obj -> msg.contains(client.getObjectDefinition(obj.getId()).getName().toLowerCase())) .findAny() .orElse(null); if (ingredientObj == null) { log.debug("unable to find ingredient from message: {}", msg); return; } saplingOrder[ingredientNum - 1] = ingredientObj; } if (msg.equals("There are no open, unpollinated flowers on this bush yet.") || msg.equals("The flowers on this bush have not yet opened enough to harvest pollen.") || msg.equals("<col=06600c>The bush is already fruiting and won't benefit from <col=06600c>any more pollen.</col>")) { if (activeFlowers.contains(lastInteractFlower)) { log.debug("Flowers reset"); activeFlowers.clear(); } } }
@Test public void testAnimaInfusedBark() { var chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", "You've been awarded <col=0000b2>88 Anima-infused bark</col>.", "", 0); woodcuttingPlugin.onChatMessage(chatMessage); assertNotNull(woodcuttingPlugin.getSession()); assertEquals(88, woodcuttingPlugin.getSession().getBark()); }
public RowExpression extract(PlanNode node) { return node.accept(new Visitor(domainTranslator, functionAndTypeManager), null); }
@Test public void testInnerJoin() { ImmutableList.Builder<EquiJoinClause> criteriaBuilder = ImmutableList.builder(); criteriaBuilder.add(new EquiJoinClause(AV, DV)); criteriaBuilder.add(new EquiJoinClause(BV, EV)); List<EquiJoinClause> criteria = criteriaBuilder.build(); Map<VariableReferenceExpression, ColumnHandle> leftAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(AV, BV, CV))); TableScanNode leftScan = tableScanNode(leftAssignments); Map<VariableReferenceExpression, ColumnHandle> rightAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(DV, EV, FV))); TableScanNode rightScan = tableScanNode(rightAssignments); FilterNode left = filter(leftScan, and( lessThan(BV, AV), lessThan(CV, bigintLiteral(10)), equals(GV, bigintLiteral(10)))); FilterNode right = filter(rightScan, and( equals(DV, EV), lessThan(FV, bigintLiteral(100)))); PlanNode node = new JoinNode( Optional.empty(), newId(), JoinType.INNER, left, right, criteria, ImmutableList.<VariableReferenceExpression>builder() .addAll(left.getOutputVariables()) .addAll(right.getOutputVariables()) .build(), Optional.of(lessThanOrEqual(BV, EV)), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()); RowExpression effectivePredicate = effectivePredicateExtractor.extract(node); // All predicates having output symbol should be carried through assertEquals(normalizeConjuncts(effectivePredicate), normalizeConjuncts(lessThan(BV, AV), lessThan(CV, bigintLiteral(10)), equals(DV, EV), lessThan(FV, bigintLiteral(100)), equals(AV, DV), equals(BV, EV), lessThanOrEqual(BV, EV))); }
public Set<String> getOnValues() { // we need a set as the field can appear multiple times Set<String> onValues = new LinkedHashSet<>(); List<String> exportValues = getExportValues(); if (!exportValues.isEmpty()) { onValues.addAll(exportValues); return onValues; } List<PDAnnotationWidget> widgets = this.getWidgets(); for (PDAnnotationWidget widget : widgets) { onValues.add(getOnValueForWidget(widget)); } return onValues; }
@Test void retrieveAcrobatRadioButtonProperties() { PDRadioButton radioButton = (PDRadioButton) acrobatAcroForm.getField("RadioButtonGroup"); assertNotNull(radioButton); assertEquals(2, radioButton.getOnValues().size()); assertTrue(radioButton.getOnValues().contains("RadioButton01")); assertTrue(radioButton.getOnValues().contains("RadioButton02")); }
public static JsonObject build(JsonObject lockJson, JsonObject packageJson, MultiValuedMap<String, String> dependencyMap, boolean skipDevDependencies) { final JsonObjectBuilder payloadBuilder = Json.createObjectBuilder(); addProjectInfo(packageJson, payloadBuilder); // NPM Audit expects 'requires' to be an object containing key/value // pairs corresponding to the module name (key) and version (value). final JsonObjectBuilder requiresBuilder = Json.createObjectBuilder(); if (packageJson.containsKey("dependencies")) { packageJson.getJsonObject("dependencies").entrySet() .stream() .collect(Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> newValue, TreeMap::new)) .forEach((key, value) -> { if (NodePackageAnalyzer.shouldSkipDependency(key, ((JsonString) value).getString())) { return; } requiresBuilder.add(key, value); dependencyMap.put(key, value.toString()); }); } if (!skipDevDependencies && packageJson.containsKey("devDependencies")) { packageJson.getJsonObject("devDependencies").entrySet() .stream() .collect(Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> newValue, TreeMap::new)) .forEach((key, value) -> { if (NodePackageAnalyzer.shouldSkipDependency(key, ((JsonString) value).getString())) { return; } requiresBuilder.add(key, value); dependencyMap.put(key, value.toString()); }); } payloadBuilder.add("requires", requiresBuilder.build()); final JsonObjectBuilder dependenciesBuilder = Json.createObjectBuilder(); final int lockJsonVersion = lockJson.containsKey("lockfileVersion") ? lockJson.getInt("lockfileVersion") : 1; JsonObject dependencies = lockJson.getJsonObject("dependencies"); if (lockJsonVersion >= 2 && dependencies == null) { dependencies = lockJson.getJsonObject("packages"); } if (dependencies != null) { dependencies.forEach((k, value) -> { String key = k; final int indexOfNodeModule = key.lastIndexOf(NodePackageAnalyzer.NODE_MODULES_DIRNAME + "/"); if (indexOfNodeModule >= 0) { key = key.substring(indexOfNodeModule + NodePackageAnalyzer.NODE_MODULES_DIRNAME.length() + 1); } JsonObject dep = ((JsonObject) value); //After Version 3, dependencies can't be taken directly from package-lock.json if (lockJsonVersion > 2 && dep.containsKey("dependencies") && dep.get("dependencies") instanceof JsonObject) { JsonObjectBuilder depBuilder = Json.createObjectBuilder(dep); depBuilder.remove("dependencies"); depBuilder.add("requires", dep.get("dependencies")); dep = depBuilder.build(); } final String version = dep.getString("version", ""); final boolean isDev = dep.getBoolean("dev", false); if (skipDevDependencies && isDev) { return; } if (NodePackageAnalyzer.shouldSkipDependency(key, version)) { return; } dependencyMap.put(key, version); dependenciesBuilder.add(key, buildDependencies(dep, dependencyMap)); }); } payloadBuilder.add("dependencies", dependenciesBuilder.build()); addConstantElements(payloadBuilder); return payloadBuilder.build(); }
@Test public void testSanitizer() { JsonObjectBuilder builder = Json.createObjectBuilder() .add("name", "my app") .add("version", "1.0.0") .add("random", "random") .add("lockfileVersion", 1) .add("requires", true) .add("dependencies", Json.createObjectBuilder() .add("abbrev", Json.createObjectBuilder() .add("version", "1.1.1") .add("resolved", "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz") .add("integrity", "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==") .add("dev", true) ) .add("node_modules/jest-resolve", Json.createObjectBuilder() .add("dev", true) .add("optional", true) .add("peer", true)) ); JsonObject packageJson = builder.build(); final MultiValuedMap<String, String> dependencyMap = new HashSetValuedHashMap<>(); JsonObject sanitized = NpmPayloadBuilder.build(packageJson, dependencyMap, false); Assert.assertTrue(sanitized.containsKey("name")); Assert.assertTrue(sanitized.containsKey("version")); Assert.assertTrue(sanitized.containsKey("dependencies")); Assert.assertTrue(sanitized.containsKey("requires")); JsonObject dependencies = sanitized.getJsonObject("dependencies"); Assert.assertTrue(dependencies.containsKey("node_modules/jest-resolve")); JsonObject requires = sanitized.getJsonObject("requires"); Assert.assertTrue(requires.containsKey("abbrev")); Assert.assertEquals("^1.1.1", requires.getString("abbrev")); Assert.assertEquals("*", requires.getString("node_modules/jest-resolve")); Assert.assertFalse(sanitized.containsKey("lockfileVersion")); Assert.assertFalse(sanitized.containsKey("random")); }
public static FileRewriteCoordinator get() { return INSTANCE; }
@TestTemplate public void testBinPackRewrite() throws NoSuchTableException, IOException { sql("CREATE TABLE %s (id INT, data STRING) USING iceberg", tableName); Dataset<Row> df = newDF(1000); df.coalesce(1).writeTo(tableName).append(); df.coalesce(1).writeTo(tableName).append(); df.coalesce(1).writeTo(tableName).append(); df.coalesce(1).writeTo(tableName).append(); Table table = validationCatalog.loadTable(tableIdent); assertThat(table.snapshots()).as("Should produce 4 snapshots").hasSize(4); Dataset<Row> fileDF = spark.read().format("iceberg").load(tableName(tableIdent.name() + ".files")); List<Long> fileSizes = fileDF.select("file_size_in_bytes").as(Encoders.LONG()).collectAsList(); long avgFileSize = fileSizes.stream().mapToLong(i -> i).sum() / fileSizes.size(); try (CloseableIterable<FileScanTask> fileScanTasks = table.newScan().planFiles()) { String fileSetID = UUID.randomUUID().toString(); ScanTaskSetManager taskSetManager = ScanTaskSetManager.get(); taskSetManager.stageTasks(table, fileSetID, Lists.newArrayList(fileScanTasks)); // read and pack original 4 files into 2 splits Dataset<Row> scanDF = spark .read() .format("iceberg") .option(SparkReadOptions.SCAN_TASK_SET_ID, fileSetID) .option(SparkReadOptions.SPLIT_SIZE, Long.toString(avgFileSize * 2)) .option(SparkReadOptions.FILE_OPEN_COST, "0") .load(tableName); // write the packed data into new files where each split becomes a new file scanDF .writeTo(tableName) .option(SparkWriteOptions.REWRITTEN_FILE_SCAN_TASK_SET_ID, fileSetID) .append(); // commit the rewrite FileRewriteCoordinator rewriteCoordinator = FileRewriteCoordinator.get(); Set<DataFile> rewrittenFiles = taskSetManager.fetchTasks(table, fileSetID).stream() .map(t -> t.asFileScanTask().file()) .collect(Collectors.toSet()); Set<DataFile> addedFiles = rewriteCoordinator.fetchNewFiles(table, fileSetID); table.newRewrite().rewriteFiles(rewrittenFiles, addedFiles).commit(); } table.refresh(); Map<String, String> summary = table.currentSnapshot().summary(); assertThat(summary.get("deleted-data-files")) .as("Deleted files count must match") .isEqualTo("4"); assertThat(summary.get("added-data-files")).as("Added files count must match").isEqualTo("2"); Object rowCount = scalarSql("SELECT count(*) FROM %s", tableName); assertThat(rowCount).as("Row count must match").isEqualTo(4000L); }
public String getTaskState(String taskName) throws IOException, HttpException { HttpGet httpGet = createHttpGetRequest(MinionRequestURLBuilder.baseUrl(_controllerUrl).forTaskState(taskName)); try (CloseableHttpResponse response = HTTP_CLIENT.execute(httpGet)) { int statusCode = response.getCode(); String responseString = EntityUtils.toString(response.getEntity()); if (statusCode >= 400) { throw new HttpException( String.format("Unable to get state for task: %s. Error code %d, Error message: %s", taskName, statusCode, responseString)); } return responseString; } }
@Test public void testTaskState() throws IOException, HttpException { HttpServer httpServer = startServer(14204, "/tasks/task/Task_SegmentGenerationAndPushTask_1607470525615/state", createHandler(200, "\"COMPLETED\"", 0)); MinionClient minionClient = new MinionClient("http://localhost:14204", null); Assert.assertEquals(minionClient.getTaskState("Task_SegmentGenerationAndPushTask_1607470525615"), "\"COMPLETED\""); httpServer.stop(0); }
public static byte[] encryptAES(byte[] data, byte[] key) { return desTemplate(data, key, AES_Algorithm, AES_Transformation, true); }
@Test public void encryptAES() throws Exception { TestCase.assertTrue( Arrays.equals( bytesResAES, EncryptKit.encryptAES(bytesDataAES, bytesKeyAES) ) ); Assert.assertEquals( resAES, EncryptKit.encryptAES2HexString(bytesDataAES, bytesKeyAES) ); TestCase.assertTrue( Arrays.equals( Base64.getEncoder().encode(bytesResAES), EncryptKit.encryptAES2Base64(bytesDataAES, bytesKeyAES) ) ); }
@Nonnull public static <T> Sink<T> list(@Nonnull String listName) { return fromProcessor("listSink(" + listName + ')', writeListP(listName)); }
@Test public void when_writeToMultipleStagesToSingleSink_then_allItemsInSink() { // Given String secondSourceName = randomName(); List<Integer> input = sequence(itemCount); addToSrcList(input); hz().getList(secondSourceName).addAll(input); BatchStage<Entry<Object, Object>> firstSource = p.readFrom(Sources.list(srcName)); BatchStage<Entry<Object, Object>> secondSource = p.readFrom(Sources.list(secondSourceName)); // When p.writeTo(Sinks.list(sinkName), firstSource, secondSource); // Then execute(); assertEquals(itemCount * 2, sinkList.size()); }
void recoverTransitionRead(DataNode datanode, NamespaceInfo nsInfo, Collection<StorageLocation> dataDirs, StartupOption startOpt) throws IOException { if (addStorageLocations(datanode, nsInfo, dataDirs, startOpt).isEmpty()) { throw new IOException("All specified directories have failed to load."); } }
@Test public void testRecoverTransitionReadDoTransitionFailure() throws IOException { final int numLocations = 3; List<StorageLocation> locations = createStorageLocations(numLocations); // Prepare volumes storage.recoverTransitionRead(mockDN, nsInfo, locations, START_OPT); assertEquals(numLocations, storage.getNumStorageDirs()); // Reset DataStorage storage.unlockAll(); storage = new DataStorage(); // Trigger an exception from doTransition(). nsInfo.clusterID = "cluster1"; try { storage.recoverTransitionRead(mockDN, nsInfo, locations, START_OPT); fail("Expect to throw an exception from doTransition()"); } catch (IOException e) { GenericTestUtils.assertExceptionContains("All specified directories", e); } assertEquals(0, storage.getNumStorageDirs()); }
@Override public boolean equals( Object o ) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } SlaveStepCopyPartitionDistribution that = (SlaveStepCopyPartitionDistribution) o; return Objects.equals( distribution, that.distribution ) && Objects.equals( originalPartitionSchemas, that.originalPartitionSchemas ); }
@Test public void equalsSameInstanceTest() { Assert.assertTrue( slaveStep.equals( slaveStep ) ); }
@Nonnull @Override public ILogger getLogger(@Nonnull String name) { checkNotNull(name, "name must not be null"); return getOrPutIfAbsent(mapLoggers, name, loggerConstructor); }
@Test public void testLog_whenLogEvent_thenNothingHappens() { ILogger logger = loggingService.getLogger("test"); logger.log(logEvent); }
@Override public Mono<GetExternalServiceCredentialsResponse> getExternalServiceCredentials(final GetExternalServiceCredentialsRequest request) { final ExternalServiceCredentialsGenerator credentialsGenerator = this.credentialsGeneratorByType .get(request.getExternalService()); if (credentialsGenerator == null) { return Mono.error(Status.INVALID_ARGUMENT.asException()); } final AuthenticatedDevice authenticatedDevice = AuthenticationUtil.requireAuthenticatedDevice(); return rateLimiters.forDescriptor(RateLimiters.For.EXTERNAL_SERVICE_CREDENTIALS).validateReactive(authenticatedDevice.accountIdentifier()) .then(Mono.fromSupplier(() -> { final ExternalServiceCredentials externalServiceCredentials = credentialsGenerator .generateForUuid(authenticatedDevice.accountIdentifier()); return GetExternalServiceCredentialsResponse.newBuilder() .setUsername(externalServiceCredentials.username()) .setPassword(externalServiceCredentials.password()) .build(); })); }
@Test public void testInvalidRequest() throws Exception { assertStatusException(Status.INVALID_ARGUMENT, () -> authenticatedServiceStub().getExternalServiceCredentials( GetExternalServiceCredentialsRequest.newBuilder() .build())); }
@Override public void send(final String command, final ProgressListener progress, final TranscriptListener transcript) throws BackgroundException { if(log.isDebugEnabled()) { log.debug(String.format("Send command %s", command)); } progress.message(command); final ProtocolCommandListener listener = new LoggingProtocolCommandListener(transcript); try { session.getClient().addProtocolCommandListener(listener); session.getClient().sendSiteCommand(command); } catch(IOException e) { throw new FTPExceptionMappingService().map(e); } finally { session.getClient().removeProtocolCommandListener(listener); } }
@Test public void testSend() throws Exception { final StringBuilder t = new StringBuilder(); new FTPCommandFeature(session).send("HELP", new ProgressListener() { @Override public void message(final String message) { assertEquals("HELP", message); } }, new TranscriptListener() { @Override public void log(final Type request, final String message) { switch(request) { case response: t.append(message); } } }); assertNotNull(t.toString()); }
void start(Iterable<ShardCheckpoint> checkpoints) { LOG.info( "Pool {} - starting for stream {} consumer {}. Checkpoints = {}", poolId, read.getStreamName(), consumerArn, checkpoints); for (ShardCheckpoint shardCheckpoint : checkpoints) { checkState( !state.containsKey(shardCheckpoint.getShardId()), "Duplicate shard id %s", shardCheckpoint.getShardId()); ShardState shardState = new ShardState( initShardSubscriber(shardCheckpoint), shardCheckpoint, watermarkPolicyFactory); state.put(shardCheckpoint.getShardId(), shardState); } }
@Test public void poolReSubscribesAndReadsRecords() throws Exception { kinesis = new EFOStubbedKinesisAsyncClient(10); kinesis.stubSubscribeToShard("shard-000", eventWithRecords(3)); kinesis.stubSubscribeToShard("shard-000", eventWithRecords(3, 7)); kinesis.stubSubscribeToShard("shard-000", eventsWithoutRecords(10, 3)); kinesis.stubSubscribeToShard("shard-001", eventWithRecords(3)); kinesis.stubSubscribeToShard("shard-001", eventWithRecords(3, 5)); kinesis.stubSubscribeToShard("shard-001", eventsWithoutRecords(8, 3)); KinesisReaderCheckpoint initialCheckpoint = initialLatestCheckpoint(ImmutableList.of("shard-000", "shard-001")); pool = new EFOShardSubscribersPool(readSpec, consumerArn, kinesis); pool.start(initialCheckpoint); PoolAssertion.assertPool(pool) .givesCheckPointedRecords( ShardAssertion.shard("shard-000") .gives(KinesisRecordView.generate("shard-000", 0, 10)) .withLastCheckpointSequenceNumber(12), ShardAssertion.shard("shard-001") .gives(KinesisRecordView.generate("shard-001", 0, 8)) .withLastCheckpointSequenceNumber(10)); assertThat(kinesis.subscribeRequestsSeen()) .containsExactlyInAnyOrder( subscribeLatest("shard-000"), subscribeLatest("shard-001"), subscribeAfterSeqNumber("shard-000", "2"), subscribeAfterSeqNumber("shard-001", "2"), subscribeAfterSeqNumber("shard-000", "9"), subscribeAfterSeqNumber("shard-001", "7"), subscribeAfterSeqNumber("shard-000", "12"), subscribeAfterSeqNumber("shard-001", "10")); }
@Override public void print(MetadataNodePrinter printer) { StringBuilder bld = new StringBuilder(); bld.append("ScramCredentialData"); bld.append("(salt="); if (printer.redactionCriteria().shouldRedactScram()) { bld.append("[redacted]"); } else { arrayToHex(data.salt(), bld); } bld.append(", storedKey="); if (printer.redactionCriteria().shouldRedactScram()) { bld.append("[redacted]"); } else { arrayToHex(data.storedKey(), bld); } bld.append(", serverKey="); if (printer.redactionCriteria().shouldRedactScram()) { bld.append("[redacted]"); } else { arrayToHex(data.serverKey(), bld); } bld.append(", iterations="); if (printer.redactionCriteria().shouldRedactScram()) { bld.append("[redacted]"); } else { bld.append(data.iterations()); } bld.append(")"); printer.output(bld.toString()); }
@Test public void testPrintUnredacted() { NodeStringifier stringifier = new NodeStringifier(Disabled.INSTANCE); new ScramCredentialDataNode(DATA).print(stringifier); assertEquals("ScramCredentialData(" + "salt=4f1d6ea31e58c5ad3aaeb3266f55cce6, " + "storedKey=3cfa1c3421b512d1d1dfc3355138b4ad, " + "serverKey=2d9781209073e8d03aee3cbc63a1d4ca, " + "iterations=16)", stringifier.toString()); }
public ClassData getClassDataOrNull(String className) { ClassData classData = loadBytecodesFromClientCache(className); if (classData != null) { return classData; } if (providerMode == UserCodeDeploymentConfig.ProviderMode.OFF) { return null; } classData = loadBytecodesFromParent(className); if (classData == null && providerMode == UserCodeDeploymentConfig.ProviderMode.LOCAL_AND_CACHED_CLASSES) { classData = loadBytecodesFromCache(className); } return classData; }
@Test public void givenProviderModeSetToLOCAL_AND_CACHED_CLASSES_whenMapClassContainsClass_thenReturnIt() { UserCodeDeploymentConfig.ProviderMode providerMode = LOCAL_AND_CACHED_CLASSES; String className = "className"; ClassSource classSource = newMockClassSource(); ClassLoader parent = getClass().getClassLoader(); ClassDataProvider provider = createClassDataProvider(providerMode, className, classSource, parent); ClassData classData = provider.getClassDataOrNull(className); assertNotNull(classData.getInnerClassDefinitions()); }
@VisibleForTesting void handleResult(Record srcRecord, JavaExecutionResult result) throws Exception { if (result.getUserException() != null) { Exception t = result.getUserException(); log.warn("Encountered exception when processing message {}", srcRecord, t); stats.incrUserExceptions(t); srcRecord.fail(); } else { if (result.getResult() != null) { sendOutputMessage(srcRecord, result.getResult()); } else { org.apache.pulsar.functions.proto.Function.FunctionDetails functionDetails = instanceConfig.getFunctionDetails(); // When function return null, needs to be acked directly. if (functionDetails.getProcessingGuarantees() != org.apache.pulsar.functions.proto.Function.ProcessingGuarantees.MANUAL) { // This condition has been automatically acked. // After waiting to remove the autoAck configuration,can be removing the judgment condition. if (!functionDetails.getAutoAck() || functionDetails.getProcessingGuarantees() != org.apache.pulsar.functions.proto.Function.ProcessingGuarantees.ATMOST_ONCE) { srcRecord.ack(); } } } // increment total successfully processed stats.incrTotalProcessedSuccessfully(); } }
@Test public void testFunctionResultNull() throws Exception { JavaExecutionResult javaExecutionResult = new JavaExecutionResult(); // ProcessingGuarantees == MANUAL, not need ack. Record record = mock(Record.class); getJavaInstanceRunnable(true, org.apache.pulsar.functions.proto.Function.ProcessingGuarantees.MANUAL) .handleResult(record, javaExecutionResult); verify(record, times(0)).ack(); // ProcessingGuarantees == ATMOST_ONCE and autoAck == true, not need ack clearInvocations(record); getJavaInstanceRunnable(true, org.apache.pulsar.functions.proto.Function.ProcessingGuarantees.ATMOST_ONCE) .handleResult(record, javaExecutionResult); verify(record, times(0)).ack(); // other case, need ack clearInvocations(record); getJavaInstanceRunnable(true, org.apache.pulsar.functions.proto.Function.ProcessingGuarantees.ATLEAST_ONCE) .handleResult(record, javaExecutionResult); verify(record, times(1)).ack(); clearInvocations(record); getJavaInstanceRunnable(true, org.apache.pulsar.functions.proto.Function.ProcessingGuarantees.EFFECTIVELY_ONCE) .handleResult(record, javaExecutionResult); verify(record, times(1)).ack(); }
public static boolean doExistingCertificatesDiffer(Secret current, Secret desired) { Map<String, String> currentData = current.getData(); Map<String, String> desiredData = desired.getData(); if (currentData == null) { return true; } else { for (Map.Entry<String, String> entry : currentData.entrySet()) { String desiredValue = desiredData.get(entry.getKey()); if (entry.getValue() != null && desiredValue != null && !entry.getValue().equals(desiredValue)) { return true; } } } return false; }
@Test public void testExistingCertificatesDiffer() { Secret defaultSecret = new SecretBuilder() .withNewMetadata() .withName("my-secret") .endMetadata() .addToData("my-cluster-kafka-0.crt", "Certificate0") .addToData("my-cluster-kafka-0.key", "Key0") .addToData("my-cluster-kafka-1.crt", "Certificate1") .addToData("my-cluster-kafka-1.key", "Key1") .addToData("my-cluster-kafka-2.crt", "Certificate2") .addToData("my-cluster-kafka-2.key", "Key2") .build(); Secret sameAsDefaultSecret = new SecretBuilder() .withNewMetadata() .withName("my-secret") .endMetadata() .addToData("my-cluster-kafka-0.crt", "Certificate0") .addToData("my-cluster-kafka-0.key", "Key0") .addToData("my-cluster-kafka-1.crt", "Certificate1") .addToData("my-cluster-kafka-1.key", "Key1") .addToData("my-cluster-kafka-2.crt", "Certificate2") .addToData("my-cluster-kafka-2.key", "Key2") .build(); Secret scaleDownSecret = new SecretBuilder() .withNewMetadata() .withName("my-secret") .endMetadata() .addToData("my-cluster-kafka-0.crt", "Certificate0") .addToData("my-cluster-kafka-0.key", "Key0") .build(); Secret scaleUpSecret = new SecretBuilder() .withNewMetadata() .withName("my-secret") .endMetadata() .addToData("my-cluster-kafka-0.crt", "Certificate0") .addToData("my-cluster-kafka-0.key", "Key0") .addToData("my-cluster-kafka-1.crt", "Certificate1") .addToData("my-cluster-kafka-1.key", "Key1") .addToData("my-cluster-kafka-2.crt", "Certificate2") .addToData("my-cluster-kafka-2.key", "Key2") .addToData("my-cluster-kafka-3.crt", "Certificate3") .addToData("my-cluster-kafka-3.key", "Key3") .addToData("my-cluster-kafka-4.crt", "Certificate4") .addToData("my-cluster-kafka-4.key", "Key4") .build(); Secret changedSecret = new SecretBuilder() .withNewMetadata() .withName("my-secret") .endMetadata() .addToData("my-cluster-kafka-0.crt", "Certificate0") .addToData("my-cluster-kafka-0.key", "Key0") .addToData("my-cluster-kafka-1.crt", "Certificate1") .addToData("my-cluster-kafka-1.key", "NewKey1") .addToData("my-cluster-kafka-2.crt", "Certificate2") .addToData("my-cluster-kafka-2.key", "Key2") .build(); Secret changedScaleUpSecret = new SecretBuilder() .withNewMetadata() .withName("my-secret") .endMetadata() .addToData("my-cluster-kafka-0.crt", "Certificate0") .addToData("my-cluster-kafka-0.key", "Key0") .addToData("my-cluster-kafka-1.crt", "Certificate1") .addToData("my-cluster-kafka-1.key", "Key1") .addToData("my-cluster-kafka-2.crt", "NewCertificate2") .addToData("my-cluster-kafka-2.key", "Key2") .addToData("my-cluster-kafka-3.crt", "Certificate3") .addToData("my-cluster-kafka-3.key", "Key3") .addToData("my-cluster-kafka-4.crt", "Certificate4") .addToData("my-cluster-kafka-4.key", "Key4") .build(); Secret changedScaleDownSecret = new SecretBuilder() .withNewMetadata() .withName("my-secret") .endMetadata() .addToData("my-cluster-kafka-0.crt", "NewCertificate0") .addToData("my-cluster-kafka-0.key", "NewKey0") .build(); assertThat(CertUtils.doExistingCertificatesDiffer(defaultSecret, defaultSecret), is(false)); assertThat(CertUtils.doExistingCertificatesDiffer(defaultSecret, sameAsDefaultSecret), is(false)); assertThat(CertUtils.doExistingCertificatesDiffer(defaultSecret, scaleDownSecret), is(false)); assertThat(CertUtils.doExistingCertificatesDiffer(defaultSecret, scaleUpSecret), is(false)); assertThat(CertUtils.doExistingCertificatesDiffer(defaultSecret, changedSecret), is(true)); assertThat(CertUtils.doExistingCertificatesDiffer(defaultSecret, changedScaleUpSecret), is(true)); assertThat(CertUtils.doExistingCertificatesDiffer(defaultSecret, changedScaleDownSecret), is(true)); }
public Img stroke(Color color, float width) { return stroke(color, new BasicStroke(width)); }
@Test @Disabled public void strokeTest() { Img.from(FileUtil.file("d:/test/公章3.png")) .stroke(null, 2f) .write(FileUtil.file("d:/test/stroke_result.png")); }
public static void checkState(boolean isValid, String message) throws IllegalStateException { if (!isValid) { throw new IllegalStateException(message); } }
@Test public void testCheckStateWithMoreThanThreeParams() { try { Preconditions.checkState(true, "Test message %s %s %s %s", 12, null, "column", true); } catch (IllegalStateException e) { Assert.fail("Should not throw exception when isValid is true"); } try { Preconditions.checkState(false, "Test message %s %s %s %s", 12, null, "column", true); Assert.fail("Should throw exception when isValid is false"); } catch (IllegalStateException e) { Assert.assertEquals("Should format message", "Test message 12 null column true", e.getMessage()); } }
public short get2BytesLittleEndian() { throw new UnsupportedOperationException("Not implemented"); }
@Test public void testGet2BytesLittleEndian() { // ByteBufferBackedBinary: get2BytesLittleEndian Binary b1 = Binary.fromConstantByteBuffer(ByteBuffer.wrap(new byte[] {0x01, 0x02})); assertEquals((short) 0x0201, b1.get2BytesLittleEndian()); // ByteArrayBackedBinary: get2BytesLittleEndian Binary b2 = Binary.fromConstantByteArray(new byte[] {0x01, 0x02}); assertEquals((short) 0x0201, b2.get2BytesLittleEndian()); // ByteArraySliceBackedBinary: get2BytesLittleEndian Binary b3 = Binary.fromConstantByteArray(new byte[] {0x00, 0x01, 0x02, 0x03}, 1, 2); assertEquals((short) 0x0201, b3.get2BytesLittleEndian()); }
public final void containsAnyOf( @Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) { containsAnyIn(accumulate(first, second, rest)); }
@Test public void iterableContainsAnyOf() { assertThat(asList(1, 2, 3)).containsAnyOf(1, 5); }
public static <T> T execute(Single<T> apiCall) { try { return apiCall.blockingGet(); } catch (HttpException e) { try { if (e.response() == null || e.response().errorBody() == null) { throw e; } String errorBody = e.response().errorBody().string(); OpenAiError error = mapper.readValue(errorBody, OpenAiError.class); throw new OpenAiHttpException(error, e, e.code()); } catch (IOException ex) { // couldn't parse OpenAI error throw e; } } }
@Test void executeParseHttpError() { String errorBody = "{\"error\":{\"message\":\"Invalid auth token\",\"type\":\"type\",\"param\":\"param\",\"code\":\"code\"}}"; HttpException httpException = createException(errorBody, 401); Single<CompletionResult> single = Single.error(httpException); OpenAiHttpException exception = assertThrows(OpenAiHttpException.class, () -> OpenAiService.execute(single)); assertEquals("Invalid auth token", exception.getMessage()); assertEquals("type", exception.type); assertEquals("param", exception.param); assertEquals("code", exception.code); assertEquals(401, exception.statusCode); }
public void convertQueueHierarchy(FSQueue queue) { List<FSQueue> children = queue.getChildQueues(); final String queueName = queue.getName(); emitChildQueues(queueName, children); emitMaxAMShare(queueName, queue); emitMaxParallelApps(queueName, queue); emitMaxAllocations(queueName, queue); emitPreemptionDisabled(queueName, queue); emitChildCapacity(queue); emitMaximumCapacity(queueName, queue); emitSizeBasedWeight(queueName); emitOrderingPolicy(queueName, queue); checkMaxChildCapacitySetting(queue); emitDefaultUserLimitFactor(queueName, children); for (FSQueue childQueue : children) { convertQueueHierarchy(childQueue); } }
@Test public void testQueueMaxParallelApps() { converter = builder.build(); converter.convertQueueHierarchy(rootQueue); assertEquals("root.admins.alice max apps", 2, csConfig.getMaxParallelAppsForQueue(ADMINS_ALICE), 0); Set<String> remaining = Sets.difference(ALL_QUEUES, Sets.newHashSet("root.admins.alice")); for (String queue : remaining) { key = PREFIX + queue + ".max-parallel-apps"; assertEquals("Key " + key + " has different value", DEFAULT_MAX_PARALLEL_APPLICATIONS, csConfig .getMaxParallelAppsForQueue(new QueuePath(queue)), 0); } }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseSetUnsetStatements() { List<CommandParser.ParsedCommand> commands = parse("SeT 'foo.property'='bar';UnSET 'foo.property';"); assertThat(commands.size(), is(2)); assertThat(commands.get(0).getStatement().isPresent(), is (true)); assertThat(commands.get(0).getStatement().get(), instanceOf(SetProperty.class)); assertThat(commands.get(0).getCommand(), is("SeT 'foo.property'='bar';")); assertThat(((SetProperty)commands.get(0).getStatement().get()).getPropertyName(), is("foo.property")); assertThat(((SetProperty)commands.get(0).getStatement().get()).getPropertyValue(), is("bar")); assertThat(commands.get(1).getStatement().isPresent(), is (true)); assertThat(commands.get(1).getStatement().get(), instanceOf(UnsetProperty.class)); assertThat(commands.get(1).getCommand(), is("UnSET 'foo.property';")); assertThat(((UnsetProperty)commands.get(1).getStatement().get()).getPropertyName(), is("foo.property")); }
public void addPartition(RangePartition p) { if (p instanceof RangeEdgePartition) { edgePartitions.add((RangeEdgePartition)p); } else { partitions.add(p); } }
@Test void requireThatRangePartitionsCanBeAdded() { FeatureRange range = new FeatureRange("foo", 10L, 22L); range.addPartition(new RangePartition("foo=10-19")); range.addPartition(new RangePartition("foo", 0, 0x8000000000000000L, true)); range.addPartition(new RangeEdgePartition("foo=20", 20, 0, 2)); assertEquals("foo in [10..22 (foo=20+[..2],foo=10-19,foo=-9223372036854775808-0)]", range.toString()); }
public static Object convert(final Object o) { if (o == null) { return RubyUtil.RUBY.getNil(); } final Class<?> cls = o.getClass(); final Valuefier.Converter converter = CONVERTER_MAP.get(cls); if (converter != null) { return converter.convert(o); } return fallbackConvert(o, cls); }
@Test public void testZonedDateTime() { ZonedDateTime zdt = ZonedDateTime.of(2022,4,4,5,6,13,123, ZoneId.of("Europe/London")); JrubyTimestampExtLibrary.RubyTimestamp result = (JrubyTimestampExtLibrary.RubyTimestamp) Valuefier.convert(zdt); assertEquals(zdt.toInstant().toEpochMilli(), result.getTimestamp().toEpochMilli()); }
<T> T call() throws IOException { if (shouldMock()) { // ce générique doit être conservé pour la compilation javac en intégration continue return this.createMockResultOfCall(); } final long start = System.currentTimeMillis(); int dataLength = -1; try { final URLConnection connection = openConnection(); // pour traductions (si on vient de CollectorServlet.forwardActionAndUpdateData, // cela permet d'avoir les messages dans la bonne langue) connection.setRequestProperty("Accept-Language", I18N.getCurrentLocale().getLanguage()); // Rq: on ne gère pas ici les éventuels cookie de session http, // puisque le filtre de monitoring n'est pas censé créer des sessions // if (cookie != null) { connection.setRequestProperty("Cookie", cookie); } connection.connect(); // final String setCookie = connection.getHeaderField("Set-Cookie"); // if (setCookie != null) { cookie = setCookie; } final CounterInputStream counterInputStream = new CounterInputStream( connection.getInputStream()); final T result; try { @SuppressWarnings("unchecked") final T tmp = (T) read(connection, counterInputStream); result = tmp; } finally { counterInputStream.close(); dataLength = counterInputStream.getDataLength(); } LOG.debug("read on " + url + " : " + result); if (result instanceof RuntimeException) { throw (RuntimeException) result; } else if (result instanceof Error) { throw (Error) result; } else if (result instanceof IOException) { throw (IOException) result; } else if (result instanceof Exception) { throw createIOException((Exception) result); } return result; } catch (final ClassNotFoundException e) { throw createIOException(e); } finally { LOG.info("http call done in " + (System.currentTimeMillis() - start) + " ms with " + dataLength / 1024 + " KB read for " + url); } }
@Test public void testCall() throws IOException { Utils.setProperty(Parameters.PARAMETER_SYSTEM_PREFIX + "mockLabradorRetriever", "false"); final File file = File.createTempFile("test", ".ser"); try { try (ObjectOutputStream output = new ObjectOutputStream(new FileOutputStream(file))) { output.writeObject(new Counter("http", null)); } final URL url = file.toURI().toURL(); final LabradorRetriever labradorRetriever = new LabradorRetriever(url); labradorRetriever.call(); final Map<String, String> headers = Collections.emptyMap(); final LabradorRetriever labradorRetriever2 = new LabradorRetriever(url, headers); labradorRetriever2.call(); } finally { if (!file.delete()) { fail("file.delete"); } } }
public static String createJobName(Function.FunctionDetails functionDetails, String jobName) { return jobName == null ? createJobName(functionDetails.getTenant(), functionDetails.getNamespace(), functionDetails.getName()) : createJobName(jobName, functionDetails.getTenant(), functionDetails.getNamespace(), functionDetails.getName()); }
@Test public void testCreateJobName() throws Exception { verifyCreateJobNameWithBackwardCompatibility(); verifyCreateJobNameWithUpperCaseFunctionName(); verifyCreateJobNameWithDotFunctionName(); verifyCreateJobNameWithDotAndUpperCaseFunctionName(); verifyCreateJobNameWithInvalidMarksFunctionName(); verifyCreateJobNameWithCollisionalFunctionName(); verifyCreateJobNameWithCollisionalAndInvalidMarksFunctionName(); verifyCreateJobNameWithOverriddenK8sPodNameNoCollisionWithSameName(); verifyCreateJobNameWithOverriddenK8sPodName(); verifyCreateJobNameWithOverriddenK8sPodNameWithInvalidMarks(); verifyCreateJobNameWithNameOverMaxCharLimit(); }
static BlockStmt getIntervalVariableDeclaration(final String variableName, final Interval interval) { final MethodDeclaration methodDeclaration = INTERVAL_TEMPLATE.getMethodsByName(GETKIEPMMLINTERVAL).get(0).clone(); final BlockStmt toReturn = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final VariableDeclarator variableDeclarator = getVariableDeclarator(toReturn, INTERVAL).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, INTERVAL, toReturn))); variableDeclarator.setName(variableName); final ObjectCreationExpr objectCreationExpr = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, INTERVAL, toReturn))) .asObjectCreationExpr(); final Expression leftMarginExpr = getExpressionForObject(interval.getLeftMargin()); final Expression rightMarginExpr = getExpressionForObject(interval.getRightMargin()); final CLOSURE closure = CLOSURE.byName(interval.getClosure().value()); final NameExpr closureExpr = new NameExpr(CLOSURE.class.getName() + "." + closure.name()); objectCreationExpr.getArguments().set(0, leftMarginExpr); objectCreationExpr.getArguments().set(1, rightMarginExpr); objectCreationExpr.getArguments().set(2, closureExpr); return toReturn; }
@Test void getIntervalVariableDeclaration() throws IOException { String variableName = "variableName"; double leftMargin = 45.32; Interval interval = new Interval(); interval.setLeftMargin(leftMargin); interval.setRightMargin(null); interval.setClosure(Interval.Closure.CLOSED_OPEN); BlockStmt retrieved = KiePMMLIntervalFactory.getIntervalVariableDeclaration(variableName, interval); String closureString = CLOSURE.class.getName() + "." + CLOSURE.byName(interval.getClosure().value()).name(); String text = getFileContent(TEST_01_SOURCE); Statement expected = JavaParserUtils.parseBlock(String.format(text, variableName, leftMargin, closureString)); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(Collections.class, KiePMMLInterval.class); commonValidateCompilationWithImports(retrieved, imports); }
@Override public String toString() { return "EipStatistic{" + "id='" + id + '\'' + ", tested=" + tested + ", totalProcessingTime=" + totalProcessingTime + ", properties=" + properties + ", childEipStatisticMap=" + childEipStatisticMap + '}'; }
@Test public void testToString() { String toString = getInstance().toString(); assertNotNull(toString); assertTrue(toString.contains("EipStatistic")); }
public static void print(Object obj) { print(TEMPLATE_VAR, obj); }
@Test public void printColorTest(){ System.out.print("\33[30;1m A \u001b[31;2m B \u001b[32;1m C \u001b[33;1m D \u001b[0m"); }
@Override public Set<Entry<K, V>> entrySet() { return set; }
@Test public void testEntrySet() { Map<String, Integer> underlying = createTestMap(); TranslatedValueMapView<String, String, Integer> view = new TranslatedValueMapView<>(underlying, v -> v.toString()); assertEquals(3, view.entrySet().size()); assertFalse(view.entrySet().isEmpty()); assertTrue(view.entrySet().contains(new SimpleImmutableEntry<>("foo", "2"))); assertFalse(view.entrySet().contains(new SimpleImmutableEntry<>("bar", "4"))); }
@Udf(description = "Converts a TIMESTAMP value into the" + " string representation of the timestamp in the given format. Single quotes in the" + " timestamp format can be escaped with '', for example: 'yyyy-MM-dd''T''HH:mm:ssX'" + " The system default time zone is used when no time zone is explicitly provided." + " The format pattern should be in the format expected" + " by java.time.format.DateTimeFormatter") public String formatTimestamp( @UdfParameter( description = "TIMESTAMP value.") final Timestamp timestamp, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { return formatTimestamp(timestamp, formatPattern, ZoneId.of("GMT").getId()); }
@Test public void shouldReturnNullOnNullDate() { // When: final String returnValue = udf.formatTimestamp(null, "yyyy-MM-dd"); // Then: assertThat(returnValue, is(nullValue())); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { try { final IRODSFileSystemAO fs = session.getClient(); final IRODSFile s = fs.getIRODSFileFactory().instanceIRODSFile(file.getAbsolute()); if(!s.exists()) { throw new NotfoundException(String.format("%s doesn't exist", file.getAbsolute())); } if(status.isExists()) { delete.delete(Collections.singletonMap(renamed, status), connectionCallback, callback); } final IRODSFile d = fs.getIRODSFileFactory().instanceIRODSFile(renamed.getAbsolute()); s.renameTo(d); return renamed; } catch(JargonException e) { throw new IRODSExceptionMappingService().map("Cannot rename {0}", e, file); } }
@Test public void testMoveFile() throws Exception { final ProtocolFactory factory = new ProtocolFactory(new HashSet<>(Collections.singleton(new IRODSProtocol()))); final Profile profile = new ProfilePlistReader(factory).read( this.getClass().getResourceAsStream("/iRODS (iPlant Collaborative).cyberduckprofile")); final Host host = new Host(profile, profile.getDefaultHostname(), new Credentials( PROPERTIES.get("irods.key"), PROPERTIES.get("irods.secret") )); final IRODSSession session = new IRODSSession(host); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path source = new Path(new IRODSHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final Path destination = new Path(new IRODSHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new IRODSTouchFeature(session).touch(source, new TransferStatus()); new IRODSTouchFeature(session).touch(destination, new TransferStatus()); new IRODSMoveFeature(session).move(source, destination, new TransferStatus().exists(true), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(session.getFeature(Find.class).find(source)); assertTrue(session.getFeature(Find.class).find(destination)); session.getFeature(Delete.class).delete(Collections.singletonList(destination), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(session.getFeature(Find.class).find(destination)); session.close(); }
public ClusterStateBundle getLastClusterStateBundleConverged() { return lastClusterStateBundleConverged; }
@Test @SuppressWarnings("unchecked") void activation_not_sent_before_all_distributors_have_acked_state_bundle() { var f = StateActivationFixture.withTwoPhaseEnabled(); var cf = f.cf; f.expectSetSystemStateInvocationsToBothDistributors(); f.simulateBroadcastTick(cf, 123); // Respond from distributor 0, but not yet from distributor 1 respondToSetClusterStateBundle(cf.cluster.getNodeInfo(Node.ofDistributor(0)), f.stateBundle, f.d0Waiter.getValue()); f.simulateBroadcastTick(cf, 123); // No activations should be sent yet cf.cluster().getNodeInfos().forEach(nodeInfo -> { verify(f.mockCommunicator, times(0)).activateClusterStateVersion(eq(123), eq(nodeInfo), any()); }); assertNull(f.broadcaster.getLastClusterStateBundleConverged()); respondToSetClusterStateBundle(cf.cluster.getNodeInfo(Node.ofDistributor(1)), f.stateBundle, f.d1Waiter.getValue()); f.simulateBroadcastTick(cf, 123); // Activation should now be sent to _all_ nodes (distributor and storage) cf.cluster().getNodeInfos().forEach(nodeInfo -> { verify(f.mockCommunicator).activateClusterStateVersion(eq(123), eq(nodeInfo), any()); }); // But not converged yet, as activations have not been ACKed assertNull(f.broadcaster.getLastClusterStateBundleConverged()); }
@Override protected int compareFirst(final Path p1, final Path p2) { if(StringUtils.isBlank(p1.attributes().getGroup()) && StringUtils.isBlank(p2.attributes().getGroup())) { return 0; } if(StringUtils.isBlank(p1.attributes().getGroup())) { return -1; } if(StringUtils.isBlank(p2.attributes().getGroup())) { return 1; } if(ascending) { return p1.attributes().getGroup().compareToIgnoreCase(p2.attributes().getGroup()); } return -p1.attributes().getGroup().compareToIgnoreCase(p2.attributes().getGroup()); }
@Test public void testCompareFirst() { assertEquals(0, new GroupComparator(true).compareFirst(new Path("/a", EnumSet.of(Path.Type.file)), new Path("/b", EnumSet.of(Path.Type.file)))); final Path p = new Path("/a", EnumSet.of(Path.Type.file)); p.attributes().setGroup("g"); assertEquals(1, new GroupComparator(true).compareFirst(p, new Path("/b", EnumSet.of(Path.Type.file)))); assertEquals(-1, new GroupComparator(true).compareFirst(new Path("/b", EnumSet.of(Path.Type.file)), p)); }
@Override public double entropy() { return entropy; }
@Test public void testEntropy() { System.out.println("entropy"); BernoulliDistribution instance = new BernoulliDistribution(0.3); instance.rand(); assertEquals(-0.3* MathEx.log2(0.3) - 0.7* MathEx.log2(0.7), instance.entropy(), 1E-7); }
@Override @SuppressWarnings("UseOfSystemOutOrSystemErr") public void run(Namespace namespace, Liquibase liquibase) throws Exception { final Boolean verbose = namespace.getBoolean("verbose"); liquibase.reportStatus(verbose != null && verbose, getContext(namespace), new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)); }
@Test void testRun() throws Exception { statusCommand.run(null, new Namespace(Collections.emptyMap()), MigrationTestSupport.createConfiguration()); assertThat(baos.toString(UTF_8.name())).matches( "3 changesets have not been applied to \\S+\\R"); }
@Override public <T> T clone(T object) { if (object instanceof String) { return object; } else if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } else if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } else if (object instanceof JsonNode) { return (T) ((JsonNode) object).deepCopy(); } if (object instanceof Serializable) { try { return (T) SerializationHelper.clone((Serializable) object); } catch (SerializationException e) { //it is possible that object itself implements java.io.Serializable, but underlying structure does not //in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization } } return jsonClone(object); }
@Test public void should_clone_serializable_object() { Object original = new SerializableObject("value"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
public static int[] computePhysicalIndicesOrTimeAttributeMarkers( TableSource<?> tableSource, List<TableColumn> logicalColumns, boolean streamMarkers, Function<String, String> nameRemapping) { Optional<String> proctimeAttribute = getProctimeAttribute(tableSource); List<String> rowtimeAttributes = getRowtimeAttributes(tableSource); List<TableColumn> columnsWithoutTimeAttributes = logicalColumns.stream() .filter( col -> !rowtimeAttributes.contains(col.getName()) && proctimeAttribute .map(attr -> !attr.equals(col.getName())) .orElse(true)) .collect(Collectors.toList()); Map<TableColumn, Integer> columnsToPhysicalIndices = TypeMappingUtils.computePhysicalIndices( columnsWithoutTimeAttributes.stream(), tableSource.getProducedDataType(), nameRemapping); return logicalColumns.stream() .mapToInt( logicalColumn -> { if (proctimeAttribute .map(attr -> attr.equals(logicalColumn.getName())) .orElse(false)) { verifyTimeAttributeType(logicalColumn, "Proctime"); if (streamMarkers) { return TimeIndicatorTypeInfo.PROCTIME_STREAM_MARKER; } else { return TimeIndicatorTypeInfo.PROCTIME_BATCH_MARKER; } } else if (rowtimeAttributes.contains(logicalColumn.getName())) { verifyTimeAttributeType(logicalColumn, "Rowtime"); if (streamMarkers) { return TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER; } else { return TimeIndicatorTypeInfo.ROWTIME_BATCH_MARKER; } } else { return columnsToPhysicalIndices.get(logicalColumn); } }) .toArray(); }
@Test void testWrongLogicalTypeForRowtimeAttribute() { TestTableSource tableSource = new TestTableSource( DataTypes.BIGINT(), Collections.singletonList("rowtime"), "proctime"); assertThatThrownBy( () -> TypeMappingUtils.computePhysicalIndicesOrTimeAttributeMarkers( tableSource, TableSchema.builder() .field("a", Types.LONG) .field("rowtime", Types.SQL_TIME) .field("proctime", Types.SQL_TIMESTAMP) .build() .getTableColumns(), false, Function.identity())) .isInstanceOf(ValidationException.class) .hasMessage( "Rowtime field 'rowtime' has invalid type TIME(0). Rowtime attributes " + "must be of a Timestamp family."); }
public static String render(String templateName, Map<String, Object> data) { TemplateEngine templateEngine = new TemplateEngine(); ClassLoaderTemplateResolver resolver = new ClassLoaderTemplateResolver(); resolver.setTemplateMode(TemplateMode.HTML); resolver.setCharacterEncoding("UTF-8"); resolver.setPrefix("/templates/"); resolver.setSuffix(".html"); templateEngine.setTemplateResolver(resolver); Context context = new Context(); data.entrySet().forEach(entry -> { context.setVariable(entry.getKey(), entry.getValue()); }); return templateEngine.process(templateName, context); }
@Test public void testRender() { String result = TemplateRenderer.render("index", Map.of("testValue", "testRender")); assertEquals(EXPECTED, result); }
public AstNode rewrite(final AstNode node, final C context) { return rewriter.process(node, context); }
@Test public void shouldRewriteWindowExpression() { // Given: final KsqlWindowExpression ksqlWindowExpression = mock(KsqlWindowExpression.class); final WindowExpression windowExpression = new WindowExpression(location, "name", ksqlWindowExpression); // When: final AstNode rewritten = rewriter.rewrite(windowExpression, context); // Then: assertThat( rewritten, equalTo(new WindowExpression(location, "name", ksqlWindowExpression)) ); }
@Override @CheckForNull public EmailMessage format(Notification notification) { if (!"alerts".equals(notification.getType())) { return null; } // Retrieve useful values String projectId = notification.getFieldValue("projectId"); String projectKey = notification.getFieldValue("projectKey"); String projectName = notification.getFieldValue("projectName"); String projectVersion = notification.getFieldValue("projectVersion"); String branchName = notification.getFieldValue("branch"); String alertName = notification.getFieldValue("alertName"); String alertText = notification.getFieldValue("alertText"); String alertLevel = notification.getFieldValue("alertLevel"); String ratingMetricsInOneString = notification.getFieldValue("ratingMetrics"); boolean isNewAlert = Boolean.parseBoolean(notification.getFieldValue("isNewAlert")); String fullProjectName = computeFullProjectName(projectName, branchName); // Generate text String subject = generateSubject(fullProjectName, alertLevel, isNewAlert); String messageBody = generateMessageBody(projectName, projectKey, projectVersion, branchName, alertName, alertText, isNewAlert, ratingMetricsInOneString); // And finally return the email that will be sent return new EmailMessage() .setMessageId("alerts/" + projectId) .setSubject(subject) .setPlainTextMessage(messageBody); }
@Test public void shouldFormatNewAlertWithSeveralMessages() { Notification notification = createNotification("Failed", "violations > 4, coverage < 75%", "ERROR", "true"); EmailMessage message = template.format(notification); assertThat(message.getMessageId(), is("alerts/45")); assertThat(message.getSubject(), is("New quality gate threshold reached on \"Foo\"")); assertThat(message.getMessage(), is("" + "Project: Foo\n" + "Version: V1-SNAP\n" + "Quality gate status: Failed\n" + "\n" + "New quality gate thresholds:\n" + " - violations > 4\n" + " - coverage < 75%\n" + "\n" + "More details at: http://nemo.sonarsource.org/dashboard?id=org.sonar.foo:foo")); }
@Override @Deprecated public <VR> KStream<K, VR> transformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullStoreNamesOnTransformValuesWithValueTransformerWithKeySupplier() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.transformValues( valueTransformerWithKeySupplier, (String[]) null)); assertThat(exception.getMessage(), equalTo("stateStoreNames can't be a null array")); }
@Override public final void writeRecord(OUT record) throws IOException { checkAsyncErrors(); tryAcquire(1); final CompletionStage<V> completionStage; try { completionStage = send(record); } catch (Throwable e) { semaphore.release(); throw e; } completionStage.whenComplete( (result, throwable) -> { if (throwable == null) { callback.onSuccess(result); } else { callback.onFailure(throwable); } }); }
@Test void testMaxConcurrentRequestsReached() throws Exception { try (TestOutputFormat testOutputFormat = createOpenedTestOutputFormat(Duration.ofMillis(1))) { CompletableFuture<Void> completableFuture = new CompletableFuture<>(); testOutputFormat.enqueueCompletableFuture(completableFuture); testOutputFormat.enqueueCompletableFuture(completableFuture); testOutputFormat.writeRecord("writeRecord #1"); // writing a second time while the first request is still not completed and the // outputFormat is set for maxConcurrentRequests=1 will fail assertThatThrownBy( () -> testOutputFormat.writeRecord("writeRecord #2"), "Sending value should have experienced a TimeoutException.") .hasCauseInstanceOf(TimeoutException.class); completableFuture.complete(null); } }
public static Locale localeFromString(String s) { if (!s.contains(LOBAR)) { return new Locale(s); } String[] items = s.split(LOBAR); return new Locale(items[0], items[1]); }
@Test public void localeFromStringItIT() { title("localeFromStringItIT"); locale = LionUtils.localeFromString("it_IT"); checkLanguageCountry(locale, "it", "IT"); }
@Override public void remove(String key) { if (key == null) { return; } Map<String, String> current = readWriteThreadLocalMap.get(); if (current != null) { current.remove(key); nullifyReadOnlyThreadLocalMap(); } }
@Test public void removeInexistentKey() { mdcAdapter.remove("abcdlw0"); }
int parseAndConvert(String[] args) throws Exception { Options opts = createOptions(); int retVal = 0; try { if (args.length == 0) { LOG.info("Missing command line arguments"); printHelp(opts); return 0; } CommandLine cliParser = new GnuParser().parse(opts, args); if (cliParser.hasOption(CliOption.HELP.shortSwitch)) { printHelp(opts); return 0; } FSConfigToCSConfigConverter converter = prepareAndGetConverter(cliParser); converter.convert(converterParams); String outputDir = converterParams.getOutputDirectory(); boolean skipVerification = cliParser.hasOption(CliOption.SKIP_VERIFICATION.shortSwitch); if (outputDir != null && !skipVerification) { validator.validateConvertedConfig( converterParams.getOutputDirectory()); } } catch (ParseException e) { String msg = "Options parsing failed: " + e.getMessage(); logAndStdErr(e, msg); printHelp(opts); retVal = -1; } catch (PreconditionException e) { String msg = "Cannot start FS config conversion due to the following" + " precondition error: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (UnsupportedPropertyException e) { String msg = "Unsupported property/setting encountered during FS config " + "conversion: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (ConversionException | IllegalArgumentException e) { String msg = "Fatal error during FS config conversion: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (VerificationException e) { Throwable cause = e.getCause(); String msg = "Verification failed: " + e.getCause().getMessage(); conversionOptions.handleVerificationFailure(cause, msg); retVal = -1; } conversionOptions.handleParsingFinished(); return retVal; }
@Test public void testValidationSkippedWhenOutputIsConsole() throws Exception { setupFSConfigConversionFiles(true); FSConfigToCSConfigArgumentHandler argumentHandler = new FSConfigToCSConfigArgumentHandler(conversionOptions, mockValidator); String[] args = getArgumentsAsArrayWithDefaults("-f", FSConfigConverterTestCommons.FS_ALLOC_FILE, "-s", "-p"); argumentHandler.parseAndConvert(args); verifyZeroInteractions(mockValidator); }
@Override public AppResponse process(Flow flow, ActivationUsernamePasswordRequest body) throws SharedServiceClientException { digidClient.remoteLog("1088", Map.of(lowerUnderscore(HIDDEN), true)); var result = digidClient.authenticate(body.getUsername(), body.getPassword()); if (result.get(lowerUnderscore(STATUS)).equals("NOK") && result.get(ERROR) != null ) { final var error = (String) result.get(ERROR); if (ERROR_DECEASED.equals(error)) { digidClient.remoteLog("1482", Map.of(lowerUnderscore(ACCOUNT_ID), result.get(lowerUnderscore(ACCOUNT_ID)), "hidden", true)); } else if (ERROR_NO_BSN.equals(error)) { digidClient.remoteLog("1074", Map.of(lowerUnderscore(ACCOUNT_ID), result.get(lowerUnderscore(ACCOUNT_ID)))); } else if (ERROR_ACCOUNT_BLOCKED.equals(error)) { return new PasswordConfirmedResponse((String) result.get(ERROR), result); } return new NokResponse((String) result.get(ERROR)); } return Optional.ofNullable(validateAmountOfApps(Long.valueOf((Integer) result.get(lowerUnderscore(ACCOUNT_ID))), body)) .orElseGet(() -> getActivationUsernamePasswordResponse(body, result)); }
@Test void nokResponseToManyAmountOfApps() throws SharedServiceClientException { when(sharedServiceClientMock.getSSConfigInt("Maximum_aantal_DigiD_apps_eindgebruiker")).thenReturn(5); AppAuthenticator oldApp = new AppAuthenticator(); oldApp.setDeviceName("test_device"); oldApp.setLastSignInAt(ZonedDateTime.now()); when(digidClientMock.authenticate(anyString(), anyString())).thenReturn(responseDigidClient); when(appAuthenticatorServiceMock.countByAccountIdAndInstanceIdNot(anyLong(), anyString())).thenReturn(6); when(appAuthenticatorServiceMock.findLeastRecentApp(anyLong())).thenReturn(oldApp); AppResponse result = passwordConfirmed.process(flow, request); assertTrue(result instanceof NokResponse); assertNotNull(result); NokResponse response = (NokResponse) result; assertNotNull(response); assertEquals("NOK", response.getStatus()); assertEquals("too_many_active", response.getError()); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldChooseVarArgsIfSpecificDoesntMatch() { // Given: givenFunctions( function(OTHER, -1, STRING), function(EXPECTED, 0, STRING_VARARGS) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of( SqlArgument.of(SqlTypes.STRING), SqlArgument.of(SqlTypes.STRING) )); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
@Override protected Object getTargetObject(boolean key) { Object targetObject; if (key) { // keyData is never null if (keyData.isPortable() || keyData.isJson() || keyData.isCompact()) { targetObject = keyData; } else { targetObject = getKey(); } } else { if (valueObject == null) { targetObject = getTargetObjectFromData(); } else { if (valueObject instanceof PortableGenericRecord || valueObject instanceof CompactGenericRecord) { // These two classes should be able to be handled by respective Getters // see PortableGetter and CompactGetter // We get into this branch when in memory format is Object and // - the cluster does not have PortableFactory configuration for Portable // - the cluster does not related classes for Compact targetObject = getValue(); } else if (valueObject instanceof Portable || serializationService.isCompactSerializable(valueObject)) { targetObject = getValueData(); } else { // Note that targetObject can be PortableGenericRecord // and it will be handled with PortableGetter for query. // We get PortableGenericRecord here when in-memory format is OBJECT and // the cluster does not have PortableFactory configuration for the object's factory ID targetObject = getValue(); } } } return targetObject; }
@Test(expected = NullPointerException.class) public void testGetTargetObject_givenInstanceIsNotInitialized_whenKeyFlagIsTrue_thenThrowNPE() { QueryableEntry entry = createEntry(); entry.getTargetObject(true); }
protected static boolean isBeanPropertyReadMethod(Method method) { return method != null && Modifier.isPublic(method.getModifiers()) && !Modifier.isStatic(method.getModifiers()) && method.getReturnType() != void.class && method.getDeclaringClass() != Object.class && method.getParameterTypes().length == 0 && (method.getName().startsWith("get") || method.getName().startsWith("is")) // 排除就叫get和is的方法 && (!"get".equals(method.getName()) && !"is".equals(method.getName())); }
@Test public void testIsBeanPropertyReadMethod() throws Exception { Assert.assertFalse(isBeanPropertyReadMethod(null)); Assert.assertTrue(isBeanPropertyReadMethod(TestReflect.class.getMethod("getS"))); Assert.assertFalse(isBeanPropertyReadMethod(TestReflect.class.getMethod("get"))); Assert.assertFalse(isBeanPropertyReadMethod(TestReflect.class.getMethod("is"))); Assert.assertFalse(isBeanPropertyReadMethod(TestReflect.class.getDeclaredMethod("get1"))); Assert.assertFalse(isBeanPropertyReadMethod(TestReflect.class.getMethod("get2"))); Assert.assertFalse(isBeanPropertyReadMethod(TestReflect.class.getMethod("get3"))); Assert.assertFalse(isBeanPropertyReadMethod(TestReflect.class.getMethod("get4", String.class))); Assert.assertFalse(isBeanPropertyReadMethod(TestReflect.class.getMethod("aget5"))); Assert.assertFalse(isBeanPropertyReadMethod(TestReflect.class.getMethod("ais5"))); }
@Override protected String getRootKey() { return Constants.HEADER_COS + mBucketName; }
@Test public void testGetRootKey() { Assert.assertEquals(Constants.HEADER_COS + BUCKET_NAME, mCOSUnderFileSystem.getRootKey()); }
@Override public boolean hasVariable(String name) { if (variables.containsKey(name)) { return true; } if (parent != null) { return parent.hasVariable(name); } return false; }
@Test public void testHasVariable() { ProcessContextImpl context = new ProcessContextImpl(); Assertions.assertFalse(context.hasVariable("key")); }
@Override public List<Connection> getConnections(final String databaseName, final String dataSourceName, final int connectionOffset, final int connectionSize, final ConnectionMode connectionMode) throws SQLException { Preconditions.checkNotNull(databaseName, "Current database name is null."); Collection<Connection> connections; String cacheKey = getKey(databaseName, dataSourceName); synchronized (cachedConnections) { connections = cachedConnections.get(cacheKey); } List<Connection> result; int maxConnectionSize = connectionOffset + connectionSize; if (connections.size() >= maxConnectionSize) { result = new ArrayList<>(connections).subList(connectionOffset, maxConnectionSize); } else if (connections.isEmpty()) { Collection<Connection> newConnections = createNewConnections(databaseName, dataSourceName, maxConnectionSize, connectionMode); result = new ArrayList<>(newConnections).subList(connectionOffset, maxConnectionSize); synchronized (cachedConnections) { cachedConnections.putAll(cacheKey, newConnections); } executeTransactionHooksAfterCreateConnections(result); } else { List<Connection> allConnections = new ArrayList<>(maxConnectionSize); allConnections.addAll(connections); List<Connection> newConnections = createNewConnections(databaseName, dataSourceName, maxConnectionSize - connections.size(), connectionMode); allConnections.addAll(newConnections); result = allConnections.subList(connectionOffset, maxConnectionSize); synchronized (cachedConnections) { cachedConnections.putAll(cacheKey, newConnections); } } return result; }
@Test void assertGetConnectionWithConnectionOffset() throws SQLException { when(backendDataSource.getConnections(anyString(), anyString(), eq(1), any())).thenReturn(MockConnectionUtils.mockNewConnections(1)); assertThat(databaseConnectionManager.getConnections(DefaultDatabase.LOGIC_NAME, "ds1", 0, 1, ConnectionMode.MEMORY_STRICTLY), is(databaseConnectionManager.getConnections(DefaultDatabase.LOGIC_NAME, "ds1", 0, 1, ConnectionMode.MEMORY_STRICTLY))); when(backendDataSource.getConnections(anyString(), anyString(), eq(1), any())).thenReturn(MockConnectionUtils.mockNewConnections(1)); assertThat(databaseConnectionManager.getConnections(DefaultDatabase.LOGIC_NAME, "ds1", 1, 1, ConnectionMode.MEMORY_STRICTLY), is(databaseConnectionManager.getConnections(DefaultDatabase.LOGIC_NAME, "ds1", 1, 1, ConnectionMode.MEMORY_STRICTLY))); when(backendDataSource.getConnections(anyString(), anyString(), eq(1), any())).thenReturn(MockConnectionUtils.mockNewConnections(1)); assertThat(databaseConnectionManager.getConnections(DefaultDatabase.LOGIC_NAME, "ds1", 0, 1, ConnectionMode.MEMORY_STRICTLY), not(databaseConnectionManager.getConnections(DefaultDatabase.LOGIC_NAME, "ds1", 1, 1, ConnectionMode.MEMORY_STRICTLY))); }
public String route(final ReadwriteSplittingDataSourceGroupRule rule) { return rule.getLoadBalancer().getTargetName(rule.getName(), getFilteredReadDataSources(rule)); }
@Test void assertRoute() { assertThat(new StandardReadwriteSplittingDataSourceRouter().route(rule), is("read_ds_0")); }
@Override public PageResult<GoViewProjectDO> getMyProjectPage(PageParam pageReqVO, Long userId) { return goViewProjectMapper.selectPage(pageReqVO, userId); }
@Test public void testGetMyGoViewProjectPage() { // mock 数据 GoViewProjectDO dbGoViewProject = randomPojo(GoViewProjectDO.class, o -> { // 等会查询到 o.setCreator("1"); }); goViewProjectMapper.insert(dbGoViewProject); // 测试 userId 不匹配 goViewProjectMapper.insert(cloneIgnoreId(dbGoViewProject, o -> o.setCreator("2"))); // 准备参数 PageParam reqVO = new PageParam(); Long userId = 1L; // 调用 PageResult<GoViewProjectDO> pageResult = goViewProjectService.getMyProjectPage(reqVO, userId); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbGoViewProject, pageResult.getList().get(0)); }
public static <T> CsvIOParse<T> parse(Class<T> klass, CSVFormat csvFormat) { CsvIOParseHelpers.validateCsvFormat(csvFormat); SchemaProvider provider = new DefaultSchema.DefaultSchemaProvider(); TypeDescriptor<T> type = TypeDescriptor.of(klass); Schema schema = checkStateNotNull( provider.schemaFor(type), "Illegal %s: Schema could not be generated from given %s class", Schema.class, klass); CsvIOParseHelpers.validateCsvFormatWithSchema(csvFormat, schema); SerializableFunction<Row, T> fromRowFn = checkStateNotNull( provider.fromRowFunction(type), "FromRowFn could not be generated from the given %s class", klass); SerializableFunction<T, Row> toRowFn = checkStateNotNull( provider.toRowFunction(type), "ToRowFn could not be generated from the given %s class", klass); SchemaCoder<T> coder = SchemaCoder.of(schema, type, toRowFn, fromRowFn); CsvIOParseConfiguration.Builder<T> builder = CsvIOParseConfiguration.builder(); builder.setCsvFormat(csvFormat).setSchema(schema).setCoder(coder).setFromRowFn(fromRowFn); return CsvIOParse.<T>builder().setConfigBuilder(builder).build(); }
@Test public void givenNonSchemaMappedClass_throws() { Pipeline pipeline = Pipeline.create(); CSVFormat csvFormat = CSVFormat.DEFAULT .withHeader("a_string", "an_integer", "a_double") .withAllowDuplicateHeaderNames(false); assertThrows( IllegalStateException.class, () -> CsvIO.parse(NonSchemaMappedPojo.class, csvFormat)); pipeline.run(); }
public RuleDescriptionSectionsGenerator getRuleDescriptionSectionsGenerator(RulesDefinition.Rule ruleDef) { Set<RuleDescriptionSectionsGenerator> generatorsFound = ruleDescriptionSectionsGenerators.stream() .filter(generator -> generator.isGeneratorForRule(ruleDef)) .collect(toSet()); checkState(generatorsFound.size() < 2, "More than one rule description section generator found for rule with key %s", ruleDef.key()); checkState(!generatorsFound.isEmpty(), "No rule description section generator found for rule with key %s", ruleDef.key()); return generatorsFound.iterator().next(); }
@Test public void getRuleDescriptionSectionsGenerator_returnsTheCorrectGenerator() { when(generator2.isGeneratorForRule(rule)).thenReturn(true); assertThat(resolver.getRuleDescriptionSectionsGenerator(rule)).isEqualTo(generator2); }
@Override public boolean isAddressReachable(String addr) { if (addr == null || addr.isEmpty()) { return false; } try { Channel channel = getAndCreateChannel(addr); return channel != null && channel.isActive(); } catch (Exception e) { LOGGER.warn("Get and create channel of {} failed", addr, e); return false; } }
@Test public void testIsAddressReachableFail() throws NoSuchFieldException, IllegalAccessException { Bootstrap bootstrap = spy(Bootstrap.class); Field field = NettyRemotingClient.class.getDeclaredField("bootstrap"); field.setAccessible(true); field.set(remotingClient, bootstrap); assertThat(remotingClient.isAddressReachable("0.0.0.0:8080")).isFalse(); verify(bootstrap).connect(eq("0.0.0.0"), eq(8080)); assertThat(remotingClient.isAddressReachable("[fe80::]:8080")).isFalse(); verify(bootstrap).connect(eq("[fe80::]"), eq(8080)); }
@Override public ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id) { TaskStatus status = statusBackingStore.get(id); if (status == null) throw new NotFoundException("No status found for task " + id); return new ConnectorStateInfo.TaskState(id.task(), status.state().toString(), status.workerId(), status.trace()); }
@Test public void testTaskStatus() { ConnectorTaskId taskId = new ConnectorTaskId(connectorName, 0); String workerId = "workerId"; AbstractHerder herder = testHerder(); final ArgumentCaptor<TaskStatus> taskStatusArgumentCaptor = ArgumentCaptor.forClass(TaskStatus.class); doNothing().when(statusStore).putSafe(taskStatusArgumentCaptor.capture()); when(statusStore.get(taskId)).thenAnswer(invocation -> taskStatusArgumentCaptor.getValue()); herder.onFailure(taskId, new RuntimeException()); ConnectorStateInfo.TaskState taskState = herder.taskStatus(taskId); assertEquals(workerId, taskState.workerId()); assertEquals("FAILED", taskState.state()); assertEquals(0, taskState.id()); assertNotNull(taskState.trace()); }
public static boolean isValidIpV6Address(String ip) { return isValidIpV6Address((CharSequence) ip); }
@Test public void testIsValidIpV6Address() { for (String host : validIpV6Hosts.keySet()) { assertTrue(isValidIpV6Address(host), host); if (host.charAt(0) != '[' && !host.contains("%")) { assertNotNull(getByName(host, true), host); String hostMod = '[' + host + ']'; assertTrue(isValidIpV6Address(hostMod), hostMod); hostMod = host + '%'; assertTrue(isValidIpV6Address(hostMod), hostMod); hostMod = host + "%eth1"; assertTrue(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host + "%]"; assertTrue(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host + "%1]"; assertTrue(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host + "]%"; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host + "]%1"; assertFalse(isValidIpV6Address(hostMod), hostMod); } } for (String host : invalidIpV6Hosts.keySet()) { assertFalse(isValidIpV6Address(host), host); assertNull(getByName(host), host); String hostMod = '[' + host + ']'; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = host + '%'; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = host + "%eth1"; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host + "%]"; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host + "%1]"; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host + "]%"; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host + "]%1"; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = host + ']'; assertFalse(isValidIpV6Address(hostMod), hostMod); hostMod = '[' + host; assertFalse(isValidIpV6Address(hostMod), hostMod); } }
@Override public Object decode(Response response, Type type) throws IOException, DecodeException { if (response.status() == 404 || response.status() == 204) if (JSONObject.class.isAssignableFrom((Class<?>) type)) return new JSONObject(); else if (JSONArray.class.isAssignableFrom((Class<?>) type)) return new JSONArray(); else if (String.class.equals(type)) return null; else throw new DecodeException(response.status(), format("%s is not a type supported by this decoder.", type), response.request()); if (response.body() == null) return null; try (Reader reader = response.body().asReader(response.charset())) { Reader bodyReader = (reader.markSupported()) ? reader : new BufferedReader(reader); bodyReader.mark(1); if (bodyReader.read() == -1) { return null; // Empty body } bodyReader.reset(); return decodeBody(response, type, bodyReader); } catch (JSONException jsonException) { if (jsonException.getCause() != null && jsonException.getCause() instanceof IOException) { throw (IOException) jsonException.getCause(); } throw new DecodeException(response.status(), jsonException.getMessage(), response.request(), jsonException); } }
@Test void unknownTypeThrowsDecodeException() throws IOException { String json = "[{\"a\":\"b\",\"c\":1},123]"; Response response = Response.builder() .status(200) .reason("OK") .headers(Collections.emptyMap()) .body(json, UTF_8) .request(request) .build(); Exception exception = assertThrows(DecodeException.class, () -> new JsonDecoder().decode(response, Clock.class)); assertThat(exception.getMessage()) .isEqualTo("class java.time.Clock is not a type supported by this decoder."); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatRightJoinWithoutJoinWindow() { final Join join = new Join(leftAlias, ImmutableList.of(new JoinedSource( Optional.empty(), rightAlias, JoinedSource.Type.RIGHT, criteria, Optional.empty()))); final String result = SqlFormatter.formatSql(join); final String expected = "`left` L\nRIGHT OUTER JOIN `right` R ON (('left.col0' = 'right.col0'))"; assertEquals(expected, result); }
@Override public int reacquireContainer(ContainerReacquisitionContext ctx) throws IOException, InterruptedException { try { if (numaResourceAllocator != null) { numaResourceAllocator.recoverNumaResource(ctx.getContainerId()); } return super.reacquireContainer(ctx); } finally { postComplete(ctx.getContainerId()); } }
@Test public void testReacquireContainer() throws Exception { @SuppressWarnings("unchecked") ConcurrentHashMap<ContainerId, Container> mockContainers = mock( ConcurrentHashMap.class); Context mockContext = mock(Context.class); NMStateStoreService mock = mock(NMStateStoreService.class); when(mockContext.getNMStateStore()).thenReturn(mock); ResourceMappings resourceMappings = new ResourceMappings(); AssignedResources assignedRscs = new AssignedResources(); when(mockContainer.getResource()) .thenReturn(Resource.newInstance(147434, 2)); ContainerId cid = ContainerId.fromString("container_1481156246874_0001_01_000001"); when(mockContainer.getContainerId()).thenReturn(cid); NumaResourceAllocation numaResourceAllocation = numaResourceAllocator.allocateNumaNodes(mockContainer); assignedRscs.updateAssignedResources(Arrays.asList(numaResourceAllocation)); resourceMappings.addAssignedResources("numa", assignedRscs); when(mockContainer.getResourceMappings()).thenReturn(resourceMappings); when(mockContainers.get(any())).thenReturn(mockContainer); when(mockContext.getContainers()).thenReturn(mockContainers); // recovered numa resources should be added to the used resources and // remaining will be available for further allocation. ContainerReacquisitionContext containerReacquisitionContext = new ContainerReacquisitionContext.Builder() .setContainerId(cid) .setUser("user") .setContainer(mockContainer) .build(); containerExecutor.reacquireContainer(containerReacquisitionContext); // reacquireContainer recovers all the numa resources , // that should be free to use next testAllocateNumaResource("container_1481156246874_0001_01_000001", Resource.newInstance(147434, 2), "0,1", "1"); when(mockContainer.getContainerId()).thenReturn( ContainerId.fromString("container_1481156246874_0001_01_000004")); when(mockContainer.getResource()) .thenReturn(Resource.newInstance(1024, 2)); // returns null since there are no sufficient resources available for the request Assert.assertNull(numaResourceAllocator.allocateNumaNodes(mockContainer)); }
public CompletableFuture<Map<TopicIdPartition, ShareAcknowledgeResponseData.PartitionData>> acknowledge( String memberId, String groupId, Map<TopicIdPartition, List<ShareAcknowledgementBatch>> acknowledgeTopics ) { log.trace("Acknowledge request for topicIdPartitions: {} with groupId: {}", acknowledgeTopics.keySet(), groupId); this.shareGroupMetrics.shareAcknowledgement(); Map<TopicIdPartition, CompletableFuture<Errors>> futures = new HashMap<>(); acknowledgeTopics.forEach((topicIdPartition, acknowledgePartitionBatches) -> { SharePartition sharePartition = partitionCacheMap.get(sharePartitionKey(groupId, topicIdPartition)); if (sharePartition != null) { CompletableFuture<Errors> future = sharePartition.acknowledge(memberId, acknowledgePartitionBatches).thenApply(throwable -> { if (throwable.isPresent()) { return Errors.forException(throwable.get()); } acknowledgePartitionBatches.forEach(batch -> { batch.acknowledgeTypes().forEach(this.shareGroupMetrics::recordAcknowledgement); }); return Errors.NONE; }); futures.put(topicIdPartition, future); } else { futures.put(topicIdPartition, CompletableFuture.completedFuture(Errors.UNKNOWN_TOPIC_OR_PARTITION)); } }); CompletableFuture<Void> allFutures = CompletableFuture.allOf( futures.values().toArray(new CompletableFuture[0])); return allFutures.thenApply(v -> { Map<TopicIdPartition, ShareAcknowledgeResponseData.PartitionData> result = new HashMap<>(); futures.forEach((topicIdPartition, future) -> result.put(topicIdPartition, new ShareAcknowledgeResponseData.PartitionData() .setPartitionIndex(topicIdPartition.partition()) .setErrorCode(future.join().code()))); return result; }); }
@Test public void testAcknowledgeSinglePartition() { String groupId = "grp"; String memberId = Uuid.randomUuid().toString(); TopicIdPartition tp = new TopicIdPartition(Uuid.randomUuid(), new TopicPartition("foo", 0)); SharePartition sp = mock(SharePartition.class); when(sp.acknowledge(ArgumentMatchers.eq(memberId), any())).thenReturn(CompletableFuture.completedFuture(Optional.empty())); Map<SharePartitionManager.SharePartitionKey, SharePartition> partitionCacheMap = new HashMap<>(); partitionCacheMap.put(new SharePartitionManager.SharePartitionKey(groupId, tp), sp); SharePartitionManager sharePartitionManager = SharePartitionManagerBuilder.builder() .withPartitionCacheMap(partitionCacheMap).build(); Map<TopicIdPartition, List<ShareAcknowledgementBatch>> acknowledgeTopics = new HashMap<>(); acknowledgeTopics.put(tp, Arrays.asList( new ShareAcknowledgementBatch(12, 20, Collections.singletonList((byte) 1)), new ShareAcknowledgementBatch(24, 56, Collections.singletonList((byte) 1)) )); CompletableFuture<Map<TopicIdPartition, ShareAcknowledgeResponseData.PartitionData>> resultFuture = sharePartitionManager.acknowledge(memberId, groupId, acknowledgeTopics); Map<TopicIdPartition, ShareAcknowledgeResponseData.PartitionData> result = resultFuture.join(); assertEquals(1, result.size()); assertTrue(result.containsKey(tp)); assertEquals(0, result.get(tp).partitionIndex()); assertEquals(Errors.NONE.code(), result.get(tp).errorCode()); }
@Override public int run(String[] argv) throws Exception { Options options = buildOptions(); if(argv.length == 0) { printHelp(); return 0; } // print help and exit with zero exit code if (argv.length == 1 && isHelpOption(argv[0])) { printHelp(); return 0; } CommandLineParser parser = new PosixParser(); CommandLine cmd; try { cmd = parser.parse(options, argv); } catch (ParseException e) { System.out.println( "Error parsing command-line options: " + e.getMessage()); printHelp(); return -1; } if (cmd.hasOption("h")) { // print help and exit with non zero exit code since // it is not expected to give help and other options together. printHelp(); return -1; } String inputFileName = cmd.getOptionValue("i"); String outputFileName = cmd.getOptionValue("o"); String processor = cmd.getOptionValue("p"); if(processor == null) { processor = defaultProcessor; } Flags flags = new Flags(); if (cmd.hasOption("r")) { flags.setRecoveryMode(); } if (cmd.hasOption("f")) { flags.setFixTxIds(); } if (cmd.hasOption("v")) { flags.setPrintToScreen(); } return go(inputFileName, outputFileName, processor, flags, null); }
@Test public void testOfflineEditsViewerHelpMessage() throws Throwable { final ByteArrayOutputStream bytes = new ByteArrayOutputStream(); final PrintStream out = new PrintStream(bytes); final PrintStream oldOut = System.out; try { System.setOut(out); int status = new OfflineEditsViewer().run(new String[] { "-h" }); assertTrue("" + "Exit code returned for help option is incorrect", status == 0); Assert.assertFalse( "Invalid Command error displayed when help option is passed.", bytes .toString().contains("Error parsing command-line options")); } finally { System.setOut(oldOut); IOUtils.closeStream(out); } }
@Override public int getStatusCode() { return this.response.getStatusLine().getStatusCode(); }
@Test void testGetStatusCode() { when(statusLine.getStatusCode()).thenReturn(200); assertEquals(200, clientHttpResponse.getStatusCode()); }
@Override public ImmutableList<String> computeEntrypoint(List<String> jvmFlags) { ImmutableList.Builder<String> entrypoint = ImmutableList.builder(); entrypoint.add("java"); entrypoint.addAll(jvmFlags); entrypoint.add("-jar"); entrypoint.add(JarLayers.APP_ROOT + "/" + jarPath.getFileName().toString()); return entrypoint.build(); }
@Test public void testComputeEntrypoint_jvmFlag() throws URISyntaxException { Path springBootJar = Paths.get(Resources.getResource(SPRING_BOOT_JAR).toURI()); SpringBootPackagedProcessor springBootProcessor = new SpringBootPackagedProcessor(springBootJar, JAR_JAVA_VERSION); ImmutableList<String> actualEntrypoint = springBootProcessor.computeEntrypoint(ImmutableList.of("-jvm-flag")); assertThat(actualEntrypoint) .isEqualTo(ImmutableList.of("java", "-jvm-flag", "-jar", "/app/springboot_sample.jar")); }
public SortedSet<Path> getLogDirs(Set<Path> logDirs, Predicate<String> predicate) { // we could also make this static, but not to do it due to mock TreeSet<Path> ret = new TreeSet<>(); for (Path logDir: logDirs) { String workerId = ""; try { Optional<Path> metaFile = getMetadataFileForWorkerLogDir(logDir); if (metaFile.isPresent()) { workerId = getWorkerIdFromMetadataFile(metaFile.get().toAbsolutePath().normalize()); if (workerId == null) { workerId = ""; } } } catch (IOException e) { LOG.warn("Error trying to find worker.yaml in {}", logDir, e); } if (predicate.test(workerId)) { ret.add(logDir); } } return ret; }
@Test public void testIdentifyWorkerLogDirs() throws Exception { try (TmpPath testDir = new TmpPath()) { Path port1Dir = Files.createDirectories(testDir.getFile().toPath().resolve("workers-artifacts/topo1/port1")); Path metaFile = Files.createFile(testDir.getFile().toPath().resolve("worker.yaml")); String expId = "id12345"; SortedSet<Path> expected = new TreeSet<>(); expected.add(port1Dir); SupervisorUtils mockedSupervisorUtils = mock(SupervisorUtils.class); SupervisorUtils.setInstance(mockedSupervisorUtils); Map<String, Object> stormConf = Utils.readStormConfig(); WorkerLogs workerLogs = new WorkerLogs(stormConf, port1Dir, new StormMetricsRegistry()) { @Override public Optional<Path> getMetadataFileForWorkerLogDir(Path logDir) { return Optional.of(metaFile); } @Override public String getWorkerIdFromMetadataFile(Path metaFile) { return expId; } }; when(mockedSupervisorUtils.readWorkerHeartbeatsImpl(anyMap())).thenReturn(null); assertEquals(expected, workerLogs.getLogDirs(Collections.singleton(port1Dir), (wid) -> true)); } finally { SupervisorUtils.resetInstance(); } }
@GET @Path(RMWSConsts.CHECK_USER_ACCESS_TO_QUEUE) @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public RMQueueAclInfo checkUserAccessToQueue( @PathParam(RMWSConsts.QUEUE) String queue, @QueryParam(RMWSConsts.USER) String username, @QueryParam(RMWSConsts.QUEUE_ACL_TYPE) @DefaultValue("SUBMIT_APPLICATIONS") String queueAclType, @Context HttpServletRequest hsr) throws AuthorizationException { initForReadableEndpoints(); // For the user who invokes this REST call, he/she should have admin access // to the queue. Otherwise, we will reject the call. UserGroupInformation callerUGI = getCallerUserGroupInformation(hsr, true); if (callerUGI != null && !this.rm.getResourceScheduler().checkAccess( callerUGI, QueueACL.ADMINISTER_QUEUE, queue)) { throw new ForbiddenException( "User=" + callerUGI.getUserName() + " doesn't haven access to queue=" + queue + " so it cannot check ACLs for other users."); } // Create UGI for the to-be-checked user. UserGroupInformation user = UserGroupInformation.createRemoteUser(username); if (user == null) { throw new ForbiddenException( "Failed to retrieve UserGroupInformation for user=" + username); } // Check if the specified queue acl is valid. QueueACL queueACL; try { queueACL = QueueACL.valueOf(queueAclType); } catch (IllegalArgumentException e) { throw new BadRequestException("Specified queueAclType=" + queueAclType + " is not a valid type, valid queue acl types={" + "SUBMIT_APPLICATIONS/ADMINISTER_QUEUE}"); } if (!this.rm.getResourceScheduler().checkAccess(user, queueACL, queue)) { return new RMQueueAclInfo(false, user.getUserName(), "User=" + username + " doesn't have access to queue=" + queue + " with acl-type=" + queueAclType); } return new RMQueueAclInfo(true, user.getUserName(), ""); }
@Test public void testCheckUserAccessToQueue() throws Exception { ResourceManager mockRM = mock(ResourceManager.class); Configuration conf = new YarnConfiguration(); // Inject a mock scheduler implementation. // Only admin user has ADMINISTER_QUEUE access. // For SUBMIT_APPLICATION ACL, both of admin/yarn user have acess ResourceScheduler mockScheduler = new FifoScheduler() { @Override public synchronized boolean checkAccess(UserGroupInformation callerUGI, QueueACL acl, String queueName) { if (acl == QueueACL.ADMINISTER_QUEUE) { if (callerUGI.getUserName().equals("admin")) { return true; } } else { if (ImmutableSet.of("admin", "yarn").contains(callerUGI.getUserName())) { return true; } } return false; } }; when(mockRM.getResourceScheduler()).thenReturn(mockScheduler); RMWebServices webSvc = new RMWebServices(mockRM, conf, mock(HttpServletResponse.class)); boolean caughtException = false; // Case 1: Only queue admin user can access other user's information HttpServletRequest mockHsr = mockHttpServletRequestByUserName("non-admin"); try { webSvc.checkUserAccessToQueue("queue", "jack", QueueACL.SUBMIT_APPLICATIONS.name(), mockHsr); } catch (ForbiddenException e) { caughtException = true; } Assert.assertTrue(caughtException); // Case 2: request an unknown ACL causes BAD_REQUEST mockHsr = mockHttpServletRequestByUserName("admin"); caughtException = false; try { webSvc.checkUserAccessToQueue("queue", "jack", "XYZ_ACL", mockHsr); } catch (BadRequestException e) { caughtException = true; } Assert.assertTrue(caughtException); // Case 3: get FORBIDDEN for rejected ACL mockHsr = mockHttpServletRequestByUserName("admin"); Assert.assertFalse(webSvc.checkUserAccessToQueue("queue", "jack", QueueACL.SUBMIT_APPLICATIONS.name(), mockHsr).isAllowed()); Assert.assertFalse(webSvc.checkUserAccessToQueue("queue", "jack", QueueACL.ADMINISTER_QUEUE.name(), mockHsr).isAllowed()); // Case 4: get OK for listed ACLs mockHsr = mockHttpServletRequestByUserName("admin"); Assert.assertTrue(webSvc.checkUserAccessToQueue("queue", "admin", QueueACL.SUBMIT_APPLICATIONS.name(), mockHsr).isAllowed()); Assert.assertTrue(webSvc.checkUserAccessToQueue("queue", "admin", QueueACL.ADMINISTER_QUEUE.name(), mockHsr).isAllowed()); // Case 5: get OK only for SUBMIT_APP acl for "yarn" user mockHsr = mockHttpServletRequestByUserName("admin"); Assert.assertTrue(webSvc.checkUserAccessToQueue("queue", "yarn", QueueACL.SUBMIT_APPLICATIONS.name(), mockHsr).isAllowed()); Assert.assertFalse(webSvc.checkUserAccessToQueue("queue", "yarn", QueueACL.ADMINISTER_QUEUE.name(), mockHsr).isAllowed()); }
public String convert(ILoggingEvent le) { List<Marker> markers = le.getMarkerList(); if (markers == null || markers.isEmpty()) { return EMPTY; } int size = markers.size(); if (size == 1) return markers.get(0).toString(); StringBuffer buf = new StringBuffer(32); for (int i = 0; i < size; i++) { if (i != 0) buf.append(' '); Marker m = markers.get(i); buf.append(m.toString()); } return buf.toString(); }
@Test public void testWithNullMarker() { String result = converter.convert(createLoggingEvent(null)); assertEquals("", result); }
static String constructJQLQuery( Collection<String> issueKeys) { StringBuilder jql = new StringBuilder(); jql.append("key in ("); Iterator<String> iterator = issueKeys.iterator(); while ( iterator.hasNext() ) { String key = iterator.next(); jql.append("'"); jql.append(key); jql.append("'"); if (iterator.hasNext()) { jql.append(","); } } jql.append(")"); return jql.toString(); }
@Test public void constructJQLQuery() throws Exception { Assert.assertEquals("key in ('JENKINS-123')", JiraSCMListener.constructJQLQuery(Collections.singletonList("JENKINS-123"))); Assert.assertEquals("key in ('JENKINS-123','FOO-123','VIVEK-123')", JiraSCMListener.constructJQLQuery( Arrays.asList("JENKINS-123", "FOO-123", "VIVEK-123"))); }
public long getElapsedTimeAndClean(final TargetAdviceMethod method) { String key = getKey(method); try { return getElapsedTime(key); } finally { clean(key); } }
@Test void assertGetElapsedTimeAndCleanWithoutRecorded() { TargetAdviceMethod method = new TargetAdviceMethod("test"); assertThat(new MethodTimeRecorder(AgentAdvice.class).getElapsedTimeAndClean(method), is(0L)); }
@Override public boolean processSnapshot(DistroData distroData) { ClientSyncDatumSnapshot snapshot = ApplicationUtils.getBean(Serializer.class) .deserialize(distroData.getContent(), ClientSyncDatumSnapshot.class); for (ClientSyncData each : snapshot.getClientSyncDataList()) { handlerClientSyncData(each); } return true; }
@Test void testProcessSnapshot() { ClientSyncDatumSnapshot snapshot = new ClientSyncDatumSnapshot(); snapshot.setClientSyncDataList(Collections.singletonList(clientSyncData)); when(serializer.deserialize(any(), eq(ClientSyncDatumSnapshot.class))).thenReturn(snapshot); assertEquals(0L, client.getRevision()); assertEquals(0, client.getAllPublishedService().size()); distroClientDataProcessor.processSnapshot(distroData); verify(clientManager).syncClientConnected(CLIENT_ID, clientSyncData.getAttributes()); assertEquals(1L, client.getRevision()); assertEquals(1, client.getAllPublishedService().size()); }
@Override public void onStreamRequest(StreamRequest req, RequestContext requestContext, Map<String, String> wireAttrs, NextFilter<StreamRequest, StreamResponse> nextFilter) { disruptRequest(req, requestContext, wireAttrs, nextFilter); }
@Test public void testStreamErrorDisrupt() throws Exception { final RequestContext requestContext = new RequestContext(); requestContext.putLocalAttr(DISRUPT_CONTEXT_KEY, DisruptContexts.error(REQUEST_LATENCY)); final DisruptFilter filter = new DisruptFilter(_scheduler, _executor, REQUEST_TIMEOUT, _clock); final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean success = new AtomicBoolean(false); final NextFilter<StreamRequest, StreamResponse> next = new NextFilter<StreamRequest, StreamResponse>() { @Override public void onRequest(StreamRequest restRequest, RequestContext requestContext, Map<String, String> wireAttrs) { latch.countDown(); } @Override public void onResponse(StreamResponse restResponse, RequestContext requestContext, Map<String, String> wireAttrs) { latch.countDown(); } @Override public void onError(Throwable ex, RequestContext requestContext, Map<String, String> wireAttrs) { success.set(ex instanceof DisruptedException); latch.countDown(); } }; filter.onStreamRequest(new StreamRequestBuilder(new URI(URI)).build(EntityStreams.emptyStream()), requestContext, Collections.emptyMap(), next); Assert.assertTrue(latch.await(TEST_TIMEOUT, TimeUnit.MILLISECONDS), "Missing NextFilter invocation"); Assert.assertTrue(success.get(), "Unexpected method invocation"); }
public static RunResponse from(WorkflowInstance instance, int state) { return RunResponse.builder() .workflowId(instance.getWorkflowId()) .workflowVersionId(instance.getWorkflowVersionId()) .workflowInstanceId(instance.getWorkflowInstanceId()) .workflowRunId(instance.getWorkflowRunId()) .workflowUuid(instance.getWorkflowUuid()) .status(Status.fromCode(state)) .timelineEvent(instance.getInitiator().getTimelineEvent()) .build(); }
@Test public void testBuildFromState() { RunResponse res = RunResponse.from(instance, 1); Assert.assertEquals(RunResponse.Status.WORKFLOW_RUN_CREATED, res.getStatus()); res = RunResponse.from(instance, 0); Assert.assertEquals(RunResponse.Status.DUPLICATED, res.getStatus()); res = RunResponse.from(instance, -1); Assert.assertEquals(RunResponse.Status.STOPPED, res.getStatus()); AssertHelper.assertThrows( "Invalid status code", MaestroInternalError.class, "Invalid status code value: 2", () -> RunResponse.from(instance, 2)); }
public static RuntimeMetric merge(RuntimeMetric metric1, RuntimeMetric metric2) { if (metric1 == null) { return metric2; } if (metric2 == null) { return metric1; } checkState(metric1.getUnit() == metric2.getUnit(), "Two metrics to be merged must have the same unit type."); RuntimeMetric mergedMetric = copyOf(metric1); mergedMetric.mergeWith(metric2); return mergedMetric; }
@Test(expectedExceptions = {IllegalStateException.class}) public void testMergeWithConflictUnits() { RuntimeMetric metric1 = new RuntimeMetric(TEST_METRIC_NAME, NANO, 5, 2, 4, 1); RuntimeMetric metric2 = new RuntimeMetric(TEST_METRIC_NAME, BYTE, 20, 2, 11, 9); RuntimeMetric.merge(metric1, metric2); }
public String getDefaultSchemaName(final String databaseName) { return dialectDatabaseMetaData.getDefaultSchema().orElseGet(() -> null == databaseName ? null : databaseName.toLowerCase()); }
@Test void assertGetDefaultSchemaNameWhenDatabaseTypeNotContainsDefaultSchemaAndNullDatabaseName() { assertNull(new DatabaseTypeRegistry(TypedSPILoader.getService(DatabaseType.class, "BRANCH")).getDefaultSchemaName(null)); }
@Deprecated @Override public void toXML(Object obj, OutputStream out) { super.toXML(obj, out); }
@Test public void annotations() { assertEquals("not registered, so sorry", "<hudson.util.XStream2Test_-C1/>", Jenkins.XSTREAM2.toXML(new C1())); assertEquals("manually registered", "<C-2/>", Jenkins.XSTREAM2.toXML(new C2())); assertEquals("manually processed", "<C-3/>", Jenkins.XSTREAM2.toXML(new C3())); assertThrows(CannotResolveClassException.class, () -> Jenkins.XSTREAM2.fromXML("<C-4/>")); Jenkins.XSTREAM2.processAnnotations(C5.class); assertThat("can deserialize from annotations so long as the processing happened at some point", Jenkins.XSTREAM2.fromXML("<C-5/>"), instanceOf(C5.class)); }
int parseAndConvert(String[] args) throws Exception { Options opts = createOptions(); int retVal = 0; try { if (args.length == 0) { LOG.info("Missing command line arguments"); printHelp(opts); return 0; } CommandLine cliParser = new GnuParser().parse(opts, args); if (cliParser.hasOption(CliOption.HELP.shortSwitch)) { printHelp(opts); return 0; } FSConfigToCSConfigConverter converter = prepareAndGetConverter(cliParser); converter.convert(converterParams); String outputDir = converterParams.getOutputDirectory(); boolean skipVerification = cliParser.hasOption(CliOption.SKIP_VERIFICATION.shortSwitch); if (outputDir != null && !skipVerification) { validator.validateConvertedConfig( converterParams.getOutputDirectory()); } } catch (ParseException e) { String msg = "Options parsing failed: " + e.getMessage(); logAndStdErr(e, msg); printHelp(opts); retVal = -1; } catch (PreconditionException e) { String msg = "Cannot start FS config conversion due to the following" + " precondition error: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (UnsupportedPropertyException e) { String msg = "Unsupported property/setting encountered during FS config " + "conversion: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (ConversionException | IllegalArgumentException e) { String msg = "Fatal error during FS config conversion: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (VerificationException e) { Throwable cause = e.getCause(); String msg = "Verification failed: " + e.getCause().getMessage(); conversionOptions.handleVerificationFailure(cause, msg); retVal = -1; } conversionOptions.handleParsingFinished(); return retVal; }
@Test public void testEmptyFairSchedulerXmlSpecified() throws Exception { FSConfigConverterTestCommons.configureEmptyFairSchedulerXml(); FSConfigConverterTestCommons.configureEmptyYarnSiteXml(); FSConfigConverterTestCommons.configureDummyConversionRulesFile(); FSConfigToCSConfigArgumentHandler argumentHandler = createArgumentHandler(); String[] args = getArgumentsAsArrayWithDefaults("-f", FSConfigConverterTestCommons.FS_ALLOC_FILE); argumentHandler.parseAndConvert(args); }
public static void assertParamsMatchWithDescription( String descriptionTemplate, Map<String, String> params) { getParams(descriptionTemplate) .forEach( param -> { if (!params.containsKey(param)) { throw new IllegalArgumentException( String.format( "Param [%s] is not set in error message [%s]", param, descriptionTemplate)); } }); }
@Test void testAssertParamsMatchWithDescription() { String description = "test description with param <key1>, <key2> and <key3>."; Map<String, String> params = new HashMap<>(); params.put("key1", "value1"); params.put("key2", "value2"); params.put("key3", "value3"); ExceptionParamsUtil.assertParamsMatchWithDescription(description, params); params.remove("key2"); Assertions.assertThrows( IllegalArgumentException.class, () -> ExceptionParamsUtil.assertParamsMatchWithDescription(description, params)); }
NewExternalIssue mapResult(String driverName, @Nullable Result.Level ruleSeverity, @Nullable Result.Level ruleSeverityForNewTaxonomy, Result result) { NewExternalIssue newExternalIssue = sensorContext.newExternalIssue(); newExternalIssue.type(DEFAULT_TYPE); newExternalIssue.engineId(driverName); newExternalIssue.severity(toSonarQubeSeverity(ruleSeverity)); newExternalIssue.ruleId(requireNonNull(result.getRuleId(), "No ruleId found for issue thrown by driver " + driverName)); newExternalIssue.cleanCodeAttribute(DEFAULT_CLEAN_CODE_ATTRIBUTE); newExternalIssue.addImpact(DEFAULT_SOFTWARE_QUALITY, toSonarQubeImpactSeverity(ruleSeverityForNewTaxonomy)); mapLocations(result, newExternalIssue); return newExternalIssue; }
@Test public void mapResult_whenRelatedLocationExists_createsSecondaryFileLocation_no_messages() { Location relatedLocationWithoutMessage = new Location(); result.withRelatedLocations(Set.of(relatedLocationWithoutMessage)); var newIssueLocationCall2 = mock(NewIssueLocation.class); when(mockExternalIssue.newLocation()).thenReturn(newExternalIssueLocation, newIssueLocationCall2); NewExternalIssue newExternalIssue = resultMapper.mapResult(DRIVER_NAME, WARNING, WARNING, result); verify(newExternalIssue).addLocation(newIssueLocationCall2); verify(newIssueLocationCall2, never()).message(anyString()); }
@Override public boolean registerListener(Object listener) { if (listener instanceof HazelcastInstanceAware aware) { aware.setHazelcastInstance(node.hazelcastInstance); } if (listener instanceof ClusterVersionListener clusterVersionListener) { clusterVersionListeners.add(clusterVersionListener); // on registration, invoke once the listening method so version is properly initialized on the listener clusterVersionListener.onClusterVersionChange(getClusterOrNodeVersion()); return true; } return false; }
@Test public void test_listenerHazelcastInstanceInjected_whenHazelcastInstanceAware() { HazelcastInstanceAwareVersionListener listener = new HazelcastInstanceAwareVersionListener(); assertTrue(nodeExtension.registerListener(listener)); assertEquals(hazelcastInstance, listener.getInstance()); }
@Override public Set<Rule<?>> rules() { return ImmutableSet.of(filterRowExpressionRewriteRule(), projectRowExpressionRewriteRule()); }
@Test public void testSubscriptCast() { tester().assertThat( ImmutableSet.<Rule<?>>builder().addAll(new SimplifyRowExpressions(getMetadata()).rules()).addAll(new RemoveMapCastRule(getFunctionManager()).rules()).build()) .setSystemProperty(REMOVE_MAP_CAST, "true") .on(p -> { VariableReferenceExpression a = p.variable("a", DOUBLE); VariableReferenceExpression feature = p.variable("feature", createMapType(getFunctionManager(), INTEGER, DOUBLE)); VariableReferenceExpression key = p.variable("key", BIGINT); return p.project( assignment(a, p.rowExpression("cast(feature as map<bigint, double>)[key]")), p.values(feature, key)); }) .matches( project( ImmutableMap.of("a", expression("feature[cast(key as integer)]")), values("feature", "key"))); }
public void execute(final PrioritizableRunnable runnable) { _queue.add(runnable); // Guarantees that execution loop is scheduled only once to the underlying executor. // Also makes sure that all memory effects of last Runnable are visible to the next Runnable // in case value returned by decrementAndGet == 0. if (_pendingCount.getAndIncrement() == 0) { tryExecuteLoop(); } }
@Test(dataProvider = "draining") public void testExecuteTwoStepPlan(boolean draining) throws InterruptedException { final LatchedRunnable inner = new LatchedRunnable(); final Runnable outer = new Runnable() { @Override public void run() { _serialExecutor.execute(inner); } }; _executorService.execute(outer); assertTrue(inner.await(5, TimeUnit.SECONDS)); assertFalse(_rejectionHandler.wasExecuted()); assertTrue(_capturingDeactivationListener.await(5, TimeUnit.SECONDS)); assertEquals(_capturingDeactivationListener.getDeactivatedCount(), 1); }
@VisibleForTesting Object evaluate(final GenericRow row) { return term.getValue(new TermEvaluationContext(row)); }
@Test public void shouldEvaluateUnaryArithmetic() { // Given: final Expression expression1 = new ArithmeticUnaryExpression( Optional.empty(), Sign.PLUS, new IntegerLiteral(1) ); final Expression expression2 = new ArithmeticUnaryExpression( Optional.empty(), Sign.MINUS, new IntegerLiteral(1) ); final Expression expression3 = new ArithmeticUnaryExpression( Optional.empty(), Sign.MINUS, new DecimalLiteral(new BigDecimal("345.5")) ); final Expression expression4 = new ArithmeticUnaryExpression( Optional.empty(), Sign.MINUS, new DoubleLiteral(45.5d) ); // When: InterpretedExpression interpreter1 = interpreter(expression1); InterpretedExpression interpreter2 = interpreter(expression2); InterpretedExpression interpreter3 = interpreter(expression3); InterpretedExpression interpreter4 = interpreter(expression4); // Then: assertThat(interpreter1.evaluate(ROW), is(1)); assertThat(interpreter2.evaluate(ROW), is(-1)); assertThat(interpreter3.evaluate(ROW), is(new BigDecimal("-345.5"))); assertThat(interpreter4.evaluate(ROW), is(-45.5d)); }
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace, Repository repository ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta, repository ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta, repository ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.scheme", null ); variableSpace.setVariable( "engine.url", null ); }
@Test public void testExecutePentahoTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setPentaho( true ); defaultRunConfiguration.setRemote( false ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace, null ); assertFalse( transExecutionConfiguration.isExecutingLocally() ); assertFalse( transExecutionConfiguration.isExecutingRemotely() ); }
@SuppressWarnings("unchecked") public final void isAtLeast(@Nullable T other) { if (checkNotNull((Comparable<Object>) actual).compareTo(checkNotNull(other)) < 0) { failWithActual("expected to be at least", other); } }
@Test public void isAtLeast() { assertThat(4).isAtLeast(3); assertThat(4).isAtLeast(4); expectFailureWhenTestingThat(4).isAtLeast(5); assertFailureValue("expected to be at least", "5"); }
@Override protected double maintain() { if ( ! nodeRepository().nodes().isWorking()) return 0.0; if (nodeRepository().zone().environment().isTest()) return 1.0; int attempts = 0; int failures = 0; outer: for (var applicationNodes : activeNodesByApplication().entrySet()) { for (var clusterNodes : nodesByCluster(applicationNodes.getValue()).entrySet()) { if (shuttingDown()) break outer; attempts++; if ( ! autoscale(applicationNodes.getKey(), clusterNodes.getKey())) failures++; } } return asSuccessFactorDeviation(attempts, failures); }
@Test public void empty_autoscaling_is_ignored() { ApplicationId app1 = AutoscalingMaintainerTester.makeApplicationId("app1"); ClusterSpec cluster1 = AutoscalingMaintainerTester.containerClusterSpec(); NodeResources resources = new NodeResources(4, 4, 10, 1); ClusterResources min = new ClusterResources(2, 1, resources); ClusterResources max = new ClusterResources(20, 1, resources); var capacity = Capacity.from(min, max); var tester = new AutoscalingMaintainerTester(new MockDeployer.ApplicationContext(app1, cluster1, capacity)); // Add a scaling event tester.deploy(app1, cluster1, capacity); tester.addMeasurements(1.0f, 0.3f, 0.3f, 0, 4, app1, cluster1.id()); tester.maintainer().maintain(); assertEquals("Scale up: " + tester.cluster(app1, cluster1).target().status(), 1, tester.cluster(app1, cluster1).lastScalingEvent().get().generation()); Load peak = tester.cluster(app1, cluster1).target().peak(); assertNotEquals(Load.zero(), peak); // Old measurements go out of scope and no new ones are made tester.clock().advance(Duration.ofDays(1)); tester.maintainer().maintain(); Load newPeak = tester.cluster(app1, cluster1).target().peak(); assertEquals("Old measurements are retained", peak, newPeak); }
@Override public void doSendSms(SmsSendMessage message) { // 获得渠道对应的 SmsClient 客户端 SmsClient smsClient = smsChannelService.getSmsClient(message.getChannelId()); Assert.notNull(smsClient, "短信客户端({}) 不存在", message.getChannelId()); // 发送短信 try { SmsSendRespDTO sendResponse = smsClient.sendSms(message.getLogId(), message.getMobile(), message.getApiTemplateId(), message.getTemplateParams()); smsLogService.updateSmsSendResult(message.getLogId(), sendResponse.getSuccess(), sendResponse.getApiCode(), sendResponse.getApiMsg(), sendResponse.getApiRequestId(), sendResponse.getSerialNo()); } catch (Throwable ex) { log.error("[doSendSms][发送短信异常,日志编号({})]", message.getLogId(), ex); smsLogService.updateSmsSendResult(message.getLogId(), false, "EXCEPTION", ExceptionUtil.getRootCauseMessage(ex), null, null); } }
@Test @SuppressWarnings("unchecked") public void testDoSendSms() throws Throwable { // 准备参数 SmsSendMessage message = randomPojo(SmsSendMessage.class); // mock SmsClientFactory 的方法 SmsClient smsClient = spy(SmsClient.class); when(smsChannelService.getSmsClient(eq(message.getChannelId()))).thenReturn(smsClient); // mock SmsClient 的方法 SmsSendRespDTO sendResult = randomPojo(SmsSendRespDTO.class); when(smsClient.sendSms(eq(message.getLogId()), eq(message.getMobile()), eq(message.getApiTemplateId()), eq(message.getTemplateParams()))).thenReturn(sendResult); // 调用 smsSendService.doSendSms(message); // 断言 verify(smsLogService).updateSmsSendResult(eq(message.getLogId()), eq(sendResult.getSuccess()), eq(sendResult.getApiCode()), eq(sendResult.getApiMsg()), eq(sendResult.getApiRequestId()), eq(sendResult.getSerialNo())); }