repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/distribution/src/main/java/io/hyperfoil/example/DivideStep.java | distribution/src/main/java/io/hyperfoil/example/DivideStep.java | package io.hyperfoil.example;
import java.util.Collections;
import java.util.List;
import org.kohsuke.MetaInfServices;
import io.hyperfoil.api.config.BenchmarkDefinitionException;
import io.hyperfoil.api.config.InitFromParam;
import io.hyperfoil.api.config.Name;
import io.hyperfoil.api.config.Step;
import io.hyperfoil.api.config.StepBuilder;
import io.hyperfoil.api.session.IntAccess;
import io.hyperfoil.api.session.ReadAccess;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.core.builders.BaseStepBuilder;
import io.hyperfoil.core.session.SessionFactory;
/**
* Example step for <a href="http://hyperfoil.io/quickstart/quickstart8">Custom steps tutorial</a>
*/
public class DivideStep implements Step {
// All fields in a step are immutable, any state must be stored in the Session
private final ReadAccess fromVar;
private final IntAccess toVar;
private final int divisor;
public DivideStep(ReadAccess fromVar, IntAccess toVar, int divisor) {
// Variables in session are not accessed directly using map lookup but
// through the Access objects. This is necessary as the scenario can use
// some simple expressions that are parsed when the scenario is built
// (in this constructor), not at runtime.
this.fromVar = fromVar;
this.toVar = toVar;
this.divisor = divisor;
}
@Override
public boolean invoke(Session session) {
// This step will block until the variable is set, rather than
// throwing an error or defaulting the value.
if (!fromVar.isSet(session)) {
return false;
}
// Session can store either objects or integers. Using int variables is
// more efficient as it prevents repeated boxing and unboxing.
int value = fromVar.getInt(session);
toVar.setInt(session, value / divisor);
return true;
}
// Make this builder loadable as service
@MetaInfServices(StepBuilder.class)
// This is the step name that will be used in the YAML
@Name("divide")
public static class Builder extends BaseStepBuilder<Builder> implements InitFromParam<Builder> {
// Contrary to the step fields in builder are mutable
private String fromVar;
private String toVar;
private int divisor;
// Let's permit a short-form definition that will store the result
// in the same variable. Note that the javadoc @param is used to generate external documentation.
/**
* @param param Use myVar /= constant
*/
@Override
public Builder init(String param) {
int divIndex = param.indexOf("/=");
if (divIndex < 0) {
throw new BenchmarkDefinitionException("Invalid inline definition: " + param);
}
try {
divisor(Integer.parseInt(param.substring(divIndex + 2).trim()));
} catch (NumberFormatException e) {
throw new BenchmarkDefinitionException("Invalid inline definition: " + param, e);
}
String var = param.substring(0, divIndex).trim();
return fromVar(var).toVar(var);
}
// All fields are set in fluent setters - this helps when the scenario
// is defined through programmatic configuration.
// When parsing YAML the methods are invoked through reflection;
// the attribute name is used for the method lookup.
public Builder fromVar(String fromVar) {
this.fromVar = fromVar;
return this;
}
public Builder toVar(String toVar) {
this.toVar = toVar;
return this;
}
// The parser can automatically convert primitive types and enums.
public Builder divisor(int divisor) {
this.divisor = divisor;
return this;
}
@Override
public List<Step> build() {
// You can ignore the sequence parameter; this is used only in steps
// that require access to the parent sequence at runtime.
if (fromVar == null || toVar == null || divisor == 0) {
// Here is a good place to check that the attributes are sane.
throw new BenchmarkDefinitionException("Missing one of the required attributes!");
}
// The builder has a bit more flexibility and it can create more than
// one step at once.
return Collections.singletonList(new DivideStep(
SessionFactory.readAccess(fromVar), SessionFactory.intAccess(toVar), divisor));
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/distribution/src/main/java/io/hyperfoil/schema/DocsGenerator.java | distribution/src/main/java/io/hyperfoil/schema/DocsGenerator.java | package io.hyperfoil.schema;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.Stack;
import java.util.TreeMap;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import com.github.javaparser.JavaParser;
import com.github.javaparser.ParseResult;
import com.github.javaparser.Problem;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.EnumConstantDeclaration;
import com.github.javaparser.ast.body.EnumDeclaration;
import com.github.javaparser.ast.body.MethodDeclaration;
import com.github.javaparser.ast.nodeTypes.NodeWithJavadoc;
import com.github.javaparser.ast.nodeTypes.NodeWithSimpleName;
import com.github.javaparser.ast.type.ClassOrInterfaceType;
import com.github.javaparser.ast.type.PrimitiveType;
import com.github.javaparser.ast.type.Type;
import com.github.javaparser.javadoc.Javadoc;
import com.github.javaparser.javadoc.JavadocBlockTag;
import io.hyperfoil.api.config.BaseSequenceBuilder;
import io.hyperfoil.api.config.InitFromParam;
import io.hyperfoil.api.config.ListBuilder;
import io.hyperfoil.api.config.MappingListBuilder;
import io.hyperfoil.api.config.PairBuilder;
import io.hyperfoil.api.config.PartialBuilder;
import io.hyperfoil.api.config.StepBuilder;
import io.hyperfoil.api.processor.Processor;
import io.hyperfoil.api.session.Action;
import io.hyperfoil.core.builders.BuilderInfo;
import io.hyperfoil.core.builders.ServiceLoadedBuilderProvider;
public class DocsGenerator extends BaseGenerator {
private static final Set<Class<?>> BLACKLIST = new HashSet<>(Arrays.asList(
BaseSequenceBuilder.class, ListBuilder.class, MappingListBuilder.class,
PairBuilder.class, PairBuilder.OfString.class, PairBuilder.OfDouble.class,
PartialBuilder.class));
private static final String NO_DESCRIPTION = "<font color=\"#606060\"><no description></font>";
private static final Docs EMPTY_DOCS = new Docs(null);
private final List<Path> sourceDirs;
private final Path output;
private final Map<String, Docs> steps = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
private final Map<Class<?>, Docs> docs = new HashMap<>();
private final Map<Docs, Class<?>> reverseTypes = new HashMap<>();
private final JavaParser parser = new JavaParser();
private final List<Docs> needsResolve = new ArrayList<>();
public static void main(String[] args) {
List<Path> sourceDirs = new ArrayList<>();
for (int i = 0; i < args.length - 1; ++i) {
sourceDirs.add(Paths.get(args[i]));
}
if (args.length > 0) {
new DocsGenerator(sourceDirs, Paths.get(args[args.length - 1])).run();
}
}
private DocsGenerator(List<Path> sourceDirs, Path output) {
this.sourceDirs = sourceDirs;
this.output = output;
}
private void run() {
for (Map.Entry<String, BuilderInfo<?>> entry : ServiceLoadedBuilderProvider.builders(StepBuilder.class).entrySet()) {
@SuppressWarnings("unchecked")
Class<? extends StepBuilder<?>> newBuilder = (Class<? extends StepBuilder<?>>) entry.getValue().implClazz;
if (newBuilder.isAnnotationPresent(Deprecated.class)) {
continue;
}
ClassOrInterfaceDeclaration cd = findClass(newBuilder);
if (cd != null) {
String inlineParamDocs = findInlineParamDocs(cd);
addStep(entry.getKey(), newBuilder, InitFromParam.class.isAssignableFrom(newBuilder), inlineParamDocs);
}
}
needsResolve.forEach(Docs::resolveLazyParams);
for (Map.Entry<Class<?>, Docs> entry : docs.entrySet()) {
reverseTypes.put(entry.getValue(), entry.getKey());
}
File outputDir = output.toFile();
if (outputDir.exists()) {
if (!outputDir.isDirectory()) {
System.err.println("Output parameter " + output + " must be a folder");
}
} else {
outputDir.mkdirs();
}
for (Map.Entry<String, Docs> step : steps.entrySet()) {
Path filePath = output.resolve("step_" + step.getKey() + ".md");
try (PrintStream out = new PrintStream(new FileOutputStream(filePath.toFile()))) {
printFrontMatter(out, step.getValue(), step.getKey());
printDocs(step.getValue(), out);
} catch (FileNotFoundException e) {
System.err.printf("Cannot write file %s: %s%n", filePath, e);
}
}
printRootType("action", Action.Builder.class);
printRootType("processor", Processor.Builder.class);
}
private void printRootType(String type, Class<?> builderClazz) {
for (Map.Entry<String, List<Docs>> entry : docs.get(builderClazz).params.entrySet()) {
Path filePath = output.resolve(type + "_" + entry.getKey() + ".md");
try (PrintStream out = new PrintStream(new FileOutputStream(filePath.toFile()))) {
Docs docs = entry.getValue().iterator().next();
printFrontMatter(out, docs, entry.getKey());
printDocs(docs, out);
} catch (FileNotFoundException e) {
System.err.printf("Cannot write file %s: %s%n", filePath, e);
}
}
}
private void printFrontMatter(PrintStream out, Docs docs, String title) {
out.println("---");
if (title == null && docs != null && docs.type != null) {
title = docs.type;
}
if (title != null) {
out.printf("title: \"%s\"%n", title);
}
if (docs != null && docs.ownerDescription != null) {
out.printf("description: \"%s\"%n", docs.ownerDescription.replaceAll("\"", "\\\\\""));
}
out.println("---");
}
private void printLink(String type, String name, Docs docs, PrintStream out) {
String description = docs.ownerDescription;
if (description == null) {
out.printf("* [%s](./%s_%s.html)%n", name, type, name);
} else {
int endOfLine = description.indexOf('\n');
if (endOfLine >= 0) {
description = description.substring(0, endOfLine);
}
out.printf("* [%s](./%s_%s.html): %s%n", name, type, name, description);
}
}
private void printDocs(Docs docs, PrintStream out) {
if (docs.typeDescription != null) {
out.println(docs.typeDescription);
}
if (docs.inlineParam != null) {
out.println();
out.println("| Inline definition |\n| -------- |");
out.printf("| %s |%n", docs.inlineParam);
out.println();
}
if (!docs.params.isEmpty()) {
List<Tuple> children = new ArrayList<>();
List<Tuple> processed = new ArrayList<>();
Set<Docs> found = new HashSet<>();
for (Map.Entry<String, List<Docs>> param : docs.params.entrySet()) {
for (Docs d : param.getValue()) {
if (d.link == null && !d.params.isEmpty() && found.add(d)) {
processed.add(new Tuple(param.getKey(), d));
}
}
}
while (!processed.isEmpty()) {
children.addAll(processed);
List<Tuple> newChildren = new ArrayList<>();
for (Tuple t : children) {
for (Map.Entry<String, List<Docs>> param : t.docs.params.entrySet()) {
for (Docs d : param.getValue()) {
if (d.link == null && !d.params.isEmpty() && found.add(d)) {
newChildren.add(new Tuple(t.name + "." + param.getKey(), d));
}
}
}
}
processed = newChildren;
}
Map<Docs, String> reverseLookup = new HashMap<>();
for (Tuple t : children) {
reverseLookup.put(t.docs, t.name);
}
out.println();
out.println("| Property | Type | Description |\n| ------- | ------- | -------- |");
for (Map.Entry<String, List<Docs>> param : docs.params.entrySet()) {
printDocs(param.getKey(), param.getValue(), out, reverseLookup);
}
out.println();
Collections.sort(children, Comparator.comparing(t -> t.name, String.CASE_INSENSITIVE_ORDER));
for (Tuple t : children) {
out.printf("### %s%n%n", reverseLookup.get(t.docs), t.name);
if (t.docs.typeDescription != null) {
out.println(t.docs.typeDescription);
out.println();
}
if (t.docs.inlineParam != null) {
out.println();
out.println("| Inline definition |\n| -------- |");
out.printf("| %s |%n", t.docs.inlineParam);
out.println();
}
out.println("| Property | Type | Description |\n| ------- | ------- | ------- |");
for (Map.Entry<String, List<Docs>> param : t.docs.params.entrySet()) {
printDocs(param.getKey(), param.getValue(), out, reverseLookup);
}
out.println();
}
}
}
private void printDocs(String name, List<Docs> options, PrintStream out, Map<Docs, String> reverseLookup) {
int printed = 0;
for (Docs d : options) {
if (d.ownerDescription == null && d.params.isEmpty()) {
continue;
}
out.printf("| %s ", name);
if (printed > 0) {
out.print("(alternative)");
}
if (d.link != null) {
out.printf("| [%s](%s) ", d.type, d.link);
} else if (d.params.isEmpty()) {
out.printf("| %s ", d.type);
} else {
out.printf("| [%s](#%s) ", d.type, reverseLookup.get(d)
.replaceAll("<", "lt").replaceAll(">", "gt").replaceAll(" ", "-").replaceAll("[^a-zA-Z0-9-_]", "")
.toLowerCase());
}
out.printf("| %s |%n", d.ownerDescription == null ? NO_DESCRIPTION : d.ownerDescription);
++printed;
}
if (printed == 0) {
out.printf("| %s | <unknown> | %s |%n", name, NO_DESCRIPTION);
}
}
private static String javadocToMarkdown(String text) {
if (text == null) {
return null;
}
return text.replaceAll("<pre>", "\n```")
.replaceAll("</pre>", "```\n")
.replaceAll("\\{@code +([^}]*)\\}", "<code>$1</code>")
.replaceAll("<p>", "\n\n");
}
private String findInlineParamDocs(ClassOrInterfaceDeclaration cd) {
return cd.findFirst(MethodDeclaration.class, md -> matches(md, "init", String.class))
.map(md -> javadocToMarkdown(getJavadocParams(md.getJavadoc()).get("param"))).orElse(null);
}
private MethodDeclaration findMatching(List<MethodDeclaration> methods, Method method) {
METHODS: for (MethodDeclaration m : methods) {
int parameterCount = m.getParameters().size();
if (m.getName().asString().equals(method.getName()) && parameterCount == method.getParameterCount()) {
for (int i = 0; i < parameterCount; ++i) {
if (!matches(m.getParameter(i).getType(), method.getParameters()[i].getType())) {
continue METHODS;
}
}
return m;
}
}
return null;
}
private boolean matches(Type type, Class<?> clazz) {
if (type instanceof PrimitiveType) {
return ((PrimitiveType) type).getType().asString().equals(clazz.getName());
} else if (type instanceof ClassOrInterfaceType) {
ClassOrInterfaceType classType = (ClassOrInterfaceType) type;
String fqName = fqName(classType);
return clazz.getName().endsWith(fqName);
}
return false;
}
private boolean matches(MethodDeclaration declaration, String name, Class<?>... parameters) {
if (!declaration.getName().asString().equals(name) || declaration.getParameters().size() != parameters.length) {
return false;
}
for (int i = 0; i < parameters.length; ++i) {
if (!matches(declaration.getParameter(i).getType(), parameters[i])) {
return false;
}
}
return true;
}
private String fqName(ClassOrInterfaceType type) {
return type.getScope().map(s -> fqName(s) + ".").orElse("") + type.getNameAsString();
}
private CompilationUnit findUnit(Class<?> clazz) {
while (clazz.getEnclosingClass() != null) {
clazz = clazz.getEnclosingClass();
}
String src = clazz.getName().replaceAll("\\.", File.separator) + ".java";
File file = sourceDirs.stream().map(path -> path.resolve(src).toFile())
.filter(f -> f.exists() && f.isFile()).findFirst().orElse(null);
if (file != null) {
try {
ParseResult<CompilationUnit> result = parser.parse(file);
if (result.isSuccessful()) {
return result.getResult().orElseThrow(IllegalStateException::new);
} else {
System.err.printf("Cannot parse file %s:%n", file);
for (Problem p : result.getProblems()) {
System.err.println(p.getVerboseMessage());
}
}
} catch (FileNotFoundException e) {
System.err.printf("Cannot read file %s: %s%n", file, e.getMessage());
}
}
if (!clazz.getName().startsWith("java.")) {
System.err.printf("Cannot find source code for %s%n", clazz);
}
return null;
}
private ClassOrInterfaceDeclaration findClass(Class<?> builder) {
Node node = findClassOrEnum(builder, ClassOrInterfaceDeclaration.class);
if (node == null)
return null;
return (ClassOrInterfaceDeclaration) node;
}
private EnumDeclaration findEnum(Class<?> builder) {
Node node = findClassOrEnum(builder, EnumDeclaration.class);
if (node == null)
return null;
return (EnumDeclaration) node;
}
private <T extends Node & NodeWithSimpleName> Node findClassOrEnum(Class<?> builder, Class<T> type) {
if (BLACKLIST.contains(builder)) {
return null;
}
Node node = findUnit(builder);
if (node == null) {
return null;
}
Stack<Class<?>> classes = new Stack<>();
Class<?> clazz = builder;
while (clazz != null) {
classes.push(clazz);
clazz = clazz.getEnclosingClass();
}
while (!classes.isEmpty()) {
String simpleName = classes.pop().getSimpleName();
if (classes.isEmpty()) {
node = node.findFirst(type, cd -> cd.getNameAsString().equals(simpleName)).orElse(null);
} else {
node = node.findFirst(ClassOrInterfaceDeclaration.class, cd -> cd.getNameAsString().equals(simpleName))
.orElse(null);
}
if (node == null) {
System.err.printf("Cannot describe builder %s%n", builder);
return null;
}
}
return node;
}
private String getJavadocDescription(NodeWithJavadoc<?> declaration) {
return declaration == null ? null
: declaration.getJavadoc()
.map(javadoc -> trimEmptyLines(javadoc.getDescription().toText()))
.map(DocsGenerator::javadocToMarkdown)
.orElse(null);
}
private String trimEmptyLines(String description) {
String[] lines = description.split("\n");
int firstLine = 0, lastLine = lines.length - 1;
for (; firstLine < lines.length; ++firstLine) {
if (!lines[firstLine].trim().isEmpty())
break;
}
for (; lastLine >= firstLine; --lastLine) {
if (!lines[lastLine].trim().isEmpty())
break;
}
StringBuilder sb = new StringBuilder();
boolean preformatted = false;
for (int i = firstLine; i <= lastLine; ++i) {
if (lines[i].contains("<pre>")) {
preformatted = true;
}
if (lines[i].contains("</pre>")) {
preformatted = false;
}
if (lines[i].trim().isEmpty()) {
sb.append("<br>");
}
sb.append(lines[i]);
if (preformatted) {
sb.append('\n');
} else if (i != lastLine) {
sb.append(" ");
}
}
if (sb.length() == 0) {
return "";
}
return sb.toString();
}
private Map<String, String> getJavadocParams(Optional<Javadoc> maybeJavadoc) {
return maybeJavadoc
.map(javadoc -> javadoc.getBlockTags().stream()
.filter(tag -> tag.getType() == JavadocBlockTag.Type.PARAM)
.collect(Collectors.toMap(tag -> tag.getName().orElse("<unknown>"), tag -> tag.getContent().toText())))
.orElse(Collections.emptyMap());
}
private void addStep(String name, Class<?> builder, boolean inline, String inlineDocs) {
Docs step = steps.get(name);
if (step == null) {
step = describeBuilder(builder, false);
step.ownerDescription = firstLine(step.typeDescription);
steps.put(name, step);
} else if (step.params.isEmpty()) {
// The step could have been created from inline-param version in StepCatalog
Docs docs = describeBuilder(builder, false);
step.typeDescription = docs.typeDescription;
step.params.putAll(docs.params);
if (step.ownerDescription == null) {
step.ownerDescription = firstLine(step.typeDescription);
}
}
if (step.inlineParam == null && inline) {
step.inlineParam = inlineDocs;
}
}
private String firstLine(String text) {
if (text == null) {
return null;
}
text = text.trim();
int endOfLine = IntStream.of(text.indexOf('\n'), text.indexOf("<br"), text.indexOf("<p>"))
.filter(index -> index >= 0).min().orElse(-1);
return endOfLine >= 0 ? text.substring(0, endOfLine) : text;
}
private Docs describeBuilder(Class<?> builder, boolean addParamsFromType) {
if (docs.containsKey(builder)) {
return docs.get(builder);
}
if (builder == ServiceLoadedBuilderProvider.class) {
throw new IllegalArgumentException();
}
ClassOrInterfaceDeclaration cd = findClass(builder);
if (cd == null) {
return null;
}
Map<Class<?>, List<MethodDeclaration>> methods = new HashMap<>();
Docs docs = new Docs(null);
docs.typeDescription = getJavadocDescription(cd);
if (InitFromParam.class.isAssignableFrom(builder)) {
docs.inlineParam = findInlineParamDocs(cd);
}
this.docs.put(builder, docs);
if (BaseSequenceBuilder.class.isAssignableFrom(builder)) {
return docs;
}
if (addParamsFromType) {
addParamsFromBuilders(docs, builder, builder);
}
findProperties(builder, m -> {
List<MethodDeclaration> mds = methods.computeIfAbsent(m.getDeclaringClass(), this::findAllMethods);
Docs param = describeMethod(m.getDeclaringClass(), m, findMatching(mds, m));
if (param != null) {
docs.addParam(m.getName(), param);
}
});
return docs;
}
private List<MethodDeclaration> findAllMethods(Class<?> clazz) {
List<MethodDeclaration> declarations = new ArrayList<>();
while (clazz != null) {
ClassOrInterfaceDeclaration cd = findClass(clazz);
if (cd != null) {
declarations.addAll(cd.findAll(MethodDeclaration.class));
}
clazz = clazz.getSuperclass();
}
return declarations;
}
private Docs describeMethod(Class<?> builder, Method m, MethodDeclaration declaration) {
StringBuilder description = declaration == null ? new StringBuilder()
: declaration.getJavadoc()
.map(javadoc -> new StringBuilder(trimEmptyLines(javadoc.getDescription().toText())))
.orElse(new StringBuilder());
// Return early to not recurse into self
if (m.getReturnType().isAssignableFrom(builder)) {
String type = "<none>";
if (m.getParameterCount() == 0) {
description.append("<br>Note: property does not have any value");
} else if (m.getParameterCount() == 1) {
Class<?> singleParam = m.getParameters()[0].getType();
if (singleParam.isEnum()) {
type = "enum";
EnumDeclaration cd = findEnum(singleParam);
if (cd != null) {
List<EnumConstantDeclaration> constants = cd.findAll(EnumConstantDeclaration.class);
if (constants != null) {
description.append("<br>Options:<ul>");
for (EnumConstantDeclaration c : constants) {
description.append("<li><code>").append(c.getNameAsString()).append("</code>");
String optionDescription = getJavadocDescription(c);
if (optionDescription != null) {
description.append(optionDescription);
}
description.append("</li>");
}
description.append("</ul>");
}
}
} else {
type = singleParam.getSimpleName();
}
}
Docs docs = new Docs(description.isEmpty() ? null : description.toString());
docs.type = type;
return docs;
}
Docs param = new Docs(description.isEmpty() ? null : description.toString());
if (BaseSequenceBuilder.class.isAssignableFrom(m.getReturnType())) {
param.addParam("<list of steps>", EMPTY_DOCS);
param.type = "<list of steps>";
param.link = "index.html#steps";
}
addParamsFromBuilders(param, m.getReturnType(), m.getGenericReturnType());
if (ServiceLoadedBuilderProvider.class.isAssignableFrom(m.getReturnType())) {
ParameterizedType returnType = (ParameterizedType) m.getAnnotatedReturnType().getType();
Class<?> builderClazz = getRawClass(returnType.getActualTypeArguments()[0]);
setServiceLoaded(param, builderClazz);
}
if (m.getReturnType().getName().endsWith("Builder")) {
Docs inner = describeBuilder(m.getReturnType(), false);
if (inner != null) {
param.typeDescription = inner.typeDescription;
param.inlineParam = inner.inlineParam;
param.type = "Builder";
param.lazyParams.add(inner.params);
needsResolve.add(param);
}
}
if (param.params.isEmpty() && param.lazyParams.isEmpty()) {
return null;
} else {
return param;
}
}
private void setServiceLoaded(Docs param, Class<?> builderClazz) {
param.type = builderType(builderClazz);
if (builderClazz == Action.Builder.class) {
param.link = "index.html#actions";
} else if (Processor.Builder.class.isAssignableFrom(builderClazz)) {
param.link = "index.html#processors";
}
param.lazyParams.add(getServiceLoadedImplementations(builderClazz).params);
needsResolve.add(param);
}
private void addParamsFromBuilders(Docs docs, Class<?> builder, java.lang.reflect.Type genericType) {
if (PairBuilder.class.isAssignableFrom(builder)) {
Docs inner = describeBuilder(builder, false);
ClassOrInterfaceDeclaration cd = findClass(builder);
if (cd != null) {
inner.ownerDescription = firstLine(getMethodJavadoc(cd, "accept", 2));
}
docs.type = inner.type = getRawClass(getGenericParams(genericType, PairBuilder.class)[0]).getSimpleName();
docs.addParam("<any>", inner);
}
if (PartialBuilder.class.isAssignableFrom(builder)) {
try {
Method withKey = builder.getMethod("withKey", String.class);
Class<?> innerBuilder = withKey.getReturnType();
String ownerDescription = null;
ClassOrInterfaceDeclaration cd = findClass(builder);
if (cd != null) {
ownerDescription = getMethodJavadoc(cd, "withKey", 1);
}
Docs inner;
if (innerBuilder == ServiceLoadedBuilderProvider.class) {
Class<?> implBuilder = getRawClass(
((ParameterizedType) withKey.getGenericReturnType()).getActualTypeArguments()[0]);
inner = new Docs(ownerDescription);
setServiceLoaded(inner, implBuilder);
} else {
inner = describeBuilder(innerBuilder, false);
inner.ownerDescription = ownerDescription;
}
docs.type = inner.type = "Builder";
docs.addParam("<any>", inner);
} catch (NoSuchMethodException e) {
throw new IllegalStateException(e);
}
}
if (ListBuilder.class.isAssignableFrom(builder)) {
Docs inner = describeBuilder(builder, false);
if (inner == null) {
inner = new Docs(null);
} else if (inner.ownerDescription == null) {
inner.ownerDescription = firstLine(inner.typeDescription);
}
docs.type = inner.type = "<list of strings>";
docs.addParam("<list of strings>", inner);
}
if (MappingListBuilder.class.isAssignableFrom(builder)) {
try {
Docs inner = describeBuilder(builder.getMethod("addItem").getReturnType(), false);
ClassOrInterfaceDeclaration cd = findClass(builder);
if (cd != null) {
inner.ownerDescription = firstLine(getMethodJavadoc(cd, "addItem", 0));
}
docs.type = inner.type = "<list of builders>";
docs.addParam("<list of mappings>", inner);
} catch (NoSuchMethodException e) {
throw new IllegalStateException(e);
}
}
}
private String getMethodJavadoc(ClassOrInterfaceDeclaration cd, String methodName, int paramCount) {
return cd
.findFirst(MethodDeclaration.class,
md -> md.getNameAsString().equals(methodName) && md.getParameters().size() == paramCount)
.map(this::getJavadocDescription).orElse(null);
}
private String builderType(Class<?> builderClazz) {
if (Processor.Builder.class.isAssignableFrom(builderClazz)) {
return "Processor.Builder";
}
String type = builderClazz.getSimpleName();
if (builderClazz.getDeclaringClass() != null) {
type = builderClazz.getDeclaringClass().getSimpleName() + "." + type;
}
return type;
}
private java.lang.reflect.Type[] getGenericParams(java.lang.reflect.Type type, Class<?> iface) {
if (type == Object.class || type == null) {
return null;
} else if (type instanceof ParameterizedType) {
Class<?> rawType = (Class<?>) ((ParameterizedType) type).getRawType();
if (rawType == iface) {
return ((ParameterizedType) type).getActualTypeArguments();
}
return getGenericParams(rawType, iface);
} else if (type instanceof Class<?>) {
return getGenericParams((Class<?>) type, iface);
} else {
throw new UnsupportedOperationException(type.getTypeName());
}
}
private java.lang.reflect.Type[] getGenericParams(Class<?> rawType, Class<?> iface) {
java.lang.reflect.Type[] params = getGenericParams(rawType.getGenericSuperclass(), iface);
if (params != null) {
return params;
}
for (java.lang.reflect.Type i : rawType.getGenericInterfaces()) {
params = getGenericParams(i, iface);
if (params != null) {
return params;
}
}
return null;
}
private Docs getServiceLoadedImplementations(Class<?> builderClazz) {
Docs implementations = docs.get(builderClazz);
if (implementations != null) {
return implementations;
}
implementations = new Docs(null);
docs.put(builderClazz, implementations);
ClassOrInterfaceDeclaration fd = findClass(builderClazz);
implementations.typeDescription = getJavadocDescription(fd);
for (Map.Entry<String, BuilderInfo<?>> entry : ServiceLoadedBuilderProvider.builders(builderClazz).entrySet()) {
Class<?> newBuilder = entry.getValue().implClazz;
if (newBuilder.isAnnotationPresent(Deprecated.class)) {
continue;
}
Docs docs = describeBuilder(newBuilder, true);
if (docs == null) {
continue;
}
docs.ownerDescription = firstLine(docs.typeDescription);
docs.type = builderType(newBuilder);
implementations.addParam(entry.getKey(), docs);
}
return implementations;
}
private static class Docs {
private static final Comparator<? super Docs> DOCS_COMPARATOR = Comparator
.<Docs, Integer> comparing(d -> d.params.size())
.thenComparing(d -> d.inlineParam == null ? "" : d.inlineParam)
.thenComparing(d -> d.typeDescription == null ? "" : d.typeDescription)
.thenComparing(d -> d.ownerDescription == null ? "" : d.ownerDescription)
.thenComparing(d -> d.type == null ? "" : d.type);
String ownerDescription;
String typeDescription;
String inlineParam;
Map<String, List<Docs>> params = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
String link;
String type;
List<Map<String, List<Docs>>> lazyParams = new ArrayList<>();
private Docs(String ownerDescription) {
this.ownerDescription = ownerDescription;
this.typeDescription = ownerDescription;
}
public void addParam(String name, Docs docs) {
List<Docs> options = params.get(name);
if (options == null) {
options = new ArrayList<>();
params.put(name, options);
}
options.add(docs);
options.sort(DOCS_COMPARATOR);
}
public void resolveLazyParams() {
for (Map<String, List<Docs>> params : lazyParams) {
for (Map.Entry<String, List<Docs>> param : params.entrySet()) {
for (Docs d : param.getValue()) {
addParam(param.getKey(), d);
}
}
}
lazyParams.clear();
}
}
private static class Tuple {
final String name;
final Docs docs;
private Tuple(String name, Docs docs) {
this.name = name;
this.docs = docs;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/distribution/src/main/java/io/hyperfoil/schema/BaseGenerator.java | distribution/src/main/java/io/hyperfoil/schema/BaseGenerator.java | package io.hyperfoil.schema;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.util.ArrayDeque;
import java.util.HashSet;
import java.util.Queue;
import java.util.Set;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import io.hyperfoil.api.config.BaseSequenceBuilder;
import io.hyperfoil.api.config.Embed;
import io.hyperfoil.api.config.InitFromParam;
import io.hyperfoil.api.config.ListBuilder;
import io.hyperfoil.api.config.MappingListBuilder;
import io.hyperfoil.api.config.PartialBuilder;
import io.hyperfoil.impl.Util;
class BaseGenerator {
private static final Pattern END_REGEXP = Pattern.compile("^end(\\p{javaUpperCase}.*|$)");
static Class<?> getRawClass(java.lang.reflect.Type type) {
if (type instanceof Class) {
return (Class<?>) type;
} else if (type instanceof ParameterizedType) {
return (Class<?>) ((ParameterizedType) type).getRawType();
} else {
throw new IllegalStateException("Cannot analyze type " + type);
}
}
static boolean isMethodIgnored(Class<?> builder, Method m) {
if (Modifier.isStatic(m.getModifiers()) || m.isDefault() || m.isSynthetic() || m.isBridge()) {
return true;
} else if (END_REGEXP.matcher(m.getName()).matches()) {
return true;
} else if (m.getParameterCount() > 1) {
return true;
} else if (m.getParameterCount() == 1 && !Util.isParamConvertible(m.getParameters()[0].getType())) {
return true;
} else if (PartialBuilder.class.isAssignableFrom(builder) && m.getName().equals("withKey")
&& m.getParameterCount() == 1) {
return true;
} else if (ListBuilder.class.isAssignableFrom(builder) && m.getName().equals("nextItem") && m.getParameterCount() == 1) {
return true;
} else if (MappingListBuilder.class.isAssignableFrom(builder) && m.getName().equals("addItem")
&& m.getParameterCount() == 0) {
return true;
} else if (m.getName().equals("init") && m.getParameterCount() == 1 && m.getParameterTypes()[0] == String.class
&& InitFromParam.class.isAssignableFrom(builder)) {
return true;
} else if (m.getName().equals("copy") && m.getParameterCount() == 0) {
return true;
} else if (m.isAnnotationPresent(Deprecated.class)) {
return true;
} else if (m.getName().equals("rootSequence") && BaseSequenceBuilder.class.isAssignableFrom(m.getDeclaringClass())) {
return true;
} else if (m.isAnnotationPresent(Embed.class)) {
return true;
} else if (m.getName().equals("self") && m.getParameterCount() == 0) {
return true;
}
return false;
}
private static boolean isParamConvertible(Class<?> type) {
return type == Object.class || type == String.class || type == CharSequence.class || type.isPrimitive() || type.isEnum();
}
protected static void findProperties(Class<?> root, Consumer<Method> processProperty) {
Queue<Class<?>> todo = new ArrayDeque<>();
Set<Class<?>> visited = new HashSet<>();
todo.add(root);
while (!todo.isEmpty()) {
Class<?> b = todo.poll();
visited.add(b);
for (Method m : b.getMethods()) {
if (isMethodIgnored(b, m)) {
continue;
}
processProperty.accept(m);
}
for (Field f : b.getFields()) {
if (f.isAnnotationPresent(Embed.class)) {
if (!visited.contains(f.getType()) && !Modifier.isStatic(f.getModifiers())) {
todo.add(f.getType());
}
}
}
for (Method m : b.getMethods()) {
if (m.isAnnotationPresent(Embed.class)) {
if (!visited.contains(m.getReturnType()) && m.getParameterCount() == 0 && !Modifier.isStatic(m.getModifiers())) {
todo.add(m.getReturnType());
}
}
}
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/distribution/src/main/java/io/hyperfoil/schema/Generator.java | distribution/src/main/java/io/hyperfoil/schema/Generator.java | package io.hyperfoil.schema;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Map;
import java.util.TreeMap;
import java.util.TreeSet;
import io.hyperfoil.api.config.BaseSequenceBuilder;
import io.hyperfoil.api.config.InitFromParam;
import io.hyperfoil.api.config.ListBuilder;
import io.hyperfoil.api.config.MappingListBuilder;
import io.hyperfoil.api.config.PairBuilder;
import io.hyperfoil.api.config.PartialBuilder;
import io.hyperfoil.api.config.StepBuilder;
import io.hyperfoil.core.builders.BuilderInfo;
import io.hyperfoil.core.builders.ServiceLoadedBuilderProvider;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
public class Generator extends BaseGenerator {
private static final JsonObject TYPE_NULL = new JsonObject().put("type", "null");
private static final JsonObject TYPE_STRING = new JsonObject().put("type", "string");
private static final Comparator<JsonObject> JSON_COMPARATOR = new Comparator<JsonObject>() {
@Override
public int compare(JsonObject o1, JsonObject o2) {
TreeSet<String> keys = new TreeSet<>(o1.fieldNames());
keys.addAll(o2.fieldNames());
for (String key : keys) {
int result = compareItems(o1.getValue(key), o2.getValue(key));
if (result != 0) {
return result;
}
}
return 0;
}
private int compareItems(Object v1, Object v2) {
// Sorting nulls at end
if (v1 == null) {
return v2 == null ? 0 : 1;
} else if (v2 == null) {
return -1;
}
if (v1 instanceof JsonObject) {
if (v2 instanceof JsonObject) {
return this.compare((JsonObject) v1, (JsonObject) v2);
} else {
throw new IllegalArgumentException(v1 + ", " + v2);
}
} else if (v1 instanceof JsonArray) {
JsonArray a1 = (JsonArray) v1;
JsonArray a2 = (JsonArray) v2;
for (int i = 0; i < Math.min(a1.size(), a2.size()); ++i) {
int result = compareItems(a1.getValue(i), a2.getValue(i));
if (result != 0) {
return result;
}
}
return Integer.compare(a1.size(), a2.size());
} else if (v1 instanceof String) {
if (v2 instanceof String) {
return ((String) v1).compareTo((String) v2);
} else {
throw new IllegalArgumentException(v1 + ", " + v2);
}
} else if (v1 instanceof Integer) {
if (v2 instanceof Integer) {
return ((Integer) v1).compareTo((Integer) v2);
} else {
throw new IllegalArgumentException(v1 + ", " + v2);
}
} else if (v1 instanceof Boolean) {
if (v2 instanceof Boolean) {
boolean b1 = (boolean) v1;
if (b1 != (boolean) v2) {
return b1 ? 1 : -1;
}
} else {
throw new IllegalArgumentException(v1 + ", " + v2);
}
} else {
throw new IllegalArgumentException(String.valueOf(v1));
}
return 0;
}
};
private final Path input;
private final Path output;
private JsonObject definitions;
public static void main(String[] args) throws IOException {
new Generator(Paths.get(args[0]), Paths.get(args[1])).run();
}
private Generator(Path input, Path output) {
this.input = input;
this.output = output;
}
private void run() throws IOException {
String template = Files.readAllLines(input).stream()
.reduce(new StringBuilder(), StringBuilder::append, StringBuilder::append).toString();
JsonObject schema = new JsonObject(template);
JsonObject schemaDefinitions = schema.getJsonObject("definitions");
definitions = new JsonObject(new TreeMap<>());
JsonObject step = schemaDefinitions.getJsonObject("step");
JsonArray oneOf = step.getJsonArray("oneOf");
TreeMap<String, Object> sortedMap = new TreeMap<>();
sortedMap.putAll(oneOf.getJsonObject(0).getJsonObject("properties").getMap());
JsonObject builders = new JsonObject(sortedMap);
oneOf.getJsonObject(0).put("properties", builders);
JsonArray simpleBuilders = oneOf.getJsonObject(1).getJsonArray("enum");
simpleBuilders.clear();
for (Map.Entry<String, BuilderInfo<?>> entry : ServiceLoadedBuilderProvider.builders(StepBuilder.class).entrySet()) {
@SuppressWarnings("unchecked")
Class<StepBuilder<?>> implClazz = (Class<StepBuilder<?>>) entry.getValue().implClazz;
addBuilder(builders, simpleBuilders, entry.getKey(), implClazz, InitFromParam.class.isAssignableFrom(implClazz));
}
if (simpleBuilders.isEmpty()) {
oneOf.remove(1);
}
definitions.forEach(e -> schemaDefinitions.put(e.getKey(), e.getValue()));
Files.writeString(output, schema.encodePrettily());
}
private void addBuilder(JsonObject builders, JsonArray simpleBuilders, String name, Class<?> builder, boolean inline) {
JsonObject properties = new JsonObject(new TreeMap<>());
if (definitions.getJsonObject(builder.getName()) == null) {
JsonObject step = new JsonObject();
definitions.put(builder.getName(), step);
describeBuilder(builder, step, properties);
if (properties.isEmpty()) {
simpleBuilders.add(name);
}
}
JsonObject reference = new JsonObject().put("$ref", "#/definitions/" + builder.getName());
addProperty(builders, name, reference);
if (inline) {
addProperty(builders, name, TYPE_STRING);
}
}
private JsonObject describeBuilder(Class<?> builder) {
if (definitions.getJsonObject(builder.getName()) == null) {
JsonObject definition = new JsonObject();
definitions.put(builder.getName(), definition);
describeBuilder(builder, definition, new JsonObject(new TreeMap<>()));
}
return new JsonObject().put("$ref", "#/definitions/" + builder.getName());
}
private void describeBuilder(Class<?> builder, JsonObject definition, JsonObject properties) {
definition.put("type", "object");
definition.put("additionalProperties", false);
definition.put("properties", properties);
if (PartialBuilder.class.isAssignableFrom(builder)) {
try {
Method withKey = builder.getMethod("withKey", String.class);
Class<?> innerBuilder = withKey.getReturnType();
JsonObject propertyType;
if (ServiceLoadedBuilderProvider.class == innerBuilder) {
propertyType = getServiceLoadedImplementations(
getRawClass(((ParameterizedType) withKey.getGenericReturnType()).getActualTypeArguments()[0]));
} else {
propertyType = describeBuilder(innerBuilder);
}
definition.put("patternProperties", new JsonObject().put(".*", propertyType));
} catch (NoSuchMethodException e) {
throw new IllegalStateException(e);
}
}
findProperties(builder, m -> {
JsonObject property = describeMethod(m.getDeclaringClass(), m);
if (property != null) {
addProperty(properties, m.getName(), property);
}
});
}
private JsonObject describeMethod(Class<?> builder, Method m) {
if (m.getReturnType().isAssignableFrom(builder)) {
if (m.getParameterCount() == 0) {
return TYPE_NULL;
} else if (m.getParameterCount() == 1) {
return getType(m);
} else {
throw new IllegalStateException();
}
}
ArrayList<JsonObject> options = new ArrayList<>();
if (PairBuilder.class.isAssignableFrom(m.getReturnType())) {
// TODO: PairBuilder.valueType
JsonObject valueType = TYPE_STRING;
if (PartialBuilder.class.isAssignableFrom(m.getReturnType())) {
try {
Class<?> innerBuilder = m.getReturnType().getMethod("withKey", String.class).getReturnType();
valueType = new JsonObject().put("oneOf", new JsonArray().add(valueType).add(describeBuilder(innerBuilder)));
} catch (NoSuchMethodException e) {
throw new IllegalStateException(e);
}
}
JsonObject patternProperties = new JsonObject().put(".*", valueType);
JsonObject object = new JsonObject()
.put("type", "object")
.put("patternProperties", patternProperties);
JsonObject sequenceObject = new JsonObject()
.put("type", "object")
.put("minProperties", 1)
.put("maxProperties", 1)
.put("patternProperties", patternProperties);
options.add(object);
options.add(arrayOf(sequenceObject));
}
if (ListBuilder.class.isAssignableFrom(m.getReturnType())) {
options.add(new JsonObject()
.put("type", "array")
.put("additionalItems", false)
.put("items", TYPE_STRING));
}
if (MappingListBuilder.class.isAssignableFrom(m.getReturnType())) {
JsonObject item;
try {
item = describeBuilder(m.getReturnType().getMethod("addItem").getReturnType());
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
options.add(item);
options.add(arrayOf(item));
}
if (ServiceLoadedBuilderProvider.class.isAssignableFrom(m.getReturnType())) {
ParameterizedType type = (ParameterizedType) m.getAnnotatedReturnType().getType();
Class<?> builderClazz = getRawClass(type.getActualTypeArguments()[0]);
JsonObject discriminator = getServiceLoadedImplementations(builderClazz);
options.add(discriminator);
options.add(arrayOf(discriminator));
}
if (BaseSequenceBuilder.class.isAssignableFrom(m.getReturnType())) {
options.add(new JsonObject()
.put("type", "array")
.put("additionalItems", false)
.put("items", new JsonObject().put("$ref", "#/definitions/step")));
// return early to avoid reporting BaseSequenceBuilder
return optionsToObject(options);
}
if (m.getReturnType().getName().endsWith("Builder")) {
JsonObject builderReference = describeBuilder(m.getReturnType());
options.add(builderReference);
options.add(arrayOf(builderReference));
}
return optionsToObject(options);
}
private JsonObject optionsToObject(ArrayList<JsonObject> options) {
if (options.isEmpty()) {
return null;
} else if (options.size() == 1) {
return options.get(0);
} else {
return new JsonObject().put("oneOf", new JsonArray(options));
}
}
private JsonObject getType(Method m) {
Class<?> type = m.getParameters()[0].getType();
if (isIntegral(type)) {
return new JsonObject().put("type", "integer");
} else if (isFloat(type)) {
return new JsonObject().put("type", "number");
} else if (type == Boolean.class || type == boolean.class) {
return new JsonObject().put("type", "boolean");
} else if (type.isEnum()) {
return new JsonObject().put("enum", makeEnum(type));
} else {
return TYPE_STRING;
}
}
private JsonObject getServiceLoadedImplementations(Class<?> builderClazz) {
JsonObject implementations = new JsonObject();
JsonObject discriminator = new JsonObject()
.put("type", "object")
.put("additionalProperties", false)
.put("minProperties", 1)
.put("maxProperties", 1)
.put("properties", implementations);
for (Map.Entry<String, BuilderInfo<?>> entry : new TreeMap<>(ServiceLoadedBuilderProvider.builders(builderClazz))
.entrySet()) {
Class<?> implClazz = entry.getValue().implClazz;
JsonObject serviceLoadedProperty = describeBuilder(implClazz);
if (InitFromParam.class.isAssignableFrom(implClazz)) {
serviceLoadedProperty = new JsonObject()
.put("oneOf", new JsonArray().add(serviceLoadedProperty).add(TYPE_STRING));
}
addProperty(implementations, entry.getKey(), serviceLoadedProperty);
}
definitions.put(builderClazz.getName(), discriminator);
return new JsonObject().put("$ref", "#/definitions/" + builderClazz.getName());
}
private static void addProperty(JsonObject properties, String name, JsonObject newProperty) {
JsonObject existingProperty = properties.getJsonObject(name);
if (existingProperty == null) {
properties.put(name, newProperty);
return;
} else if (existingProperty.equals(newProperty)) {
return;
}
ArrayList<JsonObject> props = new ArrayList<>();
JsonArray existingOneOf = existingProperty.getJsonArray("oneOf");
if (existingOneOf == null) {
props.add(existingProperty);
} else {
existingOneOf.forEach(p -> props.add((JsonObject) p));
}
JsonArray newOneOf = newProperty.getJsonArray("oneOf");
if (newOneOf == null) {
props.add(newProperty);
} else {
newOneOf.forEach(p -> props.add((JsonObject) p));
}
props.sort(JSON_COMPARATOR);
properties.put(name, new JsonObject().put("oneOf", new JsonArray(props)));
}
private static JsonObject arrayOf(JsonObject sequenceObject) {
return new JsonObject()
.put("type", "array")
.put("minLength", 1)
.put("additionalItems", false)
.put("items", sequenceObject);
}
private static JsonArray makeEnum(Class<?> type) {
JsonArray array = new JsonArray();
for (Object e : type.getEnumConstants()) {
array.add(((Enum) e).name());
}
return array;
}
private static boolean isFloat(Class<?> type) {
return type == Double.class || type == double.class || type == Float.class || type == float.class;
}
private static boolean isIntegral(Class<?> type) {
return type == Integer.class || type == int.class || type == Long.class || type == long.class;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/k8s-deployer/src/main/java/io/hyperfoil/deploy/k8s/K8sDeployer.java | k8s-deployer/src/main/java/io/hyperfoil/deploy/k8s/K8sDeployer.java | package io.hyperfoil.deploy.k8s;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.kohsuke.MetaInfServices;
import io.fabric8.kubernetes.api.model.ConfigMapVolumeSource;
import io.fabric8.kubernetes.api.model.ContainerBuilder;
import io.fabric8.kubernetes.api.model.ContainerPort;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.api.model.PodBuilder;
import io.fabric8.kubernetes.api.model.PodSpecBuilder;
import io.fabric8.kubernetes.api.model.Quantity;
import io.fabric8.kubernetes.api.model.ResourceRequirements;
import io.fabric8.kubernetes.api.model.Toleration;
import io.fabric8.kubernetes.api.model.VolumeBuilder;
import io.fabric8.kubernetes.api.model.VolumeMountBuilder;
import io.fabric8.kubernetes.client.Config;
import io.fabric8.kubernetes.client.ConfigBuilder;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.KubernetesClientBuilder;
import io.fabric8.kubernetes.client.Watcher;
import io.fabric8.kubernetes.client.WatcherException;
import io.fabric8.kubernetes.client.dsl.PodResource;
import io.hyperfoil.api.Version;
import io.hyperfoil.api.config.Agent;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.deployment.DeployedAgent;
import io.hyperfoil.api.deployment.Deployer;
import io.hyperfoil.internal.Controller;
import io.hyperfoil.internal.Properties;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
/**
* This deployer expects Hyperfoil to be deployed as Openshift/Kubernetes pod. In order to create one, run:
* <code>
* oc new-project hyperfoil
* oc apply -f hyperfoil.yaml
* </code>
* <p>
* If you want to use custom logging settings, create a configmap:
* <code>
* oc create cm log4j2 --from-file=log4j2-trace.xml=/path/to/log4j2-trace.xml
* </code>
* <p>
* This can be referenced as <code>log: log4j2/log4j2-trace.xml</code> in the agent properties.
* You can also mount the configmap to controller.
*/
public class K8sDeployer implements Deployer {
private static final Logger log = LogManager.getLogger(K8sDeployer.class);
private static final String API_SERVER = Properties.get("io.hyperfoil.deployer.k8s.apiserver",
"https://kubernetes.default.svc.cluster.local/");
private static final String DEFAULT_IMAGE = Properties.get("io.hyperfoil.deployer.k8s.defaultimage",
"quay.io/hyperfoil/hyperfoil:" + Version.VERSION);
private static final String CONTROLLER_POD_NAME = System.getenv("HOSTNAME");
private static final String APP;
private static final String NAMESPACE;
/**
* The <a href="https://kubernetes.io/docs/concepts/overview/working-with-objects/common-labels/">recommended
* labels for use in Kubernetes</a>.
*/
private static final String[] K8S_RECOMMENDED_LABELS = {
"app.kubernetes.io/name",
"app.kubernetes.io/instance",
"app.kubernetes.io/version",
"app.kubernetes.io/component",
"app.kubernetes.io/part-of",
"app.kubernetes.io/managed-by",
"app.kubernetes.io/created-by"
};
protected static final String POD_LABEL_PROPERTY_PREFIX = "pod.label.";
private KubernetesClient client;
static {
APP = Properties.get("io.hyperfoil.deployer.k8s.app", null);
NAMESPACE = getPropertyOrLoad("io.hyperfoil.deployer.k8s.namespace", "namespace");
}
private static String getPropertyOrLoad(String property, String file) {
String value = Properties.get(property, null);
if (value != null) {
return value;
}
String path = "/var/run/secrets/kubernetes.io/serviceaccount/" + file;
try {
return Files.readString(Paths.get(path));
} catch (IOException e) {
log.debug("Cannot load {} - not running as pod?", path, e);
return "<cannot load>";
}
}
private void ensureClient() {
synchronized (this) {
if (client == null) {
Config config = new ConfigBuilder()
.withMasterUrl(API_SERVER)
.withTrustCerts(true)
.build();
client = new KubernetesClientBuilder().withConfig(config).build();
}
}
}
@Override
public DeployedAgent start(Agent agent, String runId, Benchmark benchmark, Consumer<Throwable> exceptionHandler) {
ensureClient();
PodSpecBuilder spec = new PodSpecBuilder().withRestartPolicy("Never");
String serviceAccount = agent.properties.getOrDefault("pod-serviceaccount",
Properties.get("io.hyperfoil.deployer.k8s.pod.service-account", null));
if (serviceAccount != null) {
spec.withServiceAccount(serviceAccount);
}
List<String> command = new ArrayList<>();
command.add("java");
int threads = agent.threads() < 0 ? benchmark.defaultThreads() : agent.threads();
ResourceRequirements resourceRequirements = new ResourceRequirements();
Map<String, Quantity> podResourceRequests = new LinkedHashMap<>();
String cpuRequest = agent.properties.getOrDefault(
"pod-cpu",
Properties.get("io.hyperfoil.deployer.k8s.pod.cpu", null));
if (cpuRequest != null) {
podResourceRequests.put("cpu", new Quantity(cpuRequest));
}
String memoryRequest = agent.properties.getOrDefault(
"pod-memory",
Properties.get("io.hyperfoil.deployer.k8s.pod.memory", null));
if (memoryRequest != null) {
podResourceRequests.put("memory", new Quantity(memoryRequest));
}
String storageRequest = agent.properties.getOrDefault(
"pod-ephemeral-storage",
Properties.get("io.hyperfoil.deployer.k8s.pod.ephemeralstorage", null));
if (storageRequest != null) {
podResourceRequests.put("ephemeral-storage", new Quantity(storageRequest));
}
resourceRequirements.setRequests(podResourceRequests);
if (Boolean.parseBoolean(agent.properties.getOrDefault("pod-limits",
Properties.get("io.hyperfoil.deployer.k8s.pod.limits", "false")))) {
resourceRequirements.setLimits(podResourceRequests);
}
ContainerBuilder containerBuilder = new ContainerBuilder()
.withImage(agent.properties.getOrDefault("image", DEFAULT_IMAGE))
.withImagePullPolicy(agent.properties.getOrDefault("imagePullPolicy", "Always"))
.withName("hyperfoil-agent")
.withPorts(new ContainerPort(7800, null, null, "jgroups", "TCP"))
.withNewResourcesLike(resourceRequirements)
.endResources();
String node = agent.properties.get("node");
if (node != null) {
Map<String, String> nodeSelector = new HashMap<>();
for (String label : node.split(",", 0)) {
label = label.trim();
if (label.isEmpty()) {
continue;
} else if (label.contains("=")) {
String[] parts = node.split("=", 2);
nodeSelector.put(parts[0].trim(), parts[1].trim());
} else {
nodeSelector.put("kubernetes.io/hostname", label);
}
}
spec = spec.withNodeSelector(nodeSelector);
// Tolerate any taints if the node is set explicitly
spec = spec.withTolerations(new Toleration("", "", "Exists", null, null));
}
String logProperty = agent.properties.get("log");
if (logProperty != null) {
String configMap = logProperty;
String file = "log4j2.xml";
if (logProperty.contains("/")) {
int index = logProperty.indexOf("/");
configMap = logProperty.substring(0, index);
file = logProperty.substring(index + 1);
}
command.add("-D" + Properties.LOG4J2_CONFIGURATION_FILE + "=file:///etc/log4j2/" + file);
containerBuilder.withVolumeMounts(new VolumeMountBuilder()
.withName("log")
.withMountPath("/etc/log4j2")
.withReadOnly(true)
.build());
spec.withVolumes(new VolumeBuilder()
.withName("log")
.withConfigMap(new ConfigMapVolumeSource(null, null, configMap, false))
.build());
}
command.add("-Dvertx.logger-delegate-factory-class-name=io.vertx.core.logging.Log4j2LogDelegateFactory");
command.add("-D" + Properties.AGENT_NAME + "=" + agent.name);
command.add("-D" + Properties.RUN_ID + "=" + runId);
command.add("-D" + Properties.CONTROLLER_CLUSTER_IP + "=" + Properties.get(Properties.CONTROLLER_CLUSTER_IP, null));
command.add("-D" + Properties.CONTROLLER_CLUSTER_PORT + "=" + Properties.get(Properties.CONTROLLER_CLUSTER_PORT, null));
if (agent.properties.containsKey("extras")) {
command.addAll(Arrays.asList(agent.properties.get("extras").split(" ", 0)));
}
command.add("-cp");
command.add("/deployment/lib/*:/deployment/extensions/*");
command.add("io.hyperfoil.Hyperfoil$Agent");
// Keep the agent running after benchmark, e.g. to inspect logs
boolean stop = !"false".equalsIgnoreCase(agent.properties.getOrDefault("stop", "true"));
if (!stop) {
command.add("&&");
command.add("sleep");
command.add("86400");
}
containerBuilder = containerBuilder.withCommand(command);
spec = spec.withContainers(Collections.singletonList(containerBuilder.build()));
String podName = "agent-" + runId.toLowerCase() + "-" + agent.name.toLowerCase();
boolean fetchLogs = !"false".equalsIgnoreCase(agent.properties.getOrDefault("fetchLogs", "true"));
Path outputPath = null;
FileOutputStream output = null;
if (fetchLogs) {
// We're adding the finalizer to prevent destroying the pod completely before we finish reading logs.
outputPath = Controller.RUN_DIR.resolve(runId).resolve(podName + ".log");
try {
output = new FileOutputStream(outputPath.toFile());
} catch (FileNotFoundException e) {
log.error("Cannot write to {}", outputPath, e);
}
// We cannot start reading the logs right away because we'd only read an error message
// about the container being started - we'll defer it until all containers become ready.
}
Map<String, String> labels = new HashMap<>();
boolean usingRecommendedLabels = false;
for (String key : K8S_RECOMMENDED_LABELS) {
var slashIndex = key.indexOf('/');
var value = Properties.get("io.hyperfoil.deployer.k8s.label." + (key.substring(slashIndex + 1)), null);
if (value != null) {
usingRecommendedLabels = true;
labels.put(key, value);
}
}
if (usingRecommendedLabels) {
labels.putIfAbsent("app.kubernetes.io/name", "hyperfoil");
labels.putIfAbsent("app.kubernetes.io/version", Version.VERSION);
labels.putIfAbsent("app.kubernetes.io/component", "agent");
labels.putIfAbsent("app.kubernetes.io/managed-by", "hyperfoil");
labels.putIfAbsent("app.kubernetes.io/created-by", "hyperfoil");
} else {
labels.put("role", "agent");
if (APP != null) {
labels.put("app", APP);
}
}
agent.properties.forEach((k, v) -> {
if (k.startsWith(POD_LABEL_PROPERTY_PREFIX)) {
labels.put(k.substring(POD_LABEL_PROPERTY_PREFIX.length()), v);
}
});
// @formatter:off
Pod toCreate = new PodBuilder()
.withNewMetadata()
.withNamespace(NAMESPACE)
.withName(podName)
.withLabels(labels)
.endMetadata()
.withSpec(spec.build())
.build();
// @formatter:on
Pod created = client.pods().inNamespace(NAMESPACE).resource(toCreate).create();
K8sAgent k8sAgent = new K8sAgent(agent, client, created, stop, outputPath, output);
if (output != null) {
client.pods().inNamespace(NAMESPACE).withName(podName).watch(new AgentWatcher(podName, k8sAgent));
}
return k8sAgent;
}
@Override
public boolean hasControllerLog() {
return true;
}
@Override
public void downloadControllerLog(long offset, long maxLength, String destinationFile, Handler<AsyncResult<Void>> handler) {
downloadRunningLog(CONTROLLER_POD_NAME, offset, maxLength, destinationFile, handler);
}
@Override
public void downloadAgentLog(DeployedAgent deployedAgent, long offset, long maxLength, String destinationFile,
Handler<AsyncResult<Void>> handler) {
K8sAgent agent = (K8sAgent) deployedAgent;
ensureClient();
if (agent.outputPath != null) {
try (InputStream stream = new FileInputStream(agent.outputPath.toFile())) {
skipBytes(offset, stream);
Files.copy(new TruncatingInputStream(stream, maxLength), Paths.get(destinationFile),
StandardCopyOption.REPLACE_EXISTING);
handler.handle(Future.succeededFuture());
} catch (IOException e) {
handler.handle(Future.failedFuture(e));
}
} else {
downloadRunningLog(agent.pod.getMetadata().getName(), offset, maxLength, destinationFile, handler);
}
}
private void skipBytes(long offset, InputStream stream) throws IOException {
while (offset > 0) {
long skipped = stream.skip(offset);
if (skipped == 0) {
break;
}
offset -= skipped;
}
}
private void downloadRunningLog(String podName, long offset, long maxLength, String destinationFile,
Handler<AsyncResult<Void>> handler) {
ensureClient();
try {
PodResource podResource = client.pods().inNamespace(NAMESPACE).withName(podName);
try (InputStream stream = getLog(podResource)) {
skipBytes(offset, stream);
Files.copy(new TruncatingInputStream(stream, maxLength), Paths.get(destinationFile),
StandardCopyOption.REPLACE_EXISTING);
}
handler.handle(Future.succeededFuture());
} catch (IOException e) {
handler.handle(Future.failedFuture(e));
}
}
/**
* Gets a streaming InputStream for a pod's logs.
*/
private InputStream getLog(PodResource podResource) throws IOException {
return podResource.getLogInputStream();
}
@Override
public void close() {
client.close();
}
@MetaInfServices(Deployer.Factory.class)
public static class Factory implements Deployer.Factory {
@Override
public String name() {
return "k8s";
}
@Override
public K8sDeployer create() {
return new K8sDeployer();
}
}
private static class TruncatingInputStream extends InputStream {
private final InputStream stream;
private long remain;
public TruncatingInputStream(InputStream stream, long maxLength) {
this.stream = stream;
remain = maxLength;
}
@Override
public int read() throws IOException {
if (remain <= 0) {
return -1;
}
int b = stream.read();
if (b >= 0) {
remain--;
}
return b;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (remain <= 0) {
return -1;
}
int n = stream.read(b, off, Math.toIntExact(Math.min(len, remain)));
if (n > 0) {
remain -= n;
}
return n;
}
}
private class AgentWatcher implements Watcher<Pod> {
private final String podName;
private final K8sAgent agent;
AgentWatcher(String podName, K8sAgent agent) {
this.podName = podName;
this.agent = agent;
}
@Override
public void eventReceived(Action action, Pod resource) {
if (resource.getStatus().getConditions().stream()
.filter(c -> "Ready".equalsIgnoreCase(c.getType()))
.anyMatch(c -> "True".equalsIgnoreCase(c.getStatus()))) {
if (agent.logWatch != null) {
return;
}
agent.logWatch = client.pods().inNamespace(NAMESPACE).withName(podName).watchLog(agent.output);
}
}
@Override
public void onClose(WatcherException cause) {
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/k8s-deployer/src/main/java/io/hyperfoil/deploy/k8s/K8sAgent.java | k8s-deployer/src/main/java/io/hyperfoil/deploy/k8s/K8sAgent.java | package io.hyperfoil.deploy.k8s;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Path;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.dsl.LogWatch;
import io.hyperfoil.api.config.Agent;
import io.hyperfoil.api.deployment.DeployedAgent;
class K8sAgent implements DeployedAgent {
private static final Logger log = LogManager.getLogger(K8sAgent.class);
final Agent def;
final KubernetesClient client;
final Pod pod;
final boolean stop;
final Path outputPath;
final FileOutputStream output;
LogWatch logWatch;
K8sAgent(Agent def, KubernetesClient client, Pod pod, boolean stop, Path outputPath, FileOutputStream output) {
this.def = def;
this.client = client;
this.pod = pod;
this.stop = stop;
this.outputPath = outputPath;
this.output = output;
}
@Override
public void stop() {
if (stop) {
client.pods().inNamespace(pod.getMetadata().getNamespace()).delete(pod);
}
if (logWatch != null) {
logWatch.close();
}
if (output != null) {
try {
output.close();
} catch (IOException e) {
log.error("Failed to close log output.", e);
}
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/test/java/io/hyperfoil/controller/JsonWriterTest.java | clustering/src/test/java/io/hyperfoil/controller/JsonWriterTest.java | package io.hyperfoil.controller;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.StringWriter;
import org.junit.jupiter.api.Test;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.SLA;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
import io.vertx.core.json.JsonObject;
public class JsonWriterTest {
@Test
public void shouldComputePercentileResponseTimeForFailure() throws Exception {
var benchmark = Benchmark.forTesting();
StatisticsStore store = new StatisticsStore(benchmark, f -> {
});
StatisticsSnapshot snapshot = new StatisticsSnapshot();
var samples = new long[] { 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000 };
var histo = snapshot.histogram;
for (long sample : samples) {
histo.recordValue(sample);
}
// create a copy of the histogram
var latencies = snapshot.histogram.copy();
// inject a failure using that snapshot
SLA.Failure failure = new SLA.Failure(null, "phase1", "metric1", snapshot, "cause");
store.addFailure(failure);
// capture JSON output
StringWriter writer = new StringWriter();
JsonFactory jfactory = new JsonFactory();
jfactory.setCodec(new ObjectMapper());
try (JsonGenerator jGenerator = jfactory.createGenerator(writer)) {
JsonWriter.writeArrayJsons(store, jGenerator, new JsonObject());
}
// parse and verify percentiles
ObjectMapper mapper = new ObjectMapper();
JsonNode root = mapper.readTree(writer.toString());
JsonNode p = root.path("failures").get(0).path("percentileResponseTime");
String[] expectedPercentiles = { "50.0", "90.0", "99.0", "99.9", "99.99" };
assertEquals(expectedPercentiles.length, p.size());
var names = p.fieldNames();
for (int i = 0; i < expectedPercentiles.length; i++) {
assertEquals(expectedPercentiles[i], names.next());
}
double[] expectedPercentileValues = new double[] {
latencies.getValueAtPercentile(50.0),
latencies.getValueAtPercentile(90.0),
latencies.getValueAtPercentile(99.0),
latencies.getValueAtPercentile(99.9),
latencies.getValueAtPercentile(99.99)
};
for (int i = 0; i < expectedPercentileValues.length; i++) {
String percentileName = expectedPercentiles[i];
double expectedValue = expectedPercentileValues[i];
assertEquals(expectedValue, p.get(percentileName).asDouble(), "Percentile " + percentileName);
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/test/java/io/hyperfoil/controller/CsvWriterTest.java | clustering/src/test/java/io/hyperfoil/controller/CsvWriterTest.java | package io.hyperfoil.controller;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import io.hyperfoil.api.config.Benchmark;
public class CsvWriterTest {
@Test
public void testWriteCsvProducesCorrectHeader(@TempDir Path dir) throws IOException {
StatisticsStore store = new StatisticsStore(Benchmark.forTesting(), f -> {
});
CsvWriter.writeCsv(dir, store);
List<String> lines = Files.readAllLines(dir.resolve("total.csv"));
assertFalse(lines.isEmpty());
String expected = "Phase,Metric,Start,End," +
"Requests,Responses,Mean,StdDev,Min," +
"p50.0,p90.0,p99.0,p99.9,p99.99,Max" +
",ConnectionErrors,RequestTimeouts,InternalErrors,Invalid,BlockedTime" +
",MinSessions,MaxSessions";
assertEquals(expected, lines.get(0));
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/LocalController.java | clustering/src/main/java/io/hyperfoil/LocalController.java | package io.hyperfoil;
import java.nio.file.Path;
import java.util.concurrent.CompletableFuture;
import org.kohsuke.MetaInfServices;
import io.hyperfoil.clustering.Codecs;
import io.hyperfoil.clustering.ControllerVerticle;
import io.hyperfoil.internal.Controller;
import io.hyperfoil.internal.Properties;
import io.vertx.core.DeploymentOptions;
import io.vertx.core.Vertx;
import io.vertx.core.json.JsonObject;
public class LocalController implements Controller {
private final Vertx vertx;
private final String host;
private final int port;
public LocalController(Vertx vertx, String host, int port) {
this.vertx = vertx;
this.host = host;
this.port = port;
}
@Override
public String host() {
return host;
}
@Override
public int port() {
return port;
}
@Override
public void stop() {
CompletableFuture<Void> stopFuture = new CompletableFuture<>();
vertx.close(result -> {
if (result.succeeded()) {
stopFuture.complete(null);
} else {
stopFuture.completeExceptionally(result.cause());
}
});
stopFuture.join();
}
@MetaInfServices(Controller.Factory.class)
public static class Factory implements Controller.Factory {
@Override
public Controller start(Path rootDir) {
if (rootDir != null) {
// TODO: setting property could break test suite but it's that easy to override all uses of Controller.ROOT_DIR
System.setProperty(Properties.ROOT_DIR, rootDir.toFile().getAbsolutePath());
} else {
rootDir = Controller.DEFAULT_ROOT_DIR;
}
JsonObject config = new JsonObject();
config.put(Properties.CONTROLLER_LOG, rootDir.resolve("hyperfoil.local.log").toFile().getAbsolutePath());
config.put(Properties.CONTROLLER_HOST, "127.0.0.1");
config.put(Properties.CONTROLLER_PORT, 0);
Vertx vertx = Vertx.vertx();
Codecs.register(vertx);
Hyperfoil.ensureNettyResourceLeakDetection();
CompletableFuture<Integer> completion = new CompletableFuture<>();
ControllerVerticle controller = new ControllerVerticle();
vertx.deployVerticle(controller, new DeploymentOptions().setConfig(config), event -> {
if (event.succeeded()) {
completion.complete(controller.actualPort());
} else {
completion.completeExceptionally(event.cause());
}
});
return new LocalController(vertx, "127.0.0.1", completion.join());
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/Hyperfoil.java | clustering/src/main/java/io/hyperfoil/Hyperfoil.java | package io.hyperfoil;
import static io.hyperfoil.internal.Properties.CLUSTER_JGROUPS_STACK;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.FormattedMessage;
import org.infinispan.commons.dataconversion.MediaType;
import org.infinispan.commons.util.FileLookupFactory;
import org.infinispan.configuration.parsing.ConfigurationBuilderHolder;
import org.infinispan.configuration.parsing.ParserRegistry;
import org.infinispan.factories.GlobalComponentRegistry;
import org.infinispan.manager.DefaultCacheManager;
import org.infinispan.remoting.transport.Transport;
import org.infinispan.remoting.transport.jgroups.JGroupsTransport;
import org.jgroups.JChannel;
import org.jgroups.protocols.TP;
import io.hyperfoil.api.Version;
import io.hyperfoil.clustering.AgentVerticle;
import io.hyperfoil.clustering.Codecs;
import io.hyperfoil.clustering.ControllerVerticle;
import io.hyperfoil.internal.Properties;
import io.netty.util.ResourceLeakDetector;
import io.vertx.core.DeploymentOptions;
import io.vertx.core.Future;
import io.vertx.core.Verticle;
import io.vertx.core.Vertx;
import io.vertx.core.VertxOptions;
import io.vertx.core.impl.VertxInternal;
import io.vertx.core.spi.cluster.ClusterManager;
import io.vertx.ext.cluster.infinispan.InfinispanClusterManager;
public class Hyperfoil {
static final Logger log = LogManager.getLogger(Hyperfoil.class);
private static final Set<String> LOCALHOST_IPS = new HashSet<>(Arrays.asList("127.0.0.1", "::1", "[::1]"));
public static Future<Vertx> clusteredVertx(boolean isController) {
logVersion();
Thread.setDefaultUncaughtExceptionHandler(Hyperfoil::defaultUncaughtExceptionHandler);
log.info("Starting Vert.x...");
VertxOptions options = new VertxOptions();
try {
String clusterIp = Properties.get(Properties.CONTROLLER_CLUSTER_IP, null);
InetAddress address;
if (isController) {
if (clusterIp == null) {
address = InetAddress.getLocalHost();
} else {
address = InetAddress.getByName(clusterIp);
}
} else {
if (clusterIp == null) {
return Future.failedFuture("Controller clustering IP was not set on agent/auxiliary node.");
}
InetAddress bestMatch = getAddressWithBestMatch(InetAddress.getByName(clusterIp));
if (bestMatch != null) {
address = bestMatch;
} else {
address = InetAddress.getLocalHost();
log.warn("No match found between controller IP ({}) and local addresses, using address {}", clusterIp, address);
}
}
String hostName = address.getHostName();
String hostAddress = address.getHostAddress();
log.info("Using host name {}/{}", hostName, hostAddress);
if (LOCALHOST_IPS.contains(hostAddress) && clusterIp == null) {
log.error("This machine is configured to resolve its hostname to 127.0.0.1; this is " +
"an invalid configuration for clustering. Make sure `hostname -i` does not return 127.0.0.1 or ::1 " +
" or set -D{}=x.x.x.x to use different address. " +
"(if you set that to 127.0.0.1 you won't be able to connect from agents on other machines).",
Properties.CONTROLLER_CLUSTER_IP);
return Future.failedFuture("Hostname resolves to 127.0.0.1");
}
// We are using numeric address because if this is running in a pod its hostname
// wouldn't be resolvable even within the cluster/namespace.
options.getEventBusOptions().setHost(hostAddress).setClusterPublicHost(hostAddress);
// Do not override if it's manually set for some special reason
if (System.getProperty("jgroups.tcp.address") == null) {
System.setProperty("jgroups.tcp.address", hostAddress);
}
String clusterPort = Properties.get(Properties.CONTROLLER_CLUSTER_PORT, null);
if (isController && clusterPort != null && System.getProperty("jgroups.tcp.port") == null) {
System.setProperty("jgroups.tcp.port", clusterPort);
}
if (!isController) {
String initialHosts = clusterIp;
if (clusterPort != null)
initialHosts = String.format("%s[%s]", initialHosts, clusterPort);
log.info("Starting agent with controller: {}", initialHosts);
System.setProperty("jgroups.tcpping.initial_hosts", initialHosts);
System.setProperty(CLUSTER_JGROUPS_STACK, "jgroups-tcp-agent.xml");
}
} catch (UnknownHostException e) {
log.error("Cannot lookup hostname", e);
return Future.failedFuture("Cannot lookup hostname");
}
DefaultCacheManager cacheManager = createCacheManager();
populateProperties(cacheManager);
return Vertx.builder()
.with(options)
.withClusterManager(new InfinispanClusterManager(cacheManager))
.buildClustered()
.onSuccess(vertx -> {
Codecs.register(vertx);
ensureNettyResourceLeakDetection();
})
.onFailure(error -> log.error("Cannot start Vert.x", error));
}
private static void populateProperties(DefaultCacheManager dcm) {
JGroupsTransport transport = (JGroupsTransport) GlobalComponentRegistry.componentOf(dcm, Transport.class);
JChannel channel = transport.getChannel();
TP tp = channel.getProtocolStack().getTransport();
System.setProperty(Properties.CONTROLLER_CLUSTER_IP, tp.getBindAddress().getHostAddress());
System.setProperty(Properties.CONTROLLER_CLUSTER_PORT, String.valueOf(tp.getBindPort()));
log.info("Using {}:{} as clustering address", tp.getBindAddress().getHostAddress(), tp.getBindPort());
}
private static InetAddress getAddressWithBestMatch(InetAddress controllerAddress) {
InetAddress address = null;
try {
List<InetAddress> allAddresses = Collections.list(NetworkInterface.getNetworkInterfaces()).stream().filter(nic -> {
try {
return !nic.isLoopback() && nic.isUp();
} catch (SocketException e) {
log.warn("Error enumerating NIC {}", nic, e);
return false;
}
}).flatMap(nic -> Collections.list(nic.getInetAddresses()).stream()).collect(Collectors.toList());
log.info("Agent must choose NIC with best subnet match to controller ({}/{}), available IPs: {} (loopback is ignored)",
controllerAddress.getHostName(), controllerAddress.getHostAddress(), allAddresses);
int longestMatch = -1;
BitSet controllerBits = BitSet.valueOf(controllerAddress.getAddress());
for (InetAddress a : allAddresses) {
if (a.getAddress().length != controllerAddress.getAddress().length) {
log.debug("Ignoring {} as this has different address length", a);
continue;
}
BitSet aBits = BitSet.valueOf(a.getAddress());
int i = 0;
while (i < aBits.length() && aBits.get(i) == controllerBits.get(i)) {
++i;
}
log.debug("{} and {} have common prefix {} bits", controllerAddress, a, i);
if (i > longestMatch) {
longestMatch = i;
address = a;
}
}
} catch (SocketException e) {
log.warn("Error enumerating NICs", e);
}
return address;
}
private static DefaultCacheManager createCacheManager() {
try (InputStream stream = FileLookupFactory.newInstance().lookupFile("infinispan.xml",
Thread.currentThread().getContextClassLoader())) {
ConfigurationBuilderHolder holder = new ParserRegistry().parse(stream, MediaType.APPLICATION_XML);
holder.getGlobalConfigurationBuilder().transport().defaultTransport()
.withProperties(System.getProperties())
.initialClusterSize(1);
return new DefaultCacheManager(holder, true);
} catch (IOException e) {
log.error("Cannot load Infinispan configuration");
System.exit(1);
return null;
}
}
static void deploy(Vertx vertx, Class<? extends Verticle> verticleClass) {
log.info("Deploying {}...", verticleClass.getSimpleName());
vertx.deployVerticle(verticleClass, new DeploymentOptions(), event -> {
if (event.succeeded()) {
log.info("{} deployed.", verticleClass.getSimpleName());
} else {
log.error("Failed to deploy " + verticleClass.getSimpleName(), event.cause());
System.exit(1);
}
});
}
static void ensureNettyResourceLeakDetection() {
// Vert.x disables Netty's memory leak detection in VertxImpl static ctor - we need to revert that
String leakDetectionLevel = System.getProperty("io.netty.leakDetection.level");
if (leakDetectionLevel != null) {
leakDetectionLevel = leakDetectionLevel.trim();
for (ResourceLeakDetector.Level level : ResourceLeakDetector.Level.values()) {
if (leakDetectionLevel.equalsIgnoreCase(level.name())
|| leakDetectionLevel.equals(String.valueOf(level.ordinal()))) {
ResourceLeakDetector.setLevel(level);
return;
}
}
log.warn("Cannot parse Netty leak detection level '{}', use one of: {}",
leakDetectionLevel, ResourceLeakDetector.Level.values());
}
ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.SIMPLE);
}
public static Future<Void> shutdownVertx(Vertx vertx) {
ClusterManager clusterManager = ((VertxInternal) vertx).getClusterManager();
DefaultCacheManager cacheManager = (DefaultCacheManager) ((InfinispanClusterManager) clusterManager).getCacheContainer();
return vertx.close().onComplete(result -> {
try {
cacheManager.close();
} catch (IOException e) {
log.error("Failed to close Infinispan cache manager", e);
}
});
}
public static class Agent extends Hyperfoil {
public static void main(String[] args) {
clusteredVertx(false)
.onSuccess(vertx -> deploy(vertx, AgentVerticle.class))
.onFailure(error -> System.exit(1));
}
}
public static class Controller extends Hyperfoil {
public static void main(String[] args) {
clusteredVertx(true)
.onSuccess(vertx -> deploy(vertx, ControllerVerticle.class))
.onFailure(error -> System.exit(1));
}
}
public static class Standalone extends Hyperfoil {
public static void main(String[] args) {
logVersion();
Thread.setDefaultUncaughtExceptionHandler(Hyperfoil::defaultUncaughtExceptionHandler);
log.info("Starting non-clustered Vert.x...");
Vertx vertx = Vertx.vertx();
ensureNettyResourceLeakDetection();
Codecs.register(vertx);
deploy(vertx, ControllerVerticle.class);
}
}
private static void defaultUncaughtExceptionHandler(Thread thread, Throwable throwable) {
log.error(new FormattedMessage("Uncaught exception in thread {}({})", thread.getName(), thread.getId()), throwable);
}
private static void logVersion() {
log.info("Java: {} {} {} {} ({}), CWD {}",
System.getProperty("java.vm.vendor", "<unknown VM vendor>"),
System.getProperty("java.vm.name", "<unknown VM name>"),
System.getProperty("java.version", "<unknown version>"),
System.getProperty("java.vm.version", "<unknown VM version>"),
System.getProperty("java.home", "<unknown Java home>"),
System.getProperty("user.dir", "<unknown current dir>"));
String path = new File(Hyperfoil.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getParentFile()
.getParent();
log.info("Hyperfoil: {} ({})", Version.VERSION, Version.COMMIT_ID);
log.info(" DISTRIBUTION: {}", path);
log.info(" ROOT_DIR: {}", io.hyperfoil.internal.Controller.ROOT_DIR);
log.info(" BENCHMARK_DIR: {}", io.hyperfoil.internal.Controller.BENCHMARK_DIR);
log.info(" RUN_DIR: {}", io.hyperfoil.internal.Controller.RUN_DIR);
log.info(" HOOKS_DIR: {}", io.hyperfoil.internal.Controller.HOOKS_DIR);
System.getProperties().forEach((n, value) -> {
String name = String.valueOf(n);
if (name.startsWith("io.hyperfoil.") || name.startsWith("jgroups.")) {
log.debug("System property {} = {}", name, value);
}
});
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/controller/JsonLoader.java | clustering/src/main/java/io/hyperfoil/controller/JsonLoader.java | package io.hyperfoil.controller;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import org.HdrHistogram.Histogram;
import io.hyperfoil.api.config.SLA;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
import io.hyperfoil.api.statistics.StatisticsSummary;
import io.hyperfoil.api.statistics.StatsExtension;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
public class JsonLoader {
public static StatisticsStore read(String text, StatisticsStore store) {
JsonObject object = new JsonObject(text);
String schema = object.getString("$schema");
if (!JsonWriter.RUN_SCHEMA.equals(schema)) {
throw new IllegalArgumentException("Schema " + schema + " is not recognized.");
}
for (Object item : object.getJsonArray("failures")) {
JsonObject failure = (JsonObject) item;
StatisticsSnapshot snapshot = new StatisticsSnapshot();
snapshot.histogram.setStartTimeStamp(failure.getLong("start"));
snapshot.histogram.setEndTimeStamp(failure.getLong("end"));
// ignoring percentiles
store.addFailure(new SLA.Failure(null, failure.getString("phase"), failure.getString("metric"), snapshot,
failure.getString("message")));
}
int dataCounter = 0;
// TODO: there could be multiple Data per phase+metric but stepId is not in JSON
Map<String, Map<String, Data>> dataMap = new HashMap<>();
for (Object item : object.getJsonArray("stats")) {
JsonObject stats = (JsonObject) item;
Data data = new Data(store, stats.getString("name"), stats.getBoolean("isWarmup"), 0, stats.getString("metric"),
Collections.emptyMap(), new SLA[0]);
dataMap.computeIfAbsent(data.phase, p -> new HashMap<>()).putIfAbsent(data.metric, data);
store.addData(dataCounter++, data.metric, data);
loadSnapshot(stats.getJsonObject("total"), data.total);
// We cannot use percentiles histogram since it always tells only upper bounds on the response time
// and the results would be way of (at least for the first bucket)
loadHistogram(stats.getJsonObject("histogram").getJsonArray("linear"), data.total.histogram);
loadSeries(stats.getJsonArray("series"), data.series);
}
for (Object item : object.getJsonArray("sessions")) {
JsonObject ss = (JsonObject) item;
StatisticsStore.SessionPoolStats sps = new StatisticsStore.SessionPoolStats();
store.sessionPoolStats.put(ss.getString("name"), sps);
for (Object r : ss.getJsonArray("sessions")) {
JsonObject record = (JsonObject) r;
String agent = record.getString("agent");
long timestamp = record.getLong("timestamp");
int min = record.getInteger("minSessions");
int max = record.getInteger("maxSessions");
StatisticsStore.SessionPoolRecord ssRecord = new StatisticsStore.SessionPoolRecord(timestamp, min, max);
sps.records.computeIfAbsent(agent, a -> new ArrayList<>()).add(ssRecord);
}
}
for (Object item : object.getJsonArray("agents")) {
JsonObject agent = (JsonObject) item;
String name = agent.getString("name");
for (Object s : agent.getJsonArray("stats")) {
JsonObject stats = (JsonObject) s;
String phase = stats.getString("name");
String metric = stats.getString("metric");
boolean isWarmup = stats.getBoolean("isWarmup");
Data data = dataMap.computeIfAbsent(phase, p -> new HashMap<>())
.computeIfAbsent(metric,
m -> new Data(store, phase, isWarmup, 0, metric, Collections.emptyMap(), new SLA[0]));
StatisticsSnapshot snapshot = new StatisticsSnapshot();
loadSnapshot(stats.getJsonObject("total"), snapshot);
loadHistogram(stats.getJsonObject("histogram").getJsonArray("linear"), snapshot.histogram);
data.perAgent.put(name, snapshot);
loadSeries(stats.getJsonArray("series"), data.agentSeries.computeIfAbsent(name, a -> new ArrayList<>()));
}
}
for (var targetEntry : object.getJsonObject("connections")) {
String target = targetEntry.getKey();
var targetMap = store.connectionPoolStats.computeIfAbsent(target, t -> new HashMap<>());
for (var typeEntry : (JsonObject) targetEntry.getValue()) {
String type = typeEntry.getKey();
var typeMap = targetMap.computeIfAbsent(type, t -> new HashMap<>());
for (Object item : (JsonArray) typeEntry.getValue()) {
JsonObject record = (JsonObject) item;
List<StatisticsStore.ConnectionPoolStats> list = typeMap.computeIfAbsent(record.getString("agent"),
a -> new ArrayList<>());
list.add(new StatisticsStore.ConnectionPoolStats(record.getLong("timestamp"), record.getInteger("min"),
record.getInteger("max")));
}
}
}
JsonObject agentCpu = object.getJsonObject("agentCpu");
if (agentCpu != null) {
for (var phaseEntry : agentCpu) {
HashMap<String, String> phaseData = new HashMap<>();
store.cpuUsage.putIfAbsent(phaseEntry.getKey(), phaseData);
for (var agentEntry : (JsonObject) phaseEntry.getValue()) {
phaseData.put(agentEntry.getKey(), String.valueOf(agentEntry.getValue()));
}
}
}
return store;
}
private static void loadSnapshot(JsonObject object, StatisticsSnapshot total) {
total.histogram.setStartTimeStamp(object.getLong("start"));
total.histogram.setEndTimeStamp(object.getLong("end"));
JsonObject summary = object.getJsonObject("summary");
total.requestCount = summary.getInteger("requestCount");
total.responseCount = summary.getInteger("responseCount");
total.invalid = summary.getInteger("invalid");
total.connectionErrors = summary.getInteger("connectionErrors");
total.requestTimeouts = summary.getInteger("requestTimeouts");
total.internalErrors = summary.getInteger("internalErrors");
total.blockedTime = summary.getLong("blockedTime");
JsonObject extensions = object.getJsonObject("extensions");
if (extensions != null && extensions.isEmpty()) {
// TODO: load extensions
}
}
private static void loadHistogram(JsonArray array, Histogram histogram) {
for (Object item : array) {
JsonObject bucket = (JsonObject) item;
long from = bucket.getLong("from");
long to = bucket.getLong("to");
long count = bucket.getLong("count");
// should we use arithmetic or geometric mean in here?
long mid = (from + to) / 2;
histogram.recordValueWithCount(mid, count);
}
}
private static void loadSeries(JsonArray array, List<StatisticsSummary> series) {
for (Object item : array) {
JsonObject object = (JsonObject) item;
long startTime = object.getLong("startTime");
long endTime = object.getLong("endTime");
long minResponseTime = object.getLong("minResponseTime");
long meanResponseTime = object.getLong("meanResponseTime");
long stdDevResponseTime = object.getLong("stdDevResponseTime");
long maxResponseTime = object.getLong("maxResponseTime");
int requestCount = object.getInteger("requestCount");
int responseCount = object.getInteger("responseCount");
int invalid = object.getInteger("invalid");
int connectionErrors = object.getInteger("connectionErrors");
int requestTimeouts = object.getInteger("requestTimeouts");
int internalErrors = object.getInteger("internalErrors");
long blockedTime = object.getLong("blockedTime");
SortedMap<String, StatsExtension> extensions = Collections.emptySortedMap(); // TODO
SortedMap<Double, Long> percentiles = toMap(object.getJsonObject("percentileResponseTime"));
series.add(new StatisticsSummary(startTime, endTime, minResponseTime, meanResponseTime, stdDevResponseTime,
maxResponseTime, percentiles, requestCount, responseCount, invalid, connectionErrors, requestTimeouts,
internalErrors, blockedTime, extensions));
}
}
private static SortedMap<Double, Long> toMap(JsonObject object) {
TreeMap<Double, Long> map = new TreeMap<>();
for (var entry : object) {
map.put(Double.parseDouble(entry.getKey()), Long.parseLong(String.valueOf(entry.getValue())));
}
return map;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/controller/Data.java | clustering/src/main/java/io/hyperfoil/controller/Data.java | package io.hyperfoil.controller;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.api.config.SLA;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
import io.hyperfoil.api.statistics.StatisticsSummary;
import io.netty.util.collection.IntObjectHashMap;
import io.netty.util.collection.IntObjectMap;
final class Data {
private static final Logger log = LogManager.getLogger(Data.class);
// When we receive snapshot with order #N we will attempt to compact agent snapshots #(N-60)
// We are delaying this because the statistics for outlier may come with a significant delay
private static final int MERGE_DELAY = 60;
private final StatisticsStore statisticsStore;
final String phase;
final boolean isWarmup;
final int stepId;
final String metric;
// for reporting
final StatisticsSnapshot total = new StatisticsSnapshot();
final Map<String, StatisticsSnapshot> perAgent = new HashMap<>();
final Map<String, IntObjectMap<StatisticsSnapshot>> lastStats = new HashMap<>();
final List<StatisticsSummary> series = new ArrayList<>();
final Map<String, List<StatisticsSummary>> agentSeries = new HashMap<>();
// floating statistics for SLAs
private final Map<SLA, StatisticsStore.Window> windowSlas;
private final SLA[] totalSlas;
private int highestSequenceId = 0;
private boolean completed;
Data(StatisticsStore statisticsStore, String phase, boolean isWarmup, int stepId, String metric,
Map<SLA, StatisticsStore.Window> periodSlas, SLA[] totalSlas) {
this.statisticsStore = statisticsStore;
this.phase = phase;
this.isWarmup = isWarmup;
this.stepId = stepId;
this.metric = metric;
this.windowSlas = periodSlas;
this.totalSlas = totalSlas;
}
boolean record(String agentName, StatisticsSnapshot stats) {
if (completed) {
log.warn("Ignoring statistics for completed {}/{}/{} (from {}, {} requests)", phase, stepId, metric, agentName,
stats.requestCount);
return false;
}
total.add(stats);
perAgent.computeIfAbsent(agentName, a -> new StatisticsSnapshot()).add(stats);
IntObjectMap<StatisticsSnapshot> partialSnapshots = lastStats.computeIfAbsent(agentName, a -> new IntObjectHashMap<>());
StatisticsSnapshot partialSnapshot = partialSnapshots.get(stats.sequenceId);
if (partialSnapshot == null) {
partialSnapshots.put(stats.sequenceId, stats);
} else {
partialSnapshot.add(stats);
}
while (stats.sequenceId > highestSequenceId) {
++highestSequenceId;
int mergedSequenceId = highestSequenceId - MERGE_DELAY;
if (mergedSequenceId < 0) {
continue;
}
mergeSnapshots(mergedSequenceId);
}
return true;
}
private void mergeSnapshots(int sequenceId) {
StatisticsSnapshot sum = new StatisticsSnapshot();
for (Map.Entry<String, IntObjectMap<StatisticsSnapshot>> entry : lastStats.entrySet()) {
StatisticsSnapshot snapshot = entry.getValue().remove(sequenceId);
if (snapshot != null) {
sum.add(snapshot);
agentSeries.computeIfAbsent(entry.getKey(), a -> new ArrayList<>())
.add(snapshot.summary(StatisticsStore.PERCENTILES));
}
}
if (!sum.isEmpty()) {
series.add(sum.summary(StatisticsStore.PERCENTILES));
}
for (Map.Entry<SLA, StatisticsStore.Window> entry : windowSlas.entrySet()) {
SLA sla = entry.getKey();
StatisticsStore.Window window = entry.getValue();
window.add(sum);
// If we haven't filled full window the SLA won't be validated
SLA.Failure failure = sla.validate(phase, metric, window.current());
if (window.isFull() && failure != null) {
statisticsStore.addFailure(failure);
}
}
}
void completePhase() {
for (int i = Math.max(0, highestSequenceId - MERGE_DELAY); i <= highestSequenceId; ++i) {
mergeSnapshots(i);
}
// Just sanity checks
if (series.stream().mapToLong(ss -> ss.requestCount).sum() != total.requestCount) {
log.error("We lost some data (series) in phase {} metric {}", phase, metric);
}
if (agentSeries.values().stream().flatMap(List::stream).mapToLong(ss -> ss.requestCount).sum() != total.requestCount) {
log.error("We lost some data (agent series) in phase {} metric {}", phase, metric);
}
if (perAgent.values().stream().mapToLong(ss -> ss.requestCount).sum() != total.requestCount) {
log.error("We lost some data (per agent) in phase {} metric {}", phase, metric);
}
log.trace("Validating failures for {}/{}", phase, metric);
for (SLA sla : totalSlas) {
SLA.Failure failure = sla.validate(phase, metric, total);
if (failure != null) {
statisticsStore.addFailure(failure);
}
}
completed = true;
}
boolean isCompleted() {
return completed;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/controller/WriterUtil.java | clustering/src/main/java/io/hyperfoil/controller/WriterUtil.java | package io.hyperfoil.controller;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
class WriterUtil {
static <T, E extends Throwable> void printInSync(Map<String, List<T>> map, ThrowingBiConsumer<String, T, E> consumer)
throws E {
String[] addresses = map.keySet().toArray(new String[0]);
@SuppressWarnings("unchecked")
Iterator<T>[] iterators = Stream.of(addresses).map(a -> map.get(a).iterator()).toArray(Iterator[]::new);
boolean hadNext;
do {
hadNext = false;
for (int i = 0; i < addresses.length; ++i) {
if (iterators[i].hasNext()) {
consumer.accept(addresses[i], iterators[i].next());
hadNext = true;
}
}
} while (hadNext);
}
@FunctionalInterface
interface ThrowingBiConsumer<A, B, E extends Throwable> {
void accept(A a, B b) throws E;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/controller/CsvWriter.java | clustering/src/main/java/io/hyperfoil/controller/CsvWriter.java | package io.hyperfoil.controller;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.api.config.SLA;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
import io.hyperfoil.api.statistics.StatisticsSummary;
import io.hyperfoil.api.statistics.StatsExtension;
import io.hyperfoil.core.util.LowHigh;
public class CsvWriter {
private static final Logger log = LogManager.getLogger(CsvWriter.class);
public static void writeCsv(Path dir, StatisticsStore store) throws IOException {
store.data.values().stream().flatMap(m -> m.values().stream()).filter(d -> !d.isCompleted()).findAny()
.ifPresent(incomplete -> log.error("Phase {} metric {} was not completed!", incomplete.phase, incomplete.metric));
File statsDir = dir.toFile();
if (!statsDir.exists() && !statsDir.mkdirs()) {
throw new IOException("Cannot create directory " + dir);
}
Data[] sorted = store.data.values().stream().flatMap(map -> map.values().stream()).toArray(Data[]::new);
Arrays.sort(sorted,
Comparator.comparing((Data d) -> d.phase).thenComparing(d -> d.metric).thenComparingInt(d -> d.stepId));
try (PrintWriter writer = new PrintWriter(dir + File.separator + "total.csv")) {
writer.print("Phase,Metric,Start,End,");
StatisticsSummary.printHeader(writer, StatisticsStore.PERCENTILES);
String[] extensionHeaders = getHeaders(Stream.of(sorted).map(d -> d.total.extensions));
printExtensionHeaders(writer, extensionHeaders);
writer.println(",MinSessions,MaxSessions");
for (Data data : sorted) {
writer.print(data.phase);
writer.print(',');
writer.print(data.metric);
writer.print(',');
writer.print(data.total.histogram.getStartTimeStamp());
writer.print(',');
writer.print(data.total.histogram.getEndTimeStamp());
writer.print(',');
data.total.summary(StatisticsStore.PERCENTILES).printTo(writer, extensionHeaders);
StatisticsStore.SessionPoolStats sps = store.sessionPoolStats.get(data.phase);
if (sps == null) {
writer.print(",,");
} else {
LowHigh minMax = sps.findMinMax();
writer.print(',');
writer.print(minMax.low);
writer.print(',');
writer.print(minMax.high);
}
writer.println();
}
}
for (Data data : sorted) {
String filePrefix = dir + File.separator + sanitize(data.phase) + "." + sanitize(data.metric) + "." + data.stepId;
writeHistogramAndSeries(filePrefix, data.total, data.series);
}
String[] agents = store.data.values().stream()
.flatMap(m -> m.values().stream())
.flatMap(d -> d.perAgent.keySet().stream())
.distinct().sorted().toArray(String[]::new);
for (String agent : agents) {
try (PrintWriter writer = new PrintWriter(dir + File.separator + "agent." + sanitize(agent) + ".csv")) {
writer.print("Phase,Metric,Start,End,");
StatisticsSummary.printHeader(writer, StatisticsStore.PERCENTILES);
String[] extensionHeaders = getHeaders(
Stream.of(sorted).map(d -> d.perAgent.get(agent)).filter(Objects::nonNull).map(s -> s.extensions));
printExtensionHeaders(writer, extensionHeaders);
writer.println(",MinSessions,MaxSessions");
for (Data data : sorted) {
StatisticsSnapshot agentStats = data.perAgent.get(agent);
if (agentStats == null) {
continue;
}
writer.print(data.phase);
writer.print(',');
writer.print(data.metric);
writer.print(',');
writer.print(data.total.histogram.getStartTimeStamp());
writer.print(',');
writer.print(data.total.histogram.getEndTimeStamp());
writer.print(',');
agentStats.summary(StatisticsStore.PERCENTILES).printTo(writer, extensionHeaders);
StatisticsStore.SessionPoolStats sps = store.sessionPoolStats.get(data.phase);
if (sps == null || sps.records.get(agent) == null) {
writer.print(",,");
} else {
LowHigh lohi = sps.records.get(agent).stream().map(LowHigh.class::cast)
.reduce(LowHigh::combine).orElse(new LowHigh(0, 0));
writer.print(',');
writer.print(lohi.low);
writer.print(',');
writer.print(lohi.high);
}
writer.println();
}
}
for (Data data : sorted) {
String filePrefix = dir + File.separator + sanitize(data.phase) + "." + sanitize(data.metric) + "." + data.stepId
+ ".agent." + agent;
writeHistogramAndSeries(filePrefix, data.perAgent.get(agent), data.agentSeries.get(agent));
}
}
try (PrintWriter writer = new PrintWriter(dir + File.separator + "failures.csv")) {
writer.print("Phase,Metric,Message,Start,End,");
StatisticsSummary.printHeader(writer, StatisticsStore.PERCENTILES);
String[] extensionHeaders = getHeaders(store.failures.stream().map(f -> f.statistics().extensions));
printExtensionHeaders(writer, extensionHeaders);
writer.println();
for (SLA.Failure failure : store.failures) {
writer.print(failure.phase());
writer.print(',');
writer.print(failure.metric());
writer.print(",\"");
writer.print(failure.message());
writer.print("\",");
StatisticsSummary summary = failure.statistics().summary(StatisticsStore.PERCENTILES);
writer.print(summary.startTime);
writer.print(',');
writer.print(summary.endTime);
writer.print(',');
summary.printTo(writer, extensionHeaders);
writer.println();
}
}
for (Map.Entry<String, StatisticsStore.SessionPoolStats> entry : store.sessionPoolStats.entrySet()) {
try (PrintWriter writer = new PrintWriter(dir + File.separator + sanitize(entry.getKey()) + ".sessions.csv")) {
writer.println("Timestamp,Agent,MinSessions,MaxSessions");
WriterUtil.printInSync(entry.getValue().records, (agent, record) -> {
writer.print(record.timestamp);
writer.print(',');
writer.print(agent);
writer.print(',');
writer.print(record.low);
writer.print(',');
writer.println(record.high);
});
}
}
for (var targetEntry : store.connectionPoolStats.entrySet()) {
for (var typeEntry : targetEntry.getValue().entrySet()) {
try (PrintWriter writer = new PrintWriter(dir + File.separator + sanitize(targetEntry.getKey()) + "."
+ sanitize(typeEntry.getKey()) + ".connections.csv")) {
writer.println("Timestamp,Agent,MinConnections,MaxConnections");
WriterUtil.printInSync(typeEntry.getValue(), (agent, record) -> {
writer.print(record.timestamp);
writer.print(',');
writer.print(agent);
writer.print(',');
writer.print(record.low);
writer.print(',');
writer.println(record.high);
});
}
}
}
try (PrintWriter writer = new PrintWriter(dir + File.separator + "agentCpu.csv")) {
String[] cpuAgents = store.cpuUsage.values().stream().flatMap(agentMap -> agentMap.keySet().stream()).sorted()
.distinct().toArray(String[]::new);
writer.print("phase,");
for (int i = 0; i < cpuAgents.length; ++i) {
writer.print(cpuAgents[i]);
}
writer.println();
for (var phaseEntry : store.cpuUsage.entrySet()) {
writer.print(phaseEntry.getKey());
for (String a : cpuAgents) {
writer.print(',');
String usage = phaseEntry.getValue().get(a);
if (usage != null) {
writer.print(usage);
}
}
writer.println();
}
}
}
private static String sanitize(String phase) {
return phase.replaceAll(File.separator, "_");
}
private static void writeHistogramAndSeries(String filePrefix, StatisticsSnapshot total, List<StatisticsSummary> series)
throws FileNotFoundException {
if (total != null) {
try (PrintStream stream = new PrintStream(new FileOutputStream(filePrefix + ".histogram.csv"))) {
total.histogram.outputPercentileDistribution(stream, 5, 1000_000.0, true);
}
}
if (series != null) {
String[] extensionHeaders = getHeaders(series.stream().map(ss -> ss.extensions));
try (PrintWriter writer = new PrintWriter(filePrefix + ".series.csv")) {
writer.print("Start,End,");
StatisticsSummary.printHeader(writer, StatisticsStore.PERCENTILES);
printExtensionHeaders(writer, extensionHeaders);
writer.println();
for (StatisticsSummary summary : series) {
writer.print(summary.startTime);
writer.print(',');
writer.print(summary.endTime);
writer.print(',');
summary.printTo(writer, extensionHeaders);
writer.println();
}
}
}
}
private static void printExtensionHeaders(PrintWriter writer, String[] extensionHeaders) {
for (String header : extensionHeaders) {
writer.print(',');
writer.print(header);
}
}
private static String[] getHeaders(Stream<? extends Map<String, StatsExtension>> extensions) {
return extensions
.flatMap(
ext -> ext.entrySet().stream().flatMap(c -> Stream.of(c.getValue().headers()).map(h -> c.getKey() + "." + h)))
.sorted().distinct().toArray(String[]::new);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/controller/StatisticsStore.java | clustering/src/main/java/io/hyperfoil/controller/StatisticsStore.java | package io.hyperfoil.controller;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.OptionalInt;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.Phase;
import io.hyperfoil.api.config.SLA;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
import io.hyperfoil.api.statistics.StatisticsSummary;
import io.hyperfoil.controller.model.Histogram;
import io.hyperfoil.controller.model.RequestStats;
import io.hyperfoil.core.util.LowHigh;
public class StatisticsStore {
static final double[] PERCENTILES = new double[] { 50, 90, 99, 99.9, 99.99 };
private static final Comparator<RequestStats> REQUEST_STATS_COMPARATOR = Comparator
.<RequestStats, Long> comparing(rs -> rs.summary.startTime)
.thenComparing(rs -> rs.phase).thenComparing(rs -> rs.metric);
private final Benchmark benchmark;
final Map<Integer, Map<String, Data>> data = new HashMap<>();
private final Consumer<SLA.Failure> failureHandler;
final List<SLA.Failure> failures = new ArrayList<>();
private final int maxFailures = 100;
private final Map<Integer, SLA.Provider> slaProviders;
final Map<String, SessionPoolStats> sessionPoolStats = new HashMap<>();
final Map<String, Map<String, Map<String, List<ConnectionPoolStats>>>> connectionPoolStats = new HashMap<>();
final Map<String, Map<String, String>> cpuUsage = new HashMap<>();
public StatisticsStore(Benchmark benchmark, Consumer<SLA.Failure> failureHandler) {
this.benchmark = benchmark;
this.failureHandler = failureHandler;
this.slaProviders = benchmark.steps()
.filter(SLA.Provider.class::isInstance).map(SLA.Provider.class::cast)
.collect(Collectors.toMap(SLA.Provider::id, Function.identity(), (s1, s2) -> {
if (s1 != s2) {
throw new IllegalStateException();
}
return s1;
}));
}
public boolean record(String agentName, int phaseId, int stepId, String metric, StatisticsSnapshot stats) {
Map<String, Data> map = this.data.computeIfAbsent((phaseId << 16) + stepId, phaseStep -> new HashMap<>());
Data data = map.get(metric);
if (data == null) {
long collectionPeriod = benchmark.statisticsCollectionPeriod();
Phase phase = benchmark.phases().stream().filter(p -> p.id() == phaseId).findFirst().get();
SLA[] sla;
if (stepId != 0) {
SLA.Provider slaProvider = slaProviders.get(stepId);
sla = slaProvider == null ? null : slaProvider.sla();
} else {
sla = phase.customSlas.get(metric);
}
Map<SLA, Window> rings = sla == null ? Collections.emptyMap()
: Stream.of(sla).filter(s -> s.window() > 0).collect(
Collectors.toMap(Function.identity(), s -> new Window((int) (s.window() / collectionPeriod))));
SLA[] total = sla == null ? new SLA[0] : Stream.of(sla).filter(s -> s.window() <= 0).toArray(SLA[]::new);
map.put(metric, data = new Data(this, phase.name, phase.isWarmup, stepId, metric, rings, total));
}
return data.record(agentName, stats);
}
public void addFailure(String phase, String metric, long startTimestamp, long endTimestamp, String cause) {
StatisticsSnapshot statistics = new StatisticsSnapshot();
statistics.histogram.setStartTimeStamp(startTimestamp);
statistics.histogram.setEndTimeStamp(endTimestamp);
failures.add(new SLA.Failure(null, phase, metric, statistics, cause));
}
public void completePhase(String phase) {
for (Map<String, Data> m : this.data.values()) {
for (Data data : m.values()) {
if (data.phase.equals(phase)) {
data.completePhase();
}
}
}
}
public void completeAll(Consumer<String> errorHandler) {
for (Map<String, Data> m : this.data.values()) {
for (Data data : m.values()) {
if (!data.isCompleted()) {
String message = String.format(
"Data for %s/%d/%s were not completed when the phase terminated - was the data received after that?",
data.phase, data.stepId, data.metric);
errorHandler.accept(message);
data.completePhase();
}
}
}
}
// When there's only few requests during the phase we could use too short interval for throughput calculation.
// We cannot do this in completePhase() because that's invoked from the STATS feed and the overall completion
// is notified from the RESPONSE feed.
public void adjustPhaseTimestamps(String phase, long start, long completion) {
for (Map<String, Data> m : this.data.values()) {
for (Data data : m.values()) {
if (data.phase.equals(phase)) {
data.total.histogram.setStartTimeStamp(Math.min(start, data.total.histogram.getStartTimeStamp()));
data.total.histogram.setEndTimeStamp(Math.max(completion, data.total.histogram.getEndTimeStamp()));
}
}
}
}
public boolean validateSlas() {
return failures.isEmpty();
}
public List<RequestStats> recentSummary(long minValidTimestamp) {
ArrayList<RequestStats> result = new ArrayList<>();
for (Map<String, Data> m : this.data.values()) {
for (Data data : m.values()) {
OptionalInt lastSequenceId = data.lastStats.values().stream()
.flatMapToInt(map -> map.keySet().stream().mapToInt(Integer::intValue)).max();
if (lastSequenceId.isEmpty()) {
continue;
}
// We'll use one id before the last one since the last one is likely not completed yet
int penultimateId = lastSequenceId.getAsInt() - 1;
StatisticsSnapshot sum = new StatisticsSnapshot();
data.lastStats.values().stream().map(map -> map.get(penultimateId))
.filter(Objects::nonNull).forEach(sum::add);
if (sum.isEmpty() || sum.histogram.getStartTimeStamp() < minValidTimestamp) {
continue;
}
List<String> failures = this.failures.stream()
.filter(f -> f.phase().equals(data.phase) && (f.metric() == null || f.metric().equals(data.metric)))
.map(SLA.Failure::message).collect(Collectors.toList());
result.add(
new RequestStats(data.phase, data.stepId, data.metric, sum.summary(PERCENTILES), failures, data.isWarmup));
}
}
result.sort(REQUEST_STATS_COMPARATOR);
return result;
}
public List<RequestStats> totalSummary() {
ArrayList<RequestStats> result = new ArrayList<>();
for (Map<String, Data> m : this.data.values()) {
for (Data data : m.values()) {
StatisticsSummary last = data.total.summary(PERCENTILES);
List<String> failures = this.failures.stream()
.filter(f -> f.phase().equals(data.phase) && (f.metric() == null || f.metric().equals(data.metric)))
.map(SLA.Failure::message).collect(Collectors.toList());
result.add(new RequestStats(data.phase, data.stepId, data.metric, last, failures, data.isWarmup));
}
}
result.sort(REQUEST_STATS_COMPARATOR);
return result;
}
public Histogram histogram(String phase, int stepId, String metric) {
Data data = getData(phase, stepId, metric);
if (data == null) {
return null;
}
return HistogramConverter.convert(phase, metric, data.total.histogram);
}
public List<StatisticsSummary> series(String phase, int stepId, String metric) {
Data data = getData(phase, stepId, metric);
if (data == null) {
return null;
}
return data.series;
}
private Data getData(String phase, int stepId, String metric) {
int phaseId = benchmark.phases().stream().filter(p -> p.name.equals(phase)).mapToInt(p -> p.id).findFirst().orElse(-1);
Map<String, Data> phaseStepData = data.get((phaseId << 16) + stepId);
if (phaseStepData == null) {
return null;
}
return phaseStepData.get(metric);
}
public void recordSessionStats(String address, long timestamp, String phase, int minSessions, int maxSessions) {
SessionPoolStats sps = this.sessionPoolStats.computeIfAbsent(phase, p -> new SessionPoolStats());
sps.records.computeIfAbsent(address, a -> new ArrayList<>())
.add(new SessionPoolRecord(timestamp, minSessions, maxSessions));
}
public Map<String, Map<String, LowHigh>> recentSessionPoolSummary(long minValidTimestamp) {
return sessionPoolSummary(records -> {
SessionPoolRecord record = records.get(records.size() - 1);
return record.timestamp >= minValidTimestamp ? record : null;
});
}
public Map<String, Map<String, LowHigh>> totalSessionPoolSummary() {
return sessionPoolSummary(records -> {
int low = records.stream().mapToInt(r -> r.low).min().orElse(0);
int high = records.stream().mapToInt(r -> r.high).max().orElse(0);
return new LowHigh(low, high);
});
}
private Map<String, Map<String, LowHigh>> sessionPoolSummary(Function<List<SessionPoolRecord>, LowHigh> function) {
Map<String, Map<String, LowHigh>> result = new HashMap<>();
for (Map.Entry<String, SessionPoolStats> phaseEntry : sessionPoolStats.entrySet()) {
Map<String, LowHigh> addressSummary = new HashMap<>();
for (Map.Entry<String, List<SessionPoolRecord>> addressEntry : phaseEntry.getValue().records.entrySet()) {
List<SessionPoolRecord> records = addressEntry.getValue();
if (records.isEmpty()) {
continue;
}
LowHigh lohi = function.apply(records);
if (lohi != null) {
addressSummary.put(addressEntry.getKey(), lohi);
}
}
if (!addressSummary.isEmpty()) {
result.put(phaseEntry.getKey(), addressSummary);
}
}
return result;
}
public void recordConnectionStats(String address, long timestamp, Map<String, Map<String, LowHigh>> stats) {
for (var byAuthority : stats.entrySet()) {
for (var byType : byAuthority.getValue().entrySet()) {
var authorityData = connectionPoolStats.computeIfAbsent(byAuthority.getKey(), a -> new HashMap<>());
var typeData = authorityData.computeIfAbsent(byType.getKey(), t -> new HashMap<>());
var agentData = typeData.computeIfAbsent(address, a -> new ArrayList<>());
LowHigh value = byType.getValue();
agentData.add(new ConnectionPoolStats(timestamp, value.low, value.high));
}
}
}
public Map<String, Map<String, LowHigh>> recentConnectionsSummary() {
Map<String, Map<String, LowHigh>> summary = new HashMap<>();
long minTimestamp = System.currentTimeMillis() - 5000;
for (var byAuthority : connectionPoolStats.entrySet()) {
for (var byType : byAuthority.getValue().entrySet()) {
// we will simply take last range from every agent
if (byType.getValue().values().stream().anyMatch(list -> list.get(list.size() - 1).timestamp < minTimestamp)) {
// the results are too old, we will ignore this
continue;
}
LowHigh sum = byType.getValue().values().stream()
.map(list -> (LowHigh) list.get(list.size() - 1))
.reduce(LowHigh::sum).orElse(null);
if (sum != null) {
summary.computeIfAbsent(byAuthority.getKey(), a -> new HashMap<>()).put(byType.getKey(), sum);
}
}
}
return summary;
}
public Map<String, Map<String, LowHigh>> totalConnectionsSummary() {
Map<String, Map<String, LowHigh>> summary = new HashMap<>();
for (var byAuthority : connectionPoolStats.entrySet()) {
for (var byType : byAuthority.getValue().entrySet()) {
int maxSize = byType.getValue().values().stream().mapToInt(List::size).max().orElse(0);
LowHigh total = null;
for (int i = 0; i < maxSize; ++i) {
int ii = i;
total = LowHigh.combine(total, byType.getValue().values().stream()
.map(list -> ii < list.size() ? (LowHigh) list.get(ii) : null)
.reduce(LowHigh::sum).orElse(null));
}
if (total != null) {
summary.computeIfAbsent(byAuthority.getKey(), a -> new HashMap<>()).put(byType.getKey(), total);
}
}
}
return summary;
}
public void recordCpuUsage(String phase, String agentName, String usage) {
cpuUsage.computeIfAbsent(phase, p -> new HashMap<>()).putIfAbsent(agentName, usage);
}
public Map<String, Map<String, String>> cpuUsage() {
return cpuUsage;
}
static final class Window {
private final StatisticsSnapshot[] ring;
private final StatisticsSnapshot sum = new StatisticsSnapshot();
private int ptr = 0;
Window(int size) {
assert size > 0;
ring = new StatisticsSnapshot[size];
}
void add(StatisticsSnapshot stats) {
if (ring[ptr] != null) {
sum.subtract(ring[ptr]);
}
ring[ptr] = stats;
sum.add(stats);
ptr = (ptr + 1) % ring.length;
}
public boolean isFull() {
return ring[ptr] != null;
}
public StatisticsSnapshot current() {
return sum;
}
}
void addFailure(SLA.Failure failure) {
if (failures.size() < maxFailures) {
failures.add(failure);
}
failureHandler.accept(failure);
}
public List<Data> getData() {
Data[] rtrn = data.values().stream().flatMap(map -> map.values().stream()).toArray(Data[]::new);
Arrays.sort(rtrn,
Comparator.comparing((Data data) -> data.phase).thenComparing(d -> d.metric).thenComparingInt(d -> d.stepId));
return Arrays.asList(rtrn);
}
void addData(int id, String metric, Data data) {
this.data.computeIfAbsent(id, i -> new HashMap<>()).put(metric, data);
}
public List<SLA.Failure> getFailures() {
return failures;
}
static class SessionPoolStats {
Map<String, List<SessionPoolRecord>> records = new HashMap<>();
LowHigh findMinMax() {
int min = Integer.MAX_VALUE;
int max = 0;
List<Iterator<SessionPoolRecord>> iterators = records.values().stream()
.map(List::iterator).collect(Collectors.toList());
for (;;) {
LowHigh combined = iterators.stream()
.filter(Iterator::hasNext).map(Iterator::next).map(LowHigh.class::cast)
.reduce(LowHigh::sum).orElse(null);
if (combined == null) {
break;
}
min = Math.min(min, combined.low);
max = Math.max(max, combined.high);
}
return new LowHigh(min, max);
}
}
static class SessionPoolRecord extends LowHigh {
final long timestamp;
SessionPoolRecord(long timestamp, int min, int max) {
super(min, max);
this.timestamp = timestamp;
}
}
static class ConnectionPoolStats extends LowHigh {
final long timestamp;
ConnectionPoolStats(long timestamp, int low, int high) {
super(low, high);
this.timestamp = timestamp;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/controller/JsonWriter.java | clustering/src/main/java/io/hyperfoil/controller/JsonWriter.java | package io.hyperfoil.controller;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import org.HdrHistogram.HistogramIterationValue;
import com.fasterxml.jackson.core.JsonGenerator;
import io.hyperfoil.api.Version;
import io.hyperfoil.api.config.SLA;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
import io.hyperfoil.api.statistics.StatisticsSummary;
import io.hyperfoil.core.util.LowHigh;
import io.vertx.core.json.JsonObject;
public class JsonWriter {
static final String RUN_SCHEMA = "http://hyperfoil.io/run-schema/v3.0";
public static void writeArrayJsons(StatisticsStore store, JsonGenerator jGenerator, JsonObject info) throws IOException {
Data[] sorted = store.data.values().stream().flatMap(map -> map.values().stream()).toArray(Data[]::new);
Arrays.sort(sorted,
Comparator.comparing((Data data) -> data.phase).thenComparing(d -> d.metric).thenComparingInt(d -> d.stepId));
jGenerator.writeStartObject(); //root of object
if (info != null && !info.isEmpty()) {
jGenerator.writeFieldName("info");
jGenerator.writeRawValue(info.encode());
}
jGenerator.writeStringField("$schema", RUN_SCHEMA);
jGenerator.writeStringField("version", Version.VERSION);
jGenerator.writeStringField("commit", Version.COMMIT_ID);
jGenerator.writeFieldName("failures");
jGenerator.writeStartArray();
List<SLA.Failure> failures = store.getFailures();
for (SLA.Failure failure : failures) {
jGenerator.writeStartObject();
jGenerator.writeStringField("phase", failure.phase());
jGenerator.writeStringField("metric", failure.metric());
jGenerator.writeStringField("message", failure.message());
jGenerator.writeNumberField("start", failure.statistics().histogram.getStartTimeStamp());
jGenerator.writeNumberField("end", failure.statistics().histogram.getEndTimeStamp());
jGenerator.writeObjectField("percentileResponseTime",
failure.statistics().getPercentiles(StatisticsStore.PERCENTILES));
jGenerator.writeEndObject();
}
jGenerator.writeEndArray();
jGenerator.writeFieldName("stats");
jGenerator.writeStartArray(); //stats array
for (Data data : sorted) {
jGenerator.writeStartObject(); //entry
jGenerator.writeStringField("name", data.phase);
String[] split = parsePhaseName(data.phase, "");
jGenerator.writeStringField("phase", split[0]);
jGenerator.writeStringField("iteration", split[1]);
jGenerator.writeStringField("fork", split[2]);
jGenerator.writeStringField("metric", data.metric);
jGenerator.writeBooleanField("isWarmup", data.isWarmup);
jGenerator.writeFieldName("total");
long numFailures = failures.stream()
.filter(f -> f.phase().equals(data.phase) && (f.metric() == null || f.metric().equals(data.metric))).count();
StatisticsStore.SessionPoolStats sessionPoolStats = store.sessionPoolStats.get(data.phase);
LowHigh minMaxSessions = sessionPoolStats == null ? new LowHigh(0, 0) : sessionPoolStats.findMinMax();
writeTotalValue(jGenerator, data, d -> d.total, minMaxSessions, numFailures);
jGenerator.writeFieldName("histogram");
jGenerator.writeStartObject();
jGenerator.writeFieldName("percentiles");
histogramArray(jGenerator, data.total.histogram.percentiles(5).iterator(), 100);
jGenerator.writeFieldName("linear");
histogramArray(jGenerator, data.total.histogram.linearBucketValues(1_000_000).iterator(), 95);
jGenerator.writeEndObject(); //histogram
jGenerator.writeFieldName("series");
seriesArray(jGenerator, data.series);
jGenerator.writeEndObject(); //entry
}
jGenerator.writeEndArray(); //stats array
jGenerator.writeFieldName("sessions");
jGenerator.writeStartArray(); //phase sessions array
for (Data data : sorted) {
if (store.sessionPoolStats.containsKey(data.phase)) {
jGenerator.writeStartObject(); //session entry
jGenerator.writeStringField("name", data.phase);
String[] split = parsePhaseName(data.phase, "");
jGenerator.writeStringField("phase", split[0]);
jGenerator.writeStringField("iteration", split[1]);
jGenerator.writeStringField("fork", split[2]);
StatisticsStore.SessionPoolStats sps = store.sessionPoolStats.get(data.phase);
Map<String, List<StatisticsStore.SessionPoolRecord>> records = sps != null ? sps.records : Collections.emptyMap();
jGenerator.writeFieldName("sessions");
jGenerator.writeStartArray();
WriterUtil.printInSync(records, (agent, record) -> {
jGenerator.writeStartObject();
jGenerator.writeNumberField("timestamp", record.timestamp);
jGenerator.writeStringField("agent", agent);
jGenerator.writeNumberField("minSessions", record.low);
jGenerator.writeNumberField("maxSessions", record.high);
jGenerator.writeEndObject();
});
jGenerator.writeEndArray(); //sessions array
jGenerator.writeEndObject(); //phase session entry
}
}
jGenerator.writeEndArray();
String[] agents = store.data.values().stream()
.flatMap(m -> m.values().stream())
.flatMap(d -> d.perAgent.keySet().stream())
.distinct().sorted().toArray(String[]::new);
jGenerator.writeFieldName("agents");
jGenerator.writeStartArray(); //agents array
for (String agent : agents) {
jGenerator.writeStartObject(); //agent
jGenerator.writeStringField("name", agent);
jGenerator.writeFieldName("stats");
jGenerator.writeStartArray(); //agent stats array
for (Data data : sorted) {
if (data.perAgent.containsKey(agent)) {
jGenerator.writeStartObject(); // agent stats entry
jGenerator.writeStringField("name", data.phase);
String[] split = parsePhaseName(data.phase, "");
jGenerator.writeStringField("phase", split[0]);
jGenerator.writeStringField("iteration", split[1]);
jGenerator.writeStringField("fork", split[2]);
jGenerator.writeStringField("metric", data.metric);
jGenerator.writeBooleanField("isWarmup", data.isWarmup);
jGenerator.writeFieldName("total");
writeTotalValue(
jGenerator,
data,
d -> d.perAgent.get(agent),
store.sessionPoolStats.getOrDefault(data.phase, new StatisticsStore.SessionPoolStats()).records
.getOrDefault(agent, new ArrayList<>())
.stream()
.map(LowHigh.class::cast)
.reduce(LowHigh::combine)
.orElse(new LowHigh(0, 0)),
-1); // we don't track failures per agent
jGenerator.writeFieldName("histogram");
jGenerator.writeStartObject(); // histograms
jGenerator.writeFieldName("percentiles");
histogramArray(jGenerator, data.perAgent.get(agent).histogram.percentiles(5).iterator(), 100);
jGenerator.writeFieldName("linear");
histogramArray(jGenerator, data.perAgent.get(agent).histogram.linearBucketValues(1_000_000).iterator(), 95);
jGenerator.writeEndObject(); // histograms
jGenerator.writeFieldName("series");
seriesArray(jGenerator, data.agentSeries.get(agent));
jGenerator.writeEndObject(); // agent stats entry
}
}
jGenerator.writeEndArray(); //agent stats array
jGenerator.writeEndObject(); //agent
}
jGenerator.writeEndArray(); //agents array
jGenerator.writeObjectFieldStart("connections");
for (var targetEntry : store.connectionPoolStats.entrySet()) {
jGenerator.writeObjectFieldStart(targetEntry.getKey());
for (var typeEntry : targetEntry.getValue().entrySet()) {
jGenerator.writeArrayFieldStart(typeEntry.getKey());
WriterUtil.printInSync(typeEntry.getValue(), (agent, record) -> {
jGenerator.writeStartObject();
jGenerator.writeNumberField("timestamp", record.timestamp);
jGenerator.writeStringField("agent", agent);
jGenerator.writeNumberField("min", record.low);
jGenerator.writeNumberField("max", record.high);
jGenerator.writeEndObject();
});
jGenerator.writeEndArray(); // type
}
jGenerator.writeEndObject(); // endpoint
}
jGenerator.writeEndObject(); // connections
jGenerator.writeObjectFieldStart("agentCpu");
for (var phaseEntry : store.cpuUsage.entrySet()) {
jGenerator.writeObjectFieldStart(phaseEntry.getKey());
for (var agentEntry : phaseEntry.getValue().entrySet()) {
jGenerator.writeStringField(agentEntry.getKey(), agentEntry.getValue());
}
jGenerator.writeEndObject();
}
jGenerator.writeEndObject(); // agentCpu
jGenerator.writeEndObject(); //root of object
}
private static String[] parsePhaseName(String phase, String defaultName) {
String[] rtrn = new String[3];
if (phase.contains("/")) {
rtrn[0] = phase.substring(0, phase.indexOf("/"));
phase = phase.substring(phase.indexOf("/") + 1);
} else {
rtrn[0] = phase;
phase = "";
}
if (phase.isEmpty()) {
rtrn[1] = defaultName;
rtrn[2] = defaultName;
return rtrn;
}
if (phase.contains("/")) {
rtrn[1] = phase.substring(0, phase.indexOf("/"));
phase = phase.substring(phase.indexOf("/") + 1);
if (phase.isEmpty()) {
phase = defaultName;
}
rtrn[2] = phase;
} else {
//TODO determine if it is an iteration or fork
if (phase.matches("[0-9]+")) {
rtrn[1] = phase;
rtrn[2] = defaultName;
} else {
rtrn[1] = defaultName;
rtrn[2] = phase;
}
}
return rtrn;
}
private static void histogramArray(JsonGenerator jGenerator, Iterator<HistogramIterationValue> iter, double maxPercentile)
throws IOException {
jGenerator.writeStartArray(); //start histogram
double from = -1, to = -1, percentileTo = -1;
long total = 0;
HistogramIterationValue iterValue = null;
while (iter.hasNext()) {
iterValue = iter.next();
if (iterValue.getCountAddedInThisIterationStep() == 0) {
if (from < 0) {
from = iterValue.getValueIteratedFrom();
total = iterValue.getTotalCountToThisValue();
}
to = iterValue.getValueIteratedTo();
percentileTo = iterValue.getPercentileLevelIteratedTo();
} else {
if (from >= 0) {
writeBucket(jGenerator, from, to, percentileTo, 0, total);
from = -1;
}
writeBucket(jGenerator,
iterValue.getDoubleValueIteratedFrom(),
iterValue.getDoubleValueIteratedTo(),
iterValue.getPercentileLevelIteratedTo(),
iterValue.getCountAddedInThisIterationStep(),
iterValue.getTotalCountToThisValue());
}
if (iterValue.getPercentileLevelIteratedTo() > maxPercentile) {
break;
}
}
if (from >= 0) {
writeBucket(jGenerator, from, to, percentileTo, 0, total);
}
if (iterValue != null) {
from = iterValue.getDoubleValueIteratedTo();
total = iterValue.getTotalCountToThisValue();
while (iter.hasNext()) {
iterValue = iter.next();
}
if (iterValue.getTotalCountToThisValue() != total) {
writeBucket(jGenerator, from, iterValue.getDoubleValueIteratedTo(),
iterValue.getPercentileLevelIteratedTo(),
iterValue.getTotalCountToThisValue() - total,
iterValue.getTotalCountToThisValue());
}
}
jGenerator.writeEndArray(); //end histogram
}
private static void writeBucket(JsonGenerator jGenerator, double from, double to, double percentile, long count,
long totalCount) throws IOException {
jGenerator.writeStartObject();
jGenerator.writeNumberField("from", from);
jGenerator.writeNumberField("to", to);
jGenerator.writeNumberField("percentile", percentile / 100.0D);
jGenerator.writeNumberField("count", count);
jGenerator.writeNumberField("totalCount", totalCount);
jGenerator.writeEndObject();
}
private static void seriesArray(JsonGenerator jGenerator, List<StatisticsSummary> series) throws IOException {
jGenerator.writeStartArray(); //series
if (series != null) {
for (StatisticsSummary summary : series) {
jGenerator.writeObject(summary);
}
}
jGenerator.writeEndArray(); //end series
jGenerator.flush();
}
private static void writeTotalValue(JsonGenerator generator, Data data, Function<Data, StatisticsSnapshot> selector,
LowHigh minMaxSessions, long failures) throws IOException {
StatisticsSnapshot snapshot = selector.apply(data);
generator.writeStartObject();
generator.writeStringField("phase", data.phase);
generator.writeStringField("metric", data.metric);
generator.writeNumberField("start", data.total.histogram.getStartTimeStamp());
generator.writeNumberField("end", data.total.histogram.getEndTimeStamp());
generator.writeObjectField("summary", snapshot.summary(StatisticsStore.PERCENTILES));
if (failures >= 0) {
generator.writeNumberField("failures", failures);
}
if (minMaxSessions != null) {
generator.writeNumberField("minSessions", minMaxSessions.low);
generator.writeNumberField("maxSessions", minMaxSessions.high);
}
generator.writeEndObject();
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/deploy/ssh/SshDeployer.java | clustering/src/main/java/io/hyperfoil/deploy/ssh/SshDeployer.java | package io.hyperfoil.deploy.ssh;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Paths;
import java.security.GeneralSecurityException;
import java.util.function.Consumer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.sshd.client.SshClient;
import org.apache.sshd.client.future.AuthFuture;
import org.apache.sshd.client.future.ConnectFuture;
import org.apache.sshd.client.session.ClientSession;
import org.apache.sshd.common.PropertyResolverUtils;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.common.util.io.resource.URLResource;
import org.apache.sshd.common.util.security.SecurityUtils;
import org.apache.sshd.core.CoreModuleProperties;
import org.kohsuke.MetaInfServices;
import io.hyperfoil.api.config.Agent;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.BenchmarkDefinitionException;
import io.hyperfoil.api.deployment.DeployedAgent;
import io.hyperfoil.api.deployment.Deployer;
import io.hyperfoil.api.deployment.DeploymentException;
import io.hyperfoil.internal.Controller;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
public class SshDeployer implements Deployer {
private static final Logger log = LogManager.getLogger(SshDeployer.class);
static final long TIMEOUT = 10000;
private final SshClient client;
private SshDeployer() {
client = SshClient.setUpDefaultClient();
PropertyResolverUtils.updateProperty(client, CoreModuleProperties.IDLE_TIMEOUT.getName(), Long.MAX_VALUE);
PropertyResolverUtils.updateProperty(client, CoreModuleProperties.NIO2_READ_TIMEOUT.getName(), Long.MAX_VALUE);
PropertyResolverUtils.updateProperty(client, CoreModuleProperties.NIO_WORKERS.getName(), 1);
client.start();
client.setServerKeyVerifier((clientSession1, remoteAddress, serverKey) -> true);
}
@Override
public DeployedAgent start(Agent agent, String runId, Benchmark benchmark, Consumer<Throwable> exceptionHandler) {
String hostname = null, username = null;
String sshKey = "id_rsa";
int port = -1;
String dir = null, extras = null, cpu = null;
if (agent.inlineConfig != null) {
URL url;
try {
// the scheme will be later ignored
url = new URL("http://" + agent.inlineConfig);
} catch (MalformedURLException e) {
throw new BenchmarkDefinitionException("Failed to parse host:port", e);
}
hostname = url.getHost();
port = url.getPort();
username = url.getUserInfo();
}
if (agent.properties != null) {
hostname = agent.properties.getOrDefault("host", hostname);
username = agent.properties.getOrDefault("user", username);
sshKey = agent.properties.getOrDefault("sshKey", sshKey);
String portString = agent.properties.get("port");
if (portString != null) {
try {
port = Integer.parseInt(portString);
} catch (NumberFormatException e) {
log.error("Failed to parse port number for {}: {}", agent.name, portString);
}
}
dir = agent.properties.get("dir");
extras = agent.properties.get("extras");
cpu = agent.properties.get("cpu");
}
if (hostname == null) {
hostname = agent.name;
}
if (port < 0) {
port = 22;
}
if (username == null) {
username = System.getProperty("user.name");
}
if (dir == null) {
dir = Controller.ROOT_DIR.toString();
}
try {
SshDeployedAgent deployedAgent = new SshDeployedAgent(agent.name, runId, username, hostname, sshKey, port, dir, extras,
cpu);
ClientSession session = connectAndLogin(sshKey, username, hostname, port);
deployedAgent.deploy(session, exceptionHandler);
return deployedAgent;
} catch (IOException | GeneralSecurityException e) {
exceptionHandler.accept(new DeploymentException(
"Cannot connect to agent " + agent.name + " at " + username + "@" + hostname + ":" + port, e));
return null;
} catch (DeploymentException e) {
exceptionHandler.accept(e);
return null;
}
}
@Override
public boolean hasControllerLog() {
return false;
}
@Override
public void downloadControllerLog(long offset, long maxLength, String destinationFile, Handler<AsyncResult<Void>> handler) {
throw new UnsupportedOperationException();
}
@Override
public void downloadAgentLog(DeployedAgent deployedAgent, long offset, long maxLength, String destinationFile,
Handler<AsyncResult<Void>> handler) {
SshDeployedAgent sshAgent = (SshDeployedAgent) deployedAgent;
try {
ClientSession session = connectAndLogin(sshAgent.sshKey, sshAgent.username, sshAgent.hostname, sshAgent.port);
sshAgent.downloadLog(session, offset, maxLength, destinationFile, handler);
} catch (IOException | DeploymentException | GeneralSecurityException e) {
handler.handle(Future.failedFuture(e));
}
}
private ClientSession connectAndLogin(String sshKey, String username, String hostname, int port)
throws IOException, GeneralSecurityException, DeploymentException {
ConnectFuture connect = client.connect(username, hostname, port).verify(15000);
ClientSession session = connect.getSession();
String userHome = System.getProperty("user.home");
URLResource identity;
identity = new URLResource(Paths.get(userHome, ".ssh", sshKey).toUri().toURL());
try (InputStream inputStream = identity.openInputStream()) {
session.addPublicKeyIdentity(GenericUtils.head(SecurityUtils.loadKeyPairIdentities(
session,
identity,
inputStream,
(s, resourceKey, retryIndex) -> null)));
}
AuthFuture auth = session.auth();
if (!auth.await(TIMEOUT)) {
throw new DeploymentException("Not authenticated within timeout", null);
}
if (!auth.isSuccess()) {
throw new DeploymentException("Failed to authenticate", auth.getException());
}
return session;
}
@Override
public void close() {
client.stop();
}
@MetaInfServices(Deployer.Factory.class)
public static class Factory implements Deployer.Factory {
@Override
public String name() {
return "ssh";
}
@Override
public SshDeployer create() {
return new SshDeployer();
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/deploy/ssh/SshDeployedAgent.java | clustering/src/main/java/io/hyperfoil/deploy/ssh/SshDeployedAgent.java | package io.hyperfoil.deploy.ssh;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.sshd.client.channel.ChannelShell;
import org.apache.sshd.client.channel.ClientChannel;
import org.apache.sshd.client.future.OpenFuture;
import org.apache.sshd.client.session.ClientSession;
import org.apache.sshd.common.io.IoOutputStream;
import org.apache.sshd.common.io.IoReadFuture;
import org.apache.sshd.common.util.buffer.ByteArrayBuffer;
import org.apache.sshd.common.util.io.output.NullOutputStream;
import org.apache.sshd.scp.client.ScpClient;
import org.apache.sshd.scp.client.ScpClientCreator;
import org.apache.sshd.sftp.client.SftpClient;
import org.apache.sshd.sftp.client.SftpClientFactory;
import io.hyperfoil.api.BenchmarkExecutionException;
import io.hyperfoil.api.deployment.DeployedAgent;
import io.hyperfoil.api.deployment.DeploymentException;
import io.hyperfoil.internal.Properties;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
public class SshDeployedAgent implements DeployedAgent {
private static final Logger log = LogManager.getLogger(SshDeployedAgent.class);
private static final String PROMPT = "<_#%@_hyperfoil_@%#_>";
private static final String DEBUG_ADDRESS = Properties.get(Properties.AGENT_DEBUG_PORT, null);
private static final String DEBUG_SUSPEND = Properties.get(Properties.AGENT_DEBUG_SUSPEND, "n");
private static final String AGENTLIB = "/agentlib";
final String name;
final String runId;
final String username;
final String hostname;
final String sshKey;
final int port;
final String dir;
final String extras;
final String cpu;
private ClientSession session;
private ChannelShell shellChannel;
private Consumer<Throwable> exceptionHandler;
private ScpClient scpClient;
private PrintStream commandStream;
public SshDeployedAgent(String name, String runId, String username, String hostname, String sshKey, int port, String dir,
String extras, String cpu) {
this.name = name;
this.runId = runId;
this.username = username;
this.hostname = hostname;
this.sshKey = sshKey;
this.port = port;
this.dir = dir;
this.extras = extras;
this.cpu = cpu;
}
@Override
public void stop() {
log.info("Stopping agent {}", name);
if (commandStream != null) {
commandStream.close();
}
try {
if (shellChannel != null) {
shellChannel.close();
}
} catch (IOException e) {
log.error("Failed closing shell", e);
}
try {
session.close();
} catch (IOException e) {
log.error("Failed closing SSH session", e);
}
}
public void deploy(ClientSession session, Consumer<Throwable> exceptionHandler) {
this.session = session;
this.exceptionHandler = exceptionHandler;
this.scpClient = ScpClientCreator.instance().createScpClient(session);
try {
this.shellChannel = session.createShellChannel();
shellChannel.setStreaming(ClientChannel.Streaming.Async);
shellChannel.setErr(new NullOutputStream());
OpenFuture open = shellChannel.open();
if (!open.await(SshDeployer.TIMEOUT)) {
exceptionHandler.accept(new DeploymentException("Shell not opened within timeout", null));
}
if (!open.isOpened()) {
exceptionHandler.accept(new DeploymentException("Could not open shell", open.getException()));
}
} catch (IOException e) {
exceptionHandler.accept(new DeploymentException("Failed to open shell", e));
}
IoOutputStream inStream = shellChannel.getAsyncIn();
commandStream = new PrintStream(new OutputStream() {
final ByteArrayBuffer buffer = new ByteArrayBuffer();
@Override
public void write(byte[] b) throws IOException {
buffer.clear(false);
buffer.putRawBytes(b, 0, b.length);
if (!inStream.writeBuffer(buffer).await()) {
throw new IOException("Failed waiting for the write");
}
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
buffer.clear(false);
buffer.putRawBytes(b, off, len);
if (!inStream.writeBuffer(buffer).await()) {
throw new IOException("Failed waiting for the write");
}
}
@Override
public void write(int b) throws IOException {
buffer.clear(false);
buffer.putByte((byte) b);
if (!inStream.writeBuffer(buffer).await()) {
throw new IOException("Failed waiting for the write");
}
}
@Override
public void close() throws IOException {
inStream.close();
}
});
runCommand("unset PROMPT_COMMAND; export PS1='" + PROMPT + "'", true);
runCommand("mkdir -p " + dir + AGENTLIB, true);
Map<String, String> remoteMd5 = getRemoteMd5();
Map<String, String> localMd5 = getLocalMd5();
if (localMd5 == null) {
return;
}
StringBuilder startAgentCommmand = new StringBuilder();
if (cpu != null) {
startAgentCommmand.append("taskset -c ").append(cpu).append(' ');
}
String java = Properties.get(Properties.AGENT_JAVA_EXECUTABLE, "java");
startAgentCommmand.append(java).append(" -cp ");
for (Map.Entry<String, String> entry : localMd5.entrySet()) {
int lastSlash = entry.getKey().lastIndexOf("/");
String filename = lastSlash < 0 ? entry.getKey() : entry.getKey().substring(lastSlash + 1);
String remoteChecksum = remoteMd5.remove(filename);
if (!entry.getValue().equals(remoteChecksum)) {
log.debug("MD5 mismatch {}/{}, copying {}", entry.getValue(), remoteChecksum, entry.getKey());
try {
scpClient.upload(entry.getKey(), dir + AGENTLIB + "/" + filename, ScpClient.Option.PreserveAttributes);
} catch (IOException e) {
exceptionHandler.accept(e);
return;
}
}
startAgentCommmand.append(dir).append(AGENTLIB).append('/').append(filename).append(':');
}
if (!remoteMd5.isEmpty()) {
StringBuilder rmCommand = new StringBuilder();
// Drop those files that are not on classpath
rmCommand.append("rm --interactive=never ");
for (Map.Entry<String, String> entry : remoteMd5.entrySet()) {
rmCommand.append(' ').append(dir).append(AGENTLIB).append('/').append(entry.getKey());
}
runCommand(rmCommand.toString(), true);
}
String log4jConfigurationFile = Properties.get(Properties.LOG4J2_CONFIGURATION_FILE, null);
if (log4jConfigurationFile != null) {
if (log4jConfigurationFile.startsWith("file://")) {
log4jConfigurationFile = log4jConfigurationFile.substring("file://".length());
}
String filename = log4jConfigurationFile.substring(log4jConfigurationFile.lastIndexOf(File.separatorChar) + 1);
try {
String targetFile = dir + AGENTLIB + "/" + filename;
scpClient.upload(log4jConfigurationFile, targetFile, ScpClient.Option.PreserveAttributes);
startAgentCommmand.append(" -D").append(Properties.LOG4J2_CONFIGURATION_FILE)
.append("=file://").append(targetFile);
} catch (IOException e) {
log.error("Cannot copy log4j2 configuration file.", e);
}
}
startAgentCommmand.append(" -Dvertx.logger-delegate-factory-class-name=io.vertx.core.logging.Log4j2LogDelegateFactory");
startAgentCommmand.append(" -D").append(Properties.AGENT_NAME).append('=').append(name);
startAgentCommmand.append(" -D").append(Properties.RUN_ID).append('=').append(runId);
startAgentCommmand.append(" -D").append(Properties.CONTROLLER_CLUSTER_IP).append('=')
.append(Properties.get(Properties.CONTROLLER_CLUSTER_IP, ""));
startAgentCommmand.append(" -D").append(Properties.CONTROLLER_CLUSTER_PORT).append('=')
.append(Properties.get(Properties.CONTROLLER_CLUSTER_PORT, ""));
if (DEBUG_ADDRESS != null) {
startAgentCommmand.append(" -agentlib:jdwp=transport=dt_socket,server=y,suspend=").append(DEBUG_SUSPEND)
.append(",address=").append(DEBUG_ADDRESS);
}
if (extras != null) {
startAgentCommmand.append(" ").append(extras);
}
startAgentCommmand.append(" io.hyperfoil.Hyperfoil\\$Agent &> ")
.append(dir).append(File.separatorChar).append("agent.").append(name).append(".log");
String startAgent = startAgentCommmand.toString();
log.info("Starting agent {}", name);
log.debug("Command: {}", startAgent);
runCommand(startAgent, false);
onPrompt(new StringBuilder(), new ByteArrayBuffer(),
() -> exceptionHandler.accept(new BenchmarkExecutionException(
"Agent process terminated prematurely. Hint: type 'log " + name + "' to see agent output.")));
}
private void onPrompt(StringBuilder sb, ByteArrayBuffer buffer, Runnable completion) {
buffer.clear(false);
shellChannel.getAsyncOut().read(buffer).addListener(future -> {
byte[] buf = new byte[future.getRead()];
future.getBuffer().getRawBytes(buf);
String str = new String(buf, StandardCharsets.UTF_8);
log.info("Read: {}", str);
sb.append(str);
if (sb.indexOf(PROMPT) >= 0) {
completion.run();
} else {
if (sb.length() >= PROMPT.length()) {
sb.delete(0, sb.length() - PROMPT.length());
}
onPrompt(sb, buffer, completion);
}
});
}
private String runCommand(String cmd, boolean wait) {
log.trace("Running command {}", cmd);
commandStream.println(cmd);
// add one more empty command so that we get PROMPT on the line alone
commandStream.println();
commandStream.flush();
StringBuilder lines = new StringBuilder();
ByteArrayBuffer buffer = new ByteArrayBuffer();
byte[] buf = new byte[buffer.capacity()];
try {
for (;;) {
buffer.clear(false);
IoReadFuture future = shellChannel.getAsyncOut().read(buffer);
if (!future.await(10, TimeUnit.SECONDS)) {
exceptionHandler.accept(new BenchmarkExecutionException("Timed out waiting for SSH output"));
return null;
}
buffer.getRawBytes(buf, 0, future.getRead());
String line = new String(buf, 0, future.getRead(), StandardCharsets.UTF_8);
int newLine = line.indexOf('\n');
if (newLine >= 0) {
if (!wait) {
return null;
}
// first line should be echo of the command and we'll ignore that
lines.append(line.substring(newLine + 1));
break;
}
}
for (;;) {
int prompt = lines.lastIndexOf(PROMPT + "\r\n");
if (prompt >= 0) {
lines.delete(prompt, lines.length());
return lines.toString();
}
buffer.clear(false);
IoReadFuture future = shellChannel.getAsyncOut().read(buffer);
if (!future.await(10, TimeUnit.SECONDS)) {
exceptionHandler.accept(new BenchmarkExecutionException("Timed out waiting for SSH output"));
return null;
}
buffer.getRawBytes(buf, 0, future.getRead());
lines.append(new String(buf, 0, future.getRead(), StandardCharsets.UTF_8));
}
} catch (IOException e) {
exceptionHandler.accept(new DeploymentException("Error reading from shell", e));
return null;
}
}
private Map<String, String> getLocalMd5() {
String classpath = System.getProperty("java.class.path");
Map<String, String> md5map = new HashMap<>();
for (String file : classpath.split(":")) {
if (!file.endsWith(".jar")) {
// ignore folders etc...
continue;
}
try {
Process process = new ProcessBuilder("md5sum", file).start();
process.waitFor();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
String line = reader.readLine();
if (line == null) {
log.warn("No output for md5sum {}", file);
continue;
}
int space = line.indexOf(' ');
if (space < 0) {
log.warn("Wrong output for md5sum {}: {}", file, line);
continue;
}
String checksum = line.substring(0, space);
md5map.put(file, checksum);
}
} catch (IOException e) {
log.info("Cannot get md5sum for {}", file, e);
} catch (InterruptedException e) {
log.info("Interrupted waiting for md5sum{}", file);
Thread.currentThread().interrupt();
return null;
}
}
return md5map;
}
private Map<String, String> getRemoteMd5() {
String[] lines = runCommand("md5sum " + dir + AGENTLIB + "/*", true).split("\r*\n");
Map<String, String> md5map = new HashMap<>();
for (String line : lines) {
if (line.isEmpty()) {
continue;
} else if (line.endsWith("No such file or directory")) {
break;
}
int space = line.indexOf(' ');
if (space < 0)
break;
String checksum = line.substring(0, space);
int fileIndex = line.lastIndexOf('/');
if (fileIndex < 0) {
fileIndex = space;
}
String file = line.substring(fileIndex + 1).trim();
md5map.put(file, checksum);
}
return md5map;
}
public void downloadLog(ClientSession session, long offset, long maxLength, String destinationFile,
Handler<AsyncResult<Void>> handler) {
try (SftpClient sftpClient = SftpClientFactory.instance().createSftpClient(session)) {
try (SftpClient.CloseableHandle handle = sftpClient.open(dir + File.separatorChar + "agent." + name + ".log")) {
byte[] buffer = new byte[65536];
try (FileOutputStream output = new FileOutputStream(destinationFile)) {
long readOffset = offset;
long totalRead = 0;
for (;;) {
long remaining = maxLength - totalRead;
if (remaining <= 0) {
break;
}
int nread = sftpClient.read(handle, readOffset, buffer, 0,
Math.toIntExact(Math.min(remaining, buffer.length)));
if (nread < 0) {
break;
}
output.write(buffer, 0, nread);
readOffset += nread;
totalRead += nread;
}
}
}
handler.handle(Future.succeededFuture());
} catch (IOException e) {
handler.handle(Future.failedFuture(e));
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/SessionStatsSender.java | clustering/src/main/java/io/hyperfoil/clustering/SessionStatsSender.java | package io.hyperfoil.clustering;
import java.util.HashMap;
import java.util.Map;
import io.hyperfoil.clustering.messages.SessionStatsMessage;
import io.hyperfoil.core.impl.SessionStatsConsumer;
import io.hyperfoil.core.util.LowHigh;
import io.vertx.core.eventbus.EventBus;
public class SessionStatsSender implements SessionStatsConsumer {
private final String address;
private final String runId;
private final EventBus eb;
private Map<String, LowHigh> sessionStats;
public SessionStatsSender(EventBus eb, String address, String runId) {
this.address = address;
this.runId = runId;
this.eb = eb;
}
public void send() {
if (sessionStats != null) {
eb.send(Feeds.STATS, new SessionStatsMessage(address, runId, System.currentTimeMillis(), sessionStats));
sessionStats = null;
}
}
@Override
public void accept(String phase, int minSessions, int maxSessions) {
if (sessionStats == null) {
sessionStats = new HashMap<>();
}
sessionStats.put(phase, new LowHigh(minSessions, maxSessions));
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/VersionConflictException.java | clustering/src/main/java/io/hyperfoil/clustering/VersionConflictException.java | package io.hyperfoil.clustering;
public class VersionConflictException extends Exception {
public VersionConflictException() {
super(null, null, false, false);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/AgentVerticle.java | clustering/src/main/java/io/hyperfoil/clustering/AgentVerticle.java | package io.hyperfoil.clustering;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.Hyperfoil;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.session.PhaseInstance;
import io.hyperfoil.clustering.messages.AgentControlMessage;
import io.hyperfoil.clustering.messages.AgentHello;
import io.hyperfoil.clustering.messages.AgentReadyMessage;
import io.hyperfoil.clustering.messages.ErrorMessage;
import io.hyperfoil.clustering.messages.PhaseChangeMessage;
import io.hyperfoil.clustering.messages.PhaseControlMessage;
import io.hyperfoil.core.impl.SimulationRunner;
import io.hyperfoil.core.util.CountDown;
import io.hyperfoil.impl.Util;
import io.hyperfoil.internal.Properties;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.Context;
import io.vertx.core.eventbus.EventBus;
import io.vertx.core.eventbus.Message;
import io.vertx.core.eventbus.MessageConsumer;
import io.vertx.core.eventbus.ReplyException;
import io.vertx.core.eventbus.ReplyFailure;
import io.vertx.core.impl.VertxInternal;
public class AgentVerticle extends AbstractVerticle {
private static Logger log = LogManager.getLogger(AgentVerticle.class);
private String name;
private String deploymentId;
private String nodeId = "in-vm";
private String runId;
private EventBus eb;
private SimulationRunner runner;
private MessageConsumer<Object> controlFeedConsumer;
private long statsTimerId = -1;
private RequestStatsSender requestStatsSender;
private CountDown statisticsCountDown;
private SessionStatsSender sessionStatsSender;
private ConnectionStatsSender connectionStatsSender;
@Override
public void start() {
deploymentId = deploymentID();
name = context.config().getString("name");
if (name == null) {
name = Properties.get(Properties.AGENT_NAME, null);
}
if (name == null) {
try {
name = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
log.debug("Cannot deduce name from host name", e);
name = deploymentId;
}
}
runId = context.config().getString("runId");
if (runId == null) {
runId = Properties.get(Properties.RUN_ID, null);
if (runId == null) {
throw new IllegalStateException("No run ID defined for this agent.");
}
}
eb = vertx.eventBus();
eb.consumer(deploymentId, message -> {
try {
AgentControlMessage controlMessage = (AgentControlMessage) message.body();
if (controlMessage == null) {
message.fail(1, "Could not decode message body. Does this Agent have the same version as the Controller?");
return;
}
handleAgentControlMessage(message, controlMessage);
} catch (Throwable t) {
log.error("Processing of message failed", t);
message.fail(1, t.getMessage());
}
});
if (vertx.isClustered()) {
if (vertx instanceof VertxInternal) {
nodeId = ((VertxInternal) vertx).getClusterManager().getNodeId();
}
}
vertx.setPeriodic(1000, timerId -> {
eb.request(Feeds.DISCOVERY, new AgentHello(name, nodeId, deploymentId, runId), reply -> {
log.trace("{} Pinging controller", deploymentId);
if (reply.succeeded()) {
log.info("{} Got reply from controller.", deploymentId);
vertx.cancelTimer(timerId);
} else {
if (reply.cause() instanceof ReplyException) {
ReplyFailure replyFailure = ((ReplyException) reply.cause()).failureType();
if (replyFailure == ReplyFailure.RECIPIENT_FAILURE) {
log.error("{} Failed to register, already registered!", deploymentId);
} else {
log.info("{} Failed to register: {}", deploymentId, replyFailure);
}
}
}
});
});
}
private void handleAgentControlMessage(Message<Object> message, AgentControlMessage controlMessage) {
switch (controlMessage.command()) {
case INITIALIZE:
log.info("Initializing agent");
try {
initBenchmark(controlMessage.benchmark(), controlMessage.agentId());
message.reply("OK");
} catch (Throwable e) {
log.error("Failed to initialize agent", e);
message.fail(1, Util.explainCauses(e));
}
break;
case STOP:
// collect stats one last time before acknowledging termination
log.info("Received agent reset");
try {
if (statsTimerId >= 0) {
vertx.cancelTimer(statsTimerId);
}
CountDown completion = new CountDown(result -> {
message.reply(result.succeeded() ? "OK" : result.cause());
if (vertx.isClustered()) {
// Give the message some time to be sent
vertx.setTimer(1000, id -> Hyperfoil.shutdownVertx(vertx));
} else {
vertx.undeploy(deploymentID());
}
}, 1);
if (runner != null) {
// TODO: why do we have to visit&send stats here?
runner.visitStatistics(requestStatsSender);
requestStatsSender.send(completion);
requestStatsSender.sendPhaseComplete(null, completion);
runner.shutdown();
}
if (controlFeedConsumer != null) {
controlFeedConsumer.unregister();
}
controlFeedConsumer = null;
runner = null;
requestStatsSender = null;
if (statisticsCountDown != null) {
statisticsCountDown.setHandler(result -> completion.countDown());
statisticsCountDown.countDown();
} else {
completion.countDown();
}
} catch (Throwable e) {
log.error("Exception thrown when stopping the agent", e);
message.reply(e);
}
break;
case LIST_SESSIONS:
log.debug("Listing sessions...");
try {
ArrayList<String> sessions = new ArrayList<>();
boolean includeInactive = controlMessage.includeInactive();
runner.visitSessions(s -> {
if (s.isActive() || includeInactive) {
sessions.add(s.toString());
}
});
message.reply(sessions);
} catch (Throwable e) {
log.error("Exception thrown when listing sessions", e);
message.reply(e);
}
break;
case LIST_CONNECTIONS:
log.debug("Listing connections...");
try {
message.reply(runner.listConnections());
} catch (Throwable e) {
log.error("Exception thrown when listing connections", e);
message.reply(e);
}
break;
}
}
private MessageConsumer<Object> listenOnControl() {
return eb.consumer(Feeds.CONTROL, message -> {
PhaseControlMessage controlMessage = (PhaseControlMessage) message.body();
switch (controlMessage.command()) {
case RUN:
if (controlMessage.globalData() != null) {
runner.addGlobalData(controlMessage.globalData());
}
runner.startPhase(controlMessage.phase());
break;
case FINISH:
runner.finishPhase(controlMessage.phase());
break;
case TRY_TERMINATE:
runner.tryTerminatePhase(controlMessage.phase());
break;
case TERMINATE:
runner.terminatePhase(controlMessage.phase());
break;
}
});
}
@Override
public void stop() {
if (runner != null) {
runner.shutdown();
}
}
private void initBenchmark(Benchmark benchmark, int agentId) {
if (runner != null) {
throw new IllegalStateException("Another simulation is running!");
}
Context context = vertx.getOrCreateContext();
runner = new SimulationRunner(benchmark, runId, agentId,
error -> eb.send(Feeds.RESPONSE, new ErrorMessage(deploymentId, runId, error, false)));
controlFeedConsumer = listenOnControl();
requestStatsSender = new RequestStatsSender(benchmark, eb, deploymentId, runId);
statisticsCountDown = new CountDown(1);
sessionStatsSender = new SessionStatsSender(eb, deploymentId, runId);
connectionStatsSender = new ConnectionStatsSender(eb, deploymentId, runId);
runner.setControllerListener((phase, status, sessionLimitExceeded, error, globalData) -> {
log.debug("{} changed phase {} to {}", deploymentId, phase, status);
log.debug("New global data is {}", globalData);
String cpuUsage = runner.getCpuUsage(phase.name());
eb.send(Feeds.RESPONSE, new PhaseChangeMessage(deploymentId, runId, phase.name(), status, sessionLimitExceeded,
cpuUsage, error, globalData));
if (status == PhaseInstance.Status.TERMINATED) {
context.runOnContext(nil -> {
if (runner != null) {
runner.visitStatistics(phase, requestStatsSender);
}
requestStatsSender.send(statisticsCountDown);
requestStatsSender.sendPhaseComplete(phase, statisticsCountDown);
});
}
return Util.COMPLETED_VOID_FUTURE;
});
runner.init();
assert context.isEventLoopContext();
statsTimerId = vertx.setPeriodic(benchmark.statisticsCollectionPeriod(), timerId -> {
runner.visitStatistics(requestStatsSender);
requestStatsSender.send(statisticsCountDown);
runner.visitSessionPoolStats(sessionStatsSender);
sessionStatsSender.send();
runner.visitConnectionStats(connectionStatsSender);
connectionStatsSender.send();
});
runner.openConnections(vertx::executeBlocking, result -> {
if (result.succeeded()) {
eb.send(Feeds.RESPONSE, new AgentReadyMessage(deploymentID(), runId));
} else {
eb.send(Feeds.RESPONSE, new ErrorMessage(deploymentID(), runId, result.cause(), true));
}
});
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/ControllerVerticle.java | clustering/src/main/java/io/hyperfoil/clustering/ControllerVerticle.java | package io.hyperfoil.clustering;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import java.util.stream.Collectors;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.infinispan.commons.api.BasicCacheContainer;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.hyperfoil.api.BenchmarkExecutionException;
import io.hyperfoil.api.config.Agent;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.BenchmarkSource;
import io.hyperfoil.api.config.Model;
import io.hyperfoil.api.config.Phase;
import io.hyperfoil.api.config.RunHook;
import io.hyperfoil.api.config.SessionLimitPolicy;
import io.hyperfoil.api.deployment.DeployedAgent;
import io.hyperfoil.api.deployment.Deployer;
import io.hyperfoil.api.session.GlobalData;
import io.hyperfoil.api.session.PhaseInstance;
import io.hyperfoil.clustering.messages.AgentControlMessage;
import io.hyperfoil.clustering.messages.AgentHello;
import io.hyperfoil.clustering.messages.AgentReadyMessage;
import io.hyperfoil.clustering.messages.AgentStatusMessage;
import io.hyperfoil.clustering.messages.AuxiliaryHello;
import io.hyperfoil.clustering.messages.ConnectionStatsMessage;
import io.hyperfoil.clustering.messages.DelayStatsCompletionMessage;
import io.hyperfoil.clustering.messages.ErrorMessage;
import io.hyperfoil.clustering.messages.PhaseChangeMessage;
import io.hyperfoil.clustering.messages.PhaseControlMessage;
import io.hyperfoil.clustering.messages.PhaseStatsCompleteMessage;
import io.hyperfoil.clustering.messages.RequestStatsMessage;
import io.hyperfoil.clustering.messages.SessionStatsMessage;
import io.hyperfoil.clustering.messages.StatsMessage;
import io.hyperfoil.clustering.util.PersistenceUtil;
import io.hyperfoil.controller.CsvWriter;
import io.hyperfoil.controller.JsonLoader;
import io.hyperfoil.controller.JsonWriter;
import io.hyperfoil.controller.StatisticsStore;
import io.hyperfoil.core.hooks.ExecRunHook;
import io.hyperfoil.core.parser.BenchmarkParser;
import io.hyperfoil.core.parser.ParserException;
import io.hyperfoil.core.util.CountDown;
import io.hyperfoil.core.util.LowHigh;
import io.hyperfoil.internal.Controller;
import io.hyperfoil.internal.Properties;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.AsyncResult;
import io.vertx.core.DeploymentOptions;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import io.vertx.core.Promise;
import io.vertx.core.eventbus.EventBus;
import io.vertx.core.eventbus.Message;
import io.vertx.core.eventbus.ReplyException;
import io.vertx.core.impl.VertxInternal;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.core.spi.cluster.ClusterManager;
import io.vertx.core.spi.cluster.NodeListener;
import io.vertx.ext.cluster.infinispan.InfinispanClusterManager;
public class ControllerVerticle extends AbstractVerticle implements NodeListener {
private static final Logger log = LogManager.getLogger(ControllerVerticle.class);
private static final int MAX_IN_MEMORY_RUNS = Properties.getInt(Properties.MAX_IN_MEMORY_RUNS, 20);
static final String DEFAULT_STATS_JSON = "all.json";
private EventBus eb;
private ControllerServer server;
private Deployer deployer;
private final AtomicInteger runIds = new AtomicInteger();
private final Map<String, Benchmark> benchmarks = new HashMap<>();
private final Map<String, BenchmarkSource> templates = new HashMap<>();
private long timerId = -1;
Map<String, Run> runs = new HashMap<>();
@Override
public void start(Promise<Void> future) {
log.info("Starting in directory {}...", Controller.ROOT_DIR);
CountDown startCountDown = new CountDown(future, 2);
server = new ControllerServer(this, startCountDown);
vertx.exceptionHandler(throwable -> log.error("Uncaught error: ", throwable));
if (Files.exists(Controller.RUN_DIR)) {
try {
Files.list(Controller.RUN_DIR).forEach(this::updateRuns);
} catch (IOException e) {
log.error("Could not list run dir contents", e);
} catch (Exception e) {
log.error("Cannot load previous runs from {}", Controller.RUN_DIR, e);
}
}
//noinspection ResultOfMethodCallIgnored
Controller.HOOKS_DIR.resolve("pre").toFile().mkdirs();
//noinspection ResultOfMethodCallIgnored
Controller.HOOKS_DIR.resolve("post").toFile().mkdirs();
eb = vertx.eventBus();
eb.consumer(Feeds.DISCOVERY, message -> {
if (message.body() instanceof AgentHello) {
handleAgentHello(message, (AgentHello) message.body());
} else if (message.body() instanceof AuxiliaryHello) {
AuxiliaryHello hello = (AuxiliaryHello) message.body();
log.info("Noticed auxiliary {} (node {}, {})", hello.name(), hello.nodeId(), hello.deploymentId());
String nodeId = ((VertxInternal) vertx).getClusterManager().getNodeId();
message.reply(nodeId);
} else {
log.error("Unknown message on discovery feed! {}", message.body());
}
});
eb.consumer(Feeds.RESPONSE, message -> {
AgentStatusMessage msg = (AgentStatusMessage) message.body();
Run run = runs.get(msg.runId());
if (run == null) {
log.error("No run {}", msg.runId());
return;
}
AgentInfo agent = run.agents.stream().filter(a -> a.deploymentId.equals(msg.senderId())).findAny().orElse(null);
if (agent == null) {
log.error("No agent {} in run {}", msg.senderId(), run.id);
return;
}
if (msg instanceof PhaseChangeMessage) {
handlePhaseChange(run, agent, (PhaseChangeMessage) msg);
} else if (msg instanceof ErrorMessage) {
ErrorMessage errorMessage = (ErrorMessage) msg;
run.errors.add(new Run.Error(agent, errorMessage.error()));
if (errorMessage.isFatal()) {
agent.status = AgentInfo.Status.FAILED;
stopSimulation(run);
}
} else if (msg instanceof AgentReadyMessage) {
agent.status = AgentInfo.Status.READY;
if (run.agents.stream().allMatch(a -> a.status == AgentInfo.Status.READY)) {
startSimulation(run);
}
} else {
log.error("Unexpected type of message: {}", msg);
}
});
eb.consumer(Feeds.STATS, message -> {
if (!(message.body() instanceof StatsMessage)) {
log.error("Unknown message type: {}", message.body());
return;
}
StatsMessage statsMessage = (StatsMessage) message.body();
Run run = runs.get(statsMessage.runId);
if (run != null) {
String agentName = run.agents.stream()
.filter(ai -> ai.deploymentId.equals(statsMessage.address))
.map(ai -> ai.name).findFirst().orElse("<unknown>");
if (statsMessage instanceof RequestStatsMessage) {
RequestStatsMessage rsm = (RequestStatsMessage) statsMessage;
String phase = run.phase(rsm.phaseId);
if (rsm.statistics != null) {
log.debug("Run {}: Received stats from {}({}): {}/{}/{}:{} ({} requests)",
rsm.runId, agentName, rsm.address, phase, rsm.stepId, rsm.metric,
rsm.statistics.sequenceId, rsm.statistics.requestCount);
boolean added = run.statisticsStore().record(agentName, rsm.phaseId, rsm.stepId, rsm.metric, rsm.statistics);
if (!added) {
// warning already logged
String errorMessage = String.format(
"Received statistics for %s/%d/%s:%d with %d requests but the statistics are already completed; these statistics won't be reported.",
phase, rsm.stepId, rsm.metric, rsm.statistics.sequenceId, rsm.statistics.requestCount);
run.errors.add(new Run.Error(null, new BenchmarkExecutionException(errorMessage)));
}
}
} else if (statsMessage instanceof PhaseStatsCompleteMessage) {
PhaseStatsCompleteMessage pscm = (PhaseStatsCompleteMessage) statsMessage;
log.debug("Run {}: Received stats completion for phase {} from {}", run.id, pscm.phase, pscm.address);
AgentInfo agent = run.agents.stream().filter(a -> a.deploymentId.equals(pscm.address)).findFirst().orElse(null);
if (agent == null) {
log.error("Run {}: Cannot find agent {}", run.id, pscm.address);
} else {
PhaseInstance.Status prevStatus = agent.phases.put(pscm.phase, PhaseInstance.Status.STATS_COMPLETE);
if (prevStatus == PhaseInstance.Status.STATS_COMPLETE) {
// TODO: the stats might be completed both regularly and when the agent receives STOP
log.info("Run {}: stats for phase {} are already completed, ignoring.", run.id, pscm.phase);
} else if (run.agents.stream().map(a -> a.phases.get(pscm.phase))
.allMatch(s -> s == PhaseInstance.Status.STATS_COMPLETE)) {
ControllerPhase controllerPhase = run.phases.get(pscm.phase);
if (controllerPhase != null) {
tryCompletePhase(run, pscm.phase, controllerPhase);
} else {
log.error("Run {}: Cannot find phase {}!", run.id, pscm.phase);
}
}
}
} else if (statsMessage instanceof SessionStatsMessage) {
SessionStatsMessage sessionStatsMessage = (SessionStatsMessage) statsMessage;
log.trace("Run {}: Received session pool stats from {}", sessionStatsMessage.runId, sessionStatsMessage.address);
for (Map.Entry<String, LowHigh> entry : sessionStatsMessage.sessionStats.entrySet()) {
run.statisticsStore().recordSessionStats(agentName, sessionStatsMessage.timestamp,
entry.getKey(), entry.getValue().low, entry.getValue().high);
}
} else if (statsMessage instanceof ConnectionStatsMessage) {
ConnectionStatsMessage connectionStatsMessage = (ConnectionStatsMessage) statsMessage;
log.trace("Run {}: Received connection stats from {}", connectionStatsMessage.runId,
connectionStatsMessage.address);
run.statisticsStore().recordConnectionStats(agentName, connectionStatsMessage.timestamp,
connectionStatsMessage.stats);
} else if (statsMessage instanceof DelayStatsCompletionMessage) {
DelayStatsCompletionMessage delayStatsCompletionMessage = (DelayStatsCompletionMessage) statsMessage;
String phase = run.phase(delayStatsCompletionMessage.phaseId);
log.trace("Run {}: Received request for extension from {} for phase {} by {} ms",
delayStatsCompletionMessage.runId, delayStatsCompletionMessage.address,
phase, delayStatsCompletionMessage.delay);
ControllerPhase controllerPhase = run.phases.get(phase);
controllerPhase.delayStatsCompletionUntil(System.currentTimeMillis() + delayStatsCompletionMessage.delay);
}
} else {
log.error("Unknown run {}", statsMessage.runId);
}
message.reply("OK");
});
if (vertx.isClustered()) {
for (Deployer.Factory deployerFactory : ServiceLoader.load(Deployer.Factory.class)) {
log.debug("Found deployer {}", deployerFactory.name());
if (Controller.DEPLOYER.equals(deployerFactory.name())) {
deployer = deployerFactory.create();
break;
}
}
if (deployer == null) {
throw new IllegalStateException(
"Hyperfoil is running in clustered mode but it couldn't load deployer '" + Controller.DEPLOYER + "'");
}
if (vertx instanceof VertxInternal) {
ClusterManager clusterManager = ((VertxInternal) vertx).getClusterManager();
clusterManager.nodeListener(this);
}
}
if (!Controller.BENCHMARK_DIR.toFile().exists() && !Controller.BENCHMARK_DIR.toFile().mkdirs()) {
log.error("Failed to create benchmark directory: {}", Controller.BENCHMARK_DIR);
}
startCountDown.increment();
loadBenchmarks(startCountDown);
startCountDown.countDown();
}
private void tryCompletePhase(Run run, String phase, ControllerPhase controllerPhase) {
long delay = controllerPhase.delayStatsCompletionUntil() == null ? -1
: controllerPhase.delayStatsCompletionUntil() - System.currentTimeMillis();
if (delay <= 0) {
log.info("Run {}: completing stats for phase {}", run.id, phase);
run.statisticsStore().completePhase(phase);
if (!run.statisticsStore().validateSlas()) {
log.info("SLA validation failed for {}", phase);
controllerPhase.setFailed();
if (run.benchmark.failurePolicy() == Benchmark.FailurePolicy.CANCEL) {
failNotStartedPhases(run, controllerPhase);
}
}
} else {
log.info("Run {}: all agents completed stats for phase {} but delaying for {} ms", run.id, phase, delay);
vertx.setTimer(delay, ignored -> tryCompletePhase(run, phase, controllerPhase));
}
}
private void handleAgentHello(Message<Object> message, AgentHello hello) {
String runId = hello.runId();
Run run = runs.get(runId);
if (run == null) {
log.error("Unknown run ID {}", runId);
message.fail(1, "Unknown run ID");
return;
}
AgentInfo agentInfo = run.agents.stream().filter(a -> a.name.equals(hello.name())).findAny().orElse(null);
if (agentInfo == null) {
log.error("Unknown agent {} ({}/{})", hello.name(), hello.nodeId(), hello.deploymentId());
message.fail(1, "Unknown agent");
return;
}
if (agentInfo.status != AgentInfo.Status.STARTING) {
log.info("Ignoring message, {} is not starting", agentInfo.name);
message.reply("Ignoring");
return;
}
log.debug("Registering agent {} ({}/{})", hello.name(), hello.nodeId(), hello.deploymentId());
agentInfo.nodeId = hello.nodeId();
agentInfo.deploymentId = hello.deploymentId();
agentInfo.status = AgentInfo.Status.REGISTERED;
message.reply("Registered");
if (run.agents.stream().allMatch(a -> a.status != AgentInfo.Status.STARTING)) {
handleAgentsStarted(run);
} else {
log.debug("Waiting for registration from agents {}",
run.agents.stream().filter(a -> a.status == AgentInfo.Status.STARTING).collect(Collectors.toList()));
}
}
private void handlePhaseChange(Run run, AgentInfo agent, PhaseChangeMessage phaseChange) {
String phase = phaseChange.phase();
log.debug("{} Received phase change from {}: {} is {} (session limit exceeded={}, CPU usage={} errors={})", run.id,
phaseChange.senderId(), phase, phaseChange.status(), phaseChange.sessionLimitExceeded(),
phaseChange.cpuUsage(), phaseChange.getError());
agent.phases.put(phase, phaseChange.status());
ControllerPhase controllerPhase = run.phases.get(phase);
if (phaseChange.cpuUsage() != null) {
run.statisticsStore().recordCpuUsage(phaseChange.phase(), agent.name, phaseChange.cpuUsage());
}
if (phaseChange.sessionLimitExceeded()) {
Phase def = controllerPhase.definition();
SessionLimitPolicy sessionLimitPolicy = def.model instanceof Model.OpenModel
? ((Model.OpenModel) def.model).sessionLimitPolicy
: null;
if (sessionLimitPolicy == SessionLimitPolicy.CONTINUE) {
log.warn("{} Phase {} session limit exceeded, continuing due to policy {}", run.id, def.name, sessionLimitPolicy);
// We must not record this as a failure as StatisticsStore.validateSlas() would cancel the benchmark
} else {
run.statisticsStore().addFailure(def.name, null, controllerPhase.absoluteStartTime(), System.currentTimeMillis(),
"Exceeded session limit");
log.info("{} Failing phase due to exceeded session limit.", run.id);
controllerPhase.setFailed();
}
}
if (phaseChange.getError() != null) {
log.error("{} Failing phase {} as agent {} reports error: {}", run.id,
controllerPhase.definition().name, agent.name, phaseChange.getError().getMessage());
controllerPhase.setFailed();
run.errors.add(new Run.Error(agent, phaseChange.getError()));
}
controllerPhase.addGlobalData(phaseChange.globalData());
tryProgressStatus(run, phase);
runSimulation(run);
}
@Override
public void nodeAdded(String nodeID) {
}
@Override
public void nodeLeft(String nodeID) {
for (Run run : runs.values()) {
if (run.terminateTime.future().isComplete()) {
continue;
}
for (AgentInfo agent : run.agents) {
if (Objects.equals(agent.nodeId, nodeID)) {
agent.status = AgentInfo.Status.FAILED;
run.errors.add(new Run.Error(agent, new BenchmarkExecutionException("Agent unexpectedly left the cluster.")));
kill(run, result -> {
/* used version of checkstyle does not implement allowEmptyLambdas */
});
stopSimulation(run);
break;
}
}
}
}
private void updateRuns(Path runDir) {
File file = runDir.toFile();
if (!file.getName().matches("[0-9A-F][0-9A-F][0-9A-F][0-9A-F]")) {
return;
}
String runId = file.getName();
int id = Integer.parseInt(runId, 16);
if (id >= runIds.get()) {
runIds.set(id + 1);
}
Path infoFile = runDir.resolve("info.json");
JsonObject info = new JsonObject();
if (infoFile.toFile().exists() && infoFile.toFile().isFile()) {
try {
info = new JsonObject(Files.readString(infoFile));
} catch (Exception e) {
log.error("Cannot read info for run {}", runId);
return;
}
}
String name = info.getString("benchmark", "<unknown>");
JsonObject paramsObject = info.getJsonObject("params");
Map<String, String> templateParams = paramsObject == null ? Collections.emptyMap()
: paramsObject.getMap().entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, entry -> String.valueOf(entry.getValue())));
Benchmark benchmark = Benchmark.empty(name, templateParams);
Run run = new Run(runId, runDir, benchmark);
run.statsSupplier = () -> loadStats(runDir.resolve(DEFAULT_STATS_JSON), benchmark);
run.completed = true;
run.startTime = info.getLong("startTime", 0L);
run.terminateTime.complete(info.getLong("terminateTime", 0L));
run.description = info.getString("description");
JsonArray errors = info.getJsonArray("errors");
if (errors != null) {
run.errors.addAll(errors.stream()
.map(JsonObject.class::cast)
.map(e -> new Run.Error(new AgentInfo(e.getString("agent"), -1), new Throwable(e.getString("msg"))))
.collect(Collectors.toList()));
}
run.cancelled = info.getBoolean("cancelled", Boolean.FALSE);
runs.put(runId, run);
}
private StatisticsStore loadStats(Path jsonPath, Benchmark benchmark) {
File statsJson = jsonPath.toFile();
if (!statsJson.exists() || !statsJson.isFile() || !statsJson.canRead()) {
log.error("Cannot load stats from {}", jsonPath);
return null;
}
log.info("Loading stats from {}", jsonPath);
StatisticsStore store = new StatisticsStore(benchmark, f -> {
});
try {
JsonLoader.read(Files.readString(jsonPath, StandardCharsets.UTF_8), store);
} catch (Exception e) {
log.error("Cannot load stats from {}", jsonPath, e);
return null;
}
return store;
}
@Override
public void stop(Promise<Void> stopFuture) throws Exception {
if (deployer != null) {
deployer.close();
}
server.stop(stopFuture);
}
private void tryProgressStatus(Run run, String phase) {
PhaseInstance.Status minStatus = PhaseInstance.Status.TERMINATED;
for (AgentInfo a : run.agents) {
PhaseInstance.Status status = a.phases.get(phase);
if (status == null) {
// The status is not defined on one of the nodes, so we can't progress it.
return;
} else if (status.ordinal() < minStatus.ordinal()) {
minStatus = status;
}
}
ControllerPhase controllerPhase = run.phases.get(phase);
if (controllerPhase == null) {
log.error("Cannot find phase {} in run {}", phase, run.id);
return;
}
switch (minStatus) {
case RUNNING:
controllerPhase.status(run.id, ControllerPhase.Status.RUNNING);
break;
case FINISHED:
controllerPhase.status(run.id, ControllerPhase.Status.FINISHED);
break;
case TERMINATED:
case STATS_COMPLETE: // not sure if this can happen
controllerPhase.status(run.id, ControllerPhase.Status.TERMINATED);
controllerPhase.absoluteCompletionTime(System.currentTimeMillis());
run.newGlobalData.putAll(run.phases.get(phase).completeGlobalData());
break;
}
if (controllerPhase.isFailed()) {
failNotStartedPhases(run, controllerPhase);
}
}
private void failNotStartedPhases(Run run, ControllerPhase controllerPhase) {
log.info("Phase {} failed, cancelling other phases...", controllerPhase.definition().name());
for (ControllerPhase p : run.phases.values()) {
if (p.status() == ControllerPhase.Status.NOT_STARTED) {
p.status(run.id, ControllerPhase.Status.CANCELLED);
}
}
}
Run createRun(Benchmark benchmark, String description) {
ensureMaxInMemoryRuns();
String runId = String.format("%04X", runIds.getAndIncrement());
Path runDir = Controller.RUN_DIR.resolve(runId);
//noinspection ResultOfMethodCallIgnored
runDir.toFile().mkdirs();
Run run = new Run(runId, runDir, benchmark);
run.initStore(new StatisticsStore(benchmark, failure -> log.warn("Failed verify SLA(s) for {}/{}: {}",
failure.phase(), failure.metric(), failure.message())));
run.description = description;
runs.put(run.id, run);
if (run.benchmark.source() != null) {
PersistenceUtil.store(run.benchmark.source(), run.dir);
}
return run;
}
private void ensureMaxInMemoryRuns() {
List<Run> loadedRuns = runs.values().stream().filter(Run::isLoaded)
.sorted(Comparator.comparing(r -> r.id)).collect(Collectors.toList());
if (loadedRuns.size() + 1 > MAX_IN_MEMORY_RUNS) {
loadedRuns.stream().limit(loadedRuns.size() + 1 - MAX_IN_MEMORY_RUNS).forEach(r -> {
log.info("Unloading run {}", r.id);
r.unload();
r.statsSupplier = () -> {
Path statsPath = Controller.RUN_DIR.resolve(r.id).resolve(DEFAULT_STATS_JSON);
return loadStats(statsPath, r.benchmark);
};
});
}
}
@SuppressWarnings("deprecation") // Uses a deprecated executeBlocking call that should be addressed later. This is tracked in https://github.com/Hyperfoil/Hyperfoil/issues/493
String startBenchmark(Run run) {
Set<String> activeAgents = new HashSet<>();
for (Run r : runs.values()) {
if (!r.terminateTime.future().isComplete()) {
for (AgentInfo agent : run.agents) {
activeAgents.add(agent.name);
}
}
}
for (Agent agent : run.benchmark.agents()) {
if (activeAgents.contains(agent.name)) {
long currentTime = System.currentTimeMillis();
run.startTime = currentTime;
run.terminateTime.complete(currentTime);
run.completed = true;
return "Agent " + agent + " is already used; try starting the benchmark later";
}
}
if (run.benchmark.agents().length == 0) {
if (vertx.isClustered()) {
long currentTime = System.currentTimeMillis();
run.startTime = currentTime;
run.terminateTime.complete(currentTime);
run.completed = true;
return "Server is started in clustered mode; benchmarks must define agents.";
} else {
run.agents.add(new AgentInfo("in-vm", 0));
JsonObject config = new JsonObject().put("runId", run.id).put("name", "in-vm");
vertx.deployVerticle(AgentVerticle.class, new DeploymentOptions().setConfig(config));
}
} else {
if (!vertx.isClustered()) {
return "Server is not started as clustered and does not accept benchmarks with agents defined.";
}
log.info("Starting agents for run {}", run.id);
int agentCounter = 0;
for (Agent agent : run.benchmark.agents()) {
AgentInfo agentInfo = new AgentInfo(agent.name, agentCounter++);
run.agents.add(agentInfo);
log.debug("Starting agent {}", agent.name);
vertx.executeBlocking(
future -> agentInfo.deployedAgent = deployer.start(agent, run.id, run.benchmark, exception -> {
if (agentInfo.status.ordinal() < AgentInfo.Status.STOPPING.ordinal()) {
run.errors.add(
new Run.Error(agentInfo, new BenchmarkExecutionException("Failed to deploy agent", exception)));
log.error("Failed to deploy agent {}", agent.name, exception);
vertx.runOnContext(nil -> stopSimulation(run));
}
}), false, result -> {
if (result.failed()) {
run.errors.add(new Run.Error(agentInfo,
new BenchmarkExecutionException("Failed to start agent", result.cause())));
log.error("Failed to start agent {}", agent.name, result.cause());
vertx.runOnContext(nil -> stopSimulation(run));
}
});
}
}
run.deployTimerId = vertx.setTimer(Controller.DEPLOY_TIMEOUT, id -> {
log.error("{} Deployment timed out.", run.id);
run.errors.add(new Run.Error(null, new BenchmarkExecutionException("Deployment timed out.")));
stopSimulation(run);
});
return null;
}
private void handleAgentsStarted(Run run) {
vertx.cancelTimer(run.deployTimerId);
log.info("Starting benchmark {} - run {}", run.benchmark.name(), run.id);
for (AgentInfo agent : run.agents) {
if (agent.status != AgentInfo.Status.REGISTERED) {
log.error("{} Agent {}({}) already initializing, status is {}!", run.id, agent.name, agent.deploymentId,
agent.status);
} else {
eb.request(agent.deploymentId,
new AgentControlMessage(AgentControlMessage.Command.INITIALIZE, agent.id, run.benchmark), reply -> {
Throwable cause;
if (reply.failed()) {
cause = reply.cause();
log.error("{} Agent {}({}) failed to initialize", run.id, agent.name, agent.deploymentId);
log.error("Failure thrown on the controller (this node): ", cause);
} else {
Message<Object> message = reply.result();
if (message.body() instanceof ReplyException) {
String msg = ((ReplyException) message.body()).getMessage();
log.error("{} Agent {}({}) failed to initialize", run.id, agent.name, agent.deploymentId);
log.error("Failure thrown on the agent node (see agent log for details): {}", msg);
cause = new BenchmarkExecutionException(msg);
} else {
log.debug("{} Agent {}({}) was initialized.", run.id, agent.name, agent.deploymentId);
return;
}
}
agent.status = AgentInfo.Status.FAILED;
run.errors.add(new Run.Error(agent, cause));
stopSimulation(run);
});
}
}
}
@SuppressWarnings("deprecation") // Uses a deprecated executeBlocking call that should be addressed later. This is tracked in https://github.com/Hyperfoil/Hyperfoil/issues/493
private void startSimulation(Run run) {
vertx.executeBlocking(future -> {
// combine shared and benchmark-private hooks
List<RunHook> hooks = loadHooks("pre");
hooks.addAll(run.benchmark.preHooks());
Collections.sort(hooks);
for (RunHook hook : hooks) {
StringBuilder sb = new StringBuilder();
boolean success = hook.run(getRunProperties(run), sb::append);
run.hookResults.add(new Run.RunHookOutput(hook.name(), sb.toString()));
if (!success) {
run.errors.add(
new Run.Error(null, new BenchmarkExecutionException("Execution of run hook " + hook.name() + " failed.")));
future.fail("Execution of pre-hook " + hook.name() + " failed.");
break;
}
}
future.complete();
}, result -> {
if (result.succeeded()) {
vertx.runOnContext(nil -> {
assert run.startTime == Long.MIN_VALUE;
run.startTime = System.currentTimeMillis();
for (Phase phase : run.benchmark.phases()) {
run.phases.put(phase.name(), new ControllerPhase(phase));
}
runSimulation(run);
});
} else {
log.error("{} Failed to start the simulation", run.id, result.cause());
stopSimulation(run);
}
});
}
private void runSimulation(Run run) {
if (timerId >= 0) {
vertx.cancelTimer(timerId);
timerId = -1;
}
long now = System.currentTimeMillis();
for (ControllerPhase phase : run.phases.values()) {
if (phase.status() == ControllerPhase.Status.RUNNING
&& phase.absoluteStartTime() + phase.definition().duration() <= now) {
eb.publish(Feeds.CONTROL,
new PhaseControlMessage(PhaseControlMessage.Command.FINISH, phase.definition().name, null));
phase.status(run.id, ControllerPhase.Status.FINISHING);
}
if (phase.status() == ControllerPhase.Status.FINISHED) {
if (phase.definition().maxDuration() >= 0 && phase.absoluteStartTime() + phase.definition().maxDuration() <= now) {
eb.publish(Feeds.CONTROL,
new PhaseControlMessage(PhaseControlMessage.Command.TERMINATE, phase.definition().name, null));
phase.status(run.id, ControllerPhase.Status.TERMINATING);
} else if (phase.definition().terminateAfterStrict().stream().map(run.phases::get)
.allMatch(p -> p.status().isTerminated())) {
eb.publish(Feeds.CONTROL,
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | true |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/OutputStreamAdapter.java | clustering/src/main/java/io/hyperfoil/clustering/OutputStreamAdapter.java | package io.hyperfoil.clustering;
import java.io.OutputStream;
import io.netty.buffer.Unpooled;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.streams.WriteStream;
public class OutputStreamAdapter extends OutputStream {
private final WriteStream<Buffer> stream;
public OutputStreamAdapter(WriteStream<Buffer> stream) {
this.stream = stream;
}
@Override
public void write(byte[] b) {
stream.write(Buffer.buffer(Unpooled.wrappedBuffer(b)));
}
@Override
public void write(byte[] b, int off, int len) {
stream.write(Buffer.buffer(Unpooled.wrappedBuffer(b, off, len)));
}
@Override
public void close() {
stream.end();
}
@Override
public void write(int b) {
Buffer buffer = Buffer.buffer(1);
buffer.appendByte((byte) b);
stream.write(buffer);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/RequestStatsSender.java | clustering/src/main/java/io/hyperfoil/clustering/RequestStatsSender.java | package io.hyperfoil.clustering;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.Phase;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
import io.hyperfoil.clustering.messages.PhaseStatsCompleteMessage;
import io.hyperfoil.clustering.messages.RequestStatsMessage;
import io.hyperfoil.core.impl.statistics.StatisticsCollector;
import io.hyperfoil.core.util.CountDown;
import io.vertx.core.eventbus.EventBus;
public class RequestStatsSender extends StatisticsCollector {
private static final Logger log = LogManager.getLogger(RequestStatsSender.class);
private final String address;
private final String runId;
private final EventBus eb;
private final StatisticsConsumer sendStats = this::sendStats;
public RequestStatsSender(Benchmark benchmark, EventBus eb, String address, String runId) {
super(benchmark);
this.eb = eb;
this.address = address;
this.runId = runId;
}
public void send(CountDown completion) {
visitStatistics(sendStats, completion);
}
private void sendStats(Phase phase, int stepId, String metric, StatisticsSnapshot statistics, CountDown countDown) {
if (statistics.histogram.getEndTimeStamp() >= statistics.histogram.getStartTimeStamp()) {
log.debug("Sending stats for {} {}/{}, id {}: {} requests, {} responses", phase.name(), stepId, metric,
statistics.sequenceId, statistics.requestCount, statistics.responseCount);
// On clustered eventbus, ObjectCodec is not called synchronously so we *must* do a copy here.
// (on a local eventbus we'd have to do a copy in transform() anyway)
StatisticsSnapshot copy = statistics.clone();
countDown.increment();
eb.request(Feeds.STATS, new RequestStatsMessage(address, runId, phase.id(), stepId, metric, copy),
reply -> countDown.countDown());
}
}
public void sendPhaseComplete(Phase phase, CountDown countDown) {
for (int phaseAndStepId : aggregated.keySet()) {
if (phase != null && phase != phases[phaseAndStepId >> 16]) {
continue;
}
countDown.increment();
eb.request(Feeds.STATS, new RequestStatsMessage(address, runId, phaseAndStepId >> 16, -1, null, null),
reply -> countDown.countDown());
}
if (phase == null) {
// TODO: it would be better to not send this for those phases that are already complete
for (Phase p : phases) {
eb.request(Feeds.STATS, new PhaseStatsCompleteMessage(address, runId, p.name()));
}
} else {
eb.request(Feeds.STATS, new PhaseStatsCompleteMessage(address, runId, phase.name()));
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/ControllerServer.java | clustering/src/main/java/io/hyperfoil/clustering/ControllerServer.java | package io.hyperfoil.clustering;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.infinispan.commons.util.FileLookupFactory;
import io.hyperfoil.api.Version;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.BenchmarkData;
import io.hyperfoil.api.config.BenchmarkDefinitionException;
import io.hyperfoil.api.config.BenchmarkSource;
import io.hyperfoil.api.config.Model;
import io.hyperfoil.api.statistics.StatisticsSummary;
import io.hyperfoil.clustering.util.PersistedBenchmarkData;
import io.hyperfoil.clustering.webcli.WebCLI;
import io.hyperfoil.controller.ApiService;
import io.hyperfoil.controller.Client;
import io.hyperfoil.controller.StatisticsStore;
import io.hyperfoil.controller.model.Histogram;
import io.hyperfoil.controller.model.RequestStats;
import io.hyperfoil.controller.router.ApiRouter;
import io.hyperfoil.core.impl.LocalBenchmarkData;
import io.hyperfoil.core.impl.ProvidedBenchmarkData;
import io.hyperfoil.core.parser.BenchmarkParser;
import io.hyperfoil.core.parser.ParserException;
import io.hyperfoil.core.print.YamlVisitor;
import io.hyperfoil.core.util.CountDown;
import io.hyperfoil.core.util.LowHigh;
import io.hyperfoil.impl.Util;
import io.hyperfoil.internal.Controller;
import io.hyperfoil.internal.Properties;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.vertx.core.AsyncResult;
import io.vertx.core.CompositeFuture;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import io.vertx.core.Promise;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.http.HttpHeaders;
import io.vertx.core.http.HttpServer;
import io.vertx.core.http.HttpServerOptions;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.core.impl.NoStackTraceThrowable;
import io.vertx.core.json.Json;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.core.net.JksOptions;
import io.vertx.core.net.PemKeyCertOptions;
import io.vertx.ext.web.FileUpload;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.FaviconHandler;
import io.vertx.ext.web.handler.StaticHandler;
class ControllerServer implements ApiService {
private static final Logger log = LogManager.getLogger(ControllerServer.class);
private static final String MIME_TYPE_JSON = "application/json";
private static final String MIME_TYPE_SERIALIZED = "application/java-serialized-object";
private static final String MIME_TYPE_TEXT_PLAIN = "text/plain";
private static final String MIME_TYPE_YAML = "text/vnd.yaml";
private static final String KEYSTORE_PATH = Properties.get(Properties.CONTROLLER_KEYSTORE_PATH, null);
private static final String KEYSTORE_PASSWORD = Properties.get(Properties.CONTROLLER_KEYSTORE_PASSWORD, null);
private static final String PEM_KEYS = Properties.get(Properties.CONTROLLER_PEM_KEYS, null);
private static final String PEM_CERTS = Properties.get(Properties.CONTROLLER_PEM_CERTS, null);
private static final String CONTROLLER_PASSWORD = Properties.get(Properties.CONTROLLER_PASSWORD, null);
private static final boolean CONTROLLER_SECURED_VIA_PROXY = Properties.getBoolean(Properties.CONTROLLER_SECURED_VIA_PROXY);
private static final String CONTROLLER_EXTERNAL_URI = Properties.get(Properties.CONTROLLER_EXTERNAL_URI, null);
private static final String TRIGGER_URL = Properties.get(Properties.TRIGGER_URL, null);
private static final String BEARER_TOKEN;
private static final Comparator<ControllerPhase> PHASE_COMPARATOR = Comparator
.<ControllerPhase, Long> comparing(ControllerPhase::absoluteStartTime).thenComparing(p -> p.definition().name);
private static final BinaryOperator<Run> LAST_RUN_OPERATOR = (r1, r2) -> r1.id.compareTo(r2.id) > 0 ? r1 : r2;
private static final String DATAKEY = "[/**DATAKEY**/]";
static {
byte[] token = new byte[48];
new SecureRandom().nextBytes(token);
BEARER_TOKEN = Base64.getEncoder().encodeToString(token);
}
final ControllerVerticle controller;
HttpServer httpServer;
String baseURL;
ControllerServer(ControllerVerticle controller, CountDown countDown) {
this.controller = controller;
HttpServerOptions options = new HttpServerOptions();
if (KEYSTORE_PATH != null) {
options.setSsl(true).setUseAlpn(true).setKeyCertOptions(
new JksOptions().setPath(KEYSTORE_PATH).setPassword(KEYSTORE_PASSWORD));
} else if (PEM_CERTS != null || PEM_KEYS != null) {
PemKeyCertOptions pem = new PemKeyCertOptions();
if (PEM_CERTS != null) {
for (String certPath : PEM_CERTS.split(",")) {
pem.addCertPath(certPath.trim());
}
}
if (PEM_KEYS != null) {
for (String keyPath : PEM_KEYS.split(",")) {
pem.addKeyPath(keyPath.trim());
}
}
options.setSsl(true).setUseAlpn(true).setKeyCertOptions(pem);
}
Router router = Router.router(controller.getVertx());
if (CONTROLLER_PASSWORD != null) {
if (!options.isSsl() && !CONTROLLER_SECURED_VIA_PROXY) {
throw new IllegalStateException("Server uses basic authentication scheme (" + Properties.CONTROLLER_PASSWORD +
" is set) but it does not use TLS connections. If the confidentiality is guaranteed by a proxy set -D" +
Properties.CONTROLLER_SECURED_VIA_PROXY + "=true.");
}
log.info("Server is protected using a password.");
router.route().handler(new BasicAuthHandler());
}
StaticHandler staticHandler = StaticHandler.create().setCachingEnabled(true);
router.route("/").handler(staticHandler);
router.route("/web/*").handler(staticHandler);
router.route("/favicon.ico").handler(FaviconHandler.create(controller.getVertx(), "webroot/favicon.ico"));
new ApiRouter(this, router);
String controllerHost = Properties.get(Properties.CONTROLLER_HOST,
controller.getConfig().getString(Properties.CONTROLLER_HOST, "0.0.0.0"));
int controllerPort = Properties.getInt(Properties.CONTROLLER_PORT,
controller.getConfig().getInteger(Properties.CONTROLLER_PORT, 8090));
WebCLI webCLI = new WebCLI(controller.getVertx());
httpServer = controller.getVertx().createHttpServer(options).requestHandler(router)
.webSocketHandler(webCLI)
.listen(controllerPort, controllerHost, serverResult -> {
if (serverResult.succeeded()) {
String host = controllerHost;
// Can't advertise 0.0.0.0 as
if (host.equals("0.0.0.0")) {
try {
host = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
host = "localhost";
}
}
if (CONTROLLER_EXTERNAL_URI == null) {
baseURL = (options.isSsl() ? "https://" : "http://") + host + ":" + serverResult.result().actualPort();
} else {
baseURL = CONTROLLER_EXTERNAL_URI;
}
webCLI.setConnectionOptions(host, serverResult.result().actualPort(), options.isSsl());
log.info("Hyperfoil controller listening on {}", baseURL);
}
countDown.handle(serverResult.mapEmpty());
});
}
void stop(Promise<Void> stopFuture) {
httpServer.close(result -> stopFuture.complete());
}
@Override
public void openApi(RoutingContext ctx) {
try {
InputStream stream = ApiService.class.getClassLoader().getResourceAsStream("openapi.yaml");
Buffer payload;
String contentType;
if (stream == null) {
payload = Buffer.buffer("API definition not available");
contentType = MIME_TYPE_TEXT_PLAIN;
} else {
payload = Buffer.buffer(Util.toString(stream));
contentType = MIME_TYPE_YAML;
}
ctx.response()
.putHeader(HttpHeaders.CONTENT_TYPE.toString(), contentType)
.putHeader("x-epoch-millis", String.valueOf(System.currentTimeMillis()))
.end(payload);
} catch (IOException e) {
log.error("Cannot read OpenAPI definition", e);
ctx.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code())
.setStatusMessage("Cannot read OpenAPI definition.").end();
}
}
private void respondWithJson(RoutingContext ctx, boolean pretty, Object entity) {
ctx.response()
.putHeader(HttpHeaders.CONTENT_TYPE, MIME_TYPE_JSON)
.end(pretty ? Json.encodePrettily(entity) : Json.encode(entity));
}
private void respondWithJson(RoutingContext ctx, JsonObject entity) {
ctx.response()
.putHeader(HttpHeaders.CONTENT_TYPE, MIME_TYPE_JSON)
.end(entity.encodePrettily());
}
@Override
public void listBenchmarks(RoutingContext ctx) {
respondWithJson(ctx, true, controller.getBenchmarks());
}
@Override
public void listTemplates(RoutingContext ctx) {
respondWithJson(ctx, true, controller.getTemplates());
}
@Override
public void addBenchmark$application_json(RoutingContext ctx, String ifMatch, String storedFilesBenchmark) {
addBenchmark$text_vnd_yaml(ctx, ifMatch, storedFilesBenchmark);
}
private void addBenchmarkAndReply(RoutingContext ctx, String source, BenchmarkData data, String prevVersion)
throws ParserException {
if (source == null || data == null) {
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end("Cannot read benchmark.");
return;
}
BenchmarkSource benchmarkSource = BenchmarkParser.instance().createSource(source, data);
if (benchmarkSource.isTemplate()) {
Future<Void> future = controller.addTemplate(benchmarkSource, prevVersion);
sendReply(ctx, future, benchmarkSource.name);
} else {
Benchmark benchmark = BenchmarkParser.instance().buildBenchmark(benchmarkSource, Collections.emptyMap());
addBenchmarkAndReply(ctx, benchmark, prevVersion);
}
}
private void sendReply(RoutingContext ctx, Future<Void> future, String name) {
String location = baseURL + "/benchmark/" + encode(name);
future.onSuccess(nil -> {
ctx.response()
.setStatusCode(HttpResponseStatus.NO_CONTENT.code())
.putHeader(HttpHeaders.LOCATION, location).end();
}).onFailure(throwable -> {
if (throwable instanceof VersionConflictException) {
ctx.response().setStatusCode(HttpResponseStatus.CONFLICT.code()).end();
} else {
ctx.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()).end();
}
});
}
private void addBenchmarkAndReply(RoutingContext ctx, Benchmark benchmark, String prevVersion) {
if (benchmark == null) {
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end("Cannot read benchmark.");
return;
}
if (benchmark.agents().length == 0 && controller.getVertx().isClustered()) {
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code())
.end("Hyperfoil controller is clustered but the benchmark does not define any agents.");
return;
} else if (benchmark.agents().length != 0 && !controller.getVertx().isClustered()) {
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code())
.end("Hyperfoil runs in standalone mode but the benchmark defines agents for clustering");
return;
}
sendReply(ctx, controller.addBenchmark(benchmark, prevVersion), benchmark.name());
}
private static String encode(String string) {
try {
return URLEncoder.encode(string, StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
throw new IllegalArgumentException(e);
}
}
@Override
public void addBenchmark$text_uri_list(RoutingContext ctx, String ifMatch, String storedFilesBenchmark) {
var loadDirProperty = Properties.get(Properties.LOAD_DIR, null);
if (loadDirProperty == null) {
log.error("Loading controller local benchmarks is not enabled, set the {} property to enable.",
Properties.LOAD_DIR);
ctx.response().setStatusCode(HttpResponseStatus.SERVICE_UNAVAILABLE.code())
.end("Loading controller local benchmarks is not enabled.");
return;
}
var loadDirPath = Paths.get(loadDirProperty).toAbsolutePath();
String body = ctx.getBodyAsString();
if (body == null || body.isEmpty()) {
log.error("Benchmark is empty, load failed.");
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end("Benchmark is empty.");
return;
}
// text/uri-list ignores
var uris = body.lines()
.map(String::trim)
.filter(Predicate.not(String::isEmpty))
.filter(Predicate.not(l -> l.startsWith("#")))
.flatMap(l -> {
try {
return Stream.of(new URI(l));
} catch (URISyntaxException e) {
return Stream.empty();
}
})
.collect(Collectors.toList());
if (uris.isEmpty()) {
log.error("No Benchmark URIs specified, load failed.");
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end("No Benchmark URIs specified.");
return;
}
if (uris.size() > 1) {
log.error("Multiple Benchmark URIs specified, load failed.");
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end("Multiple Benchmark URIs specified.");
return;
}
var uri = uris.get(0);
if (uri.getScheme() != null && !"file".equals(uri.getScheme())) {
log.error("Unsupported URI scheme of {} specified, load failed.", uri.getScheme());
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code())
.end(uri.getScheme() + " scheme URIs are not supported.");
return;
}
var localPath = (uri.getScheme() == null ? Paths.get(uri.getPath()) : Paths.get(uri)).toAbsolutePath();
if (!localPath.startsWith(loadDirPath) || !Files.isRegularFile(localPath)) {
log.error("Unknown controller local benchmark {}.", localPath);
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end("Unknown controller local benchmark.");
return;
}
try {
String source = Files.readString(localPath);
BenchmarkData data = new LocalBenchmarkData(localPath);
if (storedFilesBenchmark != null) {
storedFilesBenchmark = BenchmarkData.sanitize(storedFilesBenchmark);
data = new PersistedBenchmarkData(Controller.BENCHMARK_DIR.resolve(storedFilesBenchmark + ".data"));
}
addBenchmarkAndReply(ctx, source, data, ifMatch);
} catch (ParserException | BenchmarkDefinitionException | IOException e) {
respondParsingError(ctx, e);
}
}
@Override
public void addBenchmark$text_vnd_yaml(RoutingContext ctx, String ifMatch, String storedFilesBenchmark) {
String source = ctx.getBodyAsString();
if (source == null || source.isEmpty()) {
log.error("Benchmark is empty, upload failed.");
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end("Benchmark is empty.");
return;
}
try {
BenchmarkData data = BenchmarkData.EMPTY;
if (storedFilesBenchmark != null) {
storedFilesBenchmark = BenchmarkData.sanitize(storedFilesBenchmark);
data = new PersistedBenchmarkData(Controller.BENCHMARK_DIR.resolve(storedFilesBenchmark + ".data"));
}
addBenchmarkAndReply(ctx, source, data, ifMatch);
} catch (ParserException | BenchmarkDefinitionException e) {
respondParsingError(ctx, e);
}
}
private void respondParsingError(RoutingContext ctx, Exception e) {
log.error("Failed to read benchmark", e);
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code())
.end("Cannot read benchmark: " + Util.explainCauses(e));
}
@Override
public void addBenchmark$application_java_serialized_object(RoutingContext ctx, String ifMatch,
String storedFilesBenchmark) {
if (storedFilesBenchmark != null) {
log.warn("Ignoring parameter useStoredData for serialized benchmark upload.");
}
byte[] bytes = ctx.getBody().getBytes();
try {
Benchmark benchmark = Util.deserialize(bytes);
addBenchmarkAndReply(ctx, benchmark, ifMatch);
} catch (IOException | ClassNotFoundException e) {
log.error("Failed to deserialize", e);
StringBuilder message = new StringBuilder(
"Cannot read benchmark - the controller (server) version and CLI version are probably not in sync.\n");
message.append("This partial stack-track might help you diagnose the problematic part:\n---\n");
for (StackTraceElement ste : e.getStackTrace()) {
message.append(ste).append('\n');
if (ste.getClassName().equals(Util.class.getName())) {
break;
}
}
message.append("---\n");
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end(message.toString());
}
}
@Override
public void addBenchmark$multipart_form_data(RoutingContext ctx, String ifMatch, String storedFilesBenchmark) {
String source = null;
BenchmarkData data = new ProvidedBenchmarkData();
for (FileUpload upload : ctx.fileUploads()) {
byte[] bytes;
try {
bytes = Files.readAllBytes(Paths.get(upload.uploadedFileName()));
} catch (IOException e) {
log.error("Cannot read uploaded file {}", upload.uploadedFileName(), e);
ctx.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()).end();
return;
}
if (upload.name().equals("benchmark")) {
try {
source = new String(bytes, upload.charSet());
} catch (UnsupportedEncodingException e) {
source = new String(bytes, StandardCharsets.UTF_8);
}
} else {
((ProvidedBenchmarkData) data).files.put(upload.fileName(), bytes);
}
}
if (source == null) {
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code())
.end("Multi-part definition missing benchmark=source-file.yaml");
return;
}
try {
if (storedFilesBenchmark != null) {
// sanitize to prevent directory escape
storedFilesBenchmark = BenchmarkData.sanitize(storedFilesBenchmark);
Path dataDirPath = Controller.BENCHMARK_DIR.resolve(storedFilesBenchmark + ".data");
log.info("Trying to use stored files from {}, adding files from request: {}", dataDirPath, data.files().keySet());
if (!data.files().isEmpty()) {
File dataDir = dataDirPath.toFile();
//noinspection ResultOfMethodCallIgnored
dataDir.mkdirs();
if (dataDir.exists() && dataDir.isDirectory()) {
try {
PersistedBenchmarkData.store(data.files(), dataDirPath);
} catch (IOException e) {
ctx.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code())
.end("Failed to store benchmark files.");
}
}
}
data = new PersistedBenchmarkData(dataDirPath);
}
addBenchmarkAndReply(ctx, source, data, ifMatch);
} catch (ParserException | BenchmarkDefinitionException e) {
respondParsingError(ctx, e);
}
}
@Override
public void getBenchmark$text_vnd_yaml(RoutingContext ctx, String name) {
Benchmark benchmark = controller.getBenchmark(name);
if (benchmark == null) {
BenchmarkSource template = controller.getTemplate(name);
if (template == null) {
ctx.response().setStatusCode(HttpResponseStatus.NOT_FOUND.code()).end("No benchmark or template '" + name + "'.");
} else {
sendYamlBenchmark(ctx, template, template.version);
}
} else {
sendYamlBenchmark(ctx, benchmark.source(), benchmark.version());
}
}
private void sendYamlBenchmark(RoutingContext ctx, BenchmarkSource source, String version) {
if (source == null) {
ctx.response()
.setStatusCode(HttpResponseStatus.NOT_ACCEPTABLE.code())
.end("Benchmark does not preserve the original source.");
} else {
HttpServerResponse response = ctx.response()
.putHeader(HttpHeaders.CONTENT_TYPE, "text/vnd.yaml; charset=UTF-8")
.putHeader(HttpHeaders.ETAG.toString(), version);
source.data.files().keySet().forEach(file -> response.putHeader("x-file", file));
response.end(source.yaml);
}
}
@Override
public void getBenchmark$application_java_serialized_object(RoutingContext ctx, String name) {
withBenchmark(ctx, name, benchmark -> sendSerializedBenchmark(ctx, benchmark));
}
@Override
public void deleteBenchmark(RoutingContext ctx, String name) {
try {
if (controller.deleteBenchmark(name)) {
ctx.response().setStatusCode(204).end();
} else {
ctx.response().setStatusCode(404).end("Could not find benchmark " + name);
}
} catch (Throwable t) {
ctx.response().setStatusCode(500).end(t.getMessage());
}
}
private void sendSerializedBenchmark(RoutingContext ctx, Benchmark benchmark) {
try {
byte[] bytes = Util.serialize(benchmark);
ctx.response()
.putHeader(HttpHeaders.CONTENT_TYPE, MIME_TYPE_SERIALIZED)
.end(Buffer.buffer(bytes));
} catch (IOException e) {
log.error("Failed to serialize", e);
ctx.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()).end("Error encoding benchmark.");
}
}
@Override
public void startBenchmark(RoutingContext ctx, String name, String desc, String xTriggerJob, String runId,
List<String> templateParam) {
Benchmark benchmark = controller.getBenchmark(name);
if (benchmark == null) {
BenchmarkSource template = controller.getTemplate(name);
if (template == null) {
ctx.response().setStatusCode(HttpResponseStatus.NOT_FOUND.code()).end("Benchmark not found");
return;
}
benchmark = templateToBenchmark(ctx, template, templateParam);
if (benchmark == null) {
return;
}
}
String triggerUrl = benchmark.triggerUrl() != null ? benchmark.triggerUrl() : TRIGGER_URL;
if (triggerUrl != null) {
if (xTriggerJob == null) {
Run run = controller.createRun(benchmark, desc);
if (!triggerUrl.endsWith("&") && !triggerUrl.endsWith("?")) {
if (triggerUrl.contains("?")) {
triggerUrl = triggerUrl + "&";
} else {
triggerUrl = triggerUrl + "?";
}
}
ctx.response()
.setStatusCode(HttpResponseStatus.MOVED_PERMANENTLY.code())
.putHeader(HttpHeaders.LOCATION, triggerUrl + "BENCHMARK=" + name + "&RUN_ID=" + run.id)
.putHeader("x-run-id", run.id)
.end("This controller is configured to trigger jobs through CI instance.");
return;
}
}
Run run;
if (runId == null) {
run = controller.createRun(benchmark, desc);
} else {
run = controller.run(runId);
if (run == null || run.startTime != Long.MIN_VALUE) {
ctx.response().setStatusCode(HttpResponseStatus.FORBIDDEN.code()).end("Run already started");
return;
}
}
String error = controller.startBenchmark(run);
if (error == null) {
ctx.response().setStatusCode(HttpResponseStatus.ACCEPTED.code())
.putHeader(HttpHeaders.LOCATION, baseURL + "/run/" + run.id)
.putHeader(HttpHeaders.CONTENT_TYPE, MIME_TYPE_JSON)
.end(Json.encodePrettily(runInfo(run, false)));
} else {
ctx.response()
.setStatusCode(HttpResponseStatus.FORBIDDEN.code()).end(error);
}
}
private Benchmark templateToBenchmark(RoutingContext ctx, BenchmarkSource template, List<String> templateParam) {
Map<String, String> paramMap = new HashMap<>();
for (String item : templateParam) {
int index = item.indexOf("=");
if (index < 0) {
paramMap.put(item, "");
} else {
paramMap.put(item.substring(0, index), item.substring(index + 1));
}
}
List<String> missingParams = template.paramsWithDefaults.entrySet().stream()
.filter(entry -> entry.getValue() == null).map(Map.Entry::getKey)
.filter(param -> !paramMap.containsKey(param)).collect(Collectors.toList());
if (missingParams.isEmpty()) {
try {
return BenchmarkParser.instance().buildBenchmark(template, paramMap);
} catch (BenchmarkData.MissingFileException e) {
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code())
.end("This benchmark is a template; external files are not uploaded for templates and the run command must append them when the benchmark is first run.");
return null;
} catch (ParserException | BenchmarkDefinitionException e) {
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end(Util.explainCauses(e));
return null;
}
} else {
ctx.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()).end(
"Benchmark " + template.name + " is missing these mandatory parameters: " + missingParams);
return null;
}
}
@Override
public void getBenchmarkStructure(RoutingContext ctx, String name, int maxCollectionSize, List<String> templateParam) {
Benchmark benchmark = controller.getBenchmark(name);
if (benchmark == null) {
BenchmarkSource template = controller.getTemplate(name);
if (template == null) {
ctx.response().setStatusCode(HttpResponseStatus.NOT_FOUND.code()).end("No benchmark or template'" + name + "'.");
} else {
String content = null;
if (!templateParam.isEmpty()) {
benchmark = templateToBenchmark(ctx, template, templateParam);
if (benchmark == null) {
return;
} else {
content = createStructure(maxCollectionSize, benchmark);
}
}
respondWithJson(ctx, false, new Client.BenchmarkStructure(template.paramsWithDefaults, content));
}
} else {
String content = createStructure(maxCollectionSize, benchmark);
respondWithJson(ctx, false, new Client.BenchmarkStructure(Collections.emptyMap(), content));
}
}
@Override
public void getBenchmarkFiles(RoutingContext ctx, String name) {
Benchmark benchmark = controller.getBenchmark(name);
Map<String, byte[]> files;
if (benchmark == null) {
BenchmarkSource template = controller.getTemplate(name);
if (template == null) {
ctx.response().setStatusCode(HttpResponseStatus.NOT_FOUND.code()).end("No benchmark or template '" + name + "'");
return;
} else {
files = template.data.files();
}
} else {
files = benchmark.files();
}
ThreadLocalRandom random = ThreadLocalRandom.current();
String boundary = new UUID(random.nextLong(), random.nextLong()).toString();
HttpServerResponse response = ctx.response();
response.putHeader(HttpHeaders.CONTENT_TYPE, "multipart/form-data; boundary=\"" + boundary + "\"");
response.setChunked(true);
response.write("--" + boundary);
for (var file : files.entrySet()) {
response.write("\n");
response.write(HttpHeaders.CONTENT_TYPE + ": application/octet-stream\n");
response.write(HttpHeaders.CONTENT_LENGTH + ": " + file.getValue().length + "\n");
response.write(HttpHeaders.CONTENT_DISPOSITION + ": form-data; name=\"file\"; filename=\"" + file.getKey() + "\"\n\n");
response.write(Buffer.buffer(file.getValue()));
response.write("\n--" + boundary);
}
response.write("--");
response.end();
}
private String createStructure(int maxCollectionSize, Benchmark benchmark) {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
try (PrintStream stream = new PrintStream(byteStream, true, StandardCharsets.UTF_8)) {
new YamlVisitor(stream, maxCollectionSize).walk(benchmark);
}
return byteStream.toString(StandardCharsets.UTF_8);
}
@Override
public void listRuns(RoutingContext ctx, boolean details) {
io.hyperfoil.controller.model.Run[] runs = controller.runs().stream()
.map(r -> details ? runInfo(r, false)
: new io.hyperfoil.controller.model.Run(r.id, null, null, null, r.cancelled,
r.completed, r.persisted, null, null, null, null))
.toArray(io.hyperfoil.controller.model.Run[]::new);
respondWithJson(ctx, true, runs);
}
@Override
public void getRun(RoutingContext ctx, String runId) {
withRun(ctx, runId, run -> respondWithJson(ctx, true, runInfo(run, true)));
}
@Override
public void agentCpu(RoutingContext ctx, String runId) {
withStats(ctx, runId, run -> respondWithJson(ctx, false, run.statisticsStore().cpuUsage()));
}
private io.hyperfoil.controller.model.Run runInfo(Run run, boolean reportPhases) {
String benchmark = null;
if (run.benchmark != null) {
benchmark = run.benchmark.name();
}
Date started = null, terminated = null;
if (run.startTime > Long.MIN_VALUE) {
started = new Date(run.startTime);
}
if (run.terminateTime.future().isComplete()) {
terminated = new Date(run.terminateTime.future().result());
}
List<io.hyperfoil.controller.model.Phase> phases = null;
if (reportPhases) {
long now = System.currentTimeMillis();
phases = run.phases.values().stream()
.filter(p -> !(p.definition().model instanceof Model.Noop))
.sorted(PHASE_COMPARATOR)
.map(phase -> {
Date phaseStarted = null, phaseTerminated = null;
StringBuilder remaining = null;
StringBuilder totalDuration = null;
if (phase.absoluteStartTime() > Long.MIN_VALUE) {
phaseStarted = new Date(phase.absoluteStartTime());
if (!phase.status().isTerminated()) {
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | true |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/Codecs.java | clustering/src/main/java/io/hyperfoil/clustering/Codecs.java | package io.hyperfoil.clustering;
import java.util.ArrayList;
import io.hyperfoil.clustering.messages.AgentControlMessage;
import io.hyperfoil.clustering.messages.AgentHello;
import io.hyperfoil.clustering.messages.AgentReadyMessage;
import io.hyperfoil.clustering.messages.AuxiliaryHello;
import io.hyperfoil.clustering.messages.ConnectionStatsMessage;
import io.hyperfoil.clustering.messages.DelayStatsCompletionMessage;
import io.hyperfoil.clustering.messages.ErrorMessage;
import io.hyperfoil.clustering.messages.ObjectCodec;
import io.hyperfoil.clustering.messages.PhaseChangeMessage;
import io.hyperfoil.clustering.messages.PhaseControlMessage;
import io.hyperfoil.clustering.messages.PhaseStatsCompleteMessage;
import io.hyperfoil.clustering.messages.RequestStatsMessage;
import io.hyperfoil.clustering.messages.SessionStatsMessage;
import io.vertx.core.Vertx;
import io.vertx.core.eventbus.EventBus;
public final class Codecs {
private Codecs() {
}
public static void register(Vertx vertx) {
EventBus eb = vertx.eventBus();
eb.registerDefaultCodec(AgentHello.class, new AgentHello.Codec());
eb.registerDefaultCodec(AgentControlMessage.class, new AgentControlMessage.Codec());
eb.registerDefaultCodec(AgentReadyMessage.class, new AgentReadyMessage.Codec());
eb.registerDefaultCodec(ArrayList.class, new ObjectCodec.ArrayList());
eb.registerDefaultCodec(AuxiliaryHello.class, new AuxiliaryHello.Codec());
eb.registerDefaultCodec(ConnectionStatsMessage.class, new ConnectionStatsMessage.Codec());
eb.registerDefaultCodec(DelayStatsCompletionMessage.class, new DelayStatsCompletionMessage.Codec());
eb.registerDefaultCodec(ErrorMessage.class, new ErrorMessage.Codec());
eb.registerDefaultCodec(PhaseChangeMessage.class, new PhaseChangeMessage.Codec());
eb.registerDefaultCodec(PhaseControlMessage.class, new PhaseControlMessage.Codec());
eb.registerDefaultCodec(PhaseStatsCompleteMessage.class, new PhaseStatsCompleteMessage.Codec());
eb.registerDefaultCodec(RequestStatsMessage.class, new RequestStatsMessage.Codec());
eb.registerDefaultCodec(SessionStatsMessage.class, new SessionStatsMessage.Codec());
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/ConnectionStatsSender.java | clustering/src/main/java/io/hyperfoil/clustering/ConnectionStatsSender.java | package io.hyperfoil.clustering;
import java.util.HashMap;
import java.util.Map;
import io.hyperfoil.clustering.messages.ConnectionStatsMessage;
import io.hyperfoil.core.impl.ConnectionStatsConsumer;
import io.hyperfoil.core.util.LowHigh;
import io.vertx.core.eventbus.EventBus;
public class ConnectionStatsSender implements ConnectionStatsConsumer {
private final EventBus eventBus;
private final String address;
private final String runId;
private Map<String, Map<String, LowHigh>> stats = new HashMap<>();
public ConnectionStatsSender(EventBus eb, String address, String runId) {
this.eventBus = eb;
this.address = address;
this.runId = runId;
}
public void send() {
eventBus.send(Feeds.STATS, new ConnectionStatsMessage(address, runId, System.currentTimeMillis(), stats));
// the eventBus may process this asynchronously so we can't reuse the map
stats = new HashMap<>();
}
@Override
public void accept(String authority, String tag, int min, int max) {
Map<String, LowHigh> byTag = stats.computeIfAbsent(authority, a -> new HashMap<>());
LowHigh lowHigh = byTag.computeIfAbsent(tag, t -> new LowHigh());
lowHigh.low += min;
lowHigh.high += max;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/BaseAuxiliaryVerticle.java | clustering/src/main/java/io/hyperfoil/clustering/BaseAuxiliaryVerticle.java | package io.hyperfoil.clustering;
import java.util.Objects;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.Hyperfoil;
import io.hyperfoil.clustering.messages.AuxiliaryHello;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.impl.VertxInternal;
import io.vertx.core.spi.cluster.NodeListener;
public class BaseAuxiliaryVerticle extends AbstractVerticle implements NodeListener {
protected final Logger log = LogManager.getLogger(getClass());
private String nodeId = "unknown";
private String controllerNodeId;
private int registrationAttempt = 0;
@Override
public void start() {
if (vertx.isClustered()) {
if (vertx instanceof VertxInternal) {
VertxInternal internal = (VertxInternal) this.vertx;
if (internal.getClusterManager().getNodes().size() < 2) {
log.info("Did not cluster with Hyperfoil Controller, shutting down.");
Hyperfoil.shutdownVertx(vertx);
return;
}
nodeId = internal.getClusterManager().getNodeId();
internal.getClusterManager().nodeListener(this);
}
}
vertx.setPeriodic(1000, timerId -> {
vertx.eventBus().request(Feeds.DISCOVERY, new AuxiliaryHello("CE Receiver", nodeId, deploymentID()), response -> {
if (response.succeeded()) {
log.info("Successfully registered at controller {}!", response.result().body());
vertx.cancelTimer(timerId);
controllerNodeId = (String) response.result().body();
onRegistered();
} else {
if (registrationAttempt++ < 10) {
log.info("Auxiliary registration failed (attempt {})", registrationAttempt);
if (registrationAttempt == 10) {
log.info("Suspending registration failure logs.");
}
}
}
});
});
}
@Override
public void nodeAdded(String nodeID) {
}
@Override
public void nodeLeft(String nodeID) {
if (Objects.equals(nodeID, controllerNodeId)) {
// Since we assume running in Openshift/Kubernetes we will let the container restart.
// Otherwise we would have to start a new cluster (that means a new Vert.x) which
// is more complicated.
log.info("Controller left the cluster, shutting down...");
Hyperfoil.shutdownVertx(vertx);
}
}
public void onRegistered() {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/AgentInfo.java | clustering/src/main/java/io/hyperfoil/clustering/AgentInfo.java | package io.hyperfoil.clustering;
import java.util.HashMap;
import java.util.Map;
import io.hyperfoil.api.deployment.DeployedAgent;
import io.hyperfoil.api.session.PhaseInstance;
class AgentInfo {
final String name;
final int id;
String nodeId;
String deploymentId;
Status status = Status.STARTING;
Map<String, PhaseInstance.Status> phases = new HashMap<>();
DeployedAgent deployedAgent;
AgentInfo(String name, int id) {
this.name = name;
this.id = id;
}
@Override
public String toString() {
return name + " (" + nodeId + "/" + deploymentId + ")";
}
public enum Status {
STARTING,
REGISTERED,
READY,
STOPPING,
STOPPED,
FAILED;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/Run.java | clustering/src/main/java/io/hyperfoil/clustering/Run.java | package io.hyperfoil.clustering;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.Phase;
import io.hyperfoil.api.session.GlobalData;
import io.hyperfoil.controller.StatisticsStore;
import io.hyperfoil.impl.Util;
import io.vertx.core.Promise;
class Run {
final String id;
final Path dir;
Benchmark benchmark;
final Map<String, ControllerPhase> phases = new HashMap<>();
final List<AgentInfo> agents = new ArrayList<>();
final Phase[] phasesById;
final List<Error> errors = new ArrayList<>();
final List<RunHookOutput> hookResults = new ArrayList<>();
long deployTimerId;
String description;
long startTime = Long.MIN_VALUE;
Promise<Long> terminateTime = Promise.promise();
boolean cancelled;
boolean completed;
// set to true once the all.json and related files are persisted in the filesystem
boolean persisted;
Supplier<StatisticsStore> statsSupplier;
private StatisticsStore statisticsStore;
Map<String, GlobalData.Element> newGlobalData = new HashMap<>();
Run(String id, Path dir, Benchmark benchmark) {
this.id = id;
this.dir = dir;
this.benchmark = benchmark;
this.phasesById = benchmark.phasesById();
}
void initStore(StatisticsStore store) {
this.statisticsStore = store;
}
long nextTimestamp() {
long nextPhaseStart = phases.values().stream()
.filter(phase -> phase.status() == ControllerPhase.Status.NOT_STARTED && phase.definition().startTime() >= 0)
.mapToLong(phase -> startTime + phase.definition().startTime()).min().orElse(Long.MAX_VALUE);
long nextPhaseFinish = phases.values().stream()
.filter(phase -> phase.status() == ControllerPhase.Status.RUNNING)
.mapToLong(phase -> phase.absoluteStartTime() + phase.definition().duration()).min().orElse(Long.MAX_VALUE);
long nextPhaseTerminate = phases.values().stream()
.filter(phase -> (phase.status() == ControllerPhase.Status.RUNNING
|| phase.status() == ControllerPhase.Status.FINISHED) && phase.definition().maxDuration() >= 0)
.mapToLong(phase -> phase.absoluteStartTime() + phase.definition().maxDuration()).min().orElse(Long.MAX_VALUE);
return Math.min(Math.min(nextPhaseStart, nextPhaseFinish), nextPhaseTerminate);
}
ControllerPhase[] getAvailablePhases() {
return phases.values().stream().filter(phase -> phase.status() == ControllerPhase.Status.NOT_STARTED &&
startTime + phase.definition().startTime() <= System.currentTimeMillis() &&
phase.definition().startAfter().stream().allMatch(dep -> phases.get(dep).status().isFinished()) &&
phase.definition().startAfterStrict().stream().allMatch(dep -> phases.get(dep).status().isTerminated()) &&
(phase.definition().startWithDelay() == null ||
phases.get(phase.definition().startWithDelay().phase).status().isStarted() &&
phases.get(phase.definition().startWithDelay().phase).absoluteStartTime()
+ phase.definition().startWithDelay().delay <= System.currentTimeMillis()))
.toArray(ControllerPhase[]::new);
}
public String phase(int phaseId) {
if (phaseId < phasesById.length) {
return phasesById[phaseId].name();
} else {
return null;
}
}
public StatisticsStore statisticsStore() {
if (statisticsStore != null) {
return statisticsStore;
} else if (statsSupplier != null) {
return statisticsStore = statsSupplier.get();
} else {
return null;
}
}
public boolean isLoaded() {
return statisticsStore != null;
}
public void unload() {
statisticsStore = null;
}
public static class Error {
public final AgentInfo agent;
public final Throwable error;
Error(AgentInfo agent, Throwable error) {
this.agent = agent;
this.error = error;
}
@Override
public String toString() {
return (agent == null ? "" : agent.name + ": ") + Util.explainCauses(error);
}
}
public static class RunHookOutput {
public final String name;
public final String output;
RunHookOutput(String name, String output) {
this.name = name;
this.output = output;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/ControllerPhase.java | clustering/src/main/java/io/hyperfoil/clustering/ControllerPhase.java | package io.hyperfoil.clustering;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.api.config.Phase;
import io.hyperfoil.api.session.GlobalData;
public class ControllerPhase {
private static final Logger log = LogManager.getLogger(ControllerPhase.class);
private final Phase definition;
private Status status = Status.NOT_STARTED;
private long absoluteStartTime = Long.MIN_VALUE;
private long absoluteCompletionTime = Long.MIN_VALUE;
private boolean failed;
private Long delayStatsCompletionUntil = null;
private Map<String, GlobalData.Accumulator> globalData = new HashMap<>();
public ControllerPhase(Phase definition) {
this.definition = definition;
}
public Phase definition() {
return definition;
}
public Status status() {
return status;
}
public long absoluteStartTime() {
return absoluteStartTime;
}
public void status(String runId, Status status) {
if (this.status.ordinal() < status.ordinal()) {
log.info("{} {} changing status {} to {}", runId, definition.name, this.status, status);
this.status = status;
}
}
public void absoluteStartTime(long time) {
absoluteStartTime = time;
}
public long absoluteCompletionTime() {
return absoluteCompletionTime;
}
public void absoluteCompletionTime(long absoluteCompletionTime) {
this.absoluteCompletionTime = absoluteCompletionTime;
}
public Long delayStatsCompletionUntil() {
return delayStatsCompletionUntil;
}
public void setFailed() {
this.failed = true;
}
public boolean isFailed() {
return failed;
}
public void delayStatsCompletionUntil(long time) {
delayStatsCompletionUntil = delayStatsCompletionUntil == null ? time : Math.max(time, delayStatsCompletionUntil);
}
public void addGlobalData(Map<String, GlobalData.Element> data) {
if (data == null) {
return;
}
for (var entry : data.entrySet()) {
log.debug("Received global data {} -> {}", entry.getKey(), entry.getValue());
GlobalData.Accumulator accumulator = globalData.get(entry.getKey());
if (accumulator == null) {
globalData.put(entry.getKey(), accumulator = entry.getValue().newAccumulator());
}
accumulator.add(entry.getValue());
}
}
public Map<String, GlobalData.Element> completeGlobalData() {
return globalData.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().complete()));
}
enum Status {
NOT_STARTED,
STARTING,
RUNNING,
FINISHING,
FINISHED,
TERMINATING,
TERMINATED,
CANCELLED;
public boolean isFinished() {
return ordinal() >= FINISHED.ordinal();
}
public boolean isTerminated() {
return ordinal() >= TERMINATED.ordinal();
}
public boolean isStarted() {
return ordinal() >= RUNNING.ordinal();
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/Feeds.java | clustering/src/main/java/io/hyperfoil/clustering/Feeds.java | package io.hyperfoil.clustering;
public final class Feeds {
public static final String DISCOVERY = "discovery-feed";
public static final String CONTROL = "control-feed";
public static final String RESPONSE = "response-feed";
public static final String STATS = "stats-feed";
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/Zipper.java | clustering/src/main/java/io/hyperfoil/clustering/Zipper.java | package io.hyperfoil.clustering;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Queue;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.vertx.core.http.HttpHeaders;
import io.vertx.core.http.HttpServerResponse;
public class Zipper {
private static final Logger log = LogManager.getLogger(Zipper.class);
private final HttpServerResponse response;
private final ZipOutputStream zipStream;
private final Queue<File> files = new LinkedList<>();
private final Path dir;
public Zipper(HttpServerResponse response, Path dir) {
this.response = response;
this.zipStream = new ZipOutputStream(new OutputStreamAdapter(response));
this.dir = dir;
files.addAll(Arrays.asList(dir.toFile().listFiles()));
response.putHeader(HttpHeaders.CONTENT_TYPE, "application/zip");
response.setChunked(true);
response.drainHandler(nil -> run());
}
public void run() {
if (response.closed()) {
return;
}
while (!response.writeQueueFull()) {
File file = files.poll();
if (file == null) {
try {
zipStream.close();
} catch (IOException e) {
log.error("Failed closing zip stream", e);
return;
} finally {
response.end();
}
}
if (file.isDirectory()) {
files.addAll(Arrays.asList(file.listFiles()));
} else {
Path path = file.toPath();
try {
zipStream.putNextEntry(new ZipEntry(dir.relativize(path).toString()));
zipStream.write(Files.readAllBytes(path));
zipStream.closeEntry();
} catch (IOException e) {
log.error("Failed writing file {}", path, e);
response.end();
return;
}
}
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/util/PersistedBenchmarkData.java | clustering/src/main/java/io/hyperfoil/clustering/util/PersistedBenchmarkData.java | package io.hyperfoil.clustering.util;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.api.config.BenchmarkData;
public class PersistedBenchmarkData implements BenchmarkData {
private static final Logger log = LogManager.getLogger(PersistedBenchmarkData.class);
private final Path dir;
public static void store(Map<String, byte[]> files, Path dir) throws IOException {
for (Map.Entry<String, byte[]> entry : files.entrySet()) {
Files.write(dir.resolve(BenchmarkData.sanitize(entry.getKey())), entry.getValue());
}
}
public PersistedBenchmarkData(Path dir) {
this.dir = dir;
}
@Override
public InputStream readFile(String file) {
String sanitized = BenchmarkData.sanitize(file);
try {
return new FileInputStream(dir.resolve(sanitized).toFile());
} catch (FileNotFoundException e) {
throw new MissingFileException("Cannot load file " + file + "(" + sanitized + ") from directory " + dir, file, e);
}
}
@Override
public Map<String, byte[]> files() {
if (!dir.toFile().exists() || !dir.toFile().isDirectory()) {
return Collections.emptyMap();
}
try {
return Files.list(dir).collect(Collectors.toMap(path -> path.getFileName().toString(), (Path path) -> {
try {
return Files.readAllBytes(path);
} catch (IOException e) {
log.error("Cannot read file " + path, e);
throw new RuntimeException(e);
}
}));
} catch (IOException e) {
throw new RuntimeException("Cannot list directory " + dir, e);
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/util/PersistenceUtil.java | clustering/src/main/java/io/hyperfoil/clustering/util/PersistenceUtil.java | package io.hyperfoil.clustering.util;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.FormattedMessage;
import io.hyperfoil.api.config.BenchmarkData;
import io.hyperfoil.api.config.BenchmarkSource;
import io.hyperfoil.core.parser.BenchmarkParser;
import io.hyperfoil.core.parser.ParserException;
public class PersistenceUtil {
private static final Logger log = LogManager.getLogger(PersistenceUtil.class);
public static void store(BenchmarkSource source, Path dir) {
if (!dir.toFile().exists()) {
if (!dir.toFile().mkdirs()) {
log.error("Failed to create directory {}", dir);
}
}
String name = BenchmarkData.sanitize(source.name);
Path path = dir.resolve(name + ".yaml");
try {
Files.write(path, source.yaml.getBytes(StandardCharsets.UTF_8));
log.info("Stored benchmark '{}' in {}", source.name, path);
} catch (IOException e) {
log.error(new FormattedMessage("Failed to persist benchmark {} to {}", source.name, path), e);
}
Path dataDirPath = dir.resolve(name + ".data");
File dataDir = dataDirPath.toFile();
Map<String, byte[]> files = source.data.files();
if (dataDir.exists()) {
if (!dataDir.isDirectory()) {
if (!dataDir.delete() || !dataDir.mkdir()) {
log.error("Couldn't delete/create data dir {}", dataDir);
return;
}
}
// Make sure the directory is empty
//noinspection ConstantConditions
for (File file : dataDir.listFiles()) {
if (file.delete()) {
log.warn("Could not delete old file {}", file);
}
}
if (files.isEmpty()) {
//noinspection ResultOfMethodCallIgnored
dataDir.delete();
}
} else if (!dataDir.exists() && !files.isEmpty()) {
if (!dataDir.mkdir()) {
log.error("Couldn't create data dir {}", dataDir);
return;
}
}
try {
PersistedBenchmarkData.store(files, dataDirPath);
} catch (IOException e) {
log.error("Couldn't persist files for benchmark " + source.name, e);
}
}
public static BenchmarkSource load(Path file) {
String filename = file.getFileName().toString();
if (filename.endsWith(".yaml")) {
BenchmarkData data = BenchmarkData.EMPTY;
String dataDirName = filename.substring(0, filename.length() - 5) + ".data";
Path dataDirPath = file.getParent().resolve(dataDirName);
File dataDir = dataDirPath.toFile();
if (dataDir.exists()) {
if (dataDir.isDirectory()) {
data = new PersistedBenchmarkData(dataDirPath);
} else {
log.error("Expected data dir {} to be a directory!", dataDirName);
}
}
try {
BenchmarkSource source = BenchmarkParser.instance().createSource(Files.readString(file), data);
log.info("Loaded benchmark from {}", file);
return source;
} catch (IOException e) {
log.error("Cannot read file " + file, e);
} catch (ParserException e) {
log.error("Cannot parse file " + file, e);
}
} else if (filename.endsWith(".serialized")) {
log.debug("Serialized benchmarks are not used anymore, ignoring {}", filename);
} else if (file.toFile().isDirectory() && filename.endsWith(".data")) {
log.debug("Ignoring directory {}", filename);
} else {
log.warn("Unknown benchmark file format: {}", file);
}
return null;
}
public static boolean delete(String name, Path dir) {
name = BenchmarkData.sanitize(name);
if (dir.resolve(name + ".yaml").toFile().delete()) {
log.debug("Benchmark YAML was deleted");
}
if (dir.resolve(name + ".serialized").toFile().delete()) {
log.debug("Serialized benchmark was deleted");
}
File dataDir = dir.resolve(name + ".data").toFile();
if (dataDir.exists()) {
if (dataDir.isDirectory()) {
for (File file : dataDir.listFiles()) {
if (!file.delete()) {
log.warn("Could not delete file {}", file);
return false;
}
}
}
if (!dataDir.delete()) {
log.error("Couldn't delete/create data dir {}", dataDir);
return false;
}
}
return true;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/SessionStatsMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/SessionStatsMessage.java | package io.hyperfoil.clustering.messages;
import java.util.Map;
import io.hyperfoil.core.util.LowHigh;
public class SessionStatsMessage extends StatsMessage {
public final long timestamp;
public final Map<String, LowHigh> sessionStats;
public SessionStatsMessage(String address, String runId, long timestamp, Map<String, LowHigh> sessionStats) {
super(address, runId);
this.timestamp = timestamp;
this.sessionStats = sessionStats;
}
public static class Codec extends ObjectCodec<SessionStatsMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/AgentStatusMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/AgentStatusMessage.java | package io.hyperfoil.clustering.messages;
import java.io.Serializable;
public abstract class AgentStatusMessage implements Serializable {
protected final String senderId;
protected final String runId;
public AgentStatusMessage(String senderId, String runId) {
this.senderId = senderId;
this.runId = runId;
}
public String senderId() {
return senderId;
}
public String runId() {
return runId;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/ConnectionStatsMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/ConnectionStatsMessage.java | package io.hyperfoil.clustering.messages;
import java.util.Map;
import io.hyperfoil.core.util.LowHigh;
public class ConnectionStatsMessage extends StatsMessage {
public final long timestamp;
public final Map<String, Map<String, LowHigh>> stats;
public ConnectionStatsMessage(String address, String runId, long timestamp, Map<String, Map<String, LowHigh>> stats) {
super(address, runId);
this.timestamp = timestamp;
this.stats = stats;
}
public static class Codec extends ObjectCodec<ConnectionStatsMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/PhaseControlMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/PhaseControlMessage.java | package io.hyperfoil.clustering.messages;
import java.io.Serializable;
import java.util.Map;
import io.hyperfoil.api.session.GlobalData;
public class PhaseControlMessage implements Serializable {
private final Command command;
private final String phase;
private final Map<String, GlobalData.Element> globalData;
public PhaseControlMessage(Command command, String phase, Map<String, GlobalData.Element> globalData) {
this.command = command;
this.phase = phase;
this.globalData = globalData;
}
public Command command() {
return command;
}
public String phase() {
return phase;
}
public Map<String, GlobalData.Element> globalData() {
return globalData;
}
public enum Command {
RUN,
FINISH,
TRY_TERMINATE,
TERMINATE
}
public static class Codec extends ObjectCodec<PhaseControlMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/StatsMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/StatsMessage.java | package io.hyperfoil.clustering.messages;
import java.io.Serializable;
public abstract class StatsMessage implements Serializable {
public final String runId;
public final String address;
public StatsMessage(String address, String runId) {
this.runId = runId;
this.address = address;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/AgentHello.java | clustering/src/main/java/io/hyperfoil/clustering/messages/AgentHello.java | package io.hyperfoil.clustering.messages;
import java.io.Serializable;
public class AgentHello implements Serializable {
private final String name;
private final String nodeId;
private final String deploymentId;
private final String runId;
public AgentHello(String name, String nodeId, String deploymentId, String runId) {
this.name = name;
this.nodeId = nodeId;
this.deploymentId = deploymentId;
this.runId = runId;
}
public String name() {
return name;
}
public String nodeId() {
return nodeId;
}
public String deploymentId() {
return deploymentId;
}
public String runId() {
return runId;
}
public static class Codec extends ObjectCodec<AgentHello> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/PhaseChangeMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/PhaseChangeMessage.java | /*
* JBoss, Home of Professional Open Source
* Copyright 2018 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @authors tag. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.hyperfoil.clustering.messages;
import java.util.Map;
import io.hyperfoil.api.session.GlobalData;
import io.hyperfoil.api.session.PhaseInstance;
import io.hyperfoil.impl.Util;
public class PhaseChangeMessage extends AgentStatusMessage {
private final String phase;
private final PhaseInstance.Status status;
private final boolean sessionLimitExceeded;
private final String cpuUsage;
private final Throwable error;
private final Map<String, GlobalData.Element> globalData;
public PhaseChangeMessage(String senderId, String runId, String phase, PhaseInstance.Status status,
boolean sessionLimitExceeded, String cpuUsage, Throwable error, Map<String, GlobalData.Element> globalData) {
super(senderId, runId);
this.phase = phase;
this.status = status;
this.sessionLimitExceeded = sessionLimitExceeded;
this.cpuUsage = cpuUsage;
this.error = error;
this.globalData = globalData;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("PhaseChangeMessage{");
sb.append("senderId='").append(senderId).append('\'');
sb.append(", phase=").append(phase);
sb.append(", status=").append(status);
sb.append(", cpuUsage=").append(cpuUsage);
sb.append(", error=").append(Util.explainCauses(error));
sb.append(", globalData=").append(globalData);
sb.append('}');
return sb.toString();
}
public String phase() {
return phase;
}
public PhaseInstance.Status status() {
return status;
}
public boolean sessionLimitExceeded() {
return sessionLimitExceeded;
}
public Throwable getError() {
return error;
}
public String cpuUsage() {
return cpuUsage;
}
public Map<String, GlobalData.Element> globalData() {
return globalData;
}
public static class Codec extends ObjectCodec<PhaseChangeMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/AgentReadyMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/AgentReadyMessage.java | package io.hyperfoil.clustering.messages;
public class AgentReadyMessage extends AgentStatusMessage {
public AgentReadyMessage(String senderId, String runId) {
super(senderId, runId);
}
public static class Codec extends ObjectCodec<AgentReadyMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/AgentControlMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/AgentControlMessage.java | package io.hyperfoil.clustering.messages;
import java.io.Serializable;
import io.hyperfoil.api.config.Benchmark;
public class AgentControlMessage implements Serializable {
private Command command;
private int agentId;
private Object param;
public AgentControlMessage(Command command, int agentId, Object param) {
this.command = command;
this.agentId = agentId;
this.param = param;
}
public Command command() {
return command;
}
public Benchmark benchmark() {
return (Benchmark) param;
}
public boolean includeInactive() {
return (Boolean) param;
}
public int agentId() {
return agentId;
}
public enum Command {
INITIALIZE,
STOP,
LIST_SESSIONS,
LIST_CONNECTIONS
}
public static class Codec extends ObjectCodec<AgentControlMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/DelayStatsCompletionMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/DelayStatsCompletionMessage.java | package io.hyperfoil.clustering.messages;
public class DelayStatsCompletionMessage extends StatsMessage {
public final int phaseId;
public final long delay;
public DelayStatsCompletionMessage(String address, String runId, int phaseId, long delay) {
super(address, runId);
this.phaseId = phaseId;
this.delay = delay;
}
public static class Codec extends ObjectCodec<DelayStatsCompletionMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/PhaseStatsCompleteMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/PhaseStatsCompleteMessage.java | package io.hyperfoil.clustering.messages;
import java.util.Objects;
public class PhaseStatsCompleteMessage extends StatsMessage {
public final String phase;
public PhaseStatsCompleteMessage(String address, String runId, String phase) {
super(address, runId);
this.phase = Objects.requireNonNull(phase);
}
public static class Codec extends ObjectCodec<PhaseStatsCompleteMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/RequestStatsMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/RequestStatsMessage.java | package io.hyperfoil.clustering.messages;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
public class RequestStatsMessage extends StatsMessage {
public final int phaseId;
public final int stepId;
public final String metric;
public final StatisticsSnapshot statistics;
public RequestStatsMessage(String address, String runId, int phaseId, int stepId, String metric,
StatisticsSnapshot statistics) {
super(address, runId);
this.phaseId = phaseId;
this.stepId = stepId;
this.metric = metric;
this.statistics = statistics;
}
public static class Codec extends ObjectCodec<RequestStatsMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/ObjectCodec.java | clustering/src/main/java/io/hyperfoil/clustering/messages/ObjectCodec.java | /*
* JBoss, Home of Professional Open Source
* Copyright 2018 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @authors tag. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.hyperfoil.clustering.messages;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectInputStream;
import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
import org.infinispan.commons.io.LazyByteArrayOutputStream;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.eventbus.MessageCodec;
import io.vertx.core.eventbus.impl.codecs.ByteArrayMessageCodec;
public class ObjectCodec<T> implements MessageCodec<T, T> {
@Override
public void encodeToWire(Buffer buffer, T object) {
LazyByteArrayOutputStream bos = new LazyByteArrayOutputStream();
try {
ObjectOutput out = new ObjectOutputStream(bos);
out.writeObject(object);
out.flush();
buffer.appendInt(bos.size());
buffer.appendBytes(bos.getRawBuffer(), 0, bos.size());
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
bos.close();
} catch (IOException ex) {
// ignore close exception
}
}
}
@Override
public T decodeFromWire(int position, Buffer buffer) {
ByteArrayMessageCodec byteArrayMessageCodec = new ByteArrayMessageCodec();
ObjectInput in = null;
ByteArrayInputStream bis = new ByteArrayInputStream(byteArrayMessageCodec.decodeFromWire(position, buffer));
try {
in = new ObjectInputStream(bis);
@SuppressWarnings("unchecked")
T object = (T) in.readObject();
return object;
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException ex) {
// ignore close exception
}
}
return null;
}
@Override
public T transform(T object) {
// We cannot protect the sender against mutation in codec because the encodeToWire is not called
// synchronously even if the eventBus.send() is invoked from event loop.
return object;
}
@Override
public String name() {
// Each codec must have a unique name.
// This is used to identify a codec when sending a message and for unregistering codecs.
return this.getClass().getName();
}
@Override
public byte systemCodecID() {
// Always -1
return -1;
}
public static class ArrayList extends ObjectCodec<java.util.ArrayList> {
@SuppressWarnings("unchecked")
@Override
public java.util.ArrayList transform(java.util.ArrayList object) {
return new java.util.ArrayList(object);
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/AuxiliaryHello.java | clustering/src/main/java/io/hyperfoil/clustering/messages/AuxiliaryHello.java | package io.hyperfoil.clustering.messages;
import java.io.Serializable;
public class AuxiliaryHello implements Serializable {
private final String name;
private final String nodeId;
private final String deploymentId;
public AuxiliaryHello(String name, String nodeId, String deploymentId) {
this.name = name;
this.nodeId = nodeId;
this.deploymentId = deploymentId;
}
public String name() {
return name;
}
public String nodeId() {
return nodeId;
}
public String deploymentId() {
return deploymentId;
}
public static class Codec extends ObjectCodec<AuxiliaryHello> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/messages/ErrorMessage.java | clustering/src/main/java/io/hyperfoil/clustering/messages/ErrorMessage.java | package io.hyperfoil.clustering.messages;
public class ErrorMessage extends AgentStatusMessage {
private final Throwable error;
private final boolean fatal;
public ErrorMessage(String senderId, String runId, Throwable error, boolean fatal) {
super(senderId, runId);
this.error = error;
this.fatal = fatal;
}
public Throwable error() {
return error;
}
public boolean isFatal() {
return fatal;
}
public static class Codec extends ObjectCodec<ErrorMessage> {
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebUpload.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebUpload.java | package io.hyperfoil.clustering.webcli;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import org.aesh.command.CommandDefinition;
import org.aesh.command.CommandException;
import org.aesh.command.CommandResult;
import org.aesh.command.option.Argument;
import io.hyperfoil.api.config.BenchmarkData;
import io.hyperfoil.api.config.BenchmarkSource;
import io.hyperfoil.cli.commands.BaseUploadCommand;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
import io.hyperfoil.core.impl.ProvidedBenchmarkData;
@CommandDefinition(name = "upload", description = "Uploads benchmark definition to Hyperfoil Controller server")
public class WebUpload extends BaseUploadCommand {
@Argument(description = "Argument ignored (provided only for compatibility).")
String dummy;
@Override
public CommandResult execute(HyperfoilCommandInvocation invocation) throws CommandException {
if (dummy != null && !dummy.isEmpty()) {
invocation.println("Argument '" + dummy + "' ignored: you must open file dialogue in WebCLI using the button below.");
}
WebCliContext context = (WebCliContext) invocation.context();
String updatedSource = null;
String benchmarkUrl = null;
CountDownLatch latch;
synchronized (context) {
latch = context.latch = new CountDownLatch(1);
}
invocation.println("__HYPERFOIL_UPLOAD_MAGIC__");
try {
latch.await();
} catch (InterruptedException e1) {
// interruption is cancel
}
try {
synchronized (context) {
context.latch = null;
if (context.uploadUrl != null) {
benchmarkUrl = context.uploadUrl;
} else if (context.editBenchmark != null) {
updatedSource = context.editBenchmark.toString();
} else {
invocation.println("Upload cancelled.");
return CommandResult.FAILURE;
}
}
} finally {
context.editBenchmark = null;
context.uploadUrl = null;
}
Map<String, byte[]> extraData = new HashMap<>();
if (extraFiles != null) {
for (String extraFile : extraFiles) {
try {
extraData.put(extraFile, context.loadFile(invocation, extraFile));
} catch (InterruptedException e) {
invocation.println("Benchmark upload cancelled.");
return CommandResult.FAILURE;
}
}
}
BenchmarkSource source;
if (updatedSource != null) {
source = loadProvided(invocation, context, updatedSource, extraData);
} else {
source = loadFromUrl(invocation, benchmarkUrl, extraData);
}
if (source == null) {
return CommandResult.FAILURE;
}
context.setServerBenchmark(context.client().register(source.yaml, source.data.files(), null, null));
invocation.println("Benchmark " + source.name + " uploaded.");
return CommandResult.SUCCESS;
}
private BenchmarkSource loadProvided(HyperfoilCommandInvocation invocation, WebCliContext context, String updatedSource,
Map<String, byte[]> extraData) throws CommandException {
ProvidedBenchmarkData data = new ProvidedBenchmarkData(extraData);
for (;;) {
try {
return loadBenchmarkSource(invocation, updatedSource, data);
} catch (CommandException e) {
throw e;
} catch (BenchmarkData.MissingFileException e) {
try {
data.files().put(e.file, context.loadFile(invocation, e.file));
} catch (InterruptedException interruptedException) {
invocation.println("Benchmark upload cancelled.");
return null;
}
} catch (Exception e) {
logError(invocation, e);
return null;
}
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebReport.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebReport.java | package io.hyperfoil.clustering.webcli;
import java.nio.charset.StandardCharsets;
import org.aesh.command.CommandDefinition;
import org.aesh.command.CommandException;
import org.aesh.command.CommandResult;
import org.aesh.command.option.Option;
import io.hyperfoil.cli.commands.BaseRunIdCommand;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
import io.hyperfoil.controller.Client;
import io.vertx.core.buffer.Buffer;
@CommandDefinition(name = "report", description = "Generate HTML report")
public class WebReport extends BaseRunIdCommand {
@Option(shortName = 's', description = "Other file (in given run) to use as report input.")
protected String source;
@Override
public CommandResult execute(HyperfoilCommandInvocation invocation) throws CommandException {
Client.RunRef runRef = getRunRef(invocation);
String report = new String(runRef.report(source), StandardCharsets.UTF_8);
invocation.println("Creating report...");
invocation.println("__HYPERFOIL_DIRECT_DOWNLOAD_MAGIC__");
invocation.println(runRef.id() + ".html");
((WebCliContext) invocation.context()).sendBinaryMessage(Buffer.buffer(report));
invocation.println("__HYPERFOIL_DIRECT_DOWNLOAD_END__");
return CommandResult.SUCCESS;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebExport.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebExport.java | package io.hyperfoil.clustering.webcli;
import org.aesh.command.CommandDefinition;
import org.aesh.command.CommandException;
import org.aesh.command.CommandResult;
import io.hyperfoil.cli.commands.BaseExportCommand;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
import io.hyperfoil.controller.Client;
@CommandDefinition(name = "export", description = "Export run statistics.")
public class WebExport extends BaseExportCommand {
@Override
public CommandResult execute(HyperfoilCommandInvocation invocation) throws CommandException {
ensureConnection(invocation);
Client.RunRef runRef = getRunRef(invocation);
invocation.println("Sending exported statistics...");
invocation.println(
"__HYPERFOIL_DOWNLOAD_MAGIC__ /run/" + runRef.id() + "/stats/all/" + format.toLowerCase() + " " + runRef.id());
return CommandResult.SUCCESS;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/Plot.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/Plot.java | package io.hyperfoil.clustering.webcli;
import java.util.List;
import org.aesh.command.CommandDefinition;
import org.aesh.command.CommandException;
import org.aesh.command.CommandResult;
import org.aesh.command.option.Arguments;
import org.aesh.terminal.utils.ANSI;
import io.hyperfoil.cli.commands.ServerCommand;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
import io.hyperfoil.controller.Client;
@CommandDefinition(name = "plot", description = "Display chart for metric/connections/sessions")
public class Plot extends ServerCommand {
@Arguments(description = "Run plot (without args) to see detailed help.")
private List<String> args;
@Override
public CommandResult execute(HyperfoilCommandInvocation invocation) throws CommandException {
if (args == null || args.size() == 0) {
invocation.println("Missing arguments, use " + ANSI.BOLD + "plot <type> [<name>] [<run id>]" + ANSI.RESET + "");
invocation.println("where type is on of: metric, histogram, percentiles, connections (or conns), sessions");
invocation.println("Examples:");
invocation.println(" plot metric foo Show metric 'foo' in all phases (details report)");
invocation.println(" plot histogram my-phase foo Show histogram of metric 'foo' in phase 'my-phase'");
invocation.println(" plot percentiles my-phase foo Show percentiles of metric 'foo' in phase 'my-phase'");
invocation.println(" plot connections Show connection pool utilization charts");
invocation.println(" plot sessions my-phase Show session pool charts for phase 'my-phase'");
invocation.println("<run id> as the last argument is required only if you don't have any run in current context.");
return CommandResult.FAILURE;
}
switch (args.get(0).toLowerCase()) {
case "m":
case "metric":
if (args.size() < 2) {
invocation.println("Missing name of metric. Type 'stats' to show available metrics.");
} else {
plotMetric(invocation, args.get(1));
}
break;
case "h":
case "histo":
case "histogram":
if (args.size() < 3) {
invocation.println("Missing name of phase and metric. Type 'stats' to show available metrics.");
} else {
plotHistogram(invocation, args.get(1), args.get(2));
}
break;
case "p":
case "percentiles":
if (args.size() < 3) {
invocation.println("Missing name of phase and metric. Type 'stats' to show available metrics.");
} else {
plotPercentiles(invocation, args.get(1), args.get(2));
}
break;
case "c":
case "conns":
case "connections":
plotConnections(invocation);
break;
case "s":
case "sessions":
if (args.size() < 2) {
plotSessions(invocation, null);
} else {
plotSessions(invocation, args.get(1));
}
}
return CommandResult.SUCCESS;
}
private void plotMetric(HyperfoilCommandInvocation invocation, String metric) throws CommandException {
Client.RunRef runRef = getRunRef(invocation, 2);
plotIframe(invocation, runRef, "/details/" + metric);
}
private void plotHistogram(HyperfoilCommandInvocation invocation, String phase, String metric) throws CommandException {
Client.RunRef runRef = getRunRef(invocation, 3);
plotIframe(invocation, runRef, "/histogram/" + phase + "/" + metric);
}
private void plotPercentiles(HyperfoilCommandInvocation invocation, String phase, String metric) throws CommandException {
Client.RunRef runRef = getRunRef(invocation, 3);
plotIframe(invocation, runRef, "/percentiles/" + phase + "/" + metric);
}
private void plotConnections(HyperfoilCommandInvocation invocation) throws CommandException {
Client.RunRef runRef = getRunRef(invocation, 1);
plotIframe(invocation, runRef, "/connections");
}
private void plotSessions(HyperfoilCommandInvocation invocation, String phase) throws CommandException {
Client.RunRef runRef = getRunRef(invocation, phase == null ? 1 : 2);
plotIframe(invocation, runRef, "/sessions" + (phase == null ? "" : "/" + phase));
}
private Client.RunRef getRunRef(HyperfoilCommandInvocation invocation, int runArgIndex) throws CommandException {
Client.RunRef runRef;
if (args.size() > runArgIndex && !args.get(runArgIndex).isEmpty()) {
runRef = invocation.context().client().run(args.get(runArgIndex));
} else {
runRef = invocation.context().serverRun();
if (runRef == null) {
failMissingRunId(invocation);
}
}
return runRef;
}
private void plotIframe(HyperfoilCommandInvocation invocation, Client.RunRef runRef, String path) {
invocation.println("__HYPERFOIL_RAW_HTML_START__" +
"<iframe onload=\"resizeFrame(this)\" class=\"plot\" src=\"/run/" + runRef.id() + "/report?unwrap=true#" + path
+ "\"></iframe>" +
"<button class=\"plottoggle hfbutton\" onclick=\"togglePlot(this)\">Collapse</button>" +
"__HYPERFOIL_RAW_HTML_END__");
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebEdit.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebEdit.java | package io.hyperfoil.clustering.webcli;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import org.aesh.command.CommandDefinition;
import org.aesh.command.CommandException;
import org.aesh.command.CommandResult;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.BenchmarkData;
import io.hyperfoil.api.config.BenchmarkDefinitionException;
import io.hyperfoil.api.config.BenchmarkSource;
import io.hyperfoil.cli.commands.BaseEditCommand;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
import io.hyperfoil.controller.Client;
import io.hyperfoil.core.impl.ProvidedBenchmarkData;
import io.hyperfoil.core.parser.BenchmarkParser;
import io.hyperfoil.core.parser.ParserException;
import io.hyperfoil.impl.Util;
@CommandDefinition(name = "edit", description = "Edit benchmark definition.")
public class WebEdit extends BaseEditCommand {
@Override
public CommandResult execute(HyperfoilCommandInvocation invocation) throws CommandException {
Client.BenchmarkRef benchmarkRef = ensureBenchmark(invocation);
Client.BenchmarkSource source = ensureSource(invocation, benchmarkRef);
WebCliContext context = (WebCliContext) invocation.context();
Map<String, byte[]> extraData = new HashMap<>();
if (extraFiles != null) {
for (String extraFile : extraFiles) {
try {
extraData.put(extraFile, context.loadFile(invocation, extraFile));
} catch (InterruptedException e) {
invocation.println("Benchmark upload cancelled.");
return CommandResult.FAILURE;
}
}
}
ProvidedBenchmarkData filesData = new ProvidedBenchmarkData(extraData);
String updatedSource = source.source;
String updatedName;
for (;;) {
updatedSource = edit(invocation, context, benchmarkRef, updatedSource);
if (updatedSource == null) {
return CommandResult.FAILURE;
}
try {
BenchmarkSource newSource = BenchmarkParser.instance().createSource(updatedSource, filesData);
updatedName = newSource.name;
if (!newSource.isTemplate()) {
Benchmark benchmark;
for (;;) {
try {
benchmark = BenchmarkParser.instance().buildBenchmark(newSource, Collections.emptyMap());
break;
} catch (BenchmarkData.MissingFileException e) {
try {
if (source.files.contains(BenchmarkData.sanitize(e.file))) {
invocation.print("Re-upload file " + e.file + "? [y/N] ");
switch (invocation.inputLine().trim().toLowerCase()) {
case "y":
case "yes":
filesData.files.put(e.file, context.loadFile(invocation, e.file));
break;
default:
filesData.ignoredFiles.add(e.file);
}
} else {
filesData.files.put(e.file, context.loadFile(invocation, e.file));
}
} catch (InterruptedException interruptedException) {
invocation.println("Edits cancelled.");
return CommandResult.FAILURE;
}
}
}
try {
Util.serialize(benchmark);
} catch (IOException e) {
invocation.error("Benchmark is not serializable.", e);
return CommandResult.FAILURE;
}
}
break;
} catch (ParserException | BenchmarkDefinitionException e) {
invocation.error(e);
invocation.print("Retry edits? [Y/n] ");
try {
switch (invocation.inputLine().trim().toLowerCase()) {
case "n":
case "no":
return CommandResult.FAILURE;
}
} catch (InterruptedException ie) {
invocation.println("Edits cancelled.");
return CommandResult.FAILURE;
}
filesData = new ProvidedBenchmarkData(extraData);
}
}
String prevVersion = source.version;
if (!updatedName.equals(benchmarkRef.name())) {
invocation.println(
"NOTE: Renamed benchmark " + benchmarkRef.name() + " to " + updatedName + "; old benchmark won't be deleted.");
prevVersion = null;
}
CountDownLatch latch;
synchronized (context) {
latch = context.latch = new CountDownLatch(1);
}
invocation.println("__HYPERFOIL_BENCHMARK_FILE_LIST__");
invocation.println(benchmark);
invocation.println(prevVersion == null ? "" : prevVersion);
for (String file : filesData.files.keySet()) {
invocation.println(file);
}
invocation.println("__HYPERFOIL_BENCHMARK_END_OF_FILES__");
try {
latch.await();
} catch (InterruptedException e) {
}
context.latch = null;
return CommandResult.SUCCESS;
}
private String edit(HyperfoilCommandInvocation invocation, WebCliContext context, Client.BenchmarkRef benchmarkRef,
String source) {
CountDownLatch latch;
synchronized (context) {
latch = context.latch = new CountDownLatch(1);
}
invocation.println("__HYPERFOIL_EDIT_MAGIC__" + benchmarkRef.name());
invocation.println(source);
try {
latch.await();
} catch (InterruptedException e) {
// interruption is cancel
}
synchronized (context) {
context.latch = null;
if (context.editBenchmark == null) {
invocation.println("Edits cancelled.");
return null;
}
source = context.editBenchmark.toString();
context.editBenchmark = null;
}
return source;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebsocketOutputStream.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebsocketOutputStream.java | package io.hyperfoil.clustering.webcli;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.Callable;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.http.ServerWebSocket;
class WebsocketOutputStream extends OutputStream implements Callable<Void> {
private ServerWebSocket webSocket;
private final ByteArrayOutputStream bytes = new ByteArrayOutputStream();
private ScheduledFuture<Void> future;
WebsocketOutputStream(ServerWebSocket webSocket) {
this.webSocket = webSocket;
}
public synchronized void reattach(ServerWebSocket webSocket) {
this.webSocket = webSocket;
}
private void checkCommand(byte[] b, int off, int len) {
// Commands like __HYPERFOIL_SOMETHING_MAGIC__ start with two underscores.
// We will flush beforehand to let scripts detect commands at the start of frame
if (len >= 2 && b[off] == '_' && b[off + 1] == '_') {
flush();
}
}
@Override
public synchronized void write(byte[] b) throws IOException {
checkCommand(b, 0, b.length);
bytes.write(b);
scheduleSendTextFrame();
}
@Override
public synchronized void write(byte[] b, int off, int len) {
checkCommand(b, 0, b.length);
bytes.write(b, off, len);
scheduleSendTextFrame();
}
@Override
public synchronized void write(int b) {
bytes.write(b);
scheduleSendTextFrame();
}
@Override
public synchronized void flush() {
if (future != null) {
future.cancel(false);
}
call();
}
private void scheduleSendTextFrame() {
if (future == null) {
future = WebCLI.SCHEDULED_EXECUTOR.schedule(this, 10, TimeUnit.MILLISECONDS);
}
}
@Override
public synchronized Void call() {
webSocket.writeTextMessage(bytes.toString(StandardCharsets.UTF_8));
bytes.reset();
future = null;
return null;
}
public synchronized void writeSingleBinary(Buffer buffer) {
// We need to flush output to keep ordering of text and binary frames
flush();
webSocket.writeBinaryMessage(buffer);
}
public synchronized void writeSingleText(String text) {
// We need to flush output to keep ordering of text and binary frames
flush();
webSocket.writeTextMessage(text);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebCLI.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebCLI.java | package io.hyperfoil.clustering.webcli;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.PrintStream;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.aesh.AeshConsoleRunner;
import org.aesh.command.Command;
import org.aesh.command.registry.CommandRegistryException;
import org.aesh.readline.ReadlineConsole;
import org.aesh.readline.terminal.impl.ExternalTerminal;
import org.aesh.readline.terminal.impl.LineDisciplineTerminal;
import org.aesh.readline.tty.terminal.TerminalConnection;
import org.aesh.terminal.tty.Signal;
import org.aesh.terminal.tty.Size;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.FormattedMessage;
import io.hyperfoil.cli.HyperfoilCli;
import io.hyperfoil.cli.commands.Connect;
import io.hyperfoil.cli.commands.Edit;
import io.hyperfoil.cli.commands.Exit;
import io.hyperfoil.cli.commands.Export;
import io.hyperfoil.cli.commands.Report;
import io.hyperfoil.cli.commands.Run;
import io.hyperfoil.cli.commands.StartLocal;
import io.hyperfoil.cli.commands.Upload;
import io.hyperfoil.client.RestClient;
import io.hyperfoil.impl.Util;
import io.vertx.core.Handler;
import io.vertx.core.Vertx;
import io.vertx.core.http.ServerWebSocket;
public class WebCLI extends HyperfoilCli implements Handler<ServerWebSocket> {
private static final Logger log = LogManager.getLogger(WebCLI.class);
private static final String EDITS_BEGIN = "__HYPERFOIL_EDITS_BEGIN__\n";
private static final String EDITS_END = "__HYPERFOIL_EDITS_END__\n";
private static final String INTERRUPT_SIGNAL = "__HYPERFOIL_INTERRUPT_SIGNAL__";
private static final String AUTH_TOKEN = "__HYPERFOIL_AUTH_TOKEN__";
private static final String UPLOAD_URL = "__HYPERFOIL_UPLOAD_URL__";
private static final String SET_BENCHMARK = "__HYPERFOIL_SET_BENCHMARK__";
private static final String SET_TERM_SIZE = "__HYPERFOIL_SET_TERM_SIZE__";
private static final String SEND_NOTIFICATIONS = "__HYPERFOIL_SEND_NOTIFICATIONS__";
private static final String FILE_TRANSFER = "__HYPERFOIL_FILE_TRANSFER__";
private static final long SESSION_TIMEOUT = 60000;
static final ScheduledExecutorService SCHEDULED_EXECUTOR = Executors.newScheduledThreadPool(1,
Util.daemonThreadFactory("webcli-timer"));
private final Vertx vertx;
private final ConcurrentMap<String, WebCliContext> contextMap = new ConcurrentHashMap<>();
private final ConcurrentMap<String, ClosedContext> closedRunners = new ConcurrentHashMap<>();
private String hostname = "localhost";
private int port = 8090;
private boolean ssl = false;
public WebCLI(Vertx vertx) {
this.vertx = vertx;
}
@Override
public void handle(ServerWebSocket webSocket) {
String sessionId = webSocket.query();
if (sessionId == null || sessionId.isEmpty()) {
throw new IllegalStateException();
}
ClosedContext closed = closedRunners.remove(sessionId);
if (closed != null) {
closed.future.cancel(false);
}
WebCliContext context = contextMap.compute(sessionId, (sid, existing) -> {
if (existing == null) {
return createNewContext(webSocket);
} else {
existing.reattach(webSocket);
return existing;
}
});
webSocket.closeHandler(nil -> {
if (context.runCompletionFuture != null) {
context.runCompletionFuture.cancel(false);
}
ScheduledFuture<?> future = SCHEDULED_EXECUTOR.schedule(() -> {
ClosedContext closedContext = closedRunners.get(context.sessionId);
if (closedContext != null && closedContext.closed <= System.currentTimeMillis() - SESSION_TIMEOUT) {
closedContext.context.runner.stop();
contextMap.remove(context.sessionId);
closedRunners.remove(context.sessionId);
}
}, SESSION_TIMEOUT, TimeUnit.MILLISECONDS);
closedRunners.put(context.sessionId, new ClosedContext(System.currentTimeMillis(), context, future));
});
webSocket.textMessageHandler(msg -> {
synchronized (context) {
if (context.editBenchmark != null) {
int editsEnd = msg.indexOf(EDITS_END);
if (editsEnd >= 0) {
context.editBenchmark.append(msg, 0, editsEnd);
context.latch.countDown();
} else {
context.editBenchmark.append(msg);
}
return;
} else if (msg.equals(INTERRUPT_SIGNAL)) {
if (context.latch != null) {
context.latch.countDown();
} else {
TerminalConnection connection = getConnection(context.runner);
if (connection != null) {
connection.getTerminal().raise(Signal.INT);
}
}
return;
} else if (msg.startsWith(EDITS_BEGIN)) {
context.editBenchmark = new StringBuilder();
context.editBenchmark.append(msg.substring(EDITS_BEGIN.length()));
return;
} else if (msg.startsWith(AUTH_TOKEN)) {
context.client().setToken(msg.substring(AUTH_TOKEN.length()));
return;
} else if (msg.startsWith(SET_BENCHMARK)) {
context.setServerBenchmark(context.client().benchmark(msg.substring(SET_BENCHMARK.length())));
return;
} else if (msg.startsWith(SET_TERM_SIZE)) {
setTermSize(context, msg.substring(SET_TERM_SIZE.length()));
return;
} else if (msg.startsWith(SEND_NOTIFICATIONS)) {
context.startNotifications();
return;
} else if (msg.startsWith(FILE_TRANSFER)) {
try {
synchronized (context) {
context.binaryLength = Integer.parseInt(msg.substring(FILE_TRANSFER.length()));
// we don't initalize binaryContent here to prevent risk of receiving 'empty' file on interrupt.
}
} catch (NumberFormatException e) {
context.outputStream.writeSingleText("Failed to parse file transfer length: closing.");
webSocket.close();
}
return;
} else if (msg.startsWith(UPLOAD_URL)) {
context.uploadUrl = msg.substring(UPLOAD_URL.length());
context.latch.countDown();
return;
}
}
try {
context.inputStream.write(msg);
context.inputStream.flush();
} catch (IOException e) {
log.error(new FormattedMessage("Failed to write '{}' to Aesh input", msg), e);
webSocket.close();
}
});
webSocket.binaryMessageHandler(buffer -> {
try {
byte[] bytes = buffer.getBytes();
synchronized (context) {
if (context.binaryContent == null) {
context.binaryContent = new ByteArrayOutputStream();
}
context.binaryContent.write(bytes);
context.binaryLength -= bytes.length;
}
if (context.binaryLength == 0) {
synchronized (context) {
context.latch.countDown();
}
} else if (context.binaryLength < 0) {
log.error("Expected binary input underflow");
context.outputStream.writeSingleText("ERROR: Expected binary input underflow.");
webSocket.close();
}
} catch (IOException e) {
log.error("Failed to append bytes", e);
}
});
}
private void setTermSize(WebCliContext context, String value) {
String[] dimensions = value.split("x");
if (dimensions.length == 2) {
try {
int width = Integer.parseInt(dimensions[0]);
int height = Integer.parseInt(dimensions[1]);
TerminalConnection connection = getConnection(context.runner);
if (connection != null) {
ExternalTerminal terminal = (ExternalTerminal) connection.getTerminal();
Field f = LineDisciplineTerminal.class.getDeclaredField("size");
f.setAccessible(true);
f.set(terminal, new Size(width, height));
}
} catch (NumberFormatException | NoSuchFieldException | IllegalAccessException e) {
// ignore
}
}
}
private WebCliContext createNewContext(ServerWebSocket webSocket) {
PipedOutputStream pos = new PipedOutputStream();
PipedInputStream pis;
try {
pis = new PipedInputStream(pos);
} catch (IOException e) {
log.error("Failed to create input stream", e);
webSocket.close();
throw new IllegalStateException(e);
}
OutputStreamWriter inputStream = new OutputStreamWriter(pos);
WebsocketOutputStream stream = new WebsocketOutputStream(webSocket);
WebCliContext ctx = new WebCliContext(vertx, inputStream, stream, webSocket);
ctx.setClient(new RestClient(vertx, hostname, port, ssl, true, null));
ctx.setOnline(true);
try {
var settingsBuilder = settingsBuilder(ctx, new WebCliCommandInvocation.Provider(ctx));
settingsBuilder.inputStream(pis)
.persistHistory(false)
.historySize(Integer.MAX_VALUE)
.outputStreamError(new PrintStream(stream))
.outputStream(new PrintStream(stream));
ctx.runner = configureRunner(ctx, settingsBuilder.build(), null);
} catch (CommandRegistryException e) {
throw new IllegalStateException(e);
}
webSocket.writeTextMessage("__HYPERFOIL_SESSION_START__\n");
webSocket.writeTextMessage("Welcome to Hyperfoil! Type 'help' for commands overview.\n");
Thread cliThread = new Thread(ctx.runner::start, "webcli-" + webSocket.remoteAddress());
cliThread.setDaemon(true);
cliThread.start();
return ctx;
}
private TerminalConnection getConnection(AeshConsoleRunner runner) {
try {
Field consoleField = AeshConsoleRunner.class.getDeclaredField("console");
consoleField.setAccessible(true);
ReadlineConsole console = (ReadlineConsole) consoleField.get(runner);
if (console == null) {
return null;
}
Field connectionField = ReadlineConsole.class.getDeclaredField("connection");
connectionField.setAccessible(true);
return (TerminalConnection) connectionField.get(console);
} catch (NoSuchFieldException | IllegalAccessException e) {
return null;
}
}
@Override
protected List<Class<? extends Command>> getCommands() {
ArrayList<Class<? extends Command>> commands = new ArrayList<>(super.getCommands());
commands.remove(Connect.class);
commands.remove(Edit.class);
commands.remove(Exit.class);
commands.remove(Export.class);
commands.remove(Report.class);
commands.remove(Run.class);
commands.remove(StartLocal.class);
commands.remove(Upload.class);
commands.add(Plot.class);
commands.add(WebEdit.class);
commands.add(WebExport.class);
commands.add(WebReport.class);
commands.add(WebRun.class);
commands.add(WebUpload.class);
return commands;
}
public void setConnectionOptions(String hostname, int port, boolean ssl) {
this.hostname = hostname;
this.port = port;
this.ssl = ssl;
}
private static class ClosedContext {
final long closed;
final WebCliContext context;
final ScheduledFuture<?> future;
private ClosedContext(long closed, WebCliContext context, ScheduledFuture<?> future) {
this.closed = closed;
this.context = context;
this.future = future;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebCliContext.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebCliContext.java | package io.hyperfoil.clustering.webcli;
import java.io.ByteArrayOutputStream;
import java.io.OutputStreamWriter;
import java.time.Duration;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.aesh.AeshConsoleRunner;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.cli.Pager;
import io.hyperfoil.cli.context.HyperfoilCliContext;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
import io.hyperfoil.client.RestClient;
import io.hyperfoil.controller.model.Run;
import io.vertx.core.Vertx;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.http.ServerWebSocket;
class WebCliContext extends HyperfoilCliContext {
private static final Logger log = LogManager.getLogger(WebCliContext.class);
final String sessionId;
final OutputStreamWriter inputStream;
final WebsocketOutputStream outputStream;
ScheduledFuture<?> runCompletionFuture;
ServerWebSocket webSocket;
AeshConsoleRunner runner;
CountDownLatch latch;
StringBuilder editBenchmark;
String uploadUrl;
int binaryLength;
ByteArrayOutputStream binaryContent;
String prevId = null;
String notifiedId = null;
WebCliContext(Vertx vertx, OutputStreamWriter inputStream, WebsocketOutputStream outputStream, ServerWebSocket webSocket) {
super(vertx, true);
this.sessionId = webSocket.query();
this.inputStream = inputStream;
this.webSocket = webSocket;
this.outputStream = outputStream;
}
public byte[] loadFile(HyperfoilCommandInvocation invocation, String file) throws InterruptedException {
CountDownLatch latch;
synchronized (this) {
latch = this.latch = new CountDownLatch(1);
}
invocation.println("__HYPERFOIL_LOAD_FILE__" + file);
latch.await();
synchronized (this) {
this.latch = null;
if (binaryContent == null) {
throw new InterruptedException();
}
byte[] bytes = binaryContent.toByteArray();
binaryContent = null;
invocation.println("File " + file + " uploaded.");
return bytes;
}
}
public void reattach(ServerWebSocket webSocket) {
this.webSocket = webSocket;
outputStream.reattach(webSocket);
}
@Override
public String interruptKey() {
return "Escape";
}
@Override
public synchronized Pager createPager(String pager) {
latch = new CountDownLatch(1);
return new WebPager();
}
public void sendBinaryMessage(Buffer buffer) {
outputStream.writeSingleBinary(buffer);
}
void startNotifications() {
if (runCompletionFuture != null) {
runCompletionFuture.cancel(false);
}
runCompletionFuture = executor().scheduleAtFixedRate(this::checkRunCompletion, 0, 15, TimeUnit.SECONDS);
}
private void checkRunCompletion() {
RestClient client = client();
if (client == null) {
return;
}
Run current = client.run("last").get();
String fetchRun = null;
synchronized (this) {
if (notifiedId == null) {
prevId = this.notifiedId = current.id;
return;
}
if (current.completed && this.notifiedId.compareTo(current.id) < 0) {
notifyRunCompleted(current);
} else if (!prevId.equals(current.id) && this.notifiedId.compareTo(prevId) < 0) {
fetchRun = prevId;
}
}
prevId = current.id;
if (fetchRun != null) {
notifyRunCompleted(client.run(fetchRun).get());
}
}
@Override
public synchronized void notifyRunCompleted(Run run) {
if (notifiedId == null || notifiedId.compareTo(run.id) < 0) {
notifiedId = run.id;
} else {
return;
}
StringBuilder sb = new StringBuilder("__HYPERFOIL_NOTIFICATION__");
// title
sb.append("Run ").append(run.id).append(" (").append(run.benchmark).append(") has finished").append('\n');
// body
if (run.cancelled) {
sb.append("The run was cancelled.\n");
}
if (run.started != null && run.terminated != null) {
String prettyDuration = Duration.between(run.started.toInstant(), run.terminated.toInstant())
.toString().substring(2).replaceAll("(\\d[HMS])(?!$)", "$1 ").toLowerCase();
sb.append("Total duration: ").append(prettyDuration).append('\n');
}
if (run.errors != null && !run.errors.isEmpty()) {
sb.append("Errors (").append(run.errors.size()).append("):\n");
run.errors.stream().limit(10).forEach(e -> sb.append(e).append('\n'));
if (run.errors.size() > 10) {
sb.append("... (further errors omitted)");
}
}
outputStream.writeSingleText(sb.toString());
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebRun.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebRun.java | package io.hyperfoil.clustering.webcli;
import org.aesh.command.CommandDefinition;
import io.hyperfoil.cli.commands.Run;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
import io.hyperfoil.core.impl.ProvidedBenchmarkData;
@CommandDefinition(name = "run", description = "Starts benchmark on Hyperfoil Controller server")
public class WebRun extends Run {
@Override
protected boolean onMissingFile(HyperfoilCommandInvocation invocation, String file, ProvidedBenchmarkData data) {
try {
WebCliContext context = (WebCliContext) invocation.context();
byte[] bytes = context.loadFile(invocation, file);
data.files.put(file, bytes);
return true;
} catch (InterruptedException interruptedException) {
invocation.warn("Cancelled, not running anything.");
return false;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebCliCommandInvocation.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebCliCommandInvocation.java | package io.hyperfoil.clustering.webcli;
import java.util.concurrent.TimeUnit;
import org.aesh.command.invocation.CommandInvocation;
import org.aesh.command.invocation.CommandInvocationProvider;
import org.aesh.readline.action.KeyAction;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
public class WebCliCommandInvocation extends HyperfoilCommandInvocation {
public WebCliCommandInvocation(WebCliContext context, CommandInvocation commandInvocation) {
super(context, commandInvocation);
}
@Override
public WebCliContext context() {
return (WebCliContext) super.context();
}
@Override
public KeyAction input(long timeout, TimeUnit unit) throws InterruptedException {
context().outputStream.writeSingleText("__HYPERFOIL_TIMED_INPUT__\n");
try {
return super.input(timeout, unit);
} finally {
context().outputStream.writeSingleText("__HYPERFOIL_TIMED_INPUT_OFF__\n");
}
}
public static class Provider implements CommandInvocationProvider<WebCliCommandInvocation> {
private final WebCliContext context;
public Provider(WebCliContext context) {
this.context = context;
}
@Override
public WebCliCommandInvocation enhanceCommandInvocation(CommandInvocation invocation) {
return new WebCliCommandInvocation(context, invocation);
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/clustering/src/main/java/io/hyperfoil/clustering/webcli/WebPager.java | clustering/src/main/java/io/hyperfoil/clustering/webcli/WebPager.java | package io.hyperfoil.clustering.webcli;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.concurrent.CountDownLatch;
import io.hyperfoil.cli.Pager;
import io.hyperfoil.cli.context.HyperfoilCommandInvocation;
class WebPager implements Pager {
@Override
public void open(HyperfoilCommandInvocation invocation, String text, String prefix, String suffix) {
invocation.println("__HYPERFOIL_PAGER_MAGIC__");
invocation.println(text);
try {
WebCliContext context = (WebCliContext) invocation.context();
CountDownLatch latch;
synchronized (context) {
latch = context.latch;
}
latch.await();
synchronized (context) {
context.latch = null;
}
} catch (InterruptedException e) {
// interruption is okay
}
}
@Override
public void open(HyperfoilCommandInvocation invocation, File file) {
try {
open(invocation, Files.readString(file.toPath(), StandardCharsets.UTF_8), null, null);
} catch (IOException e) {
invocation.error("Cannot open file " + file.getName());
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/VertxBaseTest.java | core/src/test/java/io/hyperfoil/core/VertxBaseTest.java | package io.hyperfoil.core;
import java.util.ArrayList;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import io.vertx.core.Vertx;
import io.vertx.junit5.VertxExtension;
@ExtendWith(VertxExtension.class)
public class VertxBaseTest {
protected Vertx vertx = Vertx.vertx();
protected ArrayList<Runnable> cleanup = new ArrayList<>();
@BeforeEach
public void before(Vertx vertx) {
this.vertx = vertx;
}
@AfterEach
public void cleanup() {
cleanup.forEach(Runnable::run);
cleanup.clear();
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/util/RandomConcurrentSetTest.java | core/src/test/java/io/hyperfoil/core/util/RandomConcurrentSetTest.java | package io.hyperfoil.core.util;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.BitSet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.jupiter.api.Test;
public class RandomConcurrentSetTest {
public static final int ALLOCATING_THREADS = 3;
public static final int REUSING_THREADS = 3;
public static final int MAX = 100000;
ExecutorService executor = Executors.newFixedThreadPool(ALLOCATING_THREADS + REUSING_THREADS);
AtomicInteger counter = new AtomicInteger();
CountDownLatch latch = new CountDownLatch(ALLOCATING_THREADS + REUSING_THREADS);
AtomicReference<Throwable> error = new AtomicReference<>();
@Test
public void testMultiThreaded() throws Exception {
RandomConcurrentSet<Integer> set = new RandomConcurrentSet<>(16, 16, 16);
for (int i = 0; i < ALLOCATING_THREADS; ++i) {
executor.submit(() -> runAllocator(set));
}
for (int i = 0; i < REUSING_THREADS; ++i) {
executor.submit(() -> runReusing(set));
}
latch.await(60, TimeUnit.SECONDS);
if (error.get() != null) {
throw new AssertionError(error.get());
}
BitSet bitSet = new BitSet(MAX);
AtomicInteger values = new AtomicInteger();
set.readAll(value -> {
assertThat(value).isLessThan(MAX);
assertThat(bitSet.get(value)).as("duplicit value %d", value).isFalse();
bitSet.set(value);
values.incrementAndGet();
});
for (int i = 0; i < MAX; ++i) {
assertThat(bitSet.get(i)).as("missing value %d", i).isTrue();
}
assertThat(values.get()).isEqualTo(MAX);
}
private void runReusing(RandomConcurrentSet<Integer> set) {
ThreadLocalRandom random = ThreadLocalRandom.current();
try {
for (;;) {
if (counter.get() >= MAX) {
return;
}
Integer value = set.fetch();
if (value == null) {
Thread.yield();
} else {
if (random.nextBoolean()) {
Thread.yield();
}
set.put(value);
}
}
} catch (Throwable t) {
error.set(t);
} finally {
latch.countDown();
}
}
private void runAllocator(RandomConcurrentSet<Integer> set) {
ThreadLocalRandom random = ThreadLocalRandom.current();
try {
for (;;) {
int value = counter.getAndIncrement();
if (value >= MAX) {
return;
}
set.put(value);
if (random.nextBoolean()) {
Thread.yield();
}
}
} catch (Throwable t) {
error.set(t);
} finally {
latch.countDown();
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/util/NoOpClientProvider.java | core/src/test/java/io/hyperfoil/core/util/NoOpClientProvider.java | package io.hyperfoil.core.util;
public class NoOpClientProvider {
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/builder/InvalidBenchmarkTest.java | core/src/test/java/io/hyperfoil/core/builder/InvalidBenchmarkTest.java | package io.hyperfoil.core.builder;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.config.BenchmarkBuilder;
import io.hyperfoil.api.config.BenchmarkDefinitionException;
import io.hyperfoil.api.config.PhaseBuilder;
import io.hyperfoil.api.config.PhaseReferenceDelay;
import io.hyperfoil.api.config.RelativeIteration;
import io.hyperfoil.core.builders.StepCatalog;
public class InvalidBenchmarkTest {
@Test
public void testMissingPhase() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
initPhase(builder.addPhase("foo").always(1).startAfter("bar"));
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, " is not defined");
}
@Test
public void testDeadlock() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
initPhase(builder.addPhase("foo").always(1).startAfter("bar"));
initPhase(builder.addPhase("bar").always(1).startAfterStrict("goo"));
initPhase(builder.addPhase("goo").always(1).startAfter("foo"));
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, "Phase dependencies contain cycle");
}
@Test
public void testVariableNotWritten() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
builder.addPhase("test").atOnce(1).scenario().initialSequence("test")
.step(StepCatalog.SC).log("Blabla: ${foo}");
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, "Variable 'foo' is read but it is never written to");
}
@Test
public void testMissingPhaseOnStartWith() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
initPhase(builder.addPhase("foo").always(1).startWith(new PhaseReferenceDelay("bar", RelativeIteration.NONE, null, 10)));
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, " is not defined");
}
@Test
public void testSimultaneousStartAfterAndStartWith() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
initPhase(builder.addPhase("foo").always(1).startAfter("bar").startWith("bar"));
initPhase(builder.addPhase("bar").always(1));
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, "has both startWith and one of startAfter, startAfterStrict and startTime set.");
}
@Test
public void testSimultaneousStartAfterStrictAndStartWith() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
initPhase(builder.addPhase("foo").always(1).startAfterStrict("bar").startWith("bar"));
initPhase(builder.addPhase("bar").always(1));
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, "has both startWith and one of startAfter, startAfterStrict and startTime set.");
}
@Test
public void testSimultaneousStartTimeAndStartWith() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
initPhase(builder.addPhase("foo").always(1).startTime(10).startWith("bar"));
initPhase(builder.addPhase("bar").always(1));
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, "has both startWith and one of startAfter, startAfterStrict and startTime set.");
}
@Test
public void testStartWithDeadlock() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
initPhase(builder.addPhase("foo").always(1).startWith("bar"));
initPhase(builder.addPhase("bar").always(1).startWith("goo"));
initPhase(builder.addPhase("goo").always(1).startWith("foo"));
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, "Phase dependencies contain cycle");
}
@Test
public void testMixedDeadlock() {
BenchmarkBuilder builder = BenchmarkBuilder.builder();
initPhase(builder.addPhase("foo").always(1).startAfter("bar"));
initPhase(builder.addPhase("bar").always(1).startAfterStrict("goo"));
initPhase(builder.addPhase("goo").always(1).startWith("foo"));
Exception thrown = assertThrows(BenchmarkDefinitionException.class, builder::build);
assertExceptionMessageContains(thrown, "Phase dependencies contain cycle");
}
private void initPhase(PhaseBuilder<?> p) {
p.duration(1).scenario().initialSequence("x").step(s -> true).endSequence().endScenario();
}
private void assertExceptionMessageContains(Exception exception, String expectedMessage) {
if (!exception.getMessage().contains(expectedMessage)) {
throw new AssertionError(
"Expected message to contain \"" + expectedMessage + "\" but was \"" + exception.getMessage() + "\"");
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/builder/StepCopyTest.java | core/src/test/java/io/hyperfoil/core/builder/StepCopyTest.java | package io.hyperfoil.core.builder;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.config.BenchmarkBuilder;
import io.hyperfoil.api.config.Locator;
import io.hyperfoil.api.config.ScenarioBuilder;
import io.hyperfoil.api.config.SequenceBuilder;
import io.hyperfoil.api.config.Step;
import io.hyperfoil.api.config.StepBuilder;
import io.hyperfoil.core.builders.StepCatalog;
import io.hyperfoil.core.steps.NoopStep;
public class StepCopyTest {
@Test
public void testLoop() {
AtomicInteger invoked = new AtomicInteger();
test(seq -> seq.step(StepCatalog.SC).loop("foo", 2).steps()
.step(new NoopStep())
.stepBuilder(new TestStepBuilder(invoked)).endSequence());
assert invoked.get() == 2;
}
@Test
public void testStopwatch() {
test(seq -> seq.step(StepCatalog.SC).stopwatch()
.step(new NoopStep()).step(new NoopStep()));
}
private void test(Consumer<SequenceBuilder> consumer) {
BenchmarkBuilder benchmarkBuilder = BenchmarkBuilder.builder();
ScenarioBuilder scenario = benchmarkBuilder.addPhase("test").atOnce(1).users(1).duration(1).scenario();
SequenceBuilder first = scenario.initialSequence("first");
consumer.accept(first);
scenario.sequence("second", first);
benchmarkBuilder.build();
}
public static class TestStepBuilder implements StepBuilder {
private final AtomicInteger invoked;
public TestStepBuilder(AtomicInteger invoked) {
this.invoked = invoked;
}
public TestStepBuilder(TestStepBuilder other) {
this.invoked = other.invoked;
}
@Override
public List<Step> build() {
Locator locator = Locator.current();
assert locator.step() == this;
int counter = invoked.incrementAndGet();
assert (counter == 1 ? "first" : "second").equals(locator.sequence().name());
assert locator.scenario() != null;
assert locator.benchmark() != null;
return Collections.singletonList(new NoopStep());
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/test/TestClock.java | core/src/test/java/io/hyperfoil/core/test/TestClock.java | package io.hyperfoil.core.test;
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
public class TestClock extends Clock {
private Instant instant = Instant.now();
@Override
public ZoneId getZone() {
return ZoneId.systemDefault();
}
@Override
public Clock withZone(ZoneId zone) {
throw new UnsupportedOperationException();
}
@Override
public Instant instant() {
return instant;
}
public void advance(long millis) {
instant = instant.plusMillis(millis);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/test/TestUtil.java | core/src/test/java/io/hyperfoil/core/test/TestUtil.java | package io.hyperfoil.core.test;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
import java.util.stream.Collectors;
import io.hyperfoil.api.config.BenchmarkBuilder;
import io.hyperfoil.api.config.BenchmarkData;
import io.hyperfoil.api.config.Locator;
import io.hyperfoil.api.session.AccessVisitor;
import io.hyperfoil.api.session.ReadAccess;
import io.hyperfoil.api.session.Session;
public class TestUtil {
private static final BenchmarkData TESTING_DATA = new BenchmarkData() {
@Override
public InputStream readFile(String file) {
InputStream stream = getClass().getClassLoader().getResourceAsStream(file);
if (stream == null) {
throw new MissingFileException("Cannot load file " + file + " from current classloader.");
}
return stream;
}
@Override
public Map<String, byte[]> files() {
return Collections.emptyMap();
}
};
private static final Locator TESTING_MOCK = new Locator.Abstract() {
@Override
public BenchmarkBuilder benchmark() {
return new BenchmarkBuilder(null, Collections.emptyMap()).data(TESTING_DATA);
}
@Override
public String locationMessage() {
throw new UnsupportedOperationException();
}
};
public static String randomString(ThreadLocalRandom rand, int maxLength) {
int length = rand.nextInt(maxLength);
char[] chars = new char[length];
for (int i = 0; i < length; ++i) {
chars[i] = (char) rand.nextInt('a', 'z' + 1);
}
return String.valueOf(chars);
}
public static Locator locator() {
return TESTING_MOCK;
}
public static BenchmarkData benchmarkData() {
return TESTING_DATA;
}
public static void resolveAccess(Session session, Object object) {
AccessVisitor scenarioVisitor = new AccessVisitor();
scenarioVisitor.visit(session.phase().definition().scenario);
Map<Object, Integer> indices = Arrays.stream(scenarioVisitor.reads())
.collect(Collectors.toMap(ReadAccess::key, ReadAccess::index));
AccessVisitor objectVisitor = new AccessVisitor();
objectVisitor.visit(object);
for (ReadAccess access : objectVisitor.reads()) {
access.setIndex(indices.get(access.key()));
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/test/CustomExecutorRunner.java | core/src/test/java/io/hyperfoil/core/test/CustomExecutorRunner.java | package io.hyperfoil.core.test;
import java.lang.reflect.Method;
import java.util.Objects;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.InvocationInterceptor;
import org.junit.jupiter.api.extension.ReflectiveInvocationContext;
public class CustomExecutorRunner implements BeforeEachCallback, InvocationInterceptor {
public static ExecutorService TEST_EVENT_EXECUTOR;
// @Override
// protected Statement methodBlock(final FrameworkMethod method) {
// Objects.requireNonNull(TEST_EVENT_EXECUTOR, "Executor is not set");
// final Statement statement = super.methodBlock(method);
// return new Statement() {
// @Override
// public void evaluate() throws Throwable {
// Future<?> future = TEST_EVENT_EXECUTOR.submit(() -> {
// try {
// statement.evaluate();
// } catch (Throwable throwable) {
// throw new RuntimeException(throwable);
// }
// });
// future.get(); // wait for the test to complete
// }
// };
// }
@Override
public void beforeEach(ExtensionContext extensionContext) throws Exception {
Objects.requireNonNull(TEST_EVENT_EXECUTOR, "Executor is not set");
}
@Override
public void interceptTestMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext,
ExtensionContext extensionContext) throws Throwable {
Future<?> future = TEST_EVENT_EXECUTOR.submit(() -> {
try {
invocation.proceed();
} catch (Throwable throwable) {
throw new RuntimeException(throwable);
}
});
future.get(); // wait for the test to complete
}
@Override
public void interceptAfterAllMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext,
ExtensionContext extensionContext) throws Throwable {
var executor = TEST_EVENT_EXECUTOR;
if (executor != null) {
executor.shutdown();
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/PoolTest.java | core/src/test/java/io/hyperfoil/core/impl/PoolTest.java | package io.hyperfoil.core.impl;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.util.ArrayDeque;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.collection.ElasticPool;
public abstract class PoolTest<T> {
protected abstract ElasticPool<T> createPoolWith(Supplier<T> initSupplier);
/**
* The pooled items factory must create a whole new instance each time, to work
*/
protected abstract T createNewItem();
@Test
public void acquireReleaseWithinReservedCapacity() {
final int reservedCapacity = 10;
final var reservedItemsCounter = new AtomicInteger();
final var pool = createPoolWith(() -> {
reservedItemsCounter.incrementAndGet();
return createNewItem();
});
assertEquals(0, reservedItemsCounter.get());
pool.reserve(reservedCapacity);
final var reservedItems = new HashSet<T>(reservedCapacity);
for (int i = 0; i < reservedCapacity; i++) {
final var acquired = pool.acquire();
assertNotNull(acquired);
assertTrue(reservedItems.add(acquired));
assertEquals(reservedCapacity, reservedItemsCounter.get());
assertEquals(i + 1, pool.maxUsed());
assertEquals(0, pool.minUsed());
}
assertEquals(reservedCapacity, reservedItems.size());
for (final T acquired : reservedItems) {
pool.release(acquired);
assertEquals(reservedCapacity, reservedItemsCounter.get());
assertEquals(reservedCapacity, pool.maxUsed());
assertEquals(0, pool.minUsed());
}
final var reservedItemsReacquired = new HashSet<T>(reservedCapacity);
for (int i = 0; i < reservedCapacity; i++) {
final var acquired = pool.acquire();
assertNotNull(acquired);
assertTrue(reservedItemsReacquired.add(acquired));
assertEquals(reservedCapacity, reservedItemsCounter.get());
assertEquals(reservedCapacity, pool.maxUsed());
assertEquals(0, pool.minUsed());
}
assertEquals(reservedItems, reservedItemsReacquired);
}
@Test
public void cannotAcquireAcquireWithoutReservingFirst() {
final var pool = createPoolWith(() -> {
fail("Init supplier should not be called");
return null;
});
assertThrows(Exception.class, pool::acquire);
}
@Test
public void cannotReleaseWithoutReservingFirst() {
final var pool = createPoolWith(() -> {
fail("Init supplier should not be called");
return null;
});
assertThrows(Exception.class, () -> pool.release(createNewItem()));
}
@Test
public void cannotReleaseANullItem() {
final var pool = createPoolWith(this::createNewItem);
pool.reserve(1);
// ignore what's acquired
assertNotNull(pool.acquire());
assertThrows(Exception.class, () -> pool.release(null));
}
@Test
public void reserveBelowExistingReservedCapacityShouldReuseWholeCapacity() {
final int reservedCapacity = 10;
final var reservedItemsCounter = new AtomicInteger();
final var pool = createPoolWith(() -> {
reservedItemsCounter.incrementAndGet();
return createNewItem();
});
assertEquals(0, reservedItemsCounter.get());
pool.reserve(reservedCapacity);
assertEquals(reservedCapacity, reservedItemsCounter.get());
pool.reserve(0);
for (int i = 0; i < reservedCapacity; i++) {
assertNotNull(pool.acquire());
}
}
@Test
public void statsShouldReflectPoolUsage() {
final int reservedCapacity = 10;
final var pool = createPoolWith(this::createNewItem);
pool.reserve(reservedCapacity);
var reservedItems = new ArrayDeque<T>(reservedCapacity);
for (int i = 0; i < reservedCapacity; i++) {
reservedItems.add(pool.acquire());
assertEquals(0, pool.minUsed());
assertEquals(i + 1, pool.maxUsed());
}
pool.resetStats();
assertEquals(reservedCapacity, pool.maxUsed());
assertEquals(reservedCapacity, pool.minUsed());
for (int i = 0; i < reservedCapacity; i++) {
var toRelease = reservedItems.poll();
pool.release(toRelease);
assertEquals(reservedCapacity - (i + 1), pool.minUsed());
assertEquals(reservedCapacity, pool.maxUsed());
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/ElasticPoolTest.java | core/src/test/java/io/hyperfoil/core/impl/ElasticPoolTest.java | package io.hyperfoil.core.impl;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.collection.ElasticPool;
public abstract class ElasticPoolTest<T> extends PoolTest<T> {
protected abstract ElasticPool<T> createPoolWith(Supplier<T> initSupplier, Supplier<T> depletionSupplier);
@Override
protected ElasticPool<T> createPoolWith(final Supplier<T> initSupplier) {
return createPoolWith(initSupplier, () -> {
fail("Depleted supplier should not be called");
return null;
});
}
@Test
public void acquireReleseBeyondReservedCapacity() {
final int reservedCapacity = 10;
final var reservedItemsCounter = new AtomicInteger();
final var depletedItemsCounter = new AtomicInteger();
final var pool = createPoolWith(() -> {
reservedItemsCounter.incrementAndGet();
return createNewItem();
}, () -> {
depletedItemsCounter.incrementAndGet();
return createNewItem();
});
assertEquals(0, reservedItemsCounter.get());
assertEquals(0, depletedItemsCounter.get());
pool.reserve(reservedCapacity);
final var acquiredItems = new HashSet<T>(reservedCapacity);
for (int i = 0; i < reservedCapacity; i++) {
final var acquired = pool.acquire();
assertNotNull(acquired);
assertTrue(acquiredItems.add(acquired));
assertEquals(reservedCapacity, reservedItemsCounter.get());
assertEquals(0, depletedItemsCounter.get());
assertEquals(i + 1, pool.maxUsed());
assertEquals(0, pool.minUsed());
}
final int depletedCapacity = 10;
final var depletedItems = new HashSet<T>(depletedCapacity);
for (int i = 0; i < depletedCapacity; i++) {
final T depleted = pool.acquire();
assertNotNull(depleted);
assertFalse(acquiredItems.contains(depleted));
assertTrue(depletedItems.add(depleted));
assertEquals(reservedCapacity, reservedItemsCounter.get());
assertEquals(i + 1, depletedItemsCounter.get());
assertEquals(reservedCapacity + (i + 1), pool.maxUsed());
assertEquals(0, pool.minUsed());
}
assertEquals(depletedCapacity, depletedItems.size());
// release the depleted items
for (final T depleted : depletedItems) {
pool.release(depleted);
assertEquals(reservedCapacity, reservedItemsCounter.get());
assertEquals(depletedCapacity, depletedItemsCounter.get());
assertEquals(reservedCapacity + depletedCapacity, pool.maxUsed());
assertEquals(0, pool.minUsed());
}
// acquiring it again should just use the depleted items
final var depletedItemsReacquired = new HashSet<T>(depletedCapacity);
for (int i = 0; i < depletedCapacity; i++) {
final var depleted = pool.acquire();
assertNotNull(depleted);
assertFalse(acquiredItems.contains(depleted));
assertTrue(depletedItemsReacquired.add(depleted));
assertEquals(reservedCapacity, reservedItemsCounter.get());
assertEquals(depletedCapacity, depletedItemsCounter.get());
assertEquals(reservedCapacity + depletedCapacity, pool.maxUsed());
assertEquals(0, pool.minUsed());
}
// verify that the reacquired items are the same
assertEquals(depletedItems, depletedItemsReacquired);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/LockBasedElasticPoolTest.java | core/src/test/java/io/hyperfoil/core/impl/LockBasedElasticPoolTest.java | package io.hyperfoil.core.impl;
import java.util.function.Supplier;
import io.hyperfoil.api.collection.ElasticPool;
public class LockBasedElasticPoolTest extends ElasticPoolTest<Object> {
@Override
protected ElasticPool<Object> createPoolWith(final Supplier<Object> initSupplier, final Supplier<Object> depletionSupplier) {
return new LockBasedElasticPool<>(initSupplier, depletionSupplier);
}
@Override
protected Object createNewItem() {
return new Object();
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/EventExecutorSessionPoolTest.java | core/src/test/java/io/hyperfoil/core/impl/EventExecutorSessionPoolTest.java | package io.hyperfoil.core.impl;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.HashSet;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import java.util.stream.StreamSupport;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import io.hyperfoil.api.collection.ElasticPool;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.core.test.CustomExecutorRunner;
import io.netty.channel.DefaultEventLoop;
import io.netty.util.concurrent.DefaultEventExecutorGroup;
import io.netty.util.concurrent.EventExecutor;
@ExtendWith(CustomExecutorRunner.class)
public class EventExecutorSessionPoolTest extends PoolTest<Session> {
private static EventExecutor[] executors;
private int nextEventExecutor = 0;
@BeforeAll
public static void configureRunnerExecutor() {
final var eventExecutors = new DefaultEventExecutorGroup(11);
executors = StreamSupport.stream(eventExecutors.spliterator(), false)
.map(EventExecutor.class::cast).toArray(EventExecutor[]::new);
CustomExecutorRunner.TEST_EVENT_EXECUTOR = eventExecutors;
}
@Override
protected ElasticPool<Session> createPoolWith(final Supplier<Session> initSupplier) {
return new AffinityAwareSessionPool(executors, initSupplier);
}
@Override
protected Session createNewItem() {
final Session session = mock(Session.class);
final var eventExecutor = executors[nextEventExecutor];
when(session.executor()).thenReturn(eventExecutor);
when(session.agentThreadId()).thenReturn(nextEventExecutor);
nextEventExecutor++;
if (nextEventExecutor >= executors.length) {
nextEventExecutor = 0;
}
return session;
}
@Test
public void reserveBeyondReservedCapacityReuseAndExtendTheCapacity() {
final int reservedCapacity = 10;
final var reservedItemsCounter = new AtomicInteger();
final var pool = createPoolWith(() -> {
reservedItemsCounter.incrementAndGet();
return createNewItem();
});
assertEquals(0, reservedItemsCounter.get());
pool.reserve(reservedCapacity);
final var reservedItems = new HashSet<Session>(reservedCapacity);
for (int i = 0; i < reservedCapacity; i++) {
final var acquired = pool.acquire();
assertTrue(reservedItems.add(acquired));
assertNotNull(acquired);
}
// release all reserved items
for (final Session acquired : reservedItems) {
pool.release(acquired);
}
assertEquals(reservedCapacity, reservedItemsCounter.get());
final int additionalCapacity = 10;
pool.reserve(reservedCapacity + additionalCapacity);
assertEquals(reservedCapacity + additionalCapacity, reservedItemsCounter.get());
final var newReservedItems = new HashSet<Session>(reservedCapacity + additionalCapacity);
for (int i = 0; i < reservedCapacity + additionalCapacity; i++) {
final var acquired = pool.acquire();
assertNotNull(acquired);
assertTrue(newReservedItems.add(acquired));
}
assertTrue(newReservedItems.containsAll(reservedItems));
assertNull(pool.acquire());
}
@Test
public void acquireReleaseWithinReservedCapacityFromAlienThread() {
var error = new CompletableFuture<>();
Thread alienThread = new Thread(() -> {
try {
super.acquireReleaseWithinReservedCapacity();
error.complete(null);
} catch (Throwable t) {
error.completeExceptionally(t);
}
});
alienThread.start();
assertNull(error.join());
}
@Test
public void acquireReleaseWithinReservedCapacityFromAlienEventExecutor() {
var eventLoop = new DefaultEventLoop();
try {
var testResult = eventLoop.submit(super::acquireReleaseWithinReservedCapacity);
// capture the exception if any
try {
testResult.get();
} catch (Throwable e) {
fail(e.getMessage());
}
} finally {
eventLoop.shutdownGracefully();
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/rate/ConstantRateGeneratorTest.java | core/src/test/java/io/hyperfoil/core/impl/rate/ConstantRateGeneratorTest.java | package io.hyperfoil.core.impl.rate;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class ConstantRateGeneratorTest extends RateGeneratorTest {
@Override
int samples() {
return 1000;
}
@Override
RateGenerator newUserGenerator() {
return RateGenerator.constantRate(1000);
}
@Override
void assertSamplesWithoutSkew(final double[] samples, final long totalUsers) {
for (int i = 1; i < samples.length; ++i) {
assertEquals(samples[i - 1] + 1.0, samples[i], 0.0);
}
assertEquals(999, samples[samples.length - 1] - samples[0], 0.0);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/rate/FireTimesCounter.java | core/src/test/java/io/hyperfoil/core/impl/rate/FireTimesCounter.java | package io.hyperfoil.core.impl.rate;
final class FireTimesCounter implements FireTimeListener {
public long fireTimes;
FireTimesCounter() {
fireTimes = 0;
}
@Override
public void onFireTime() {
fireTimes++;
}
} | java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/rate/RateGeneratorTest.java | core/src/test/java/io/hyperfoil/core/impl/rate/RateGeneratorTest.java | package io.hyperfoil.core.impl.rate;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import org.apache.commons.math3.distribution.ExponentialDistribution;
import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.commons.math3.stat.inference.KolmogorovSmirnovTest;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
public abstract class RateGeneratorTest {
public static double[] computeInterArrivalTimes(final double[] samples) {
final double[] interArrivalTimes = new double[samples.length - 1];
int j = 0;
for (int i = 1; i < samples.length; i++) {
final double intervalMs = samples[i] - samples[i - 1];
interArrivalTimes[j] = intervalMs;
j++;
}
return interArrivalTimes;
}
public static void kolmogorovSmirnovTestVsExpDistr(final double[] data, final int seed, final double mean) {
final KolmogorovSmirnovTest ksTest = new KolmogorovSmirnovTest(new JDKRandomGenerator(seed));
final ExponentialDistribution expDistribution = new ExponentialDistribution(mean); // Mean of the distribution is 1 for the normalized data
final double pValue = ksTest.kolmogorovSmirnovTest(expDistribution, data);
if (pValue < 0.05) {
fail("The generated fire times do not follow the expected exponential distribution. p-value: " + pValue);
}
}
abstract int samples();
abstract RateGenerator newUserGenerator();
abstract void assertSamplesWithoutSkew(double[] fireTimesMs, long totalUsers);
@Test
public void testNoFireTimesOnCreation() {
assertEquals(0, newUserGenerator().fireTimes());
}
@Test
@Disabled("This test fail due to lastComputedFireTimeMs() uses Math.ceil() and can skew the results")
public void testFireTimesDistributionWithoutSkew() {
final int samples = samples();
final var fireTimeSamples = new double[samples];
final var userGenerator = newUserGenerator();
final var fireTimesCounter = new FireTimesCounter();
for (int i = 0; i < samples; i++) {
final long fireTimesBefore = userGenerator.fireTimes();
fireTimesCounter.fireTimes = 0;
final var nextFireTimeMs = userGenerator.computeNextFireTime(userGenerator.lastComputedFireTimeMs(), fireTimesCounter);
final long fireTimesAfter = userGenerator.fireTimes();
assertEquals(1, fireTimesCounter.fireTimes);
assertEquals(1, fireTimesAfter - fireTimesBefore);
assertEquals(nextFireTimeMs, userGenerator.lastComputedFireTimeMs(), 0.0);
fireTimeSamples[i] = nextFireTimeMs;
}
assertEquals(samples(), userGenerator.fireTimes());
assertSamplesWithoutSkew(fireTimeSamples, userGenerator.fireTimes());
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/rate/RampRateRateGeneratorTest.java | core/src/test/java/io/hyperfoil/core/impl/rate/RampRateRateGeneratorTest.java | package io.hyperfoil.core.impl.rate;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
public class RampRateRateGeneratorTest extends RateGeneratorTest {
private static double computeRateAtTime(final double initialRate, final double targetRate, final long duration,
final double currentTime) {
return initialRate + (targetRate - initialRate) * (currentTime / duration);
}
@Override
int samples() {
// this is using the math series sum formula to calculate the total number of users i.e. sum(1, m) = m * (1 + m) / 2
// total_users := 10 * (1 + 10) / 2 = 55
return 55;
}
@Override
RateGenerator newUserGenerator() {
return RateGenerator.rampRate(1, 10, 10_000);
}
@Test
public void divisionByZeroTest() {
final var generator = RateGenerator.rampRate(10, 10, 10_000);
final var missingFireTimeCounter = new FireTimesCounter();
generator.computeNextFireTime(9999, missingFireTimeCounter);
assertEquals(100, missingFireTimeCounter.fireTimes);
}
@Test
public void slowStartTest() {
final var generator = newUserGenerator();
final var missingFireTimeCounter = new FireTimesCounter();
generator.computeNextFireTime(9999, missingFireTimeCounter);
assertEquals(samples(), missingFireTimeCounter.fireTimes);
}
@Override
void assertSamplesWithoutSkew(final double[] samples, final long totalUsers) {
// compute inter-arrival times
final double[] interArrivalTimes = computeInterArrivalTimes(samples);
// compute fire times on intervals
final double[] fireTimesOnIntervals = new double[interArrivalTimes.length];
double elapsedTime = 0;
for (int i = 0; i < interArrivalTimes.length; i++) {
final double rpMs = computeRateAtTime(1.0 / 1000, 10.0 / 1000, 10_000, elapsedTime);
fireTimesOnIntervals[i] = interArrivalTimes[i] * rpMs;
elapsedTime += interArrivalTimes[i];
}
// we expect each of them to be 1.0
for (final var fireTime : fireTimesOnIntervals) {
assertEquals(1.0, fireTime, 0.0);
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/rate/PoissonConstantRateGeneratorTest.java | core/src/test/java/io/hyperfoil/core/impl/rate/PoissonConstantRateGeneratorTest.java | package io.hyperfoil.core.impl.rate;
import java.util.Random;
public class PoissonConstantRateGeneratorTest extends RateGeneratorTest {
private static final int SEED = 0;
@Override
int samples() {
return 1_000;
}
@Override
RateGenerator newUserGenerator() {
// force the Random::nextDouble to return 0.5
return RateGenerator.poissonConstantRate(new Random(SEED), 1000);
}
@Override
public void assertSamplesWithoutSkew(final double[] samples, final long totalUsers) {
// Perform K-S test
final double[] interArrivalTimes = computeInterArrivalTimes(samples);
// it is important to use the same SEED here!
kolmogorovSmirnovTestVsExpDistr(interArrivalTimes, SEED, 1.0);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/rate/DecreasingRumpRateGeneratorTest.java | core/src/test/java/io/hyperfoil/core/impl/rate/DecreasingRumpRateGeneratorTest.java | package io.hyperfoil.core.impl.rate;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class DecreasingRumpRateGeneratorTest extends RateGeneratorTest {
@Override
int samples() {
// this is using the math series sum formula to calculate the total number of users i.e. sum(1, m) = m * (1 + m) / 2
// total_users := 10 * (1 + 10) / 2 = 55
return 55;
}
@Override
RateGenerator newUserGenerator() {
return RateGenerator.rampRate(10, 1, 10_000);
}
@Override
void assertSamplesWithoutSkew(final double[] samples, final long totalUsers) {
assertEquals(10_000, samples[samples.length - 1], 0.0);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/impl/rate/PoissonRampRateGeneratorTest.java | core/src/test/java/io/hyperfoil/core/impl/rate/PoissonRampRateGeneratorTest.java | package io.hyperfoil.core.impl.rate;
import java.util.Random;
public class PoissonRampRateGeneratorTest extends RateGeneratorTest {
private static final int SEED = 0;
private static double computeRateAtTime(final double initialRate, final double targetRate, final long duration,
final double currentTime) {
return initialRate + (targetRate - initialRate) * (currentTime / duration);
}
@Override
int samples() {
return 1000;
}
@Override
RateGenerator newUserGenerator() {
return RateGenerator.poissonRampRate(new Random(SEED), 1, 10, 10000);
}
@Override
void assertSamplesWithoutSkew(final double[] samples, final long totalUsers) {
final double[] interArrivalTimes = computeInterArrivalTimes(samples);
final double[] fireTimesOnIntervals = new double[interArrivalTimes.length];
double elapsedTime = 0;
for (int i = 0; i < interArrivalTimes.length; i++) {
final double rpMs = computeRateAtTime(0.001, 0.01, 10000, elapsedTime);
fireTimesOnIntervals[i] = interArrivalTimes[i] * rpMs;
elapsedTime += interArrivalTimes[i];
}
// fireTimesOnIntervals should follow an exponential distribution with lambda = 1
kolmogorovSmirnovTestVsExpDistr(fireTimesOnIntervals, SEED, 1.0);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/generators/RandomUUIDStepTest.java | core/src/test/java/io/hyperfoil/core/generators/RandomUUIDStepTest.java | package io.hyperfoil.core.generators;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.List;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.config.Locator;
import io.hyperfoil.api.config.Step;
import io.hyperfoil.api.session.ObjectAccess;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.core.session.SessionFactory;
import io.hyperfoil.core.test.TestUtil;
public class RandomUUIDStepTest {
@Test
public void testStringGenerator() {
// builder
String[] vars = new String[] { "first", "second", "third", "fourth" };
for (String varName : vars) {
RandomUUIDStep.Builder builder = new RandomUUIDStep.Builder();
builder.init(String.format("%s", varName));
// session
Locator.push(TestUtil.locator());
List<Step> steps = builder.build();
ObjectAccess access = SessionFactory.objectAccess(varName);
Locator.pop();
Session session = SessionFactory.forTesting(access);
// assert
RandomUUIDStep randomStringStep = (RandomUUIDStep) steps.get(0);
TestUtil.resolveAccess(session, randomStringStep);
randomStringStep.invoke(session);
String value = access.getObject(session).toString();
assertTrue(value.matches("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"));
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/generators/TimestampStepTest.java | core/src/test/java/io/hyperfoil/core/generators/TimestampStepTest.java | package io.hyperfoil.core.generators;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.config.Locator;
import io.hyperfoil.api.session.ObjectAccess;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.core.session.SessionFactory;
import io.hyperfoil.core.steps.TimestampStep;
import io.hyperfoil.core.test.TestUtil;
public class TimestampStepTest {
@Test
public void testTimestamp() {
Locator.push(TestUtil.locator());
try {
TimestampStep step = (TimestampStep) new TimestampStep.Builder().toVar("foo").pattern("yyyyy.MMMMM.dd GGG hh:mm aaa")
.build().get(0);
ObjectAccess foo = SessionFactory.objectAccess("foo");
Session session = SessionFactory.forTesting(foo);
step.reserve(session);
TestUtil.resolveAccess(session, step);
assertThat(step.invoke(session)).isTrue();
assertThat(foo.getObject(session)).matches(ts -> {
String timestamp = (String) ts;
return timestamp.matches("020[0-9][0-9].[a-zA-Z]+.[0-9][0-9] AD [0-9][0-9]:[0-9][0-9] [AP]M");
});
} finally {
Locator.pop();
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/generators/RandomCsvRowStepTest.java | core/src/test/java/io/hyperfoil/core/generators/RandomCsvRowStepTest.java | package io.hyperfoil.core.generators;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.List;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.config.Locator;
import io.hyperfoil.api.config.Step;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.api.session.WriteAccess;
import io.hyperfoil.core.session.SessionFactory;
import io.hyperfoil.core.test.TestUtil;
public class RandomCsvRowStepTest {
private static final String[][] DATA = new String[][] {
{ "one", "two two", "three, three", "four\"four" },
{ " five", "six ", "", "eight" },
{ "nine", "", "eleven", "twelve\n ends here" }
};
@Test
public void testParseAllColumns() {
Locator.push(TestUtil.locator());
RandomCsvRowStep.Builder builder = new RandomCsvRowStep.Builder()
.skipComments(true)
.file("data/testdata.csv");
var cols = builder.columns();
for (int i = 0; i < 4; i++) {
var pos = Integer.toString(i);
cols.accept(pos, pos);
}
String[][] rows = ((RandomCsvRowStep) builder.build().get(0)).rows();
Locator.pop();
assertEquals(rows.length, DATA.length);
for (int i = 0; i < DATA.length; i++) {
assertArrayEquals(DATA[i], rows[i]);
}
}
@Test
public void testSelectDifferentColumns() {
Locator.push(TestUtil.locator());
class MutableInt {
int i = 0;
}
var mutableInt = new MutableInt();
RandomCsvRowStep.Builder builder = new RandomCsvRowStep.Builder()
.skipComments(true)
.customSelector(limit -> mutableInt.i++)
.file("data/testdata.csv");
var cols = builder.columns();
var vars = new String[4];
for (int i = 0; i < 4; i++) {
var pos = Integer.toString(i);
vars[i] = pos;
cols.accept(pos, pos);
}
var access = Arrays.stream(vars).map(SessionFactory::objectAccess).toArray(WriteAccess[]::new);
var session = SessionFactory.forTesting(access);
var steps = builder.build();
Locator.pop();
var csvRowStep = (RandomCsvRowStep) steps.get(0);
TestUtil.resolveAccess(session, csvRowStep);
for (String[] row : DATA) {
assertTrue(csvRowStep.invoke(session));
assertEquals(row.length, access.length);
for (int i = 0; i < row.length; i++) {
assertThat(access[i].getObject(session)).isEqualTo(row[i]);
}
}
}
@Test
public void testAllColumns() {
test(new String[] { "first", "second", "third", "fourth" });
}
@Test
public void testTwoColums() {
test(new String[] { "first", null, "third", null });
}
@Test
public void serializeAndDeserializeStepIgnoreCustomSelector() throws IOException, ClassNotFoundException {
Locator.push(TestUtil.locator());
RandomCsvRowStep.Builder builder = new RandomCsvRowStep.Builder()
.skipComments(true)
.customSelector(limit -> 0)
.file("data/testdata.csv");
var cols = builder.columns();
var vars = new String[4];
for (int i = 0; i < 4; i++) {
var pos = Integer.toString(i);
vars[i] = pos;
cols.accept(pos, pos);
}
var steps = builder.build();
Locator.pop();
var step = steps.get(0);
final byte[] serializedBytes;
try (var byteArrayStream = new ByteArrayOutputStream();
var objectOutputStream = new ObjectOutputStream(byteArrayStream)) {
objectOutputStream.writeObject(step);
objectOutputStream.flush();
serializedBytes = byteArrayStream.toByteArray();
}
try (var objectInputStream = new ObjectInputStream(new ByteArrayInputStream(serializedBytes))) {
var deserializedStep = (Step) objectInputStream.readObject();
assertThat(deserializedStep).isInstanceOf(step.getClass());
assertThat(((RandomCsvRowStep) deserializedStep).rowSelector()).isNull();
}
}
private void test(String[] vars) {
Locator.push(TestUtil.locator());
RandomCsvRowStep.Builder builder = new RandomCsvRowStep.Builder()
.skipComments(true)
.file("data/testdata.csv");
for (int i = 0; i < vars.length; ++i) {
if (vars[i] != null) {
builder.columns().accept(String.valueOf(i), vars[i]);
}
}
WriteAccess[] access = Arrays.stream(vars).map(SessionFactory::objectAccess).toArray(WriteAccess[]::new);
Session session = SessionFactory.forTesting(access);
List<Step> steps = builder.build();
Locator.pop();
RandomCsvRowStep csvRowStep = (RandomCsvRowStep) steps.get(0);
TestUtil.resolveAccess(session, csvRowStep);
OUTER: for (int i = 0; i < 10; ++i) {
csvRowStep.invoke(session);
Object first = access[0].getObject(session);
for (String[] row : DATA) {
if (row[0].equals(first)) {
for (int j = 1; j < row.length; ++j) {
if (access[j] != null) {
assertThat(access[j].getObject(session)).isEqualTo(row[j]);
}
}
continue OUTER;
}
}
fail("No match for row: %s", first);
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/generators/PatternTest.java | core/src/test/java/io/hyperfoil/core/generators/PatternTest.java | package io.hyperfoil.core.generators;
import static org.assertj.core.api.Assertions.assertThat;
import java.nio.charset.StandardCharsets;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.config.Locator;
import io.hyperfoil.api.session.IntAccess;
import io.hyperfoil.api.session.ObjectAccess;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.core.session.SessionFactory;
import io.hyperfoil.core.test.TestUtil;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
public class PatternTest {
@BeforeEach
public void before() {
Locator.push(TestUtil.locator());
}
@AfterEach
public void after() {
Locator.pop();
}
@Test
public void testString() {
Pattern pattern = new Pattern("foo${var}bar", false);
Session session = setObject("var", "xx");
test(pattern, session, "fooxxbar");
}
@Test
public void testNonAscii() {
Pattern pattern = new Pattern("foo${var}bar", false);
Session session = setObject("var", "ěščř");
test(pattern, session, "fooěščřbar");
}
@Test
public void testInt() {
Pattern pattern = new Pattern("${var}bar", false);
Session session = setInt("var", 42);
test(pattern, session, "42bar");
}
@Test
public void testFormat() {
Pattern pattern = new Pattern("${%04X:var}bar", false);
Session session = setInt("var", 42);
test(pattern, session, "002Abar");
}
@Test
public void testUrlEncode() {
Pattern pattern = new Pattern("foo${urlencode:var}", false);
Session session = setObject("var", " @+ěščř ");
test(pattern, session, "foo+%40%2B%C4%9B%C5%A1%C4%8D%C5%99+");
}
@Test
public void testUrlEncodeImplicit() {
Pattern pattern = new Pattern("foo${var}", true);
Session session = setObject("var", " @+ěščř ");
test(pattern, session, "foo+%40%2B%C4%9B%C5%A1%C4%8D%C5%99+");
}
@Test
public void testEscape() {
Pattern pattern = new Pattern("foo${var}$${var}${var}$$${var}", false);
Session session = setObject("var", "bar");
test(pattern, session, "foobar${var}bar$${var}");
}
@Test
public void testReplaceFirst() {
Pattern pattern = new Pattern("foo${replace/[a-z]/X/:var}bar", false);
Session session = setObject("var", "xyz");
test(pattern, session, "fooXyzbar");
}
@Test
public void testReplaceAll() {
Pattern pattern = new Pattern("foo${replace#[a-z]#X#g:var}bar", false);
Session session = setObject("var", "xyz");
test(pattern, session, "fooXXXbar");
}
private Session setObject(String name, String value) {
ObjectAccess var = SessionFactory.objectAccess(name);
Session session = SessionFactory.forTesting(var);
var.setObject(session, value);
return session;
}
private Session setInt(String name, int value) {
IntAccess var = SessionFactory.intAccess(name);
Session session = SessionFactory.forTesting(var);
var.setInt(session, value);
return session;
}
private void test(Pattern pattern, Session session, String expected) {
TestUtil.resolveAccess(session, pattern);
String str = pattern.apply(session);
assertThat(str).isEqualTo(expected);
ByteBuf buf = ByteBufAllocator.DEFAULT.buffer();
pattern.accept(session, buf);
byte[] bytes = new byte[buf.readableBytes()];
buf.readBytes(bytes);
String bufString = new String(bytes, StandardCharsets.UTF_8);
assertThat(bufString).isEqualTo(expected);
buf.release();
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/session/BaseBenchmarkParserTest.java | core/src/test/java/io/hyperfoil/core/session/BaseBenchmarkParserTest.java | package io.hyperfoil.core.session;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.Map;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.core.parser.BenchmarkParser;
import io.hyperfoil.core.parser.ParserException;
import io.hyperfoil.core.test.TestUtil;
import io.hyperfoil.impl.Util;
public abstract class BaseBenchmarkParserTest {
protected Benchmark loadScenario(String filePath) {
InputStream stream = null;
try {
if (Files.exists(Path.of(filePath))) {
stream = new FileInputStream(filePath);
} else {
stream = getClass().getClassLoader().getResourceAsStream(filePath);
}
Benchmark benchmark = loadBenchmark(stream);
// Serialization here is solely for the purpose of asserting serializability for all the components
byte[] bytes = Util.serialize(benchmark);
assertThat(bytes).isNotNull();
return benchmark;
} catch (IOException | ParserException e) {
throw new AssertionError(e);
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
protected Benchmark loadBenchmark(InputStream config) throws IOException, ParserException {
return BenchmarkParser.instance().buildBenchmark(config, TestUtil.benchmarkData(), Collections.emptyMap());
}
protected Benchmark loadBenchmark(InputStream config, Map<String, String> arguments) throws IOException, ParserException {
return BenchmarkParser.instance().buildBenchmark(config, TestUtil.benchmarkData(), arguments);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/session/ThreadDataTest.java | core/src/test/java/io/hyperfoil/core/session/ThreadDataTest.java | package io.hyperfoil.core.session;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.concurrent.ThreadLocalRandom;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.session.ThreadData;
public class ThreadDataTest {
private static final String FOO = "foo";
private static final String NUMBER = "number";
@Test
public void testFlatData() {
ThreadData data = new ThreadDataImpl();
data.reserveMap(FOO, null, 3);
for (int i = 0; i < 10; ++i) {
ThreadData.SharedMap map = data.newMap(FOO);
map.put(FOO, "bar" + i);
map.put(NUMBER, i);
data.pushMap(FOO, map);
}
int sum = 0;
for (int i = 0; i < 10; ++i) {
ThreadData.SharedMap map = data.pullMap(FOO);
int number = (Integer) map.get(NUMBER);
sum += number;
data.releaseMap(FOO, map);
}
assertThat(sum).isEqualTo(45);
assertThat(data.pullMap(FOO)).isNull();
}
@Test
public void testIndexedData() {
ThreadData data = new ThreadDataImpl();
data.reserveMap(FOO, null, 5);
data.reserveMap(FOO, FOO, 2);
data.reserveMap(FOO, NUMBER, 3);
int count = 0;
for (int i = 0; i < 10; ++i) {
for (int j = 0; j <= i; ++j) {
ThreadData.SharedMap map = data.newMap(FOO);
map.put(FOO, "bar" + i);
map.put(NUMBER, j);
data.pushMap(FOO, map);
count++;
}
}
for (int i = 0; i < count; ++i) {
if (count % 5 == 0) {
ThreadData.SharedMap map = data.pullMap(FOO);
assertThat(map).isNotNull();
} else {
ThreadData.SharedMap map;
int j;
do {
j = ThreadLocalRandom.current().nextInt(10);
map = data.pullMap(FOO, NUMBER, j);
} while (map == null);
assertThat(map.get(NUMBER)).isEqualTo(j);
}
}
assertThat(data.pullMap(FOO)).isNull();
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/session/ExecutorsTest.java | core/src/test/java/io/hyperfoil/core/session/ExecutorsTest.java | package io.hyperfoil.core.session;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import java.util.HashSet;
import java.util.Set;
import org.junit.jupiter.api.Test;
public class ExecutorsTest extends BaseScenarioTest {
public static final int CLIENT_THREADS = 3;
@Test
public void test() {
Set<Thread> threads = new HashSet<>();
parallelScenario(10).initialSequence("foo")
.step(s -> {
synchronized (threads) {
threads.add(Thread.currentThread());
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
fail("Cannot sleep for 100ms", e);
}
return true;
})
.endSequence();
runScenario();
assertThat(threads.size()).isEqualTo(CLIENT_THREADS);
}
@Override
protected int threads() {
return CLIENT_THREADS;
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/session/ManualLoopTest.java | core/src/test/java/io/hyperfoil/core/session/ManualLoopTest.java | package io.hyperfoil.core.session;
import static io.hyperfoil.core.builders.StepCatalog.SC;
import org.junit.jupiter.api.Test;
import io.hyperfoil.core.steps.RestartSequenceAction;
import io.hyperfoil.core.steps.SetIntAction;
public class ManualLoopTest extends BaseScenarioTest {
@Test
public void test() {
scenario()
.initialSequence("test")
.step(SC).breakSequence()
.condition().intCondition().fromVar("counter").equalTo().value(1).end().end()
.endStep()
.step(SC).action(new SetIntAction.Builder().var("counter").value(1))
.step(SC).action(new RestartSequenceAction.Builder());
runScenario();
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/session/BaseScenarioTest.java | core/src/test/java/io/hyperfoil/core/session/BaseScenarioTest.java | package io.hyperfoil.core.session;
import java.util.HashMap;
import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.jupiter.api.BeforeEach;
import io.hyperfoil.api.config.Benchmark;
import io.hyperfoil.api.config.BenchmarkBuilder;
import io.hyperfoil.api.config.Phase;
import io.hyperfoil.api.config.ScenarioBuilder;
import io.hyperfoil.api.statistics.StatisticsSnapshot;
import io.hyperfoil.core.impl.LocalSimulationRunner;
import io.hyperfoil.core.impl.statistics.StatisticsCollector;
import io.hyperfoil.core.util.CountDown;
public abstract class BaseScenarioTest extends BaseBenchmarkParserTest {
protected final Logger log = LogManager.getLogger(getClass());
protected BenchmarkBuilder benchmarkBuilder;
protected Map<String, StatisticsSnapshot> runScenario() {
return runScenario(benchmarkBuilder.build());
}
protected Map<String, StatisticsSnapshot> runScenario(Benchmark benchmark) {
TestStatistics statisticsConsumer = new TestStatistics();
LocalSimulationRunner runner = new LocalSimulationRunner(benchmark, statisticsConsumer, null, null);
runner.run();
return statisticsConsumer.stats();
}
@BeforeEach
public void before() {
benchmarkBuilder = BenchmarkBuilder.builder();
benchmarkBuilder.threads(threads());
}
protected ScenarioBuilder scenario() {
return scenario(1);
}
protected ScenarioBuilder scenario(int repeats) {
return benchmarkBuilder.addPhase("test").sequentially(repeats).scenario();
}
protected ScenarioBuilder parallelScenario(int concurrency) {
return benchmarkBuilder.addPhase("test").atOnce(concurrency).scenario();
}
protected int threads() {
return 3;
}
public class TestStatistics implements StatisticsCollector.StatisticsConsumer {
private final Map<String, StatisticsSnapshot> stats = new HashMap<>();
@Override
public void accept(Phase phase, int stepId, String metric, StatisticsSnapshot snapshot, CountDown countDown) {
log.debug("Adding stats for {}/{}/{} - #{}: {} requests {} responses", phase, stepId, metric,
snapshot.sequenceId, snapshot.requestCount, snapshot.responseCount);
stats.computeIfAbsent(metric, n -> new StatisticsSnapshot()).add(snapshot);
}
public Map<String, StatisticsSnapshot> stats() {
return stats;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/session/JsonStepTest.java | core/src/test/java/io/hyperfoil/core/session/JsonStepTest.java | package io.hyperfoil.core.session;
import static org.assertj.core.api.Assertions.assertThat;
import java.nio.charset.StandardCharsets;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.session.ObjectAccess;
import io.hyperfoil.api.session.ReadAccess;
import io.hyperfoil.core.data.DataFormat;
import io.hyperfoil.core.steps.JsonStep;
public class JsonStepTest extends BaseScenarioTest {
@Test
public void test() {
scenario()
.initialSequence("test")
.step(() -> {
ObjectAccess json = SessionFactory.objectAccess("json");
return s1 -> {
json.setObject(s1, "{ \"foo\" : \"bar\\nbar\" }".getBytes(StandardCharsets.UTF_8));
return true;
};
})
.stepBuilder(new JsonStep.Builder()
.fromVar("json")
.query(".foo")
.storeShortcuts().format(DataFormat.STRING).toVar("output").end())
.step(() -> {
ReadAccess output = SessionFactory.readAccess("output");
return s -> {
assertThat(output.getObject(s)).isEqualTo("bar\nbar");
return true;
};
});
runScenario();
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/handlers/SearchHandlerTest.java | core/src/test/java/io/hyperfoil/core/handlers/SearchHandlerTest.java | package io.hyperfoil.core.handlers;
import java.nio.charset.StandardCharsets;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.session.ResourceUtilizer;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.core.session.SessionFactory;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
public class SearchHandlerTest {
@Test
public void testSimple() {
ExpectProcessor processor = new ExpectProcessor();
processor.expect(6, 3, true);
SearchHandler handler = new SearchHandler("foo", "bar", processor);
runHandler(handler, processor, "yyyfooxxxbaryyy");
}
@Test
public void testStartEnd() {
ExpectProcessor processor = new ExpectProcessor();
processor.expect(3, 2, true);
SearchHandler handler = new SearchHandler("foo", "bar", processor);
runHandler(handler, processor, "fooxxbar");
}
@Test
public void testEmpty() {
ExpectProcessor processor = new ExpectProcessor();
processor.expect(3, 0, true);
SearchHandler handler = new SearchHandler("foo", "bar", processor);
runHandler(handler, processor, "foobar");
}
@Test
public void testNotEnding() {
ExpectProcessor processor = new ExpectProcessor();
SearchHandler handler = new SearchHandler("foo", "bar", processor);
runHandler(handler, processor, "fooxxx");
}
@Test
public void testGreedy() {
ExpectProcessor processor = new ExpectProcessor();
processor.expect(3, 6, true);
SearchHandler handler = new SearchHandler("foo", "bar", processor);
runHandler(handler, processor, "foofooxxxbar");
}
@Test
public void testSplitMany() {
ExpectProcessor processor = new ExpectProcessor();
processor.expect(1, 3, true);
processor.expect(0, 1, false);
processor.expect(0, 2, true);
SearchHandler handler = new SearchHandler("foo", "bar", processor);
runHandler(handler, processor, "fo", "oxxxb", "aryyyfoo", "x", "xxbar");
}
private void runHandler(SearchHandler handler, ExpectProcessor processor, String... text) {
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
handler.before(session);
for (String t : text) {
ByteBuf data = Unpooled.wrappedBuffer(t.getBytes(StandardCharsets.UTF_8));
handler.process(session, data, data.readerIndex(), data.readableBytes(), false);
}
handler.after(session);
processor.validate();
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/handlers/JsonHandlerTest.java | core/src/test/java/io/hyperfoil/core/handlers/JsonHandlerTest.java | package io.hyperfoil.core.handlers;
import static org.assertj.core.api.Assertions.assertThat;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.jupiter.api.Test;
import io.hyperfoil.api.processor.Processor;
import io.hyperfoil.api.processor.Transformer;
import io.hyperfoil.api.session.ResourceUtilizer;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.core.handlers.json.JsonHandler;
import io.hyperfoil.core.handlers.json.JsonUnquotingTransformer;
import io.hyperfoil.core.session.SessionFactory;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.vertx.core.json.Json;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
public class JsonHandlerTest {
private static final byte[] ID418 = "418".getBytes(StandardCharsets.UTF_8);
private static final byte[] ID420 = "420".getBytes(StandardCharsets.UTF_8);
private static final byte[] ID450 = "450".getBytes(StandardCharsets.UTF_8);
private static final byte[] JSON = ("[\n" +
" { \"id\" : 418, \"product\" : \"Teapots\", \"units\" : 123 },\n" +
" { \"id\" : 420, \"product\" : \"Various herbs\", \"units\" : 321 },\n" +
" { \"id\" : 450, \"product\" : \"Magazines\", \"units\": 456 }\n" +
" ]").getBytes(StandardCharsets.UTF_8);
private static final byte[] ESCAPED = ("[\n" +
" { \"foo\" : \"\\nx\\bx\\f\\rx\\t\" },\n" +
" { \"foo\" : \"x\\u15dCx\\\"x\\/\\\\\" },\n" +
" ]").getBytes(StandardCharsets.UTF_8);
@Test
public void testFull() {
ExpectProcessor expect = new ExpectProcessor()
.expect(-1, 3, true)
.expect(-1, 3, true)
.expect(-1, 3, true);
JsonHandler handler = new JsonHandler(".[].id", false, null, expect);
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
ByteBuf data = Unpooled.wrappedBuffer(JSON);
handler.before(session);
handler.process(session, data, data.readerIndex(), data.readableBytes(), true);
handler.after(session);
expect.validate();
}
@Test
public void testSplit() {
ExpectProcessor expect = new ExpectProcessor();
JsonHandler handler = new JsonHandler(".[].id", false, null, expect);
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
for (int i = 0; i < JSON.length; ++i) {
ByteBuf data1 = Unpooled.wrappedBuffer(JSON, 0, i);
ByteBuf data2 = Unpooled.wrappedBuffer(JSON, i, JSON.length - i);
for (byte[] string : new byte[][] { ID418, ID420, ID450 }) {
if (contains(JSON, 0, i, string) || contains(JSON, i, JSON.length - i, string)) {
expect.expect(-1, 3, true);
} else {
expect.expect(-1, -1, false);
expect.expect(-1, -1, true);
}
}
handler.before(session);
handler.process(session, data1, data1.readerIndex(), data1.readableBytes(), false);
handler.process(session, data2, data2.readerIndex(), data2.readableBytes(), true);
handler.after(session);
expect.validate();
}
}
@Test
public void testSelectObject() {
ExpectProcessor expect = new ExpectProcessor()
.expect(9, 14, true);
JsonHandler handler = new JsonHandler(".foo", false, null, expect);
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
ByteBuf data = Unpooled.wrappedBuffer("{ \"foo\": { \"bar\" : 42 }}".getBytes(StandardCharsets.UTF_8));
handler.before(session);
handler.process(session, data, data.readerIndex(), data.readableBytes(), true);
handler.after(session);
expect.validate();
}
@Test
public void testEscaped() {
List<String> unescapedItems = Arrays.asList("\nx\bx\f\rx\t", "x\u15dcx\"x/\\");
List<String> expectedStrings = new ArrayList<>(unescapedItems);
Processor expect = (Processor) (session, data, offset, length, isLastPart) -> {
byte[] bytes = new byte[length];
data.getBytes(offset, bytes);
String str = new String(bytes, StandardCharsets.UTF_8);
assertThat(str).isEqualTo(expectedStrings.remove(0));
};
JsonHandler handler = new JsonHandler(".[].foo", false, null, new JsonUnquotingTransformer(new DefragProcessor(expect)));
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
for (int i = 0; i < ESCAPED.length; ++i) {
handleSplit(handler, session, ESCAPED, i);
assertThat(expectedStrings.isEmpty()).isTrue();
expectedStrings.addAll(unescapedItems);
}
}
@Test
public void testDelete() {
StringCollector stringCollector = new StringCollector();
JsonHandler handler = new JsonHandler(".[].product", true, null, new DefragProcessor(stringCollector));
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
for (int i = 0; i < JSON.length; ++i) {
handleSplit(handler, session, JSON, i);
JsonArray array = (JsonArray) Json.decodeValue(stringCollector.str);
assertThat(array.size()).isEqualTo(3);
array.forEach(o -> {
JsonObject obj = (JsonObject) o;
assertThat(obj.getInteger("id")).isNotNull();
assertThat(obj.getInteger("units")).isNotNull();
});
}
}
@Test
public void testDeleteArrayItem() {
StringCollector stringCollector = new StringCollector();
JsonHandler handler = new JsonHandler(".[1]", true, null, new DefragProcessor(stringCollector));
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
for (int i = 0; i < JSON.length; ++i) {
handleSplit(handler, session, JSON, i);
JsonArray array = (JsonArray) Json.decodeValue(stringCollector.str);
assertThat(array.size()).isEqualTo(2);
array.forEach(o -> {
JsonObject obj = (JsonObject) o;
assertThat(obj.getInteger("id")).isNotNull();
assertThat(obj.getString("product")).isNotBlank();
assertThat(obj.getInteger("units")).isNotNull();
});
}
}
@Test
public void testReplace() {
StringCollector stringCollector = new StringCollector();
JsonUnquotingTransformer replace = new JsonUnquotingTransformer(new ObscuringTransformer());
JsonHandler handler = new JsonHandler(".[].product", false, replace, new DefragProcessor(stringCollector));
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
for (int i = 0; i < JSON.length; ++i) {
handleSplit(handler, session, JSON, i);
JsonArray array = (JsonArray) Json.decodeValue(stringCollector.str);
assertThat(array.size()).isEqualTo(3);
array.forEach(o -> {
JsonObject obj = (JsonObject) o;
assertThat(obj.getInteger("id")).isNotNull();
assertThat(obj.getInteger("units")).isNotNull();
});
assertThat(array.getJsonObject(0).getString("product")).isEqualTo("xxxxxxx");
assertThat(array.getJsonObject(1).getString("product")).isEqualTo("xxxxxxxxxxxxx");
assertThat(array.getJsonObject(2).getString("product")).isEqualTo("xxxxxxxxx");
}
}
@Test
public void testReplaceDeleting() {
StringCollector stringCollector = new StringCollector();
JsonHandler handler = new JsonHandler(".[1]", false, (Transformer) (session, in, offset, length, lastFragment, out) -> {
}, new DefragProcessor(stringCollector));
Session session = SessionFactory.forTesting();
ResourceUtilizer.reserveForTesting(session, handler);
for (int i = 0; i < JSON.length; ++i) {
handleSplit(handler, session, JSON, i);
JsonArray array = (JsonArray) Json.decodeValue(stringCollector.str);
assertThat(array.size()).isEqualTo(2);
array.forEach(o -> {
JsonObject obj = (JsonObject) o;
assertThat(obj.getInteger("id")).isNotNull();
assertThat(obj.getString("product")).isNotBlank();
assertThat(obj.getInteger("units")).isNotNull();
});
}
}
private void handleSplit(JsonHandler handler, Session session, byte[] json, int position) {
ByteBuf data1 = Unpooled.wrappedBuffer(json, 0, position);
ByteBuf data2 = Unpooled.wrappedBuffer(json, position, json.length - position);
handler.before(session);
handler.process(session, data1, data1.readerIndex(), data1.readableBytes(), false);
handler.process(session, data2, data2.readerIndex(), data2.readableBytes(), true);
handler.after(session);
}
private boolean contains(byte[] data, int offset, int length, byte[] string) {
OUTER: for (int i = 0; i <= length - string.length; ++i) {
for (int j = 0; j < string.length && i + j < length; ++j) {
if (string[j] != data[offset + i + j]) {
continue OUTER;
}
}
return true;
}
return false;
}
private static class StringCollector implements Processor {
private String str;
@Override
public void before(Session session) {
str = "This was never set";
}
@Override
public void process(Session session, ByteBuf data, int offset, int length, boolean isLastPart) {
byte[] bytes = new byte[length];
data.getBytes(offset, bytes);
str = new String(bytes, StandardCharsets.UTF_8);
}
}
private static class ObscuringTransformer
implements Transformer, ResourceUtilizer, Session.ResourceKey<ObscuringTransformer.Context> {
@Override
public void transform(Session session, ByteBuf in, int offset, int length, boolean lastFragment, ByteBuf out) {
Context ctx = session.getResource(this);
if (ctx.firstFragment) {
out.writeByte('"');
ctx.firstFragment = false;
}
for (int i = 0; i < length; ++i) {
out.writeByte('x');
}
if (lastFragment) {
out.writeByte('"');
ctx.firstFragment = true;
}
}
@Override
public void reserve(Session session) {
session.declareResource(this, Context::new);
}
public static class Context implements Session.Resource {
private boolean firstFragment = true;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/handlers/ExpectProcessor.java | core/src/test/java/io/hyperfoil/core/handlers/ExpectProcessor.java | package io.hyperfoil.core.handlers;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.Deque;
import java.util.LinkedList;
import java.util.function.Predicate;
import io.hyperfoil.api.processor.Processor;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.impl.Util;
import io.netty.buffer.ByteBuf;
public class ExpectProcessor implements Processor {
int beforeCalled;
int afterCalled;
int invoked;
final Deque<Invocation> invocations = new LinkedList<>();
@Override
public void before(Session session) {
assertThat(beforeCalled).isEqualTo(0);
beforeCalled++;
}
@Override
public void process(Session session, ByteBuf data, int offset, int length, boolean isLastPart) {
Invocation invocation = invocations.pollFirst();
assertThat(invocation).isNotNull();
if (invocation.data != null) {
assertThat(data).withFailMessage(Util.toString(data, offset, length)).matches(invocation.data);
}
if (invocation.offset >= 0) {
assertThat(offset).as("Invocation #%d", invoked).isEqualTo(invocation.offset);
}
if (invocation.length >= 0) {
assertThat(length).as("Invocation #%d", invoked).isEqualTo(invocation.length);
}
assertThat(isLastPart).as("Invocation #%d", invoked).isEqualTo(invocation.isLastPart);
++invoked;
}
@Override
public void after(Session session) {
assertThat(afterCalled).isEqualTo(0);
afterCalled++;
}
public ExpectProcessor expect(int offset, int length, boolean isLastPart) {
invocations.add(new Invocation(null, offset, length, isLastPart));
return this;
}
public ExpectProcessor expect(ByteBuf data, int offset, int length, boolean isLastPart) {
invocations.add(new Invocation(data::equals, offset, length, isLastPart));
return this;
}
public ExpectProcessor expect(ByteBuf data) {
invocations.add(new Invocation(data::equals, -1, -1, true));
return this;
}
public void validate() {
assertThat(beforeCalled).isEqualTo(1);
assertThat(invocations).isEmpty();
assertThat(afterCalled).isEqualTo(1);
beforeCalled = 0;
afterCalled = 0;
invoked = 0;
}
private class Invocation {
final Predicate<ByteBuf> data;
final int offset;
final int length;
final boolean isLastPart;
private Invocation(Predicate<ByteBuf> data, int offset, int length, boolean isLastPart) {
this.data = data;
this.offset = offset;
this.length = length;
this.isLastPart = isLastPart;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/handlers/ProcessorAssertion.java | core/src/test/java/io/hyperfoil/core/handlers/ProcessorAssertion.java | package io.hyperfoil.core.handlers;
import static org.junit.jupiter.api.Assertions.assertEquals;
import io.hyperfoil.api.processor.Processor;
import io.hyperfoil.api.session.Session;
import io.netty.buffer.ByteBuf;
import io.vertx.junit5.VertxTestContext;
public class ProcessorAssertion {
private final int assertInvocations;
private final boolean onlyLast;
private int actualInvocations;
public ProcessorAssertion(int assertInvocations, boolean onlyLast) {
this.assertInvocations = assertInvocations;
this.onlyLast = onlyLast;
}
public Processor.Builder processor(Processor.Builder delegate) {
return new Builder(delegate);
}
public void runAssertions(VertxTestContext ctx) {
assertEquals(assertInvocations, actualInvocations);
actualInvocations = 0;
}
private class Builder implements Processor.Builder {
private final Processor.Builder delegate;
private Builder(Processor.Builder delegate) {
this.delegate = delegate;
}
@Override
public Processor build(boolean fragmented) {
return new Instance(delegate.build(fragmented));
}
}
private class Instance extends Processor.BaseDelegating {
protected Instance(Processor delegate) {
super(delegate);
}
@Override
public void process(Session session, ByteBuf data, int offset, int length, boolean isLastPart) {
if (isLastPart || !onlyLast) {
actualInvocations++;
}
delegate.process(session, data, offset, length, isLastPart);
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/handlers/json/StreamQueueTest.java | core/src/test/java/io/hyperfoil/core/handlers/json/StreamQueueTest.java | package io.hyperfoil.core.handlers.json;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import io.netty.buffer.Unpooled;
public class StreamQueueTest {
private static final int DATA_SIZE = 10;
private static IdentityHashMap<ByteStream, AtomicInteger> releaseCounters = new IdentityHashMap<>();
private static IdentityHashMap<ByteStream, AtomicInteger> retainCounters = new IdentityHashMap<>();
private static int generatedDataBytes;
private static List<byte[]> generatedData = new ArrayList<>();
@AfterEach
public void tearDown() {
releaseCounters.clear();
retainCounters.clear();
generatedDataBytes = 0;
generatedData.clear();
}
private static ByteStream trackRetain(ByteStream stream) {
retainCounters.computeIfAbsent(stream, s -> new AtomicInteger()).incrementAndGet();
return stream;
}
private static void trackReleaseUntil(ByteStream stream) {
releaseCounters.computeIfAbsent(stream, s -> new AtomicInteger()).incrementAndGet();
}
/**
* WARNING: this assumes that data generation and order by which data is appended is the same!
*/
private static byte[] generateData(int length) {
byte[] array = new byte[length];
int value = generatedDataBytes;
for (int i = 0; i < length; ++i) {
int nextValue = value + i;
byte positiveValue = (byte) (nextValue & 0x7F);
assert positiveValue >= 0;
array[i] = positiveValue;
}
generatedDataBytes += length;
generatedData.add(array);
return array;
}
private static byte[] dumpGeneratedData() {
byte[] allData = new byte[generatedDataBytes];
int written = 0;
for (byte[] array : generatedData) {
System.arraycopy(array, 0, allData, written, array.length);
written += array.length;
}
return allData;
}
private static void removeFirstGeneratedData() {
if (generatedData.isEmpty()) {
return;
}
var data = generatedData.remove(0);
generatedDataBytes -= data.length;
}
private static void removeLastGeneratedData() {
if (generatedData.isEmpty()) {
return;
}
var lastData = generatedData.remove(generatedData.size() - 1);
generatedDataBytes -= lastData.length;
}
static Supplier<ByteStream>[] byteStreamProvider() {
Function<ByteStream, ByteStream> retain = StreamQueueTest::trackRetain;
Consumer<ByteStream> release = StreamQueueTest::trackReleaseUntil;
return new Supplier[] {
() -> new ByteBufByteStream(retain, release).wrap(Unpooled.wrappedBuffer(generateData(DATA_SIZE)), 0, 10),
() -> new ByteArrayByteStream(retain, release).wrap(generateData(DATA_SIZE))
};
}
private static void assertRetain(ByteStream stream, int expectedCount) {
assertCount(stream, expectedCount, retainCounters);
}
private static void assertRelease(ByteStream stream, int expectedCount) {
assertCount(stream, expectedCount, releaseCounters);
}
private static void assertCount(ByteStream stream, int expectedCount, Map<ByteStream, AtomicInteger> countMap) {
var counter = countMap.get(stream);
if (expectedCount == 0) {
assertNull(counter);
} else {
assertNotNull(counter);
assertEquals(expectedCount, counter.get());
}
}
private static byte[] streamDataOf(StreamQueue streamQueue) {
byte[] streamData = new byte[streamQueue.bytes()];
int offset = streamQueue.firstAvailableIndex();
for (int i = 0; i < streamData.length; ++i) {
streamData[i] = (byte) streamQueue.getByte(offset);
offset++;
}
return streamData;
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void appendShouldRetainStream(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(1);
ByteStream stream = streamFactory.get();
assertRetain(stream, 0);
streamQueue.append(stream);
assertRetain(stream, 1);
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void appendShouldContainsTheOriginalData(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(1);
ByteStream stream = streamFactory.get();
assertRetain(stream, 0);
streamQueue.append(stream);
assertRetain(stream, 1);
assertEquals(1, streamQueue.parts());
assertEquals(DATA_SIZE, streamQueue.bytes());
assertArrayEquals(dumpGeneratedData(), streamDataOf(streamQueue));
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void appendWithEnlargementShouldContainsTheOriginalData(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(1);
assertEquals(1, streamQueue.availableCapacityBeforeEnlargement());
streamQueue.append(streamFactory.get());
assertEquals(0, streamQueue.availableCapacityBeforeEnlargement());
streamQueue.append(streamFactory.get());
assertEquals(0, streamQueue.availableCapacityBeforeEnlargement());
assertEquals(2, streamQueue.parts());
assertEquals(DATA_SIZE * 2, streamQueue.bytes());
assertArrayEquals(dumpGeneratedData(), streamDataOf(streamQueue));
streamQueue.append(streamFactory.get());
assertEquals(1, streamQueue.availableCapacityBeforeEnlargement());
assertEquals(3, streamQueue.parts());
assertEquals(DATA_SIZE * 3, streamQueue.bytes());
assertArrayEquals(dumpGeneratedData(), streamDataOf(streamQueue));
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void appendAndRemoveWithEnlargementShouldContainsTheRightData(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(1);
streamQueue.append(streamFactory.get());
streamQueue.append(streamFactory.get());
assertEquals(0, streamQueue.availableCapacityBeforeEnlargement());
streamQueue.releaseUntil(DATA_SIZE);
assertEquals(1, streamQueue.availableCapacityBeforeEnlargement());
removeFirstGeneratedData();
streamQueue.append(streamFactory.get());
assertEquals(0, streamQueue.availableCapacityBeforeEnlargement());
assertArrayEquals(dumpGeneratedData(), streamDataOf(streamQueue));
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void removeIfEmptyShouldNotThrowErrors(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(1);
streamQueue.releaseUntil(1);
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void removeBeyondExistingIndexesShouldNotRemoveLastAppended(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(2);
var firstPart = streamFactory.get();
streamQueue.append(firstPart);
var secondPart = streamFactory.get();
streamQueue.append(secondPart);
int beyondLastIndex = streamQueue.bytes();
assertEquals(-1, streamQueue.getByte(beyondLastIndex));
streamQueue.releaseUntil(3 * DATA_SIZE);
assertRelease(firstPart, 1);
assertRelease(secondPart, 0);
assertEquals(1, streamQueue.parts());
removeFirstGeneratedData();
assertArrayEquals(dumpGeneratedData(), streamDataOf(streamQueue));
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void removeLastByteOfPartShouldNotRemoveIt(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(2);
var firstPart = streamFactory.get();
streamQueue.append(firstPart);
var secondPart = streamFactory.get();
streamQueue.append(secondPart);
streamQueue.releaseUntil(2 * DATA_SIZE);
assertRelease(firstPart, 1);
assertRelease(secondPart, 0);
assertEquals(1, streamQueue.parts());
removeFirstGeneratedData();
assertArrayEquals(dumpGeneratedData(), streamDataOf(streamQueue));
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void appendShouldReturnTheFirstIndexOfEachPart(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(2);
var part = streamFactory.get();
int index = streamQueue.append(part);
assertEquals(0, index);
index = streamQueue.append(streamFactory.get());
assertEquals(index, DATA_SIZE);
}
@ParameterizedTest
@MethodSource("byteStreamProvider")
public void resetShouldReleaseAllPartsAndResetLength(Supplier<ByteStream> streamFactory) {
var streamQueue = new StreamQueue(2);
final var part = streamFactory.get();
streamQueue.append(part);
streamQueue.reset();
assertArrayEquals(new byte[0], streamDataOf(streamQueue));
assertRelease(part, 1);
assertEquals(0, streamQueue.append(streamFactory.get()));
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/test/java/io/hyperfoil/core/parser/TemplateIteratorTest.java | core/src/test/java/io/hyperfoil/core/parser/TemplateIteratorTest.java | package io.hyperfoil.core.parser;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.io.StringReader;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import org.junit.jupiter.api.Test;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.events.AliasEvent;
import org.yaml.snakeyaml.events.CollectionStartEvent;
import org.yaml.snakeyaml.events.Event;
import org.yaml.snakeyaml.events.ScalarEvent;
import io.hyperfoil.impl.Util;
public class TemplateIteratorTest {
@Test
public void testTemplate() throws IOException {
testMatching("template/template.yaml", "template/expected.yaml",
Map.of("FOO", "foo", "BAR", "bar", "C", "c", "LIST_OF_ITEMS", "x;y"));
}
private void testMatching(String templateResource, String expectedResource, Map<String, String> params) throws IOException {
ClassLoader classLoader = this.getClass().getClassLoader();
String template = Util.toString(classLoader.getResourceAsStream(templateResource));
String expected = Util.toString(classLoader.getResourceAsStream(expectedResource));
Yaml yaml = new Yaml();
Iterator<Event> rawIterator = yaml.parse(new StringReader(template)).iterator();
Iterator<Event> templateIterator = new DebugIterator<>(new TemplateIterator(rawIterator, params));
Iterator<Event> expectedIterator = yaml.parse(new StringReader(expected)).iterator();
while (templateIterator.hasNext() && expectedIterator.hasNext()) {
Event fromExpected = expectedIterator.next();
Event fromTemplate = templateIterator.next();
assertThat(fromTemplate.getClass()).isEqualTo(fromExpected.getClass());
if (fromTemplate instanceof ScalarEvent) {
assertThat(((ScalarEvent) fromTemplate).getValue()).isEqualTo(((ScalarEvent) fromExpected).getValue());
assertThat(((ScalarEvent) fromTemplate).getTag()).isNull();
} else if (fromTemplate instanceof CollectionStartEvent) {
assertThat(((CollectionStartEvent) fromTemplate).getTag()).isNull();
}
assertThat(fromTemplate).isNotExactlyInstanceOf(AliasEvent.class);
}
assertThat(templateIterator.hasNext()).isFalse();
assertThat(expectedIterator.hasNext()).isFalse();
}
@Test
public void testAliasInAnchor() throws IOException {
testMatching("template/aliasInAnchor.template.yaml", "template/aliasInAnchor.expected.yaml", Collections.emptyMap());
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/main/java/io/hyperfoil/core/util/CountDown.java | core/src/main/java/io/hyperfoil/core/util/CountDown.java | package io.hyperfoil.core.util;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
public class CountDown implements Handler<AsyncResult<Void>> {
private Handler<AsyncResult<Void>> handler;
private int value;
public CountDown(Handler<AsyncResult<Void>> handler, int initialValue) {
if (initialValue <= 0) {
throw new IllegalArgumentException();
}
this.handler = handler;
this.value = initialValue;
}
public CountDown(int initialValue) {
this(null, initialValue);
}
public CountDown setHandler(Handler<AsyncResult<Void>> handler) {
if (this.handler != null) {
throw new IllegalStateException();
} else if (handler == null) {
throw new IllegalArgumentException();
}
this.handler = handler;
return this;
}
public void increment() {
if (value < 0) {
throw new IllegalStateException();
}
++value;
}
public void countDown() {
if (value <= 0) {
throw new IllegalStateException();
}
if (--value == 0) {
value = -1;
handler.handle(Future.succeededFuture());
}
}
@Override
public void handle(AsyncResult<Void> event) {
if (event.succeeded()) {
countDown();
} else {
value = -1;
handler.handle(event);
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/main/java/io/hyperfoil/core/util/ConstantBytesGenerator.java | core/src/main/java/io/hyperfoil/core/util/ConstantBytesGenerator.java | package io.hyperfoil.core.util;
import io.hyperfoil.api.connection.Connection;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.function.SerializableBiFunction;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
public class ConstantBytesGenerator implements SerializableBiFunction<Session, Connection, ByteBuf> {
private final byte[] bytes;
public ConstantBytesGenerator(byte[] bytes) {
this.bytes = bytes;
}
@Override
public ByteBuf apply(Session session, Connection connection) {
// TODO: implement pooling of wrapping buffers.
// We cannot cache this buffer since despite the wrapped array is constant the code
// writing the buffer to socket would mutate the ByteBuf (move readerIndex/writerIndex, decrement refCnt...)
return Unpooled.wrappedBuffer(bytes);
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/main/java/io/hyperfoil/core/util/FromVarBytesGenerator.java | core/src/main/java/io/hyperfoil/core/util/FromVarBytesGenerator.java | package io.hyperfoil.core.util;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.api.connection.Connection;
import io.hyperfoil.api.session.ReadAccess;
import io.hyperfoil.api.session.Session;
import io.hyperfoil.function.SerializableBiFunction;
import io.hyperfoil.impl.Util;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
public class FromVarBytesGenerator implements SerializableBiFunction<Session, Connection, ByteBuf> {
private static final Logger log = LogManager.getLogger(FromVarBytesGenerator.class);
private final ReadAccess fromVar;
public FromVarBytesGenerator(ReadAccess fromVar) {
this.fromVar = fromVar;
}
@Override
public ByteBuf apply(Session session, Connection connection) {
Object value = fromVar.getObject(session);
if (value instanceof ByteBuf) {
return (ByteBuf) value;
} else if (value instanceof String) {
String str = (String) value;
return Util.string2byteBuf(str, connection.context().alloc().buffer(str.length()));
} else if (value instanceof byte[]) {
return Unpooled.wrappedBuffer((byte[]) value);
} else {
log.error("#{} Cannot encode contents of var {}: {}", session.uniqueId(), fromVar, value);
return null;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Hyperfoil/Hyperfoil | https://github.com/Hyperfoil/Hyperfoil/blob/70ad9cad7ba105e88d7c62e7b65892ecd288f034/core/src/main/java/io/hyperfoil/core/util/CpuWatchdog.java | core/src/main/java/io/hyperfoil/core/util/CpuWatchdog.java | package io.hyperfoil.core.util;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.hyperfoil.api.BenchmarkExecutionException;
import io.hyperfoil.internal.Properties;
public class CpuWatchdog implements Runnable {
private static final Logger log = LogManager.getLogger(CpuWatchdog.class);
private static final Path PROC_STAT = Path.of("/proc/stat");
private static final long PERIOD = Properties.getLong(Properties.CPU_WATCHDOG_PERIOD, 5000);
private static final double IDLE_THRESHOLD = Double
.parseDouble(Properties.get(Properties.CPU_WATCHDOG_IDLE_THRESHOLD, "0.2"));
// On most architectures the tick is defined as 1/100 of second (10 ms)
// where the value of 100 can be obtained using sysconf(_SC_CLK_TCK)
private static final long TICK_NANOS = Properties.getLong("io.hyperfoil.clock.tick.nanos", 10_000_000);
private final Consumer<Throwable> errorHandler;
private final Thread thread;
private final BooleanSupplier warmupTest;
private final int nCpu;
private final long[] idleTime;
private volatile boolean running = true;
private long lastTimestamp;
private long now;
private final Map<String, PhaseRecord> phaseStart = new HashMap<>();
private final Map<String, String> phaseUsage = new HashMap<>();
public CpuWatchdog(Consumer<Throwable> errorHandler, BooleanSupplier warmupTest) {
this.errorHandler = errorHandler;
this.warmupTest = warmupTest;
File stat = PROC_STAT.toFile();
if (!stat.exists() || !stat.isFile() || !stat.canRead()) {
log.warn("Not starting CPU watchdog as {} is not available (exists: {}, file: {}, readable: {})",
PROC_STAT, stat.exists(), stat.isFile(), stat.canRead());
thread = null;
nCpu = 0;
idleTime = null;
return;
}
thread = new Thread(this, "cpu-watchdog");
thread.setDaemon(true);
AtomicInteger counter = new AtomicInteger();
if (readProcStat(ignored -> counter.incrementAndGet())) {
nCpu = counter.get();
} else {
nCpu = 0;
}
idleTime = new long[nCpu];
}
public void run() {
if (nCpu <= 0) {
log.error("Illegal number of CPUs");
return;
}
lastTimestamp = System.nanoTime();
now = lastTimestamp;
while (running) {
if (!readProcStat(this::processCpuLine)) {
log.info("CPU watchdog is terminating.");
return;
}
try {
Thread.sleep(PERIOD);
} catch (InterruptedException e) {
log.error("CPU watchdog thread interrupted, terminating.", e);
return;
}
lastTimestamp = now;
now = System.nanoTime();
}
}
private boolean readProcStat(Consumer<String[]> consumer) {
try {
for (String line : Files.readAllLines(PROC_STAT)) {
if (!line.startsWith("cpu"))
continue;
String[] parts = line.split(" ");
// ignore overall stats
if ("cpu".equals(parts[0]))
continue;
// weird format?
if (parts.length < 5)
continue;
consumer.accept(parts);
}
return true;
} catch (IOException e) {
log.error("CPU watchdog cannot read {}", PROC_STAT, e);
return false;
} catch (NumberFormatException e) {
log.error("CPU watchdog cannot parse stats.", e);
return false;
}
}
private void processCpuLine(String[] parts) {
int cpuIndex = Integer.parseInt(parts[0], 3, parts[0].length(), 10);
long idle = Long.parseLong(parts[4]);
long prevIdle = idleTime[cpuIndex];
if (prevIdle != 0 && prevIdle != Long.MAX_VALUE && lastTimestamp != now) {
double idleRatio = (double) (TICK_NANOS * (idle - prevIdle)) / (now - lastTimestamp);
if (idleRatio < IDLE_THRESHOLD) {
String message = String.format("%s | CPU %d was used for %.0f%% which is more than the threshold of %.0f%%",
new SimpleDateFormat("HH:mm:ss.SSS").format(new Date()), cpuIndex, 100 * (1 - idleRatio),
100 * (1 - IDLE_THRESHOLD));
log.warn(message);
if (warmupTest.getAsBoolean()) {
errorHandler.accept(new BenchmarkExecutionException(message));
idle = Long.MAX_VALUE;
}
}
}
if (prevIdle != Long.MAX_VALUE) {
idleTime[cpuIndex] = idle;
}
}
public void start() {
if (thread != null) {
thread.start();
}
}
public void stop() {
running = false;
}
public synchronized void notifyPhaseStart(String name) {
if (nCpu <= 0)
return;
PhaseRecord record = new PhaseRecord(System.nanoTime(), new long[nCpu]);
if (readProcStat(parts -> {
int cpuIndex = Integer.parseInt(parts[0], 3, parts[0].length(), 10);
record.cpuIdle[cpuIndex] = Long.parseLong(parts[4]);
})) {
phaseStart.putIfAbsent(name, record);
}
}
public synchronized void notifyPhaseEnd(String name) {
if (nCpu <= 0) {
return;
}
PhaseRecord start = phaseStart.get(name);
if (start == null || phaseUsage.containsKey(name)) {
return;
}
long now = System.nanoTime();
SumMin acc = new SumMin();
if (readProcStat(parts -> {
int cpuIndex = Integer.parseInt(parts[0], 3, parts[0].length(), 10);
long idle = Long.parseLong(parts[4]);
long diff = idle - start.cpuIdle[cpuIndex];
acc.sum += diff;
acc.min = Math.min(acc.min, diff);
})) {
double idleCores = (double) (TICK_NANOS * acc.sum) / (now - start.timestamp);
double minIdleRatio = (double) (TICK_NANOS * acc.min) / (now - start.timestamp);
phaseUsage.put(name, String.format("%.1f%% (%.1f/%d cores), 1 core max %.1f%%",
100 - 100 * idleCores / nCpu, nCpu - idleCores, nCpu, 100 - 100 * minIdleRatio));
}
}
public String getCpuUsage(String name) {
return phaseUsage.get(name);
}
private static class SumMin {
long sum;
long min = Long.MAX_VALUE;
}
private static class PhaseRecord {
final long timestamp;
final long[] cpuIdle;
private PhaseRecord(long timestamp, long[] cpuIdle) {
this.timestamp = timestamp;
this.cpuIdle = cpuIdle;
}
}
}
| java | Apache-2.0 | 70ad9cad7ba105e88d7c62e7b65892ecd288f034 | 2026-01-05T02:38:03.557103Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.