us.abstracta.jmeter.javadsl.blazemeter.BlazeMeterEngine Maven / Gradle / Ivy
Show all versions of jmeter-java-dsl-blazemeter Show documentation
package us.abstracta.jmeter.javadsl.blazemeter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.concurrent.TimeoutException;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jorphan.collections.HashTree;
import org.apache.jorphan.collections.ListedHashTree;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import us.abstracta.jmeter.javadsl.blazemeter.api.Project;
import us.abstracta.jmeter.javadsl.blazemeter.api.Test;
import us.abstracta.jmeter.javadsl.blazemeter.api.TestConfig;
import us.abstracta.jmeter.javadsl.blazemeter.api.TestRun;
import us.abstracta.jmeter.javadsl.blazemeter.api.TestRunConfig;
import us.abstracta.jmeter.javadsl.blazemeter.api.TestRunRequestStats;
import us.abstracta.jmeter.javadsl.blazemeter.api.TestRunStatus;
import us.abstracta.jmeter.javadsl.blazemeter.api.TestRunSummaryStats.TestRunLabeledSummary;
import us.abstracta.jmeter.javadsl.core.BuildTreeContext;
import us.abstracta.jmeter.javadsl.core.DslJmeterEngine;
import us.abstracta.jmeter.javadsl.core.DslTestPlan;
import us.abstracta.jmeter.javadsl.core.TestPlanStats;
import us.abstracta.jmeter.javadsl.core.engines.JmeterEnvironment;
import us.abstracta.jmeter.javadsl.core.stats.CountMetricSummary;
import us.abstracta.jmeter.javadsl.core.stats.StatsSummary;
import us.abstracta.jmeter.javadsl.core.stats.TimeMetricSummary;
/**
* A {@link DslJmeterEngine} which allows running DslTestPlan in BlazeMeter.
*
* @since 0.2
*/
public class BlazeMeterEngine implements DslJmeterEngine {
private static final Logger LOG = LoggerFactory.getLogger(BlazeMeterEngine.class);
private static final String BASE_URL = "https://a.blazemeter.com";
private static final Duration STATUS_POLL_PERIOD = Duration.ofSeconds(5);
private final BlazeMeterClient client;
private String testName = "jmeter-java-dsl";
private Long projectId;
private Duration testTimeout = Duration.ofHours(1);
private Duration availableDataTimeout = Duration.ofSeconds(30);
private Integer totalUsers;
private Duration rampUp;
private Integer iterations;
private Duration holdFor;
private Integer threadsPerEngine;
private boolean useDebugRun;
/**
* @param authToken is the authentication token to be used to access BlazeMeter API.
*
* It follows the following format: <Key ID>:<Key Secret>.
*
* Check BlazeMeter
* API keys for instructions on how to generate them.
*/
public BlazeMeterEngine(String authToken) {
client = new BlazeMeterClient(BASE_URL + "/api/v4/", authToken);
}
/**
* Sets the name of the BlazeMeter test to use.
*
* BlazeMeterEngine will search for a test with the given name in the given project (Check {@link
* #projectId(long)}) and if one exists, it will update it and use it to run the provided test
* plan. If a test with the given name does not exist, then it will create a new one to run the
* given test plan.
*
* When not specified, the test name defaults to "jmeter-java-dsl".
*
* @param testName specifies the name of the test to update or create in BlazeMeter.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine testName(String testName) {
this.testName = testName;
return this;
}
/**
* Specifies the ID of the BlazeMeter project where to run the test.
*
* You can get the ID of the project by selecting a given project in BlazeMeter and getting the
* number right after "/projects" in the URL.
*
* When no project ID is specified, then the default one for the user (associated to the given
* authentication token) is used.
*
* @param projectId is the ID of the project to be used to run the test.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine projectId(long projectId) {
this.projectId = projectId;
return this;
}
/**
* Specifies a timeout for the entire test execution.
*
* If the timeout is reached then the test run will throw a TimeoutException.
*
* It is strongly advised to set this timeout properly in each run, according to the expected test
* execution time plus some additional margin (to consider for additional delays in BlazeMeter
* test setup and teardown).
*
* This timeout exists to avoid any potential problem with BlazeMeter execution not detected by
* the client, and avoid keeping the test indefinitely running until is interrupted by a user.
* This is specially annoying when running tests in automated fashion, for example in CI/CD.
*
* When not specified, the default timeout will is set to 1 hour.
*
* @param testTimeout to be used as time limit for test execution. If execution takes more than
* this, then a TimeoutException will be thrown by the engine.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine testTimeout(Duration testTimeout) {
this.testTimeout = testTimeout;
return this;
}
/**
* Specifies a timeout for waiting for test data (metrics) to be available in BlazeMeter.
*
* After a test is marked as ENDED in BlazeMeter, it may take a few seconds for the associated
* final metrics to be available. In some cases, the test is marked as ENDED by BlazeMeter, but
* the data is never available. This usually happens when there is some problem running the test
* (for example some internal problem with BlazeMeter engine, some missing jmeter plugin, or some
* other jmeter error). This timeout makes sure that tests properly fail (throwing a
* TimeoutException) when they are marked as ENDED and no data is available after the given
* timeout, and avoids unnecessary wait for test execution timeout.
*
* Usually this timeout should not be necessary to change, but the API provides such method in
* case you need to tune such setting.
*
* When not specified, this value will default to 30 seconds.
*
* @param availableDataTimeout to wait for available data after a test ends, before throwing a
* TimeoutException.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine availableDataTimeout(Duration availableDataTimeout) {
this.availableDataTimeout = availableDataTimeout;
return this;
}
/**
* Specifies the number of virtual users to use when running the test.
*
* This value overwrites any value specified in JMeter test plans thread groups.
*
* When not specified, then the last test run (with same name) value or 1 (if none has ever been
* specified) will be used.
*
* @param totalUsers number of virtual users to run the test with.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine totalUsers(int totalUsers) {
this.totalUsers = totalUsers;
return this;
}
/**
* Sets the duration of time taken to start the specified total users.
*
* For example if totalUsers is set to 10, rampUp is 1 minute and holdFor is 10 minutes, it means
* that it will take 1 minute to start the 10 users (starting them in a linear fashion: 1 user
* every 6 seconds), and then continue executing the test with the 10 users for 10 additional
* minutes.
*
* This value overwrites any value specified in JMeter test plans thread groups.
*
* Take into consideration that BlazeMeter does not support specifying this value in units more
* granular than minutes, so, if you use a finer grain duration, it will be rounded up to minutes
* (eg: if you specify 61 seconds, this will be translated into 2 minutes).
*
* When not specified, the last test run (with same name) value or 0 (if none has ever been
* specified) will be used.
*
* @param rampUp duration that BlazeMeter will take to spin up all the virtual users.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine rampUpFor(Duration rampUp) {
this.rampUp = rampUp;
return this;
}
/**
* Specifies the number of iterations each virtual user will execute.
*
* If both iterations and holdFor are specified, then iterations are ignored and only holdFor is
* taken into consideration.
*
* When neither iterations and holdFor are specified, then the last test run configuration is
* used, or the criteria specified in the JMeter test plan if no previous test run exists.
*
* When specified, this value overwrites any value specified in JMeter test plans thread groups.
*
* @param iterations for each virtual users to execute.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine iterations(int iterations) {
this.iterations = iterations;
return this;
}
/**
* Specifies the duration of time to keep the virtual users running, after the rampUp period.
*
* If both iterations and holdFor are specified, then iterations are ignored and only holdFor is
* taken into consideration.
*
* When neither iterations and holdFor are specified, then the last test run configuration is
* used, or the criteria specified in the JMeter test plan if no previous test run exists.
*
* Take into consideration that BlazeMeter does not support specifying this value in units more
* granular than minutes, so, if you use a finer grain duration, it will be rounded up to minutes
* (eg: if you specify 61 seconds, this will be translated into 2 minutes).
*
* When specified, this value overwrites any value specified in JMeter test plans thread groups.
*
* @param holdFor duration to keep virtual users running after the rampUp period.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine holdFor(Duration holdFor) {
this.holdFor = holdFor;
return this;
}
/**
* Specifies the number of threads/virtual users to use per BlazeMeter engine (host or
* container).
*
* It is always important to use as less resources (which reduces costs) as possible to generate
* the required load for the test. Too few resources might lead to misguiding results, since the
* instances/engines running might be saturating and not properly imposing the expected load upon
* the system under test. Too many resources might lead to unnecessary expenses (wasted money).
*
* This setting, in conjunction with totalUsers, determine the number of engines BlazeMeter will
* use to run the test. For example, if you specify totalUsers to 500 and 100 threadsPerEngine,
* then 5 engines will be used to run the test.
*
* It is important to set this value appropriately, since different test plans may impose
* different load in BlazeMeter engines. This in turns ends up defining different limit of number
* of virtual users per engine that a test run requires to properly measure the performance of the
* system under test. This process is usually referred as "calibration" and you can read more
* about it here.
*
* When not specified, the value of the last test run will be used, or the default one for your
* BlazeMeter billing plan if no previous test run exists.
*
* @param threadsPerEngine the number of threads/virtual users to execute per BlazeMeter engine.
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine threadsPerEngine(int threadsPerEngine) {
this.threadsPerEngine = threadsPerEngine;
return this;
}
/**
* Specifies that the test run will use BlazeMeter debug run feature, not consuming credits but
* limited up to 10 threads and 5 minutes or 100 iterations.
*
* @return the modified instance for fluent API usage.
*/
public BlazeMeterEngine useDebugRun() {
this.useDebugRun = true;
return this;
}
@Override
public BlazeMeterTestPlanStats run(DslTestPlan testPlan)
throws IOException, InterruptedException, TimeoutException {
Project project = findProject();
/*
Create file within temporary directory instead of just temporary file, to control the name of
the file, which is later used by BlazeMeter test.
*/
File jmxFile = Files.createTempDirectory("jmeter-dsl").resolve("test.jmx").toFile();
try {
saveTestPlanTo(testPlan, jmxFile);
Test test = client.findTestByName(testName, project).orElse(null);
TestConfig testConfig = buildTestConfig(project, jmxFile);
if (test != null) {
client.updateTest(test, testConfig);
LOG.info("Updated test {}", test.getUrl());
} else {
test = client.createTest(testConfig, project);
LOG.info("Created test {}", test.getUrl());
}
client.uploadTestFile(test, jmxFile);
TestRun testRun = client.startTest(test, buildTestRunConfig());
LOG.info("Started test run {}", testRun.getUrl());
awaitTestEnd(testRun);
return findTestPlanStats(testRun);
} finally {
if (jmxFile.delete()) {
jmxFile.getParentFile().delete();
}
}
}
private Project findProject() throws IOException {
String appBaseUrl = BASE_URL + "/app/#";
return projectId == null ? client.findDefaultProject(appBaseUrl)
: client.findProjectById(this.projectId, appBaseUrl);
}
private void saveTestPlanTo(DslTestPlan testPlan, File jmxFile) throws IOException {
JmeterEnvironment env = new JmeterEnvironment();
try (FileOutputStream output = new FileOutputStream(jmxFile.getPath())) {
HashTree tree = new ListedHashTree();
BuildTreeContext context = new BuildTreeContext();
context.buildTreeFor(testPlan, tree);
env.saveTree(tree, output);
context.getVisualizers().forEach((v, e) ->
LOG.warn(
"BlazeMeterEngine does not currently support displaying visualizers. Ignoring {}.",
v.getClass().getSimpleName())
);
}
}
private TestConfig buildTestConfig(Project project, File jmxFile) {
return new TestConfig()
.name(testName)
.projectId(project.getId())
.jmxFile(jmxFile)
.totalUsers(totalUsers)
.rampUp(rampUp)
.iterations(iterations)
.holdFor(holdFor)
.threadsPerEngine(threadsPerEngine);
}
private TestRunConfig buildTestRunConfig() {
TestRunConfig ret = new TestRunConfig();
if (useDebugRun) {
ret.debugRun();
}
return ret;
}
private void awaitTestEnd(TestRun testRun)
throws InterruptedException, IOException, TimeoutException {
TestRunStatus status = TestRunStatus.CREATED;
Instant testStart = Instant.now();
do {
Thread.sleep(STATUS_POLL_PERIOD.toMillis());
TestRunStatus newStatus = client.findTestRunStatus(testRun);
if (!status.equals(newStatus)) {
LOG.debug("Test run {} status changed to: {}", testRun.getUrl(), newStatus);
status = newStatus;
}
} while (!TestRunStatus.ENDED.equals(status) && !hasTimedOut(testStart, testTimeout));
if (!TestRunStatus.ENDED.equals(status)) {
throw buildTestTimeoutException(testRun);
} else if (!status.isDataAvailable()) {
awaitAvailableData(testRun, testStart);
}
}
private boolean hasTimedOut(Instant start, Duration timeout) {
return Duration.between(start, Instant.now()).compareTo(timeout) >= 0;
}
private TimeoutException buildTestTimeoutException(TestRun testRun) {
return new TimeoutException(String.format(
"Test %s didn't end after %s. "
+ "If the timeout is too short, you can change it with testTimeout() method.",
testRun.getUrl(), testTimeout));
}
private void awaitAvailableData(TestRun testRun, Instant testStart)
throws InterruptedException, IOException, TimeoutException {
TestRunStatus status;
Instant dataPollStart = Instant.now();
do {
Thread.sleep(STATUS_POLL_PERIOD.toMillis());
status = client.findTestRunStatus(testRun);
} while (!status.isDataAvailable() && !hasTimedOut(testStart, testTimeout) && !hasTimedOut(
dataPollStart, availableDataTimeout));
if (hasTimedOut(testStart, testTimeout)) {
throw buildTestTimeoutException(testRun);
} else if (!status.isDataAvailable()) {
throw new TimeoutException(String.format(
"Test %s ended, but no data is available after %s. "
+ "This is usually caused by some failure in BlazeMeter. "
+ "Check bzt.log and jmeter.out, and if everything looks good you might try "
+ "increasing this timeout with availableDataTimeout() method.", testRun.getUrl(),
availableDataTimeout));
}
}
private BlazeMeterTestPlanStats findTestPlanStats(TestRun testRun)
throws IOException {
TestRunLabeledSummary summary = client
.findTestRunSummaryStats(testRun).getSummary().get(0);
List labeledStats = client
.findTestRunRequestStats(testRun);
return buildTestStats(summary, labeledStats);
}
private BlazeMeterTestPlanStats buildTestStats(TestRunLabeledSummary summary,
List labeledStats) {
BlazeMeterTestPlanStats stats = new BlazeMeterTestPlanStats();
for (TestRunRequestStats labeledStat : labeledStats) {
BlazemeterStatsSummary labelStatsSummary = new BlazemeterStatsSummary(labeledStat, summary);
if ("ALL".equals(labeledStat.getLabelName())) {
stats.setOverallStats(labelStatsSummary);
stats.setStart(labelStatsSummary.firstTime);
stats.setEnd(labelStatsSummary.endTime);
} else {
stats.setLabeledStats(labeledStat.getLabelName(), labelStatsSummary);
}
}
return stats;
}
public static class BlazeMeterTestPlanStats extends TestPlanStats {
public BlazeMeterTestPlanStats() {
super(() -> null);
}
public void setLabeledStats(String label, StatsSummary stats) {
labeledStats.put(label, stats);
}
public void setOverallStats(StatsSummary stats) {
overallStats = stats;
}
}
public static class BlazemeterStatsSummary implements StatsSummary {
private final Instant firstTime;
private final Instant endTime;
private final CountMetricSummary samples = new CountMetricSummary();
private final CountMetricSummary errors = new CountMetricSummary();
private final CountMetricSummary receivedBytes = new CountMetricSummary();
private final BlazeMeterTimeMetricSummary sampleTime;
private BlazemeterStatsSummary(TestRunRequestStats labeledStat,
TestRunLabeledSummary summary) {
/*
These two values are approximations, since BZ api does not provide such information per label
and calculating it from result logs would incur in significant additional time and resources
usage.
*/
firstTime = summary.getFirst();
endTime = summary.getLast();
long elapsedTimeMillis = labeledStat.getDuration();
samples.increment(labeledStat.getSamples(), elapsedTimeMillis);
errors.increment(labeledStat.getErrorsCount(), elapsedTimeMillis);
sampleTime = new BlazeMeterTimeMetricSummary(labeledStat.getMinResponseTime(),
labeledStat.getMaxResponseTime(), labeledStat.getAvgResponseTime(),
labeledStat.getMedianResponseTime(), labeledStat.getPerc90(), labeledStat.getPerc95(),
labeledStat.getPerc99());
// Similar comment as with firstTime and endTime: this is just an approximation.
receivedBytes.increment(Math.round(labeledStat.getAvgBytes() / 1000 * elapsedTimeMillis),
elapsedTimeMillis);
}
@Override
public void add(SampleResult result) {
}
@Override
public Instant firstTime() {
return firstTime;
}
@Override
public Instant endTime() {
return endTime;
}
@Override
public CountMetricSummary samples() {
return samples;
}
@Override
public CountMetricSummary errors() {
return errors;
}
public BlazeMeterTimeMetricSummary sampleTime() {
return sampleTime;
}
@Override
public CountMetricSummary receivedBytes() {
return receivedBytes;
}
@Override
public CountMetricSummary sentBytes() {
throw new UnsupportedOperationException(
"BlazeMeter API does not provide an efficient way to get this value.");
}
}
public static class BlazeMeterTimeMetricSummary implements TimeMetricSummary {
private final Duration min;
private final Duration max;
private final Duration mean;
private final Duration median;
private final Duration percentile90;
private final Duration percentile95;
private final Duration percentile99;
private BlazeMeterTimeMetricSummary(long min, long max, double mean, double median,
double percentile90, double percentile95, double percentile99) {
this.min = Duration.ofMillis(min);
this.max = Duration.ofMillis(max);
this.mean = double2Duration(mean);
this.median = double2Duration(median);
this.percentile90 = double2Duration(percentile90);
this.percentile95 = double2Duration(percentile95);
this.percentile99 = double2Duration(percentile99);
}
private Duration double2Duration(double millis) {
return Duration.ofMillis(Math.round(millis));
}
@Override
public Duration min() {
return min;
}
@Override
public Duration max() {
return max;
}
@Override
public Duration mean() {
return mean;
}
@Override
public Duration median() {
return median;
}
@Override
public Duration perc90() {
return percentile90;
}
@Override
public Duration perc95() {
return percentile95;
}
@Override
public Duration perc99() {
return percentile99;
}
}
}