io.hyperfoil.tools.horreum.svc.RunServiceImpl Maven / Gradle / Ivy
The newest version!
package io.hyperfoil.tools.horreum.svc;
import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance;
import static io.hyperfoil.tools.horreum.entity.data.SchemaDAO.QUERY_1ST_LEVEL_BY_RUNID_TRANSFORMERID_SCHEMA_ID;
import static io.hyperfoil.tools.horreum.entity.data.SchemaDAO.QUERY_2ND_LEVEL_BY_RUNID_TRANSFORMERID_SCHEMA_ID;
import static io.hyperfoil.tools.horreum.entity.data.SchemaDAO.QUERY_TRANSFORMER_TARGETS;
import java.io.IOException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiConsumer;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import jakarta.annotation.security.PermitAll;
import jakarta.annotation.security.RolesAllowed;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.PersistenceException;
import jakarta.persistence.Query;
import jakarta.persistence.TransactionRequiredException;
import jakarta.persistence.Tuple;
import jakarta.transaction.InvalidTransactionException;
import jakarta.transaction.SystemException;
import jakarta.transaction.Transaction;
import jakarta.transaction.TransactionManager;
import jakarta.transaction.Transactional;
import jakarta.ws.rs.WebApplicationException;
import jakarta.ws.rs.core.HttpHeaders;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
import org.hibernate.query.NativeQuery;
import org.hibernate.type.StandardBasicTypes;
import org.jboss.logging.Logger;
import org.jboss.resteasy.reactive.multipart.FileUpload;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import io.hyperfoil.tools.horreum.api.SortDirection;
import io.hyperfoil.tools.horreum.api.data.Access;
import io.hyperfoil.tools.horreum.api.data.Dataset;
import io.hyperfoil.tools.horreum.api.data.ExportedLabelValues;
import io.hyperfoil.tools.horreum.api.data.JsonpathValidation;
import io.hyperfoil.tools.horreum.api.data.Run;
import io.hyperfoil.tools.horreum.api.data.ValidationError;
import io.hyperfoil.tools.horreum.api.services.RunService;
import io.hyperfoil.tools.horreum.api.services.SchemaService;
import io.hyperfoil.tools.horreum.api.services.TestService;
import io.hyperfoil.tools.horreum.bus.AsyncEventChannels;
import io.hyperfoil.tools.horreum.datastore.BackendResolver;
import io.hyperfoil.tools.horreum.datastore.Datastore;
import io.hyperfoil.tools.horreum.datastore.DatastoreResponse;
import io.hyperfoil.tools.horreum.entity.PersistentLogDAO;
import io.hyperfoil.tools.horreum.entity.alerting.DataPointDAO;
import io.hyperfoil.tools.horreum.entity.alerting.TransformationLogDAO;
import io.hyperfoil.tools.horreum.entity.data.DatasetDAO;
import io.hyperfoil.tools.horreum.entity.data.RunDAO;
import io.hyperfoil.tools.horreum.entity.data.SchemaDAO;
import io.hyperfoil.tools.horreum.entity.data.TestDAO;
import io.hyperfoil.tools.horreum.entity.data.TransformerDAO;
import io.hyperfoil.tools.horreum.hibernate.JsonBinaryType;
import io.hyperfoil.tools.horreum.mapper.DatasetMapper;
import io.hyperfoil.tools.horreum.mapper.RunMapper;
import io.hyperfoil.tools.horreum.server.RoleManager;
import io.hyperfoil.tools.horreum.server.WithRoles;
import io.quarkus.narayana.jta.runtime.TransactionConfiguration;
import io.quarkus.runtime.Startup;
import io.quarkus.security.identity.SecurityIdentity;
@ApplicationScoped
@Startup
public class RunServiceImpl implements RunService {
private static final Logger log = Logger.getLogger(RunServiceImpl.class);
//@formatter:off
private static final String FIND_AUTOCOMPLETE = """
SELECT * FROM (
SELECT DISTINCT jsonb_object_keys(q) AS key
FROM run, jsonb_path_query(run.data, ? ::jsonpath) q
WHERE jsonb_typeof(q) = 'object') AS keys
WHERE keys.key LIKE CONCAT(?, '%');
""";
protected static final String FIND_RUNS_WITH_URI = """
SELECT id, testid
FROM run
WHERE NOT trashed
AND (data->>'$schema' = ?1
OR (CASE
WHEN jsonb_typeof(data) = 'object' THEN ?1 IN (SELECT values.value->>'$schema' FROM jsonb_each(data) as values)
WHEN jsonb_typeof(data) = 'array' THEN ?1 IN (SELECT jsonb_array_elements(data)->>'$schema')
ELSE false
END)
OR (metadata IS NOT NULL AND ?1 IN (SELECT jsonb_array_elements(metadata)->>'$schema'))
)
""";
//@formatter:on
private static final String[] CONDITION_SELECT_TERMINAL = { "==", "!=", "<>", "<", "<=", ">", ">=", " " };
private static final String CHANGE_ACCESS = "UPDATE run SET owner = ?, access = ? WHERE id = ?";
private static final String SCHEMA_USAGE = "COALESCE(jsonb_agg(jsonb_build_object(" +
"'id', schema.id, 'uri', rs.uri, 'name', schema.name, 'source', rs.source, " +
"'type', rs.type, 'key', rs.key, 'hasJsonSchema', schema.schema IS NOT NULL)), '[]')";
@Inject
EntityManager em;
@Inject
SecurityIdentity identity;
@Inject
RoleManager roleManager;
@Inject
TransactionManager tm;
@Inject
SqlServiceImpl sqlService;
@Inject
TestServiceImpl testService;
@Inject
LabelValuesService labelValuesService;
@Inject
ObjectMapper mapper;
@Inject
ServiceMediator mediator;
@Inject
BackendResolver backendResolver;
@Inject
Session session;
private final ConcurrentHashMap transformations = new ConcurrentHashMap<>();
@Transactional
@WithRoles(extras = Roles.HORREUM_SYSTEM)
void onTestDeleted(int testId) {
log.debugf("Trashing runs for test (%d)", testId);
ScrollableResults results = session.createNativeQuery("SELECT id FROM run WHERE testid = ?1", Integer.class)
.setParameter(1, testId)
.setReadOnly(true)
.setFetchSize(100)
.scroll(ScrollMode.FORWARD_ONLY);
while (results.next()) {
int id = results.get();
trashDueToTestDeleted(id);
}
}
// plain trash does not have the right privileges and @RolesAllowed would cause ContextNotActiveException
@WithRoles(extras = Roles.HORREUM_SYSTEM)
@Transactional
void trashDueToTestDeleted(int id) {
trashInternal(id, true);
}
// We cannot run this without a transaction (to avoid timeout) because we have not request going on
// and EM has to bind its lifecycle either to current request or transaction.
@Transactional(Transactional.TxType.REQUIRES_NEW)
@TransactionConfiguration(timeout = 3600) // 1 hour, this may run a long time
@WithRoles(extras = Roles.HORREUM_SYSTEM)
void onNewOrUpdatedSchema(int schemaId) {
SchemaDAO schema = SchemaDAO.findById(schemaId);
if (schema == null) {
log.errorf("Cannot process schema add/update: cannot load schema %d", schemaId);
return;
}
processNewOrUpdatedSchema(schema);
}
@Transactional
void processNewOrUpdatedSchema(SchemaDAO schema) {
// we don't have to care about races with new runs
findRunsWithUri(schema.uri, (runId, testId) -> {
log.debugf("Recalculate Datasets for run %d - schema %d (%s) changed", runId, schema.id, schema.uri);
onNewOrUpdatedSchemaForRun(runId, schema.id);
});
}
void findRunsWithUri(String uri, BiConsumer consumer) {
ScrollableResults results = session.createNativeQuery(FIND_RUNS_WITH_URI, Tuple.class).setParameter(1, uri)
.setTupleTransformer((tuple, aliases) -> {
RunFromUri r = new RunFromUri();
r.id = (int) tuple[0];
r.testId = (int) tuple[1];
return r;
})
.setFetchSize(100)
.scroll(ScrollMode.FORWARD_ONLY);
while (results.next()) {
RunFromUri r = results.get();
consumer.accept(r.id, r.testId);
}
}
@WithRoles(extras = Roles.HORREUM_SYSTEM)
@Transactional
void onNewOrUpdatedSchemaForRun(int runId, int schemaId) {
em.createNativeQuery("SELECT update_run_schemas(?1)::text").setParameter(1, runId).getSingleResult();
//clear validation error tables by schemaId
em.createNativeQuery("DELETE FROM dataset_validationerrors WHERE schema_id = ?1")
.setParameter(1, schemaId).executeUpdate();
em.createNativeQuery("DELETE FROM run_validationerrors WHERE schema_id = ?1")
.setParameter(1, schemaId).executeUpdate();
Util.registerTxSynchronization(tm, txStatus -> mediator.queueRunRecalculation(runId));
// transform(runId, true);
}
@PermitAll
@WithRoles
@Override
public RunExtended getRun(int id) {
RunExtended runExtended = null;
String extendedData = (String) Util.runQuery(em, "SELECT (to_jsonb(run) || jsonb_build_object(" +
"'schemas', (SELECT " + SCHEMA_USAGE
+ " FROM run_schemas rs JOIN schema ON rs.schemaid = schema.id WHERE runid = run.id), " +
"'testname', (SELECT name FROM test WHERE test.id = run.testid), " +
"'datasets', (SELECT jsonb_agg(id ORDER BY id) FROM dataset WHERE runid = run.id), " +
"'validationErrors', (SELECT jsonb_agg(jsonb_build_object('schemaId', schema_id, 'error', error)) FROM run_validationerrors WHERE run_id = ?1)"
+
"))::text FROM run WHERE id = ?1", id);
try {
runExtended = mapper.readValue(extendedData, RunExtended.class);
} catch (JsonProcessingException e) {
throw ServiceException.serverError("Could not retrieve extended run");
}
return runExtended;
}
@WithRoles
@Override
public RunSummary getRunSummary(int id) {
try {
Query query = em.createNativeQuery("SELECT run.id, run.start, run.stop, run.testid, " +
"run.owner, run.access, run.trashed, run.description, run.metadata IS NOT NULL as has_metadata, "
+
"(SELECT name FROM test WHERE test.id = run.testid) as testname, " +
"(SELECT " + SCHEMA_USAGE
+ " FROM run_schemas rs JOIN schema ON schema.id = rs.schemaid WHERE rs.runid = run.id) as schemas, " +
"(SELECT json_agg(id ORDER BY id) FROM dataset WHERE runid = run.id) as datasets, " +
"(SELECT jsonb_agg(jsonb_build_object('schemaId', schema_id, 'error', error)) AS errors FROM run_validationerrors WHERE run_id = ?1 GROUP BY run_id) AS validationErrors "
+
"FROM run where id = ?1").setParameter(1, id);
initTypes(query);
return createSummary((Object[]) query.getSingleResult());
} catch (NoResultException e) {
throw ServiceException.notFound("Run " + id + " not found");
}
}
@PermitAll
@WithRoles
@Override
public Object getData(int id, String schemaUri) {
if (schemaUri == null || schemaUri.isEmpty()) {
return Util.runQuery(em, "SELECT data#>>'{}' from run where id = ?", id);
} else {
String sqlQuery = "SELECT (CASE " +
"WHEN rs.type = 0 THEN run.data " +
"WHEN rs.type = 1 THEN run.data->rs.key " +
"ELSE run.data->(rs.key::integer) " +
"END)#>>'{}' FROM run JOIN run_schemas rs ON rs.runid = run.id WHERE id = ?1 AND rs.source = 0 AND rs.uri = ?2";
return Util.runQuery(em, sqlQuery, id, schemaUri);
}
}
//this is nearly identical to TestServiceImpl.labelValues (except the return object)
//this reads from the dataset table but provides data specific to the run...
@Override
public List labelValues(int runId, String filter, String sort, String direction, int limit, int page,
List include, List exclude, boolean multiFilter) {
Run run = getRun(runId);
if (run == null) {
throw ServiceException.notFound("Cannot find run " + runId);
}
try {
return labelValuesService.labelValuesByRun(runId, filter, sort, direction, limit,
page, include, exclude, multiFilter);
} catch (IllegalArgumentException e) {
throw ServiceException.badRequest(e.getMessage());
}
}
@PermitAll
@WithRoles
@Override
public JsonNode getMetadata(int id, String schemaUri) {
String result;
if (schemaUri == null || schemaUri.isEmpty()) {
result = (String) Util.runQuery(em, "SELECT coalesce((metadata#>>'{}')::jsonb, '{}'::jsonb) from run where id = ?",
id);
} else {
String sqlQuery = "SELECT run.metadata->(rs.key::integer)#>>'{}' FROM run " +
"JOIN run_schemas rs ON rs.runid = run.id WHERE id = ?1 AND rs.source = 1 AND rs.uri = ?2";
result = (String) Util.runQuery(em, sqlQuery, id, schemaUri);
}
try {
return Util.OBJECT_MAPPER.readTree(result);
} catch (JsonProcessingException e) {
throw ServiceException.serverError(e.getMessage());
}
}
@RolesAllowed(Roles.TESTER)
@WithRoles
@Transactional
@Override
// TODO: it would be nicer to use @FormParams but fetchival on client side doesn't support that
public void updateAccess(int id, String owner, Access access) {
Query query = em.createNativeQuery(CHANGE_ACCESS);
query.setParameter(1, owner);
query.setParameter(2, access.ordinal());
query.setParameter(3, id);
if (query.executeUpdate() != 1) {
throw ServiceException.serverError("Access change failed (missing permissions?)");
}
}
@RolesAllowed(Roles.UPLOADER)
@WithRoles
@Transactional
@Override
public Response add(String testNameOrId, String owner, Access access, Run run) {
if (owner != null) {
run.owner = owner;
}
if (access != null) {
run.access = access;
}
log.debugf("About to add new run to test %s using owner", testNameOrId, owner);
if (testNameOrId == null || testNameOrId.isEmpty()) {
if (run.testid == null || run.testid == 0) {
return Response.status(Response.Status.BAD_REQUEST).entity("No test name or id provided").build();
} else
testNameOrId = run.testid.toString();
}
TestDAO test = testService.ensureTestExists(testNameOrId);
run.testid = test.id;
Integer runId = addAuthenticated(RunMapper.to(run), test);
return Response.status(Response.Status.OK).entity(String.valueOf(runId)).header(HttpHeaders.LOCATION, "/run/" + runId)
.build();
}
@Override
public Response addRunFromData(String start, String stop, String test, String owner, Access access, String schemaUri,
String description, String data) {
return addRunFromData(start, stop, test, owner, access, schemaUri, description, data, null);
}
@Override
public Response addRunFromData(String start, String stop, String test, String owner, Access access, String schemaUri,
String description, FileUpload data, FileUpload metadata) {
if (data == null) {
log.debugf("Failed to upload for test %s with description %s because of missing data.", test, description);
throw ServiceException.badRequest("No data!");
} else if (!MediaType.APPLICATION_JSON.equals(data.contentType())) {
log.debugf("Failed to upload for test %s with description %s because of wrong data content type: %s.", test,
description, data.contentType());
throw ServiceException
.badRequest("Part 'data' must use content-type: application/json, currently: " + data.contentType());
}
if (metadata != null && !MediaType.APPLICATION_JSON.equals(metadata.contentType())) {
log.debugf("Failed to upload for test %s with description %s because of wrong metadata content type: %s.", test,
description, metadata.contentType());
throw ServiceException.badRequest(
"Part 'metadata' must use content-type: application/json, currently: " + metadata.contentType());
}
JsonNode dataNode;
JsonNode metadataNode = null;
try {
dataNode = Util.OBJECT_MAPPER.readTree(data.uploadedFile().toFile());
if (metadata != null) {
metadataNode = Util.OBJECT_MAPPER.readTree(metadata.uploadedFile().toFile());
if (metadataNode.isArray()) {
for (JsonNode item : metadataNode) {
if (!item.isObject()) {
log.debugf(
"Failed to upload for test %s with description %s because of wrong item in metadata: %s.",
test, description, item);
throw ServiceException.badRequest("One of metadata elements is not an object!");
} else if (!item.has("$schema")) {
log.debugf(
"Failed to upload for test %s with description %s because of missing schema in metadata: %s.",
test, description, item);
throw ServiceException.badRequest("One of metadata elements is missing a schema!");
}
}
} else if (metadataNode.isObject()) {
if (!metadataNode.has("$schema")) {
log.debugf("Failed to upload for test %s with description %s because of missing schema in metadata.",
test, description);
throw ServiceException.badRequest("Metadata is missing schema!");
}
metadataNode = instance.arrayNode().add(metadataNode);
}
}
} catch (IOException e) {
log.error("Failed to read data/metadata from upload file", e);
throw ServiceException.badRequest("Provided data/metadata can't be read (JSON encoding problem?)");
}
return addRunFromData(start, stop, test, owner, access, schemaUri, description, dataNode.toString(), metadataNode);
}
@RolesAllowed(Roles.UPLOADER)
@Transactional
@WithRoles
Response addRunFromData(String start, String stop, String test,
String owner, Access access,
String schemaUri, String description,
String stringData, JsonNode metadata) {
if (stringData == null) {
log.debugf("Failed to upload for test %s with description %s because of missing data.", test, description);
throw ServiceException.badRequest("No data!");
}
JsonNode data = null;
try {
data = Util.OBJECT_MAPPER.readValue(stringData, JsonNode.class);
} catch (JsonProcessingException e) {
throw ServiceException.badRequest("Could not map incoming data to JsonNode: " + e.getMessage());
}
Object foundTest = findIfNotSet(test, data);
String testNameOrId = foundTest == null ? null : foundTest.toString().trim();
if (testNameOrId == null || testNameOrId.isEmpty()) {
log.debugf("Failed to upload for test %s with description %s as the test cannot be identified.", test, description);
throw ServiceException.badRequest("Cannot identify test name.");
}
TestDAO testEntity = testService.ensureTestExists(testNameOrId);
Datastore datastore = backendResolver.getBackend(testEntity.backendConfig.type);
DatastoreResponse response = datastore.handleRun(data, metadata, testEntity.backendConfig,
Optional.ofNullable(schemaUri));
List runIds = new ArrayList<>();
if (datastore.uploadType() == Datastore.UploadType.MUILTI
&& response.payload instanceof ArrayNode) {
if (response.payload.isEmpty()) {
return Response.status(Response.Status.NO_CONTENT).entity("Query returned no results").build();
}
//if we return more than 10 results, offload to async queue to process - this might take a LOOONG time
if (response.payload.size() > 10) {
response.payload.forEach(jsonNode -> {
mediator.queueRunUpload(start, stop, test, owner, access, schemaUri, description, null, jsonNode,
testEntity);
});
} else { //process synchronously
response.payload.forEach(jsonNode -> {
runIds.add(getPersistRun(start, stop, test, owner, access, schemaUri, description, metadata, jsonNode,
testEntity));
});
}
} else {
runIds.add(getPersistRun(start, stop, test, owner, access, schemaUri, description, metadata, response.payload,
testEntity));
}
if (runIds.size() > 0) {
return Response.status(Response.Status.OK)
.entity(String.valueOf(runIds.stream().map(val -> Integer.toString(val)).collect(Collectors.joining(", "))))
.build();
} else {
return Response.status(Response.Status.ACCEPTED).entity("More than 10 runs uploaded, processing asynchronously")
.build();
}
}
@Transactional
void persistRun(ServiceMediator.RunUpload runUpload) {
runUpload.roles.add("horreum.system");
roleManager.setRoles(runUpload.roles.stream().collect(Collectors.joining(",")));
TestDAO testEntity = TestDAO.findById(runUpload.testId);
if (testEntity == null) {
log.errorf("Could not find Test (%d) for Run Upload", runUpload.testId);
return;
}
try {
Integer runID = getPersistRun(runUpload.start, runUpload.stop, runUpload.test,
runUpload.owner, runUpload.access, runUpload.schemaUri,
runUpload.description, runUpload.metaData, runUpload.payload, testEntity);
if (runID == null) {
log.errorf("Could not persist Run for Test: %d", testEntity.name);
}
} catch (ServiceException serviceException) {
log.errorf("Could not persist Run for Test: %d", testEntity.name, serviceException);
}
}
private Integer getPersistRun(String start, String stop, String test, String owner, Access access,
String schemaUri, String description, JsonNode metadata, JsonNode data, TestDAO testEntity) {
Object foundStart = findIfNotSet(start, data);
Object foundStop = findIfNotSet(stop, data);
Object foundDescription = findIfNotSet(description, data);
Instant startInstant = Util.toInstant(foundStart);
Instant stopInstant = Util.toInstant(foundStop);
if (startInstant == null) {
log.debugf("Failed to upload for test %s with description %s; cannot parse start time %s (%s)", test, description,
foundStart, start);
throw ServiceException.badRequest("Cannot parse start time from " + foundStart + " (" + start + ")");
} else if (stopInstant == null) {
log.debugf("Failed to upload for test %s with description %s; cannot parse start time %s (%s)", test, description,
foundStop, stop);
throw ServiceException.badRequest("Cannot parse stop time from " + foundStop + " (" + stop + ")");
}
if (schemaUri != null && !schemaUri.isEmpty()) {
if (data.isObject()) {
((ObjectNode) data).put("$schema", schemaUri);
} else if (data.isArray()) {
data.forEach(node -> {
if (node.isObject() && !node.hasNonNull("$schema")) {
((ObjectNode) node).put("$schema", schemaUri);
}
});
}
}
log.debugf("Creating new run for test %s(%d) with description %s", testEntity.name, testEntity.id, foundDescription);
RunDAO run = new RunDAO();
run.testid = testEntity.id;
run.start = startInstant;
run.stop = stopInstant;
run.description = foundDescription != null ? foundDescription.toString() : null;
run.data = data;
run.metadata = metadata;
run.owner = owner;
run.access = access;
return addAuthenticated(run, testEntity);
}
private Object findIfNotSet(String value, JsonNode data) {
if (value != null && !value.isEmpty()) {
if (value.startsWith("$.")) {
return Util.findJsonPath(data, value);
} else {
return value;
}
} else {
return null;
}
}
@WithRoles(extras = Roles.HORREUM_SYSTEM)
private Integer addAuthenticated(RunDAO run, TestDAO test) {
// Id will be always generated anew
run.id = null;
//if run.metadata is null on the client, it will be converted to a NullNode, not null...
if (run.metadata != null && run.metadata.isNull())
run.metadata = null;
if (run.owner == null) {
List uploaders = identity.getRoles().stream().filter(role -> role.endsWith("-uploader"))
.collect(Collectors.toList());
if (uploaders.size() != 1) {
log.debugf("Failed to upload for test %s: no owner, available uploaders: %s", test.name, uploaders);
throw ServiceException.badRequest(
"Missing owner and cannot select single default owners; this user has these uploader roles: "
+ uploaders);
}
String uploader = uploaders.get(0);
run.owner = uploader.substring(0, uploader.length() - 9) + "-team";
} else if (!Objects.equals(test.owner, run.owner) && !identity.getRoles().contains(run.owner)) {
log.debugf("Failed to upload for test %s: requested owner %s, available roles: %s", test.name, run.owner,
identity.getRoles());
throw ServiceException.badRequest("This user does not have permissions to upload run for owner=" + run.owner);
}
if (run.access == null) {
run.access = Access.PRIVATE;
}
log.debugf("Uploading with owner=%s and access=%s", run.owner, run.access);
try {
if (run.id == null) {
em.persist(run);
} else {
trashConnectedDatasets(run.id, run.testid);
em.merge(run);
}
em.flush();
} catch (Exception e) {
log.error("Failed to persist run.", e);
throw ServiceException.serverError("Failed to persist run");
}
log.debugf("Upload flushed, run ID %d", run.id);
mediator.newRun(RunMapper.from(run));
transform(run.id, false);
if (mediator.testMode())
Util.registerTxSynchronization(tm,
txStatus -> mediator.publishEvent(AsyncEventChannels.RUN_NEW, test.id, RunMapper.from(run)));
return run.id;
}
@PermitAll
@WithRoles
@Override
public List autocomplete(String query) {
if (query == null || query.isEmpty()) {
return null;
}
String jsonpath = query.trim();
String incomplete = "";
if (jsonpath.endsWith(".")) {
jsonpath = jsonpath.substring(0, jsonpath.length() - 1);
} else {
int lastDot = jsonpath.lastIndexOf('.');
if (lastDot > 0) {
incomplete = jsonpath.substring(lastDot + 1);
jsonpath = jsonpath.substring(0, lastDot);
} else {
incomplete = jsonpath;
jsonpath = "$.**";
}
}
int conditionIndex = jsonpath.indexOf('@');
if (conditionIndex >= 0) {
int conditionSelectEnd = jsonpath.length();
for (String terminal : CONDITION_SELECT_TERMINAL) {
int ti = jsonpath.indexOf(terminal, conditionIndex + 1);
if (ti >= 0) {
conditionSelectEnd = Math.min(conditionSelectEnd, ti);
}
}
String conditionSelect = jsonpath.substring(conditionIndex + 1, conditionSelectEnd);
int queryIndex = jsonpath.indexOf('?');
if (queryIndex < 0) {
// This is a shortcut query '@.foo...'
jsonpath = "$.**" + conditionSelect;
} else if (queryIndex > conditionIndex) {
// Too complex query with multiple conditions
return Collections.emptyList();
} else {
while (queryIndex > 0 && Character.isWhitespace(jsonpath.charAt(queryIndex - 1))) {
--queryIndex;
}
jsonpath = jsonpath.substring(0, queryIndex) + conditionSelect;
}
}
if (!jsonpath.startsWith("$")) {
jsonpath = "$.**." + jsonpath;
}
try {
NativeQuery findAutocomplete = session.createNativeQuery(FIND_AUTOCOMPLETE, String.class);
findAutocomplete.setParameter(1, jsonpath);
findAutocomplete.setParameter(2, incomplete);
List results = findAutocomplete.getResultList();
return results.stream().map(option -> option.matches("^[a-zA-Z0-9_-]*$") ? option : "\"" + option + "\"")
.collect(Collectors.toList());
} catch (PersistenceException e) {
throw ServiceException.badRequest("Failed processing query '" + query + "':\n" + e.getLocalizedMessage());
}
}
@PermitAll
@WithRoles
@Override
public RunsSummary listAllRuns(String query, boolean matchAll, String roles, boolean trashed,
Integer limit, Integer page, String sort, SortDirection direction) {
StringBuilder sql = new StringBuilder("SELECT run.id, run.start, run.stop, run.testId, ")
.append("run.owner, run.access, run.trashed, run.description, ")
.append("run.metadata IS NOT NULL AS has_metadata, test.name AS testname, ")
.append("'[]'::jsonb AS schemas, '[]'::jsonb AS datasets, '[]'::jsonb AS validationErrors ")
.append("FROM run JOIN test ON test.id = run.testId WHERE ");
String[] queryParts;
boolean whereStarted = false;
if (query == null || query.isEmpty()) {
queryParts = new String[0];
} else {
query = query.trim();
if (query.startsWith("$") || query.startsWith("@")) {
queryParts = new String[] { query };
} else {
queryParts = query.split("([ \t\n,]+)|\\bOR\\b");
}
sql.append("(");
for (int i = 0; i < queryParts.length; ++i) {
if (i != 0) {
sql.append(matchAll ? " AND " : " OR ");
}
sql.append("jsonb_path_exists(data, ?").append(i + 1).append(" ::jsonpath)");
if (queryParts[i].startsWith("$")) {
// no change
} else if (queryParts[i].startsWith("@")) {
queryParts[i] = "$.** ? (" + queryParts[i] + ")";
} else {
queryParts[i] = "$.**." + queryParts[i];
}
}
sql.append(")");
whereStarted = true;
}
whereStarted = Roles.addRolesSql(identity, "run", sql, roles, queryParts.length + 1, whereStarted ? " AND" : null)
|| whereStarted;
if (!trashed) {
if (whereStarted) {
sql.append(" AND ");
}
sql.append(" trashed = false ");
}
Util.addPaging(sql, limit, page, sort, direction);
NativeQuery
© 2015 - 2025 Weber Informatics LLC | Privacy Policy