![JAR search and dependency download from the Maven repository](/logo.png)
org.computate.smartvillage.enus.vertx.WorkerVerticle Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of smartabyar-smartvillage Show documentation
Show all versions of smartabyar-smartvillage Show documentation
Red Hat Global Social Innovation Program is partnering with Boston University and Smarta Byar in order to collaborate on creating a global and open research platform allowing researchers to study what social sustainability means by using a digital twin of Veberöd, Sweden as the test village, supported by Smarta Byar. The goal of this collaboration is to build the open source technological infrastructure so that researchers can collaborate on this platform effectively to study mobility, public health, sustainability among other study areas to ultimately help define better the link between well being and eco-smart cities.
The newest version!
package org.computate.smartvillage.enus.vertx;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.file.Paths;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Map;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.Stack;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.computate.search.serialize.ComputateZonedDateTimeSerializer;
import org.computate.search.tool.TimeTool;
import org.computate.search.tool.XmlTool;
import org.computate.vertx.api.ApiCounter;
import org.computate.vertx.api.ApiRequest;
import org.computate.vertx.api.ApiCounter;
import org.computate.vertx.api.ApiRequest;
import org.computate.smartvillage.enus.config.ConfigKeys;
import org.computate.smartvillage.enus.request.SiteRequestEnUS;
import org.computate.smartvillage.enus.model.page.SitePage;
import org.computate.smartvillage.enus.model.page.reader.SitePageReader;
import org.computate.smartvillage.enus.model.htm.SiteHtm;
import org.computate.vertx.api.ApiCounter;
import org.computate.vertx.api.ApiRequest;
import org.computate.vertx.config.ComputateConfigKeys;
import org.computate.vertx.handlebars.AuthHelpers;
import org.computate.vertx.handlebars.DateHelpers;
import org.computate.vertx.handlebars.SiteHelpers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.jknack.handlebars.Context;
import com.github.jknack.handlebars.Handlebars;
import com.github.jknack.handlebars.Jackson2Helper;
import com.github.jknack.handlebars.Template;
import com.github.jknack.handlebars.helper.ConditionalHelpers;
import com.github.jknack.handlebars.helper.StringHelpers;
import com.google.common.io.PatternFilenameFilter;
import io.vertx.config.yaml.YamlProcessor;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.CompositeFuture;
import io.vertx.core.Future;
import io.vertx.core.MultiMap;
import io.vertx.core.Promise;
import io.vertx.core.WorkerExecutor;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.eventbus.DeliveryOptions;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.auth.authentication.TokenCredentials;
import io.vertx.ext.auth.authentication.UsernamePasswordCredentials;
import io.vertx.ext.jdbc.JDBCClient;
import io.vertx.ext.mail.MailClient;
import io.vertx.ext.mail.MailConfig;
import io.vertx.ext.web.client.WebClient;
import io.vertx.ext.web.client.predicate.ResponsePredicate;
import io.vertx.ext.web.templ.handlebars.HandlebarsTemplateEngine;
import io.vertx.ext.web.client.WebClientOptions;
import io.vertx.kafka.client.producer.KafkaProducer;
import io.vertx.pgclient.PgConnectOptions;
import io.vertx.pgclient.PgPool;
import io.vertx.sqlclient.Cursor;
import io.vertx.sqlclient.PoolOptions;
import io.vertx.sqlclient.Row;
import io.vertx.sqlclient.RowStream;
import io.vertx.sqlclient.SqlConnection;
import java.time.LocalDateTime;
import io.vertx.ext.jdbc.JDBCClient;
import io.vertx.sqlclient.Cursor;
import io.vertx.sqlclient.SqlConnection;
import io.vertx.ext.web.client.predicate.ResponsePredicate;
import io.vertx.ext.auth.authentication.TokenCredentials;
import org.computate.smartvillage.enus.model.iotnode.IotNode;
import org.computate.smartvillage.enus.model.traffic.simulation.reader.TrafficFcdReader;
import org.computate.smartvillage.enus.model.traffic.time.step.TimeStep;
import org.computate.smartvillage.enus.model.iotnode.reader.IotNodeReader;
import org.computate.smartvillage.enus.camel.CamelIntegration;
import org.computate.smartvillage.enus.model.user.SiteUser;
import org.computate.smartvillage.enus.model.htm.SiteHtm;
import org.computate.smartvillage.enus.model.page.SitePage;
import org.computate.smartvillage.enus.result.map.MapResult;
import org.computate.smartvillage.enus.model.iotnode.IotNode;
import org.computate.smartvillage.enus.model.traffic.bicycle.step.BicycleStep;
import org.computate.smartvillage.enus.model.traffic.simulation.TrafficSimulation;
import org.computate.smartvillage.enus.model.traffic.person.step.PersonStep;
import org.computate.smartvillage.enus.model.traffic.fiware.parkingaccess.ParkingAccess;
import org.computate.smartvillage.enus.model.traffic.fiware.smarttrafficlight.SmartTrafficLight;
import org.computate.smartvillage.enus.model.traffic.simulation.report.SimulationReport;
import org.computate.smartvillage.enus.model.traffic.time.step.TimeStep;
import org.computate.smartvillage.enus.model.traffic.fiware.trafficflowobserved.TrafficFlowObserved;
import org.computate.smartvillage.enus.model.traffic.fiware.crowdflowobserved.CrowdFlowObserved;
import org.computate.smartvillage.enus.model.traffic.light.TrafficLight;
import org.computate.smartvillage.enus.model.traffic.light.step.TrafficLightStep;
import org.computate.smartvillage.enus.model.traffic.vehicle.step.VehicleStep;
import org.computate.smartvillage.enus.result.iotnode.step.IotNodeStep;
/**
*/
public class WorkerVerticle extends WorkerVerticleGen {
private static final Logger LOG = LoggerFactory.getLogger(WorkerVerticle.class);
public static final Integer FACET_LIMIT = 100;
public final DateTimeFormatter TIME_FORMAT = DateTimeFormatter.ofPattern("HH:mm:ss VV");
private KafkaProducer kafkaProducer;
/**
* A io.vertx.ext.jdbc.JDBCClient for connecting to the relational database PostgreSQL.
**/
private PgPool pgPool;
public PgPool getPgPool() {
return pgPool;
}
public void setPgPool(PgPool pgPool) {
this.pgPool = pgPool;
}
private WebClient webClient;
WorkerExecutor workerExecutor;
Integer commitWithin;
HandlebarsTemplateEngine templateEngine;
Handlebars handlebars;
/**
* This is called by Vert.x when the verticle instance is deployed.
* Initialize a new site context object for storing information about the entire site in English.
* Setup the startPromise to handle the configuration steps and starting the server.
**/
@Override()
public void start(Promise startPromise) throws Exception, Exception {
commitWithin = config().getInteger(ConfigKeys.SOLR_WORKER_COMMIT_WITHIN_MILLIS);
try {
configureData().onSuccess(a ->
configureWebClient().onSuccess(b ->
configureHandlebars().onSuccess(c ->
configureSharedWorkerExecutor().onSuccess(d ->
configureEmail().onSuccess(e ->
configureKafka().onSuccess(f ->
configureCamel().onSuccess(g ->
importData().onSuccess(h ->
refreshAllData().onSuccess(i -> {
startPromise.complete();
}).onFailure(ex -> startPromise.fail(ex))
).onFailure(ex -> startPromise.fail(ex))
).onFailure(ex -> startPromise.fail(ex))
).onFailure(ex -> startPromise.fail(ex))
).onFailure(ex -> startPromise.fail(ex))
).onFailure(ex -> startPromise.fail(ex))
).onFailure(ex -> startPromise.fail(ex))
).onFailure(ex -> startPromise.fail(ex))
).onFailure(ex -> startPromise.fail(ex));
} catch (Exception ex) {
LOG.error("Couldn't start verticle. ", ex);
}
}
/**
* Val.Fail.enUS:Handlebars was not configured properly.
* Val.Complete.enUS:Handlebars was configured properly.
*/
private Future configureHandlebars() {
Promise promise = Promise.promise();
try {
templateEngine = HandlebarsTemplateEngine.create(vertx);
handlebars = (Handlebars)templateEngine.unwrap();
handlebars.registerHelpers(ConditionalHelpers.class);
handlebars.registerHelpers(StringHelpers.class);
handlebars.registerHelpers(AuthHelpers.class);
handlebars.registerHelpers(SiteHelpers.class);
handlebars.registerHelpers(DateHelpers.class);
handlebars.registerHelper("json", Jackson2Helper.INSTANCE);
LOG.info(configureHandlebarsComplete);
promise.complete();
} catch(Exception ex) {
LOG.error(configureHandlebarsFail, ex);
promise.fail(ex);
}
return promise.future();
}
/**
**/
private Future configureWebClient() {
Promise promise = Promise.promise();
try {
Boolean sslVerify = config().getBoolean(ConfigKeys.SSL_VERIFY);
webClient = WebClient.create(vertx, new WebClientOptions().setVerifyHost(sslVerify).setTrustAll(!sslVerify));
promise.complete();
} catch(Exception ex) {
LOG.error("Unable to configure site context. ", ex);
promise.fail(ex);
}
return promise.future();
}
/**
*
* Val.ConnectionError.enUS:Could not open the database client connection.
* Val.ConnectionSuccess.enUS:The database client connection was successful.
*
* Val.InitError.enUS:Could not initialize the database tables.
* Val.InitSuccess.enUS:The database tables were created successfully.
*
* Configure shared database connections across the cluster for massive scaling of the application.
* Return a promise that configures a shared database client connection.
* Load the database configuration into a shared io.vertx.ext.jdbc.JDBCClient for a scalable, clustered datasource connection pool.
* Initialize the database tables if not already created for the first time.
**/
private Future configureData() {
Promise promise = Promise.promise();
try {
PgConnectOptions pgOptions = new PgConnectOptions();
Integer jdbcMaxPoolSize = config().getInteger(ConfigKeys.JDBC_MAX_POOL_SIZE, 1);
pgOptions.setPort(config().getInteger(ConfigKeys.JDBC_PORT));
pgOptions.setHost(config().getString(ConfigKeys.JDBC_HOST));
pgOptions.setDatabase(config().getString(ConfigKeys.JDBC_DATABASE));
pgOptions.setUser(config().getString(ConfigKeys.JDBC_USERNAME));
pgOptions.setPassword(config().getString(ConfigKeys.JDBC_PASSWORD));
pgOptions.setIdleTimeout(config().getInteger(ConfigKeys.JDBC_MAX_IDLE_TIME, 24));
pgOptions.setIdleTimeoutUnit(TimeUnit.HOURS);
pgOptions.setConnectTimeout(config().getInteger(ConfigKeys.JDBC_CONNECT_TIMEOUT, 5000));
PoolOptions poolOptions = new PoolOptions();
poolOptions.setMaxSize(jdbcMaxPoolSize);
poolOptions.setMaxWaitQueueSize(config().getInteger(ConfigKeys.JDBC_MAX_WAIT_QUEUE_SIZE, 10));
pgPool = PgPool.pool(vertx, pgOptions, poolOptions);
LOG.info(configureDataInitSuccess);
promise.complete();
} catch (Exception ex) {
LOG.error(configureDataInitError, ex);
promise.fail(ex);
}
return promise.future();
}
/**
* Val.Fail.enUS:Could not configure the shared worker executor.
* Val.Complete.enUS:The shared worker executor "{}" was configured successfully.
*
* Configure a shared worker executor for running blocking tasks in the background.
* Return a promise that configures the shared worker executor.
**/
private Future configureSharedWorkerExecutor() {
Promise promise = Promise.promise();
try {
String name = "WorkerVerticle-WorkerExecutor";
Integer workerPoolSize = System.getenv(ConfigKeys.WORKER_POOL_SIZE) == null ? 5 : Integer.parseInt(System.getenv(ConfigKeys.WORKER_POOL_SIZE));
Long vertxMaxWorkerExecuteTime = config().getLong(ConfigKeys.VERTX_MAX_WORKER_EXECUTE_TIME);
workerExecutor = vertx.createSharedWorkerExecutor(name, workerPoolSize, vertxMaxWorkerExecuteTime, TimeUnit.SECONDS);
LOG.info(configureSharedWorkerExecutorComplete, name);
promise.complete();
} catch (Exception ex) {
LOG.error(configureSharedWorkerExecutorFail, ex);
promise.fail(ex);
}
return promise.future();
}
/**
* Configure sending email.
* Val.Complete.enUS:Configure sending email succeeded.
* Val.Fail.enUS:Configure sending email failed.
**/
private Future configureEmail() {
Promise promise = Promise.promise();
try {
String emailHost = config().getString(ConfigKeys.EMAIL_HOST);
if(StringUtils.isNotBlank(emailHost)) {
MailConfig mailConfig = new MailConfig();
mailConfig.setHostname(emailHost);
mailConfig.setPort(config().getInteger(ConfigKeys.EMAIL_PORT));
mailConfig.setSsl(config().getBoolean(ConfigKeys.EMAIL_SSL));
mailConfig.setUsername(config().getString(ConfigKeys.EMAIL_USERNAME));
mailConfig.setPassword(config().getString(ConfigKeys.EMAIL_PASSWORD));
MailClient.createShared(vertx, mailConfig);
LOG.info(configureEmailComplete);
promise.complete();
} else {
LOG.info(configureEmailComplete);
promise.complete();
}
} catch (Exception ex) {
LOG.error(configureEmailFail, ex);
promise.fail(ex);
}
return promise.future();
}
/**
* Val.Success.enUS:The Kafka producer was initialized successfully.
**/
public Future> configureKafka() {
Promise> promise = Promise.promise();
try {
if(config().getBoolean(ConfigKeys.ENABLE_KAFKA)) {
Map kafkaConfig = new HashMap<>();
kafkaConfig.put("bootstrap.servers", config().getString(ConfigKeys.KAFKA_BROKERS));
kafkaConfig.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
kafkaConfig.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
kafkaConfig.put("acks", "1");
kafkaConfig.put("security.protocol", "SSL");
kafkaConfig.put("ssl.keystore.type", config().getString(ConfigKeys.KAFKA_SSL_KEYSTORE_TYPE));
kafkaConfig.put("ssl.keystore.location", config().getString(ConfigKeys.KAFKA_SSL_KEYSTORE_LOCATION));
kafkaConfig.put("ssl.keystore.password", config().getString(ConfigKeys.KAFKA_SSL_KEYSTORE_PASSWORD));
kafkaConfig.put("ssl.truststore.type", config().getString(ConfigKeys.KAFKA_SSL_TRUSTSTORE_TYPE));
kafkaConfig.put("ssl.truststore.location", config().getString(ConfigKeys.KAFKA_SSL_TRUSTSTORE_LOCATION));
kafkaConfig.put("ssl.truststore.password", config().getString(ConfigKeys.KAFKA_SSL_TRUSTSTORE_PASSWORD));
kafkaProducer = KafkaProducer.createShared(vertx, MainVerticle.SITE_NAME, kafkaConfig);
LOG.info(configureKafkaSuccess);
promise.complete(kafkaProducer);
} else {
LOG.info(configureKafkaSuccess);
promise.complete(null);
}
} catch(Exception ex) {
LOG.error("Unable to configure site context. ", ex);
promise.fail(ex);
}
return promise.future();
}
/**
* Val.Fail.enUS:The Camel Component was not configured properly.
* Val.Complete.enUS:The Camel Component was configured properly.
*/
public Future configureCamel() {
Promise promise = Promise.promise();
CamelIntegration.configureCamel(vertx, config()).onSuccess(a -> {
LOG.info(configureCamelComplete);
promise.complete();
}).onFailure(ex -> {
LOG.error(configureCamelFail, ex);
promise.fail(ex);
});
return promise.future();
}
/**
* Val.Scheduling.enUS:Scheduling the %s import at %s
* Val.Skip.enUS:Skip importing %s data.
* Val.Fail.enUS:Scheduling the import of %s data failed.
*/
private Future importTimer(String classSimpleName) {
Promise promise = Promise.promise();
if(config().getBoolean(String.format("%s_%s", ConfigKeys.ENABLE_IMPORT_DATA, classSimpleName), true)) {
// Load the import start time and period configuration.
String importStartTime = config().getString(String.format("%s_%s", ConfigKeys.IMPORT_DATA_START_TIME, classSimpleName));
String importPeriod = config().getString(String.format("%s_%s", ConfigKeys.IMPORT_DATA_PERIOD, classSimpleName));
// Get the duration of the import period.
// Calculate the next start time, or the next start time after that, if the start time is in less than a minute,
// to give the following code enough time to complete it's calculations to ensure the import starts correctly.
Duration nextStartDuration = null;
ZonedDateTime nextStartTime = null;
if(importPeriod != null) {
Duration duration = TimeTool.parseNextDuration(importPeriod);
if(importStartTime == null) {
nextStartTime = Optional.of(ZonedDateTime.now(ZoneId.of(config().getString(ConfigKeys.SITE_ZONE))))
.map(t -> Duration.between(Instant.now(), t).toMinutes() < 1L ? t.plus(duration) : t).get();
} else {
nextStartTime = TimeTool.parseNextZonedTime(importStartTime);
}
// Get the time now for the import start time zone.
ZonedDateTime now = ZonedDateTime.now(nextStartTime.getZone());
BigDecimal[] divideAndRemainder = BigDecimal.valueOf(Duration.between(now, nextStartTime).toMillis())
.divideAndRemainder(BigDecimal.valueOf(duration.toMillis()));
nextStartDuration = Duration.between(now, nextStartTime);
if(divideAndRemainder[0].compareTo(BigDecimal.ONE) >= 0) {
nextStartDuration = Duration.ofMillis(divideAndRemainder[1].longValueExact());
nextStartTime = now.plus(nextStartDuration);
}
LOG.info(String.format(importTimerScheduling, classSimpleName, nextStartTime.format(TIME_FORMAT)));
}
ZonedDateTime nextStartTime2 = nextStartTime;
if(importStartTime == null) {
try {
vertx.setTimer(1, a -> {
workerExecutor.executeBlocking(promise2 -> {
importDataClass(classSimpleName, null).onSuccess(b -> {
promise2.complete();
}).onFailure(ex -> {
promise2.fail(ex);
});
});
});
promise.complete();
} catch(Exception ex) {
LOG.error(String.format(importTimerFail, classSimpleName), ex);
promise.fail(ex);
}
} else {
try {
vertx.setTimer(nextStartDuration.toMillis(), a -> {
workerExecutor.executeBlocking(promise2 -> {
importDataClass(classSimpleName, nextStartTime2).onSuccess(b -> {
promise2.complete();
}).onFailure(ex -> {
promise2.fail(ex);
});
});
});
promise.complete();
} catch(Exception ex) {
LOG.error(String.format(importTimerFail, classSimpleName), ex);
promise.fail(ex);
}
}
} else {
LOG.info(String.format(importTimerSkip, classSimpleName));
promise.complete();
}
return promise.future();
}
/**
* Import initial data
* Val.Skip.enUS:The data import is disabled.
**/
private Future importData() {
Promise promise = Promise.promise();
if(config().getBoolean(ConfigKeys.ENABLE_IMPORT_DATA)) {
importTimer(IotNode.CLASS_SIMPLE_NAME).onSuccess(a -> {
importTimer(SitePage.CLASS_SIMPLE_NAME).onSuccess(b -> {
importTimer(TrafficSimulation.CLASS_SIMPLE_NAME).onSuccess(c -> {
promise.complete();
});
});
});
}
else {
LOG.info(importDataSkip);
promise.complete();
}
return promise.future();
}
/**
* Description: Import initial data
* Val.Complete.enUS:Configuring the import of %s data completed.
* Val.Fail.enUS:Configuring the import of %s data failed.
*/
private Future importDataClass(String classSimpleName, ZonedDateTime startDateTime) {
Promise promise = Promise.promise();
if(IotNode.CLASS_SIMPLE_NAME.equals(classSimpleName)) {
SiteRequestEnUS siteRequest = new SiteRequestEnUS();
siteRequest.setConfig(config());
siteRequest.setWebClient(webClient);
siteRequest.initDeepSiteRequestEnUS(siteRequest);
IotNodeReader reader = new IotNodeReader();
reader.setVertx(vertx);
reader.setWorkerExecutor(workerExecutor);
reader.setTemplateEngine(templateEngine);
reader.setHandlebars(handlebars);
reader.initDeepForClass(siteRequest);
reader.importData().onComplete(a -> {
String importPeriod = config().getString(String.format("%s_%s", ConfigKeys.IMPORT_DATA_PERIOD, classSimpleName));
if(importPeriod != null && startDateTime != null) {
Duration duration = TimeTool.parseNextDuration(importPeriod);
ZonedDateTime nextStartTime = startDateTime.plus(duration);
LOG.info(String.format(importTimerScheduling, classSimpleName, nextStartTime.format(TIME_FORMAT)));
Duration nextStartDuration = Duration.between(Instant.now(), nextStartTime);
vertx.setTimer(nextStartDuration.toMillis(), b -> {
workerExecutor.executeBlocking(promise2 -> {
importDataClass(classSimpleName, nextStartTime).onSuccess(c -> {
promise2.complete();
}).onFailure(ex -> {
promise2.fail(ex);
});
});
});
promise.complete();
} else {
promise.complete();
}
});
} else if(SitePage.CLASS_SIMPLE_NAME.equals(classSimpleName)) {
SiteRequestEnUS siteRequest = new SiteRequestEnUS();
siteRequest.setConfig(config());
siteRequest.setWebClient(webClient);
siteRequest.initDeepSiteRequestEnUS(siteRequest);
SitePageReader reader = new SitePageReader();
reader.setVertx(vertx);
reader.setWorkerExecutor(workerExecutor);
reader.setTemplateEngine(templateEngine);
reader.setHandlebars(handlebars);
reader.setKafkaProducer(kafkaProducer);
reader.initDeepForClass(siteRequest);
reader.importDataSitePages().onComplete(a -> {
String importPeriod = config().getString(String.format("%s_%s", ConfigKeys.IMPORT_DATA_PERIOD, classSimpleName));
if(importPeriod != null && startDateTime != null) {
Duration duration = TimeTool.parseNextDuration(importPeriod);
ZonedDateTime nextStartTime = startDateTime.plus(duration);
LOG.info(String.format(importTimerScheduling, classSimpleName, nextStartTime.format(TIME_FORMAT)));
Duration nextStartDuration = Duration.between(Instant.now(), nextStartTime);
vertx.setTimer(nextStartDuration.toMillis(), b -> {
workerExecutor.executeBlocking(promise2 -> {
importDataClass(classSimpleName, nextStartTime).onSuccess(c -> {
promise2.complete();
}).onFailure(ex -> {
promise2.fail(ex);
});
});
});
promise.complete();
} else {
promise.complete();
}
});
} else if(TrafficSimulation.CLASS_SIMPLE_NAME.equals(classSimpleName)) {
SiteRequestEnUS siteRequest = new SiteRequestEnUS();
siteRequest.setConfig(config());
siteRequest.setWebClient(webClient);
siteRequest.initDeepSiteRequestEnUS(siteRequest);
TrafficFcdReader reader = new TrafficFcdReader(vertx, workerExecutor, siteRequest, config());
reader.initDeepForClass(siteRequest);
reader.importFcd().onComplete(a -> {
String importPeriod = config().getString(String.format("%s_%s", ConfigKeys.IMPORT_DATA_PERIOD, classSimpleName));
if(importPeriod != null && startDateTime != null) {
Duration duration = TimeTool.parseNextDuration(importPeriod);
ZonedDateTime nextStartTime = startDateTime.plus(duration);
LOG.info(String.format(importTimerScheduling, classSimpleName, nextStartTime.format(TIME_FORMAT)));
Duration nextStartDuration = Duration.between(Instant.now(), nextStartTime);
vertx.setTimer(nextStartDuration.toMillis(), b -> {
workerExecutor.executeBlocking(promise2 -> {
importDataClass(classSimpleName, nextStartTime).onSuccess(c -> {
promise2.complete();
}).onFailure(ex -> {
promise2.fail(ex);
});
});
});
promise.complete();
} else {
promise.complete();
}
});
}
return promise.future();
}
/**
* Val.Complete.enUS:Refresh all data completed.
* Val.Started.enUS:Refresh all data started.
* Val.Fail.enUS:Refresh all data failed.
* Val.Skip.enUS:Refresh all data skipped.
**/
private Future refreshAllData() {
Promise promise = Promise.promise();
try {
if(config().getBoolean(ConfigKeys.ENABLE_REFRESH_DATA, false)) {
LOG.info(refreshAllDataStarted);
refreshData(SiteUser.CLASS_SIMPLE_NAME).onSuccess(q -> {
refreshData(SiteHtm.CLASS_SIMPLE_NAME).onSuccess(q1 -> {
refreshData(SitePage.CLASS_SIMPLE_NAME).onSuccess(q2 -> {
refreshData(MapResult.CLASS_SIMPLE_NAME).onSuccess(q4 -> {
refreshData(IotNode.CLASS_SIMPLE_NAME).onSuccess(q5 -> {
refreshData(BicycleStep.CLASS_SIMPLE_NAME).onSuccess(q6 -> {
refreshData(TrafficSimulation.CLASS_SIMPLE_NAME).onSuccess(q7 -> {
refreshData(PersonStep.CLASS_SIMPLE_NAME).onSuccess(q8 -> {
refreshData(ParkingAccess.CLASS_SIMPLE_NAME).onSuccess(q9 -> {
refreshData(SmartTrafficLight.CLASS_SIMPLE_NAME).onSuccess(q10 -> {
refreshData(SimulationReport.CLASS_SIMPLE_NAME).onSuccess(q11 -> {
refreshData(TimeStep.CLASS_SIMPLE_NAME).onSuccess(q12 -> {
refreshData(TrafficFlowObserved.CLASS_SIMPLE_NAME).onSuccess(q13 -> {
refreshData(CrowdFlowObserved.CLASS_SIMPLE_NAME).onSuccess(q14 -> {
refreshData(TrafficLight.CLASS_SIMPLE_NAME).onSuccess(q15 -> {
refreshData(TrafficLightStep.CLASS_SIMPLE_NAME).onSuccess(q16 -> {
refreshData(VehicleStep.CLASS_SIMPLE_NAME).onSuccess(q17 -> {
refreshData(IotNodeStep.CLASS_SIMPLE_NAME).onSuccess(q18 -> {
LOG.info(refreshAllDataComplete);
promise.complete();
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> promise.fail(ex));
}).onFailure(ex -> {
LOG.error(refreshAllDataFail, ex);
promise.fail(ex);
});
} else {
LOG.info(refreshAllDataSkip);
promise.complete();
}
} catch(Exception ex) {
LOG.error(refreshAllDataFail, ex);
promise.fail(ex);
}
return promise.future();
}
/**
* Sync %s data from the database to Solr.
* Refresh %s data from the database to Solr.
* Val.Complete.enUS:%s refresh completed.
* Val.Started.enUS:%s data sync started.
* Val.Skip.enUS:%s refresh skipped.
* Val.Fail.enUS:%s refresh failed.
* Val.CounterResetFail.enUS:%s data sync failed to reset counter.
**/
private Future refreshData(String tableName) {
Promise promise = Promise.promise();
if(config().getBoolean(String.format("%s_%s", ConfigKeys.ENABLE_REFRESH_DATA, tableName), false)) {
vertx.setTimer(10000, timer -> {
try {
LOG.info(String.format(refreshDataStarted, tableName));
pgPool.withTransaction(sqlConnection -> {
Promise promise1 = Promise.promise();
sqlConnection.query(String.format("SELECT count(pk) FROM %s", tableName)).execute().onSuccess(countRowSet -> {
try {
Optional rowCountOptional = Optional.ofNullable(countRowSet.iterator().next()).map(row -> row.getLong(0));
if(rowCountOptional.isPresent()) {
Integer apiCounterResume = config().getInteger(ConfigKeys.API_COUNTER_RESUME);
Integer apiCounterFetch = config().getInteger(ConfigKeys.API_COUNTER_FETCH);
ApiCounter apiCounter = new ApiCounter();
SiteRequestEnUS siteRequest = new SiteRequestEnUS();
siteRequest.setConfig(config());
siteRequest.initDeepSiteRequestEnUS(siteRequest);
ApiRequest apiRequest = new ApiRequest();
apiRequest.setRows(apiCounterFetch.longValue());
apiRequest.setNumFound(rowCountOptional.get());
apiRequest.setNumPATCH(apiCounter.getQueueNum());
apiRequest.setCreated(ZonedDateTime.now(ZoneId.of(config().getString(ConfigKeys.SITE_ZONE))));
apiRequest.initDeepApiRequest(siteRequest);
vertx.eventBus().publish(String.format("websocket%s", tableName), JsonObject.mapFrom(apiRequest));
sqlConnection.prepare(String.format("SELECT pk FROM %s", tableName)).onSuccess(preparedStatement -> {
apiCounter.setQueueNum(0L);
apiCounter.setTotalNum(0L);
try {
RowStream stream = preparedStatement.createStream(apiCounterFetch);
stream.pause();
stream.fetch(apiCounterFetch);
stream.exceptionHandler(ex -> {
LOG.error(String.format(refreshDataFail, tableName), new RuntimeException(ex));
promise1.fail(ex);
});
stream.endHandler(v -> {
LOG.info(String.format(refreshDataComplete, tableName));
promise1.complete();
});
stream.handler(row -> {
apiCounter.incrementQueueNum();
try {
vertx.eventBus().request(
String.format("smartabyar-smartvillage-enUS-%s", tableName)
, new JsonObject().put(
"context"
, new JsonObject().put(
"params"
, new JsonObject()
.put("body", new JsonObject().put("pk", row.getLong(0).toString()))
.put("path", new JsonObject())
.put("cookie", new JsonObject())
.put("query", new JsonObject().put("q", "*:*").put("fq", new JsonArray().add("pk:" + row.getLong(0))).put("var", new JsonArray().add("refresh:false")))
)
)
, new DeliveryOptions().addHeader("action", String.format("patch%sFuture", tableName))).onSuccess(a -> {
apiCounter.incrementTotalNum();
apiCounter.decrementQueueNum();
if(apiCounter.getQueueNum().compareTo(apiCounterResume.longValue()) == 0) {
stream.fetch(apiCounterFetch);
apiRequest.setNumPATCH(apiCounter.getTotalNum());
apiRequest.setTimeRemaining(apiRequest.calculateTimeRemaining());
vertx.eventBus().publish(String.format("websocket%s", tableName), JsonObject.mapFrom(apiRequest));
}
}).onFailure(ex -> promise1.fail(ex));
} catch (Exception ex) {
LOG.error(String.format(refreshDataFail, tableName), ex);
promise1.fail(ex);
}
});
} catch (Exception ex) {
LOG.error(String.format(refreshDataFail, tableName), ex);
promise1.fail(ex);
}
}).onFailure(ex -> promise1.fail(ex));
} else {
promise1.complete();
}
} catch (Exception ex) {
LOG.error(String.format(refreshDataFail, tableName), ex);
promise1.fail(ex);
}
}).onFailure(ex -> promise1.fail(ex));
return promise1.future();
}).onSuccess(a -> {
promise.complete();
}).onFailure(ex -> promise.fail(ex));
} catch (Exception ex) {
LOG.error(String.format(refreshDataFail, tableName), ex);
promise.fail(ex);
}
});
} else {
LOG.info(String.format(refreshDataSkip, tableName));
promise.complete();
}
return promise.future();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy