com.turbospaces.spark.AbstractEbeanPartitionReader Maven / Gradle / Ivy
package com.turbospaces.spark;
import java.io.IOException;
import java.util.Objects;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.connector.read.PartitionReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.service.ServiceInfo;
import com.turbospaces.boot.MockCloud;
import com.turbospaces.cfg.ApplicationConfig;
import com.turbospaces.cfg.ApplicationProperties;
import com.turbospaces.jdbc.DatasourceCreator;
import com.turbospaces.jdbc.JdbcPoolServiceConfig;
import com.zaxxer.hikari.HikariDataSource;
import io.ebean.Database;
public abstract class AbstractEbeanPartitionReader implements PartitionReader {
protected final Logger logger = LoggerFactory.getLogger(getClass());
protected final HikariDataSource db;
protected ApplicationProperties props;
protected Database ebean;
public AbstractEbeanPartitionReader(ServiceInfo si) throws Exception {
ApplicationConfig cfg = MockCloud.newMock().build();
props = new ApplicationProperties(cfg);
JdbcPoolServiceConfig jdbcCfg = new JdbcPoolServiceConfig(props, false);
jdbcCfg.setMaxPoolSize(1);
DatasourceCreator creator = new DatasourceCreator();
db = creator.create(si, jdbcCfg);
}
@Override
public void close() throws IOException {
try {
if (Objects.nonNull(ebean)) {
ebean.shutdown();
}
} finally {
if (Objects.nonNull(db)) {
db.close();
}
}
}
}