Skip to content

Commit

Permalink
Basic class structure for the data exporters, both continuous and int…
Browse files Browse the repository at this point in the history
…egrated.
  • Loading branch information
dbeaudoinfortin committed Jun 13, 2024
1 parent e65898b commit 67c0e48
Show file tree
Hide file tree
Showing 32 changed files with 627 additions and 168 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>NAPSData</groupId>
<artifactId>naps_data</artifactId>
<version>0.0.6</version>
<version>0.0.7</version>
<name>NAPS Data Downloader</name>
<build>
<finalName>${project.artifactId}</finalName>
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/com/dbf/excel/OldBIFFExcelSheet.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.dbf.excel.Records.OldDimensionsRecord;
import com.dbf.excel.records.OldDimensionsRecord;

public class OldBIFFExcelSheet extends RawDataExcelSheet {

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.dbf.excel.Records;
package com.dbf.excel.records;

import java.util.Map;
import java.util.function.Supplier;
Expand Down
117 changes: 117 additions & 0 deletions src/main/java/com/dbf/naps/data/db/DBOptions.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
package com.dbf.naps.data.db;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.dbf.naps.data.BaseOptions;

public class DBOptions extends BaseOptions {

private static final Logger log = LoggerFactory.getLogger(DBOptions.class);

private String dbHost = "localhost";
private int dbPort = 5432;
private String dbName = "naps";
private String dbUser = "postgres";
private String dbPass = "password";

static {
getOptions().addOption("dbh","dbHost", true, "Hostname for the PostgreSQL database. Default: localhost");
getOptions().addOption("dbt","dbPort", true, "Port for the PostgreSQL database. Default: 5432");
getOptions().addOption("dbn","dbName", true, "Database name for the PostgreSQL database. Default: naps");
getOptions().addOption("dbu","dbUser", true, "Database user name for the PostgreSQL database. Default: postgres");
getOptions().addOption("dbp","dbPass", true, "Database password for the PostgreSQL database. Default: password");
}

public DBOptions(String[] args) throws IllegalArgumentException {
super(args);
loadFromArgs(args);
}

private void loadFromArgs(String[] args) throws IllegalArgumentException {
CommandLine cmd = null;
try {
cmd = (new DefaultParser()).parse(getOptions(), args);
}
catch(ParseException e) {
throw new IllegalArgumentException(e);
}

loadDBHost(cmd);
loadDBPort(cmd);
loadDBName(cmd);
loadDBUser(cmd);
loadDBPass(cmd);
}

private void loadDBPort(CommandLine cmd) {
if(cmd.hasOption("dbPort")) {
dbPort = Integer.parseInt(cmd.getOptionValue("dbPort"));
if (dbPort < 0 || dbPort > 65535) {
throw new IllegalArgumentException("DB port number: " + dbPort);
}
log.info("Using DB port number: " + dbPort);
} else {
log.info("Using default DB port number: " + dbPort);
}
}

private void loadDBHost(CommandLine cmd) {
if(cmd.hasOption("dbHost")) {
dbHost = cmd.getOptionValue("dbHost");
log.info("Using DB hostname: " + dbHost);
} else {
log.info("Using default DB hostname: " + dbHost);
}
}

private void loadDBUser(CommandLine cmd) {
if(cmd.hasOption("dbUser")) {
dbUser = cmd.getOptionValue("dbUser");
log.info("Using DB user name: " + dbUser);
} else {
log.info("Using default DB user name: " + dbUser);
}
}

private void loadDBPass(CommandLine cmd) {
if(cmd.hasOption("dbPass")) {
dbPass = cmd.getOptionValue("dbPass");
log.info("Using DB password: " + dbPass);
} else {
log.info("Using default DB password: " + dbPass);
}
}

private void loadDBName(CommandLine cmd) {
if(cmd.hasOption("dbName")) {
dbName = cmd.getOptionValue("dbName");
log.info("Using DB name: " + dbName);
} else {
log.info("Using default DB name: " + dbName);
}
}

public String getDbHost() {
return dbHost;
}

public int getDbPort() {
return dbPort;
}

public String getDbName() {
return dbName;
}

public String getDbUser() {
return dbUser;
}

public String getDbPass() {
return dbPass;
}
}
76 changes: 76 additions & 0 deletions src/main/java/com/dbf/naps/data/db/NAPSDBAction.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
package com.dbf.naps.data.db;

import java.io.IOException;
import java.io.Reader;
import java.sql.SQLException;
import java.util.List;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.jdbc.ScriptRunner;
import org.apache.ibatis.mapping.Environment;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.dbf.naps.data.NAPSActionBase;
import com.dbf.naps.data.db.mappers.DataMapper;
import com.zaxxer.hikari.HikariDataSource;

public abstract class NAPSDBAction<O extends DBOptions> extends NAPSActionBase<O> {

private static final Logger log = LoggerFactory.getLogger(NAPSDBAction.class);

private HikariDataSource dbDataSource;
private SqlSessionFactory sqlSessionFactory;

public NAPSDBAction(String[] args) {
super(args);
}

protected void run() {
try
{
initDB();
} catch (Throwable t) {
log.error("Unexpected failure initializing the DB.", t);
throw new RuntimeException(t);
}
}

private void initDB() throws IOException, SQLException {
dbDataSource = new HikariDataSource();
dbDataSource.setUsername(getOptions().getDbUser());
dbDataSource.setPassword(getOptions().getDbPass());
dbDataSource.setSchema(getOptions().getDbName());
dbDataSource.setDriverClassName("org.postgresql.Driver");
dbDataSource.setMaximumPoolSize(getOptions().getThreadCount() + 1);
dbDataSource.setMinimumIdle(getOptions().getThreadCount());
dbDataSource.setJdbcUrl("jdbc:postgresql://" + getOptions().getDbHost() + ":" + getOptions().getDbPort() + "/");
dbDataSource.setAutoCommit(true);

JdbcTransactionFactory transactionFactory = new JdbcTransactionFactory();
Environment environment = new Environment("local", transactionFactory, dbDataSource);

Configuration configuration = new Configuration(environment);
configuration.addMapper(DataMapper.class);

for(Class<?> clazz : getDBMappers()) {
configuration.addMapper(clazz);
}

sqlSessionFactory = new SqlSessionFactoryBuilder().build(configuration);

try (Reader reader = Resources.getResourceAsReader(NAPSDBAction.class.getClassLoader(),"schema/schema.sql")) {
ScriptRunner scriptRunner = new ScriptRunner(dbDataSource.getConnection());
scriptRunner.runScript(reader);
}
}

protected abstract List<Class<?>> getDBMappers();

public SqlSessionFactory getSqlSessionFactory() {
return sqlSessionFactory;
}
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
package com.dbf.naps.data.loader.continuous;
package com.dbf.naps.data.db.mappers;

import java.math.BigDecimal;
import java.util.Date;
import java.util.List;

import org.apache.ibatis.annotations.Insert;

import com.dbf.naps.data.records.ContinuousDataRecord;

public interface ContinuousDataMapper {

@Insert("INSERT into naps.continuous_data (site_id, pollutant_id, method_id, date_time, year, month, day, hour, day_of_week, data)"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
package com.dbf.naps.data.loader;
package com.dbf.naps.data.db.mappers;

import java.math.BigDecimal;
import org.apache.ibatis.annotations.Insert;
import org.apache.ibatis.annotations.Options;
import org.apache.ibatis.annotations.Select;

import com.dbf.naps.data.loader.records.SampleRecord;
import com.dbf.naps.data.loader.records.SiteRecord;
import com.dbf.naps.data.records.SampleRecord;
import com.dbf.naps.data.records.SiteRecord;

public interface DataMapper {

Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
package com.dbf.naps.data.loader.integrated;
package com.dbf.naps.data.db.mappers;

import java.util.List;

import org.apache.ibatis.annotations.Insert;

import com.dbf.naps.data.records.IntegratedDataRecord;

public interface IntegratedDataMapper {

@Insert("<script>"
Expand Down
Loading

0 comments on commit 67c0e48

Please sign in to comment.