Skip to content

Commit

Permalink
feat: add export_handlers module and enhance import/export functional…
Browse files Browse the repository at this point in the history
…ity with path handling
  • Loading branch information
Kremilly committed Nov 22, 2024
1 parent 08ecc48 commit 96012b8
Show file tree
Hide file tree
Showing 7 changed files with 207 additions and 137 deletions.
4 changes: 2 additions & 2 deletions src/dump_sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ impl DumpSync {
.expect("Invalid port");

UI::section_header("Importing dump to server", "info");
Dump::new(&host, port, &user, &password, &dbname, &backup_path, None).import();
Dump::new(&host, port, &user, &password, &dbname, &backup_path, None, &backup_path).import();
}

fn export(&self, options: ExportOptions) {
Expand Down Expand Up @@ -67,7 +67,7 @@ impl DumpSync {

UI::label("Press CTRL+C to exit the tool", "normal");
UI::section_header("Dumping the database", "info");
Dump::new(&host, port, &user, &password, &dbname, &backup_path, Some(interval)).export();
Dump::new(&host, port, &user, &password, &dbname, &backup_path, Some(interval), &backup_path).export();
}

pub fn init(&self) {
Expand Down
24 changes: 15 additions & 9 deletions src/engine/dump.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
use std::{
thread,
process,
thread,
process,
time::Duration,

sync::{
Arc,

atomic::{
Ordering,
AtomicBool,
AtomicUsize,
},
},
Ordering
}
},
};

use crate::{
Expand All @@ -30,6 +30,7 @@ use crate::{

pub struct Dump {
port: u64,
path: String,
host: String,
user: String,
interval: u64,
Expand All @@ -50,15 +51,17 @@ impl Dump {
dbname: &str,
backup_path: &str,
interval: Option<u64>,
path: &str,
) -> Self {
Self {
host: host.to_string(),
port,
host: host.to_string(),
user: user.to_string(),
password: password.to_string(),
dbname: dbname.to_string(),
interval: interval.unwrap_or(3600),
password: password.to_string(),
dump_file_path: backup_path.to_string(),
interval: interval.unwrap_or(3600),
path: path.to_string(),
}
}

Expand Down Expand Up @@ -102,6 +105,7 @@ impl Dump {
let password_clone = self.password.clone();
let dbname_clone = self.dbname.clone();
let interval_clone = self.interval;
let path_clone = self.path.clone();

ctrlc::set_handler(move || {
running.store(false, Ordering::SeqCst);
Expand All @@ -114,6 +118,7 @@ impl Dump {
dbname: dbname_clone.clone(),
interval: interval_clone,
dump_file_path: dump_file_path_clone.clone(),
path: path_clone.clone(),
};

let dump_count = DUMP_COUNT.load(Ordering::SeqCst);
Expand All @@ -136,6 +141,7 @@ impl Dump {
&self.password,
&self.dbname,
&self.dump_file_path,
&self.path,
).dump().expect("Failed to import dump");
}

Expand Down
144 changes: 23 additions & 121 deletions src/engine/export.rs
Original file line number Diff line number Diff line change
@@ -1,53 +1,24 @@
use std::{
fs::File,
error::Error,

io::{
self,
Write,
BufWriter
},
};

use flate2::{
Compression,
write::GzEncoder
};

use mysql::{
*,
Row,
prelude::*
};

use crate::{
ui::success_alerts::SuccessAlerts,

utils::{
date::Date,
file::FileUtils
},
utils::file::FileUtils,
ui::success_alerts::SuccessAlerts,
helpers::export_handlers::ExportHandlers,

engine::{
configs::Configs,
connection::Connection
},
};

enum Writer {
Compressed(BufWriter<GzEncoder<File>>),
Uncompressed(BufWriter<File>),
}

impl Writer {
fn as_write(&mut self) -> &mut dyn Write {
match self {
Writer::Compressed(w) => w,
Writer::Uncompressed(w) => w,
}
}
}

pub struct Export {
pub host: String,
pub port: u16,
Expand All @@ -58,8 +29,15 @@ pub struct Export {
}

impl Export {

pub fn new(host: &str, port: u16, user: &str, password: &str, dbname: &str, dump_file_path: &str) -> Self {

pub fn new(
host: &str,
port: u16,
user: &str,
password: &str,
dbname: &str,
dump_file_path: &str
) -> Self {
Self {
host: host.to_string(),
port,
Expand All @@ -70,85 +48,6 @@ impl Export {
}
}

fn create_writer(&self, file: File, compress_data: bool) -> Writer {
if compress_data {
let encoder = GzEncoder::new(file, Compression::default());
Writer::Compressed(BufWriter::new(encoder))
} else {
Writer::Uncompressed(BufWriter::new(file))
}
}

fn comments_header(&self, writer: &mut dyn Write) -> Result<(), Box<dyn Error>> {
writeln!(writer, "-- Exporting using {} v.{}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"))?;
writeln!(writer, "-- Database backup: {}", self.dbname)?;
writeln!(writer, "-- Export date and time: {}", Date::timestamp())?;
writeln!(writer, "-- ---------------------------------------------------\n")?;

Ok(())
}

fn write_create_new_database(&self, writer: &mut dyn Write) -> Result<(), Box<dyn Error>> {
let database_if_not_exists = Configs.boolean("exports", "database_if_not_exists", true);

if database_if_not_exists {
writeln!(writer, "CREATE DATABASE IF NOT EXISTS `{}`;", self.dbname)?;
writeln!(writer, "USE `{}`;", self.dbname)?;
writeln!(writer, "-- ---------------------------------------------------\n")?;
}
Ok(())
}

fn write_inserts_for_table(&self, table: &str, conn: &mut PooledConn, writer: &mut dyn Write) -> Result<(), Box<dyn Error>> {
let dump_data = Configs.boolean("exports", "dump_data", true);
let insert_ignore_into = Configs.boolean("exports", "insert_ignore_into", false);

if dump_data {
let rows: Vec<Row> = conn.query(format!("SELECT * FROM `{}`", table))?;

if rows.is_empty() {
writeln!(writer, "-- Table `{}` contains no data.", table)?;
} else {
for row in rows {
let values: Vec<String> = row.clone().unwrap().into_iter().map(|value| match value {
Value::NULL => "NULL".to_string(),
Value::Bytes(bytes) => format!("'{}'", String::from_utf8_lossy(&bytes)),
Value::Int(int) => int.to_string(),
Value::UInt(uint) => uint.to_string(),
Value::Float(float) => float.to_string(),
_ => "NULL".to_string(),
}).collect();

let line = if insert_ignore_into {
format!("INSERT IGNORE INTO `{}` VALUES ({});", table, values.join(", "))
} else {
format!("INSERT INTO `{}` VALUES ({});", table, values.join(", "))
};

writeln!(writer, "{}", line)?;
}
}
}

Ok(())
}

fn write_structure_for_table(&self, table: &str, conn: &mut PooledConn, writer: &mut dyn Write) -> Result<(), Box<dyn Error>> {
let drop_table_if_exists = Configs.boolean("exports", "drop_table_if_exists", false);

writeln!(writer, "-- Exporting the table: `{}`", table)?;

if drop_table_if_exists {
writeln!(writer, "DROP TABLE IF EXISTS `{}`;", table)?;
}

let row: Row = conn.query_first(format!("SHOW CREATE TABLE `{}`", table))?.unwrap();
let create_table: String = row.get(1).expect("Error retrieving CREATE TABLE");
writeln!(writer, "{};\n", create_table)?;

Ok(())
}

pub fn dump(&self) -> Result<(), Box<dyn Error>> {
let compress_data = Configs.boolean("exports", "compress_data", false);

Expand All @@ -158,6 +57,11 @@ impl Export {
self.dump_file_path.clone()
};

let export_handlers = ExportHandlers::new(
File::create(dump_file_path.clone())?,
&self.dbname
);

let pool = Connection {
host: self.host.clone(),
port: self.port,
Expand All @@ -166,14 +70,13 @@ impl Export {
dbname: Some(self.dbname.clone()),
}.create_pool()?;

FileUtils::create_path(&dump_file_path);
FileUtils::create_path(&dump_file_path.clone());

let mut conn = pool.get_conn()?;
let file = File::create(&dump_file_path)?;
let mut writer = self.create_writer(file, compress_data);
let mut writer = export_handlers.create_writer()?;

self.comments_header(writer.as_write())?;
self.write_create_new_database(writer.as_write())?;
export_handlers.comments_header(writer.as_write())?;
export_handlers.write_create_new_database(writer.as_write())?;

let tables: Vec<String> = conn.query("SHOW TABLES")?;
let ignore_tables = Configs.list("exports", "ignore_tables").unwrap_or_default();
Expand All @@ -184,13 +87,12 @@ impl Export {
continue;
}

self.write_structure_for_table(&table, &mut conn, writer.as_write())?;
self.write_inserts_for_table(&table, &mut conn, writer.as_write())?;
export_handlers.write_structure_for_table(&table, &mut conn, writer.as_write())?;
export_handlers.write_inserts_for_table(&table, &mut conn, writer.as_write())?;
writeln!(writer.as_write(), "-- End of table `{}`", table)?;
}

SuccessAlerts::dump(&dump_file_path);
io::stdout().flush().unwrap();

Ok(())
}
Expand Down
26 changes: 23 additions & 3 deletions src/engine/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ use std::{
fs::File,
error::Error,

path::{
Path,
PathBuf
},

io::{
Read,
BufReader,
Expand All @@ -31,22 +36,35 @@ pub struct Import {
user: String,
password: String,
dbname: String,
path: String,
dump_file_path: String,
}

impl Import {

pub fn new(host: &str, port: u16, user: &str, password: &str, dbname: &str, dump_file_path: &str) -> Self {
pub fn new(host: &str, port: u16, user: &str, password: &str, dbname: &str, dump_file_path: &str, path: &str) -> Self {
Self {
host: host.to_string(),
port,
user: user.to_string(),
password: password.to_string(),
dbname: dbname.to_string(),
path: path.to_string(),
dump_file_path: dump_file_path.to_string(),
}
}

fn complete_path(&self) -> Result<PathBuf, Box<dyn std::error::Error>> {
let path = Path::new(&self.dump_file_path);

if path.is_absolute() {
Ok(path.to_path_buf())
} else {
let dump_file_path = Path::new(&self.dump_file_path);
Ok(dump_file_path.join(&self.path))
}
}

pub fn dump(&self) -> Result<(), Box<dyn Error>> {
let pool = Connection {
host: self.host.clone(),
Expand All @@ -59,8 +77,10 @@ impl Import {
let mut conn = pool.get_conn()?;
let is_compressed = self.dump_file_path.ends_with(".sql.gz");

let file = self.complete_path()?;

let dump_content = if is_compressed {
let file = File::open(&self.dump_file_path)?;
let file = File::open(file)?;

let mut decoder = GzDecoder::new(BufReader::new(file));
let mut content = String::new();
Expand All @@ -79,7 +99,7 @@ impl Import {

for statement in dump_content.split(';') {
let trimmed = statement.trim();

if !trimmed.is_empty() {
match conn.query_drop(trimmed) {
Ok(_) => {
Expand Down
Loading

0 comments on commit 96012b8

Please sign in to comment.