diff --git a/src/database/__init__.py b/src/database/__init__.py index 77a39b9..9add3ee 100644 --- a/src/database/__init__.py +++ b/src/database/__init__.py @@ -1,4 +1 @@ -from .databaseconnection import DatabaseConnection -from .repoconn import RepoConn -from .archiveconn import ArchiveConn -from .statsconn import StatsConn +from .borgdatabase import BorgDatabase diff --git a/src/database/borgdatabase.py b/src/database/borgdatabase.py new file mode 100644 index 0000000..d5f4d66 --- /dev/null +++ b/src/database/borgdatabase.py @@ -0,0 +1,40 @@ +from .connection import RepoConn, ArchiveConn, StatsConn, ErrorConn +from pathlib import Path +from src import borg +import json + + +class BorgDatabase(object): + def __init__(self, db_path: Path): + self.repo_name = "repo" + self.archive_name = "archive" + self.stats_name = "stats" + self.error_name = "error" + + self.repo_conn = RepoConn(db_path, table_name=self.repo_name) + self.archive_conn = ArchiveConn(db_path, self.repo_name, + table_name=self.archive_name) + self.stats_conn = StatsConn(db_path, self.repo_name, self.archive_name, + table_name=self.stats_name) + self.error_conn = ErrorConn(db_path, + table_name=self.error_name) + + def process_borg_output(self, borg_output: str): + borg_json = None + try: + borg_json = json.loads(borg_output) + except json.JSONDecodeError: + self.handle_borg_error(borg_output) + self.process_borg_json(borg_json) + + def process_borg_json(self, borg_json: dict): + repo = borg.Repo.from_json(borg_json['repository']) + archive = borg.Archive.from_json(borg_json['archive']) + stats = borg.Stats.from_json(borg_json['archive']['stats']) + + repo_id = self.repo_conn.insert(repo) + archive_id = self.archive_conn.insert(archive, repo_id) + self.stats_conn.insert(stats, repo_id, archive_id) + + def handle_borg_error(self, borg_error: str): + pass diff --git a/src/database/connection/__init__.py b/src/database/connection/__init__.py new file mode 100644 index 0000000..dc86103 --- /dev/null +++ b/src/database/connection/__init__.py @@ -0,0 +1,5 @@ +from .databaseconnection import DatabaseConnection +from .repoconn import RepoConn +from .archiveconn import ArchiveConn +from .statsconn import StatsConn +from .errorconn import ErrorConn diff --git a/src/database/archiveconn.py b/src/database/connection/archiveconn.py similarity index 60% rename from src/database/archiveconn.py rename to src/database/connection/archiveconn.py index 8287c68..6d14f41 100644 --- a/src/database/archiveconn.py +++ b/src/database/connection/archiveconn.py @@ -1,23 +1,28 @@ -from . import DatabaseConnection +from .databaseconnection import DatabaseConnection class ArchiveConn(DatabaseConnection): - def __init__(self, db_path, table_name: str = "archive"): + def __init__(self, db_path, repo_table_name: str, + table_name: str = "archive"): + self.repo_table_name = repo_table_name super().__init__(db_path, table_name) + def _create_table(self): create_statement = f"create table if not exists {self._sql_table}(" \ f"archive_id INTEGER PRIMARY KEY," \ - f"fingerprint INTEGER NOT NULL UNIQUE," \ + f"fingerprint TEXT NOT NULL UNIQUE," \ f"repo_id INTEGER NOT NULL," \ - f"name TEXT NOT NULL UNIQUE," \ + f"name TEXT NOT NULL," \ f"start TEXT TIMESTAMP NULL," \ f"end TEXT TIMESTAMP NULL," \ - f"FOREIGN KEY (repo_id) REFERENCES repo (repo_id))" + f"FOREIGN KEY (repo_id) REFERENCES" \ + f" {self.repo_table_name} (repo_id));" self.sql_execute(create_statement) def _exists(self, record): - return f"SELECT archive_id FROM {self._sql_table} WHERE fingerprint=?;", (record.fingerprint,) + return f"SELECT archive_id FROM {self._sql_table}" \ + f" WHERE fingerprint=?;", (record.fingerprint,) def _insert(self, record, repo_id=None, archive_id=None) -> int: if repo_id is None: @@ -27,7 +32,8 @@ class ArchiveConn(DatabaseConnection): statement = f"INSERT INTO {self._sql_table}"\ f" ('fingerprint', 'repo_id', 'name', 'start', 'end')"\ f" VALUES (?, ?, ?, ?, ?);" - args = (record.fingerprint, repo_id, record.name, record.start, record.end) + args = (record.fingerprint, repo_id, record.name, + record.start, record.end) cursor.execute(statement, args) self.sql_commit() return cursor.lastrowid diff --git a/src/database/databaseconnection.py b/src/database/connection/databaseconnection.py similarity index 100% rename from src/database/databaseconnection.py rename to src/database/connection/databaseconnection.py diff --git a/src/database/connection/errorconn.py b/src/database/connection/errorconn.py new file mode 100644 index 0000000..09c00a9 --- /dev/null +++ b/src/database/connection/errorconn.py @@ -0,0 +1,30 @@ +from .databaseconnection import DatabaseConnection +from datetime import datetime + + +class ErrorConn(DatabaseConnection): + def __init__(self, db_path, table_name: str = "errors"): + super().__init__(db_path, table_name) + + def _create_table(self): + create_statement = f"create table if not exists {self._sql_table}(" \ + f"error_id INTEGER PRIMARY KEY," \ + f"error TEXT NOT NULL," \ + f"time TIMESTAMP NOT NULL);" + self.sql_execute(create_statement) + + def _exists(self, record): + return None, None + + def _insert(self, record, repo_id=None, archive_id=None) -> int: + if repo_id is None or archive_id is None: + raise Exception("Repo and archive ids not supplied") + with self.sql_lock: + cursor = self.sql_cursor + statement = f"INSERT INTO {self._sql_table}"\ + f" ('error', 'time')"\ + f" VALUES (?, ?);" + args = (record.error, datetime.now()) + cursor.execute(statement, args) + self.sql_commit() + return cursor.lastrowid diff --git a/src/database/repoconn.py b/src/database/connection/repoconn.py similarity index 92% rename from src/database/repoconn.py rename to src/database/connection/repoconn.py index 4bd01e1..ca83dc7 100644 --- a/src/database/repoconn.py +++ b/src/database/connection/repoconn.py @@ -1,4 +1,4 @@ -from . import DatabaseConnection +from .databaseconnection import DatabaseConnection class RepoConn(DatabaseConnection): @@ -27,7 +27,7 @@ class RepoConn(DatabaseConnection): def _create_table(self): create_statement = f"create table if not exists {self._sql_table}(" \ f"repo_id INTEGER PRIMARY KEY," \ - f"fingerprint INTEGER NOT NULL UNIQUE," \ + f"fingerprint TEXT NOT NULL UNIQUE," \ f"location TEXT NOT NULL," \ f"last_modified TIMESTAMP NOT NULL)" self.sql_execute(create_statement) diff --git a/src/database/statsconn.py b/src/database/connection/statsconn.py similarity index 78% rename from src/database/statsconn.py rename to src/database/connection/statsconn.py index 0110219..e383995 100644 --- a/src/database/statsconn.py +++ b/src/database/connection/statsconn.py @@ -1,8 +1,12 @@ -from . import DatabaseConnection +from .databaseconnection import DatabaseConnection class StatsConn(DatabaseConnection): - def __init__(self, db_path, table_name: str = "stats"): + def __init__(self, db_path, repo_table: str, archive_table: str, + table_name: str = "stats"): + self.repo_table = repo_table + self.archive_table = archive_table + super().__init__(db_path, table_name) def _create_table(self): @@ -14,8 +18,10 @@ class StatsConn(DatabaseConnection): f"original_size INTEGER NOT NULL," \ f"compressed_size INTEGER NOT NULL," \ f"deduplicated_size INTEGER NOT NULL," \ - f"FOREIGN KEY (repo_id) REFERENCES repo (repo_id)," \ - f"FOREIGN KEY (archive_id) REFERENCES archive (archive_id))" + f"FOREIGN KEY (repo_id) REFERENCES" \ + f" {self.repo_table} (repo_id)," \ + f"FOREIGN KEY (archive_id) REFERENCES" \ + f" {self.archive_table} (archive_id));" self.sql_execute(create_statement) def _exists(self, record): diff --git a/src/main.py b/src/main.py index b1cfb5d..c724d31 100644 --- a/src/main.py +++ b/src/main.py @@ -1,36 +1,17 @@ +from database import BorgDatabase from sys import stdin from os.path import realpath from pathlib import Path -from database import RepoConn, ArchiveConn, StatsConn -import json -import borg -def main(input_json: dict, path: Path): +def main(borg_output: str, path: Path): db_path = path / 'borg.sqlite' + db = BorgDatabase(db_path) - repo = borg.Repo.from_json(input_json['repository']) - barchive = borg.Archive.from_json(input_json['archive']) - stats = borg.Stats.from_json(input_json['archive']['stats']) - - repo_conn = RepoConn(db_path) - archive_conn = ArchiveConn(db_path) - stats_conn = StatsConn(db_path) - - repo_id = repo_conn.insert(repo) - archive_id = archive_conn.insert(barchive, repo_id) - stat_id = stats_conn.insert(stats, repo_id, archive_id) + db.process_borg_output(borg_output) if __name__ == "__main__": path = Path(realpath(__file__)).parent.parent - input_text = stdin.readlines() - - try: - input_json = json.loads(" ".join(input_text)) - except: - # todo: output input_text somewhere - print("Error parsing json, output:") - print("\n".join(input_text)) - exit(1) - main(input_json, path) + borg_output = "\n".join(stdin.readlines()) + main(borg_output, path)