Separate sql logic from borg classes
This commit is contained in:
parent
06331f4a83
commit
40ecdf04ef
|
@ -1,4 +1,4 @@
|
|||
from .databaseconnection import DatabaseConnection
|
||||
from .repo import Repo
|
||||
from .archive import Archive
|
||||
from .stats import Stats
|
||||
from .repoconn import RepoConn
|
||||
from .archiveconn import ArchiveConn
|
||||
from .statsconn import StatsConn
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
from . import DatabaseConnection, Repo
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Archive(DatabaseConnection):
|
||||
def __init__(self, db_path, repo: Repo, archive_json: dict, table_name: str = "archive"):
|
||||
super().__init__(db_path, table_name)
|
||||
|
||||
self.uuid = archive_json['id']
|
||||
self.repo_id = repo.primary_key
|
||||
self.name = archive_json['name']
|
||||
self.start = datetime.fromisoformat(archive_json['start'])
|
||||
self.end = datetime.fromisoformat(archive_json['end'])
|
||||
|
||||
self.insert()
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"archive_id INTEGER PRIMARY KEY," \
|
||||
f"uuid INTEGER NOT NULL UNIQUE," \
|
||||
f"repo_id INTEGER NOT NULL," \
|
||||
f"name TEXT NOT NULL UNIQUE," \
|
||||
f"start TEXT TIMESTAMP NULL," \
|
||||
f"end TEXT TIMESTAMP NULL," \
|
||||
f"FOREIGN KEY (repo_id) REFERENCES repo (repo_id))"
|
||||
self.sql_execute(create_statement)
|
||||
|
||||
def _exists(self):
|
||||
result = self.sql_execute_one(f"SELECT archive_id FROM {self._sql_table}"
|
||||
f" WHERE uuid=?;", (self.uuid,))
|
||||
if result is None:
|
||||
return False, None
|
||||
else:
|
||||
return True, result[0]
|
||||
|
||||
def _insert(self) -> int:
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('uuid', 'repo_id', 'name', 'start', 'end')"\
|
||||
f" VALUES (?, ?, ?, ?, ?);"
|
||||
args = (self.uuid, self.repo_id, self.name, self.start, self.end)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
31
src/database/archiveconn.py
Normal file
31
src/database/archiveconn.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
from . import DatabaseConnection
|
||||
|
||||
|
||||
class ArchiveConn(DatabaseConnection):
|
||||
def __init__(self, db_path, table_name: str = "archive"):
|
||||
super().__init__(db_path, table_name)
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"archive_id INTEGER PRIMARY KEY," \
|
||||
f"fingerprint INTEGER NOT NULL UNIQUE," \
|
||||
f"repo_id INTEGER NOT NULL," \
|
||||
f"name TEXT NOT NULL UNIQUE," \
|
||||
f"start TEXT TIMESTAMP NULL," \
|
||||
f"end TEXT TIMESTAMP NULL," \
|
||||
f"FOREIGN KEY (repo_id) REFERENCES repo (repo_id))"
|
||||
self.sql_execute(create_statement)
|
||||
|
||||
def _exists(self, record):
|
||||
return f"SELECT archive_id FROM {self._sql_table} WHERE fingerprint=?;", (record.fingerprint,)
|
||||
|
||||
def _insert(self, record, repo_id) -> int:
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('fingerprint', 'repo_id', 'name', 'start', 'end')"\
|
||||
f" VALUES (?, ?, ?, ?, ?);"
|
||||
args = (record.fingerprint, repo_id, record.name, record.start, record.end)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
|
@ -16,8 +16,6 @@ class DatabaseConnection(ABC):
|
|||
self._create_table()
|
||||
self.sql_commit()
|
||||
|
||||
self.primary_key = None
|
||||
|
||||
@property
|
||||
def sql_lock(self):
|
||||
return self.__sql_lock
|
||||
|
@ -68,26 +66,35 @@ class DatabaseConnection(ABC):
|
|||
def sql_commit(self):
|
||||
self.__sql_database.commit()
|
||||
|
||||
def insert(self):
|
||||
if self.exists():
|
||||
raise Exception("Record exists")
|
||||
elif self.primary_key is not None:
|
||||
raise Exception("Primary key already set")
|
||||
def insert(self, record, *args, **kwargs):
|
||||
exists, primary_key = self.exists(record)
|
||||
if exists:
|
||||
self._update(record, primary_key)
|
||||
return primary_key
|
||||
else:
|
||||
self.primary_key = self._insert()
|
||||
return self._insert(record, *args, **kwargs)
|
||||
|
||||
def _update(self, record, primary_key):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _insert(self) -> int:
|
||||
def _insert(self, record, *args, **kwargs) -> int:
|
||||
raise NotImplementedError
|
||||
|
||||
def exists(self) -> bool:
|
||||
exists, primary_key = self._exists()
|
||||
if exists:
|
||||
self.primary_key = primary_key
|
||||
return exists
|
||||
def exists(self, record) -> (bool, int):
|
||||
query, args = self._exists(record)
|
||||
|
||||
if query is None:
|
||||
return False, None
|
||||
else:
|
||||
result = self.sql_execute_one(query, args)
|
||||
if result is None:
|
||||
return False, None
|
||||
else:
|
||||
return True, result[0]
|
||||
|
||||
@abstractmethod
|
||||
def _exists(self) -> (bool, list):
|
||||
def _exists(self, record) -> (str, tuple):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
from . import DatabaseConnection
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Repo(DatabaseConnection):
|
||||
def __init__(self, db_path, repo_json: dict, table_name: str = 'repo'):
|
||||
super(Repo, self).__init__(db_path, table_name)
|
||||
|
||||
self.uuid = repo_json['id']
|
||||
self.location = repo_json['location']
|
||||
self.last_modified = datetime.fromisoformat(repo_json['last_modified'])
|
||||
|
||||
if self.exists():
|
||||
self._update()
|
||||
else:
|
||||
self.insert()
|
||||
|
||||
def _insert(self) -> int:
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('uuid', 'location', 'last_modified')"\
|
||||
f" VALUES (?, ?, ?);"
|
||||
args = (self.uuid, self.location, self.last_modified)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
||||
|
||||
def _update(self):
|
||||
self.sql_execute(f"UPDATE {self._sql_table} SET location = ?, last_modified = ? WHERE repo_id = ?;",
|
||||
(self.location, self.last_modified, self.primary_key))
|
||||
self.sql_commit()
|
||||
|
||||
def _exists(self):
|
||||
result = self.sql_execute_one(f"SELECT repo_id FROM {self._sql_table}"
|
||||
f" WHERE uuid=?;", (self.uuid,))
|
||||
if result is None:
|
||||
return False, None
|
||||
else:
|
||||
return True, result[0]
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"repo_id INTEGER PRIMARY KEY," \
|
||||
f"uuid INTEGER NOT NULL UNIQUE," \
|
||||
f"location TEXT NOT NULL," \
|
||||
f"last_modified TIMESTAMP NOT NULL)"
|
||||
self.sql_execute(create_statement)
|
33
src/database/repoconn.py
Normal file
33
src/database/repoconn.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
from . import DatabaseConnection
|
||||
|
||||
|
||||
class RepoConn(DatabaseConnection):
|
||||
def __init__(self, db_path, table_name: str = 'repo'):
|
||||
super(RepoConn, self).__init__(db_path, table_name)
|
||||
|
||||
def _insert(self, record) -> int:
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('fingerprint', 'location', 'last_modified')"\
|
||||
f" VALUES (?, ?, ?);"
|
||||
args = (record.fingerprint, str(record.location), record.last_modified)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
||||
|
||||
def _update(self, record, primary_key):
|
||||
self.sql_execute(f"UPDATE {self._sql_table} SET location = ?, last_modified = ? WHERE repo_id = ?;",
|
||||
(str(record.location), record.last_modified, primary_key))
|
||||
self.sql_commit()
|
||||
|
||||
def _exists(self, record):
|
||||
return f"SELECT repo_id FROM {self._sql_table} WHERE fingerprint=?;", (record.fingerprint,)
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"repo_id INTEGER PRIMARY KEY," \
|
||||
f"fingerprint INTEGER NOT NULL UNIQUE," \
|
||||
f"location TEXT NOT NULL," \
|
||||
f"last_modified TIMESTAMP NOT NULL)"
|
||||
self.sql_execute(create_statement)
|
|
@ -1,44 +0,0 @@
|
|||
from . import DatabaseConnection, Repo, Archive
|
||||
|
||||
|
||||
class Stats(DatabaseConnection):
|
||||
def __init__(self, db_path, repo: Repo, archive: Archive, stats_json: dict, table_name: str = "stats"):
|
||||
super().__init__(db_path, table_name)
|
||||
|
||||
self.repo_id = repo.primary_key
|
||||
self.archive_id = archive.primary_key
|
||||
self.file_count = stats_json['nfiles']
|
||||
self.original_size = stats_json['original_size']
|
||||
self.compressed_size = stats_json['compressed_size']
|
||||
self.deduplicated_size = stats_json['deduplicated_size']
|
||||
|
||||
self.insert()
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"stat_id INTEGER PRIMARY KEY," \
|
||||
f"repo_id INTEGER NOT NULL," \
|
||||
f"archive_id INTEGER NOT NULL," \
|
||||
f"file_count INTEGER NOT NULL UNIQUE," \
|
||||
f"original_size INTEGER NOT NULL UNIQUE," \
|
||||
f"compressed_size INTEGER NOT NULL UNIQUE," \
|
||||
f"deduplicated_size INTEGER NOT NULL UNIQUE," \
|
||||
f"FOREIGN KEY (repo_id) REFERENCES repo (repo_id)," \
|
||||
f"FOREIGN KEY (archive_id) REFERENCES archive (archive_id))"
|
||||
self.sql_execute(create_statement)
|
||||
|
||||
def _exists(self):
|
||||
return False, None
|
||||
|
||||
def _insert(self) -> int:
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('repo_id', 'archive_id', 'file_count', 'original_size'," \
|
||||
f"'compressed_size', 'deduplicated_size')"\
|
||||
f" VALUES (?, ?, ?, ?, ?, ?);"
|
||||
args = (self.repo_id, self.archive_id, self.file_count, self.original_size,
|
||||
self.compressed_size, self.deduplicated_size)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
35
src/database/statsconn.py
Normal file
35
src/database/statsconn.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
from . import DatabaseConnection
|
||||
|
||||
|
||||
class StatsConn(DatabaseConnection):
|
||||
def __init__(self, db_path, table_name: str = "stats"):
|
||||
super().__init__(db_path, table_name)
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"stat_id INTEGER PRIMARY KEY," \
|
||||
f"repo_id INTEGER NOT NULL," \
|
||||
f"archive_id INTEGER NOT NULL," \
|
||||
f"file_count INTEGER NOT NULL," \
|
||||
f"original_size INTEGER NOT NULL," \
|
||||
f"compressed_size INTEGER NOT NULL," \
|
||||
f"deduplicated_size INTEGER NOT NULL," \
|
||||
f"FOREIGN KEY (repo_id) REFERENCES repo (repo_id)," \
|
||||
f"FOREIGN KEY (archive_id) REFERENCES archive (archive_id))"
|
||||
self.sql_execute(create_statement)
|
||||
|
||||
def _exists(self, record):
|
||||
return None, None
|
||||
|
||||
def _insert(self, record, repo_id, archive_id) -> int:
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('repo_id', 'archive_id', 'file_count', 'original_size'," \
|
||||
f"'compressed_size', 'deduplicated_size')"\
|
||||
f" VALUES (?, ?, ?, ?, ?, ?);"
|
||||
args = (repo_id, archive_id, record.file_count, record.original_size,
|
||||
record.compressed_size, record.deduplicated_size)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
17
src/main.py
17
src/main.py
|
@ -1,16 +1,25 @@
|
|||
from sys import stdin
|
||||
from os.path import realpath
|
||||
from pathlib import Path
|
||||
from database import Repo, Archive, Stats
|
||||
from database import RepoConn, ArchiveConn, StatsConn
|
||||
import json
|
||||
import borg
|
||||
|
||||
|
||||
def main(input_json: dict, path: Path):
|
||||
db_path = path / 'borg.sqlite'
|
||||
|
||||
repo = Repo(db_path, input_json['repository'])
|
||||
archive = Archive(db_path, repo, input_json['archive'])
|
||||
stats = Stats(db_path, repo, archive, input_json['archive']['stats'])
|
||||
repo = borg.Repo.from_json(input_json['repository'])
|
||||
barchive = borg.Archive.from_json(input_json['archive'])
|
||||
stats = borg.Stats.from_json(input_json['archive']['stats'])
|
||||
|
||||
repo_conn = RepoConn(db_path)
|
||||
archive_conn = ArchiveConn(db_path)
|
||||
stats_conn = StatsConn(db_path)
|
||||
|
||||
repo_id = repo_conn.insert(repo)
|
||||
archive_id = archive_conn.insert(barchive, repo_id)
|
||||
stat_id = stats_conn.insert(stats, repo_id, archive_id)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Loading…
Reference in New Issue
Block a user