Move archive stats to archive table

- Remove all references to old stats
This commit is contained in:
George Lacey 2021-05-05 04:32:37 +01:00
parent a815cfb0b7
commit 0b5f18474a
9 changed files with 30 additions and 109 deletions

View File

@ -1,5 +1,5 @@
from datetime import datetime from datetime import datetime
from borgmanager.database.object import Repo, Archive, Error, Stats from borgmanager.database.object import Repo, Archive, Error
import json import json
@ -17,9 +17,8 @@ class OutputHandler(object):
def get_borg_info(self): def get_borg_info(self):
repo = Repo.from_json(self.borg_json['repository']) repo = Repo.from_json(self.borg_json['repository'])
archive = Archive.from_json(self.borg_json['archive']) archive = Archive.from_json(self.borg_json['archive'])
stats = Stats.from_json(self.borg_json['archive']['stats'])
return repo, archive, stats return repo, archive
def get_borg_error(self): def get_borg_error(self):
return Error(self.borg_output, datetime.now()) return Error(self.borg_output, datetime.now())

View File

@ -1,4 +1,4 @@
from .connection import RepoConn, ArchiveConn, StatsConn, ErrorConn, LabelConn from .connection import RepoConn, ArchiveConn, ErrorConn, LabelConn
from .object.label import Label from .object.label import Label
from pathlib import Path from pathlib import Path
@ -14,8 +14,6 @@ class BorgDatabase(object):
self.repo_conn = RepoConn(db_path, table_name=self.repo_name) self.repo_conn = RepoConn(db_path, table_name=self.repo_name)
self.archive_conn = ArchiveConn(db_path, self.repo_name, self.archive_conn = ArchiveConn(db_path, self.repo_name,
table_name=self.archive_name) table_name=self.archive_name)
self.stats_conn = StatsConn(db_path, self.repo_name, self.archive_name,
table_name=self.stats_name)
self.error_conn = ErrorConn(db_path, self.error_conn = ErrorConn(db_path,
label_table=self.label_name, label_table=self.label_name,
table_name=self.error_name) table_name=self.error_name)
@ -25,11 +23,10 @@ class BorgDatabase(object):
# region INSERT # region INSERT
def insert_record(self, repo, archive, stats, label): def insert_record(self, repo, archive, label):
repo_id = self.repo_conn.insert(repo) repo_id = self.repo_conn.insert(repo)
label_id = self.insert_label(label, repo_id=repo_id) label_id = self.insert_label(label, repo_id=repo_id)
archive_id = self.archive_conn.insert(archive, repo_id=repo_id) archive_id = self.archive_conn.insert(archive, repo_id=repo_id)
self.stats_conn.insert(stats, repo_id=repo_id, archive_id=archive_id)
def insert_error(self, borg_error, label): def insert_error(self, borg_error, label):
label_id = self.insert_label(label) label_id = self.insert_label(label)
@ -45,7 +42,4 @@ class BorgDatabase(object):
def get_repos(self): def get_repos(self):
return self.repo_conn.get_all() return self.repo_conn.get_all()
def get_repo_stats(self, repo):
return self.stats_conn.get_latest_stats(repo)
# endregion # endregion

View File

@ -1,6 +1,5 @@
from .databaseconnection import DatabaseConnection from .databaseconnection import DatabaseConnection
from .repoconn import RepoConn from .repoconn import RepoConn
from .archiveconn import ArchiveConn from .archiveconn import ArchiveConn
from .statsconn import StatsConn
from .errorconn import ErrorConn from .errorconn import ErrorConn
from .labelconn import LabelConn from .labelconn import LabelConn

View File

@ -15,6 +15,10 @@ class ArchiveConn(DatabaseConnection):
f"name TEXT NOT NULL," \ f"name TEXT NOT NULL," \
f"start TEXT TIMESTAMP NULL," \ f"start TEXT TIMESTAMP NULL," \
f"end TEXT TIMESTAMP NULL," \ f"end TEXT TIMESTAMP NULL," \
f"file_count INTEGER NOT NULL," \
f"original_size INTEGER NOT NULL," \
f"compressed_size INTEGER NOT NULL," \
f"deduplicated_size INTEGER NOT NULL," \
f"FOREIGN KEY (repo_id) REFERENCES" \ f"FOREIGN KEY (repo_id) REFERENCES" \
f" {self.repo_table_name} (id));" f" {self.repo_table_name} (id));"
self.sql_execute(create_statement) self.sql_execute(create_statement)
@ -29,10 +33,12 @@ class ArchiveConn(DatabaseConnection):
with self.sql_lock: with self.sql_lock:
cursor = self.sql_cursor cursor = self.sql_cursor
statement = f"INSERT INTO {self._sql_table}"\ statement = f"INSERT INTO {self._sql_table}"\
f" ('fingerprint', 'repo_id', 'name', 'start', 'end')"\ f" ('fingerprint', 'repo_id', 'name', 'start', 'end'," \
f" VALUES (?, ?, ?, ?, ?);" f"'file_count', 'original_size', 'compressed_size', 'deduplicated_size')"\
f" VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);"
args = (record.fingerprint, repo_id, record.name, args = (record.fingerprint, repo_id, record.name,
record.start, record.end) record.start, record.end, record.file_count, record.original_size,
record.compressed_size, record.deduplicated_size)
cursor.execute(statement, args) cursor.execute(statement, args)
self.sql_commit() self.sql_commit()
return cursor.lastrowid return cursor.lastrowid

View File

@ -1,59 +0,0 @@
from .databaseconnection import DatabaseConnection
class StatsConn(DatabaseConnection):
def __init__(self, db_path, repo_table: str, archive_table: str,
table_name: str = "stats"):
self.repo_table = repo_table
self.archive_table = archive_table
super().__init__(db_path, table_name)
# region INIT
def _create_table(self):
create_statement = f"create table if not exists {self._sql_table}(" \
f"id INTEGER PRIMARY KEY," \
f"repo_id INTEGER NOT NULL," \
f"archive_id INTEGER NOT NULL," \
f"file_count INTEGER NOT NULL," \
f"original_size INTEGER NOT NULL," \
f"compressed_size INTEGER NOT NULL," \
f"deduplicated_size INTEGER NOT NULL," \
f"FOREIGN KEY (repo_id) REFERENCES" \
f" {self.repo_table} (id)," \
f"FOREIGN KEY (archive_id) REFERENCES" \
f" {self.archive_table} (id));"
self.sql_execute(create_statement)
# endregion
# region INSERT
def _exists(self, record, repo_id=None, archive_id=None, label_id=None):
return None, None
def _insert(self, record, repo_id=None, archive_id=None, label_id=None) -> int:
if repo_id is None or archive_id is None:
raise Exception("Repo and archive ids not supplied")
with self.sql_lock:
cursor = self.sql_cursor
statement = f"INSERT INTO {self._sql_table}"\
f" ('repo_id', 'archive_id', 'file_count', 'original_size'," \
f"'compressed_size', 'deduplicated_size')"\
f" VALUES (?, ?, ?, ?, ?, ?);"
args = (repo_id, archive_id, record.file_count, record.original_size,
record.compressed_size, record.deduplicated_size)
cursor.execute(statement, args)
self.sql_commit()
return cursor.lastrowid
# endregion
# region QUERY
def get_latest_stats(self, repo):
key = repo.primary_key
return self.sql_execute_one(f"SELECT * FROM {self._sql_table} WHERE repo_id=?;", (key,))
# endregion

View File

@ -1,6 +1,5 @@
from .dbobject import DBObject from .dbobject import DBObject
from .repo import Repo from .repo import Repo
from .archive import Archive from .archive import Archive
from .stats import Stats
from .error import Error from .error import Error
from .label import Label from .label import Label

View File

@ -3,20 +3,32 @@ from datetime import datetime
class Archive(DBObject): class Archive(DBObject):
def __init__(self, fingerprint: str, name: str, start: datetime, end: datetime, primary_key=None): def __init__(self, fingerprint: str, name: str, start: datetime, end: datetime, file_count: int, original_size: int,
compressed_size: int, deduplicated_size: int, primary_key=None):
super(Archive, self).__init__(primary_key) super(Archive, self).__init__(primary_key)
self.fingerprint = fingerprint self.fingerprint = fingerprint
self.name = name self.name = name
self.start = start self.start = start
self.end = end self.end = end
self.file_count = file_count
self.original_size = original_size
self.compressed_size = compressed_size
self.deduplicated_size = deduplicated_size
@classmethod @classmethod
def from_json(cls, json: dict): def from_json(cls, json: dict):
uuid = json['id'] fingerprint = json['id']
name = json['name'] name = json['name']
start = datetime.fromisoformat(json['start']) start = datetime.fromisoformat(json['start'])
end = datetime.fromisoformat(json['end']) end = datetime.fromisoformat(json['end'])
return cls(uuid, name, start, end)
stats_json = json['stats']
file_count = stats_json['nfiles']
original_size = stats_json['original_size']
compressed_size = stats_json['compressed_size']
deduplicated_size = stats_json['deduplicated_size']
return cls(fingerprint, name, start, end, file_count, original_size, compressed_size, deduplicated_size)
@classmethod @classmethod
def from_sql(cls, sql: list): def from_sql(cls, sql: list):

View File

@ -1,28 +0,0 @@
from . import DBObject
class Stats(DBObject):
def __init__(self, file_count: int, original_size: int, compressed_size: int, deduplicated_size: int,
primary_key=None):
super(Stats, self).__init__(primary_key)
self.file_count = file_count
self.original_size = original_size
self.compressed_size = compressed_size
self.deduplicated_size = deduplicated_size
@classmethod
def from_json(cls, json: dict):
file_count = json['nfiles']
original_size = json['original_size']
compressed_size = json['compressed_size']
deduplicated_size = json['deduplicated_size']
return cls(file_count, original_size, compressed_size, deduplicated_size)
@classmethod
def from_sql(cls, sql: tuple):
key = sql[0]
filecount = sql[3]
original_size = sql[4]
compressed_size = sql[5]
deduplicated_size = sql[6]
return cls(filecount, original_size, compressed_size, deduplicated_size, key)

View File

@ -1,4 +1,4 @@
from borgmanager.database.object import Repo, Stats from borgmanager.database.object import Repo
class Summary(object): class Summary(object):
@ -14,10 +14,9 @@ class Summary(object):
return_string = "" return_string = ""
for line in repo_sql: for line in repo_sql:
repo = Repo.from_sql(line) repo = Repo.from_sql(line)
stats = Stats.from_sql(self.db.get_repo_stats(repo))
return_string += f"repo: {repo.location}\n" return_string += f"repo: {repo.location}\n"
return_string += f"last backup: {self.seconds_to_string(repo.seconds_since(), 'day', True)} ago\n" return_string += f"last backup: {self.seconds_to_string(repo.seconds_since(), 'day', True)} ago\n"
return_string += f"file count: {stats.file_count}\n" return_string += f"file count: {repo.file_count}\n"
return_string += "\n" return_string += "\n"
return return_string.strip() return return_string.strip()