Implement repo stats
- create cache table - link cache to archive
This commit is contained in:
parent
0b5f18474a
commit
732fdd12ef
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime
|
||||
from borgmanager.database.object import Repo, Archive, Error
|
||||
from borgmanager.database.object import Repo, Archive, Error, Cache
|
||||
import json
|
||||
|
||||
|
||||
|
@ -17,8 +17,9 @@ class OutputHandler(object):
|
|||
def get_borg_info(self):
|
||||
repo = Repo.from_json(self.borg_json['repository'])
|
||||
archive = Archive.from_json(self.borg_json['archive'])
|
||||
cache = Cache.from_json(self.borg_json['cache']['stats'])
|
||||
|
||||
return repo, archive
|
||||
return repo, archive, cache
|
||||
|
||||
def get_borg_error(self):
|
||||
return Error(self.borg_output, datetime.now())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .connection import RepoConn, ArchiveConn, ErrorConn, LabelConn
|
||||
from .connection import RepoConn, ArchiveConn, ErrorConn, LabelConn, CacheConn
|
||||
from .object.label import Label
|
||||
from pathlib import Path
|
||||
|
||||
|
@ -7,12 +7,12 @@ class BorgDatabase(object):
|
|||
def __init__(self, db_path: Path):
|
||||
self.repo_name = "repo"
|
||||
self.archive_name = "archive"
|
||||
self.stats_name = "stats"
|
||||
self.cache_name = "cache"
|
||||
self.error_name = "error"
|
||||
self.label_name = "label"
|
||||
|
||||
self.repo_conn = RepoConn(db_path, table_name=self.repo_name)
|
||||
self.archive_conn = ArchiveConn(db_path, self.repo_name,
|
||||
self.archive_conn = ArchiveConn(db_path, repo_table=self.repo_name,
|
||||
table_name=self.archive_name)
|
||||
self.error_conn = ErrorConn(db_path,
|
||||
label_table=self.label_name,
|
||||
|
@ -20,13 +20,17 @@ class BorgDatabase(object):
|
|||
self.label_conn = LabelConn(db_path,
|
||||
repo_table=self.repo_name,
|
||||
table_name=self.label_name)
|
||||
self.cache_conn = CacheConn(db_path,
|
||||
archive_table=self.archive_name,
|
||||
table_name=self.cache_name)
|
||||
|
||||
# region INSERT
|
||||
|
||||
def insert_record(self, repo, archive, label):
|
||||
def insert_record(self, repo, archive, cache, label):
|
||||
repo_id = self.repo_conn.insert(repo)
|
||||
label_id = self.insert_label(label, repo_id=repo_id)
|
||||
self.insert_label(label, repo_id=repo_id)
|
||||
archive_id = self.archive_conn.insert(archive, repo_id=repo_id)
|
||||
self.cache_conn.insert(cache, archive_id=archive_id)
|
||||
|
||||
def insert_error(self, borg_error, label):
|
||||
label_id = self.insert_label(label)
|
||||
|
|
|
@ -3,3 +3,4 @@ from .repoconn import RepoConn
|
|||
from .archiveconn import ArchiveConn
|
||||
from .errorconn import ErrorConn
|
||||
from .labelconn import LabelConn
|
||||
from .cacheconn import CacheConn
|
||||
|
|
|
@ -2,9 +2,9 @@ from .databaseconnection import DatabaseConnection
|
|||
|
||||
|
||||
class ArchiveConn(DatabaseConnection):
|
||||
def __init__(self, db_path, repo_table_name: str,
|
||||
def __init__(self, db_path, repo_table: str,
|
||||
table_name: str = "archive"):
|
||||
self.repo_table_name = repo_table_name
|
||||
self.repo_table = repo_table
|
||||
super().__init__(db_path, table_name)
|
||||
|
||||
def _create_table(self):
|
||||
|
@ -20,7 +20,7 @@ class ArchiveConn(DatabaseConnection):
|
|||
f"compressed_size INTEGER NOT NULL," \
|
||||
f"deduplicated_size INTEGER NOT NULL," \
|
||||
f"FOREIGN KEY (repo_id) REFERENCES" \
|
||||
f" {self.repo_table_name} (id));"
|
||||
f" {self.repo_table} (id));"
|
||||
self.sql_execute(create_statement)
|
||||
|
||||
def _exists(self, record, repo_id=None, archive_id=None, label_id=None):
|
||||
|
|
40
src/borgmanager/database/connection/cacheconn.py
Normal file
40
src/borgmanager/database/connection/cacheconn.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
from .databaseconnection import DatabaseConnection
|
||||
|
||||
|
||||
class CacheConn(DatabaseConnection):
|
||||
def __init__(self, db_path, archive_table: str, table_name: str = "cache"):
|
||||
self.archive_table = archive_table
|
||||
super().__init__(db_path, table_name)
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"id INTEGER PRIMARY KEY," \
|
||||
f"archive_id INT NOT NULL," \
|
||||
f"total_chunks INT NOT NULL," \
|
||||
f"total_csize INT NOT NULL," \
|
||||
f"total_size INT NOT NULL," \
|
||||
f"total_unique_chunks INT NOT NULL," \
|
||||
f"unique_csize INT NOT NULL," \
|
||||
f"unique_size INT NOT NULL," \
|
||||
f"FOREIGN KEY (archive_id) REFERENCES" \
|
||||
f" {self.archive_table} (id));"
|
||||
self.sql_execute(create_statement)
|
||||
|
||||
def _exists(self, record, repo_id=None, archive_id=None, label_id=None):
|
||||
return None, None
|
||||
|
||||
def _insert(self, record, repo_id=None, archive_id=None, label_id=None) -> int:
|
||||
if archive_id is None:
|
||||
raise Exception("Archive ID not supplied")
|
||||
else:
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('archive_id', 'total_chunks', 'total_csize', 'total_size'," \
|
||||
f"'total_unique_chunks', 'unique_csize', 'unique_size')"\
|
||||
f" VALUES (?, ?, ?, ?, ?, ?, ?);"
|
||||
args = (archive_id, record.total_chunks, record.total_csize, record.total_size,
|
||||
record.total_unique_chunks, record.unique_csize, record.unique_size)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
|
@ -3,3 +3,4 @@ from .repo import Repo
|
|||
from .archive import Archive
|
||||
from .error import Error
|
||||
from .label import Label
|
||||
from .cache import Cache
|
||||
|
|
27
src/borgmanager/database/object/cache.py
Normal file
27
src/borgmanager/database/object/cache.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
from . import DBObject
|
||||
|
||||
|
||||
class Cache(DBObject):
|
||||
def __init__(self, total_chunks: int, total_csize: int, total_size: int, total_unique_chunks: int,
|
||||
unique_csize: int, unique_size: int, primary_key=None):
|
||||
super(Cache, self).__init__(primary_key)
|
||||
self.total_chunks = total_chunks
|
||||
self.total_csize = total_csize
|
||||
self.total_size = total_size
|
||||
self.total_unique_chunks = total_unique_chunks
|
||||
self.unique_csize = unique_csize
|
||||
self.unique_size = unique_size
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json: dict):
|
||||
total_chunks = json['total_chunks']
|
||||
total_csize = json['total_csize']
|
||||
total_size = json['total_size']
|
||||
total_unique_chunks = json['total_unique_chunks']
|
||||
unique_csize = json['unique_csize']
|
||||
unique_size = json['unique_size']
|
||||
return cls(total_chunks, total_csize, total_size, total_unique_chunks, unique_csize, unique_size)
|
||||
|
||||
@classmethod
|
||||
def from_sql(cls, sql: list):
|
||||
pass
|
Loading…
Reference in New Issue
Block a user