Parse borg JSON instead of raw output
- Create db connection interface - Create repo and archive database classes
This commit is contained in:
parent
a663450c26
commit
bb2e997705
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -1,3 +1,6 @@
|
|||
# db files
|
||||
*.sqlite
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
from logentry import LogEntry
|
||||
import sqlite3
|
||||
|
||||
|
||||
class Database(object):
|
||||
def __init__(self, path):
|
||||
self.conn = sqlite3.connect(path)
|
||||
self.table_name = "log"
|
||||
self.create_log_table()
|
||||
|
||||
def __del__(self):
|
||||
self.conn.close()
|
||||
|
||||
def commit(self):
|
||||
self.conn.commit()
|
||||
|
||||
def create_log_table(self):
|
||||
query = f"""CREATE TABLE IF NOT EXISTS {self.table_name} (
|
||||
logID integer PRIMARY KEY,
|
||||
name text NOT NULL,
|
||||
fingerprint text NOT NULL,
|
||||
start text NOT NULL,
|
||||
end text NOT NULL,
|
||||
filecount long NOT NULL);"""
|
||||
self.conn.execute(query)
|
||||
self.commit()
|
||||
|
||||
def insert(self, log_entry: LogEntry):
|
||||
query = f"INSERT INTO {self.table_name} (name, fingerprint, start, end, filecount) VALUES(?,?,?,?,?)"
|
||||
self.conn.execute(query, (log_entry.name, log_entry.fingerprint, log_entry.start_time, log_entry.end_time,
|
||||
log_entry.file_count))
|
||||
self.commit()
|
3
src/database/__init__.py
Normal file
3
src/database/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
from .databaseconnection import DatabaseConnection
|
||||
from .repo import Repo
|
||||
from .archive import Archive
|
48
src/database/archive.py
Normal file
48
src/database/archive.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
from . import DatabaseConnection, Repo
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Archive(DatabaseConnection):
|
||||
def __init__(self, db_path, repo: Repo, archive_json: dict, table_name: str = "archive"):
|
||||
super().__init__(db_path, table_name)
|
||||
|
||||
self.uuid = archive_json['id']
|
||||
self.repo_id = repo.repo_id
|
||||
self.name = archive_json['name']
|
||||
print(archive_json['start'])
|
||||
self.start = datetime.fromisoformat(archive_json['start'])
|
||||
self.end = datetime.fromisoformat(archive_json['end'])
|
||||
|
||||
self.archive_id = self._insert()
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"archive_id INTEGER PRIMARY KEY," \
|
||||
f"uuid INTEGER NOT NULL UNIQUE," \
|
||||
f"repo_id INTEGER NOT NULL," \
|
||||
f"name TEXT NOT NULL UNIQUE," \
|
||||
f"start TEXT TIMESTAMP NULL," \
|
||||
f"end TEXT TIMESTAMP NULL," \
|
||||
f"FOREIGN KEY (repo_id) REFERENCES repo (repo_id))"
|
||||
self.sql_execute(create_statement)
|
||||
|
||||
def _exists(self):
|
||||
result = self.sql_execute_one(f"SELECT archive_id FROM {self._sql_table}"
|
||||
f" WHERE uuid=?;", (self.uuid,))
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
return result[0]
|
||||
|
||||
def _insert(self) -> int:
|
||||
if self._exists():
|
||||
raise Exception("archive with same uuid already exists")
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('uuid', 'repo_id', 'name', 'start', 'end')"\
|
||||
f" VALUES (?, ?, ?, ?, ?);"
|
||||
args = (self.uuid, self.repo_id, self.name, self.start, self.end)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
77
src/database/databaseconnection.py
Normal file
77
src/database/databaseconnection.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from threading import Lock
|
||||
import sqlite3
|
||||
|
||||
|
||||
class DatabaseConnection(ABC):
|
||||
def __init__(self, db_path, table_name: str):
|
||||
self.__sql_lock = Lock()
|
||||
|
||||
self.__sql_database = sqlite3.connect(db_path,
|
||||
detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES,
|
||||
check_same_thread=False)
|
||||
|
||||
self._sql_table = table_name
|
||||
|
||||
self._create_table()
|
||||
self.sql_commit()
|
||||
|
||||
@property
|
||||
def sql_lock(self):
|
||||
return self.__sql_lock
|
||||
|
||||
@property
|
||||
def sql_cursor(self):
|
||||
return self.__sql_database.cursor()
|
||||
|
||||
def sql_execute(self, statement: str, args: tuple = None):
|
||||
with self.__sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
if args is None:
|
||||
cursor.execute(statement)
|
||||
else:
|
||||
cursor.execute(statement, args)
|
||||
|
||||
def sql_execute_row_id(self, statement: str, args: tuple = None):
|
||||
with self.__sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
if args is None:
|
||||
cursor.execute(statement)
|
||||
else:
|
||||
cursor.execute(statement, args)
|
||||
cursor.execute(f"select last_insert_rowid() from {self._sql_table};")
|
||||
row_id = cursor.fetchone()
|
||||
return row_id[0]
|
||||
|
||||
def sql_execute_all(self, statement: str, args: tuple = None):
|
||||
with self.__sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
if args is None:
|
||||
cursor.execute(statement)
|
||||
else:
|
||||
cursor.execute(statement, args)
|
||||
|
||||
return cursor.fetchall()
|
||||
|
||||
def sql_execute_one(self, statement: str, args: tuple = None):
|
||||
with self.__sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
if args is None:
|
||||
cursor.execute(statement)
|
||||
else:
|
||||
cursor.execute(statement, args)
|
||||
|
||||
return cursor.fetchone()
|
||||
|
||||
def sql_commit(self):
|
||||
self.__sql_database.commit()
|
||||
|
||||
@abstractmethod
|
||||
def _create_table(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def stop(self):
|
||||
with self.__sql_lock:
|
||||
self.sql_commit()
|
||||
self.__sql_database.close()
|
||||
|
51
src/database/repo.py
Normal file
51
src/database/repo.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
from . import DatabaseConnection
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Repo(DatabaseConnection):
|
||||
def __init__(self, db_path, repo_json: dict, table_name: str = 'repo'):
|
||||
super(Repo, self).__init__(db_path, table_name)
|
||||
|
||||
self.repo_id = None
|
||||
self.uuid = repo_json['id']
|
||||
self.location = repo_json['location']
|
||||
self.last_modified = datetime.fromisoformat(repo_json['last_modified'])
|
||||
|
||||
repo_id = self._exists()
|
||||
if repo_id is None:
|
||||
self.repo_id = self._insert()
|
||||
else:
|
||||
self.repo_id = repo_id
|
||||
self._update()
|
||||
|
||||
def _insert(self) -> int:
|
||||
with self.sql_lock:
|
||||
cursor = self.sql_cursor
|
||||
statement = f"INSERT INTO {self._sql_table}"\
|
||||
f" ('uuid', 'location', 'last_modified')"\
|
||||
f" VALUES (?, ?, ?);"
|
||||
args = (self.uuid, self.location, self.last_modified)
|
||||
cursor.execute(statement, args)
|
||||
self.sql_commit()
|
||||
return cursor.lastrowid
|
||||
|
||||
def _update(self):
|
||||
self.sql_execute(f"UPDATE {self._sql_table} SET location = ?, last_modified = ? WHERE repo_id = ?;",
|
||||
(self.location, self.last_modified, self.repo_id))
|
||||
self.sql_commit()
|
||||
|
||||
def _exists(self):
|
||||
result = self.sql_execute_one(f"SELECT repo_id FROM {self._sql_table}"
|
||||
f" WHERE uuid=?;", (self.uuid,))
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
return result[0]
|
||||
|
||||
def _create_table(self):
|
||||
create_statement = f"create table if not exists {self._sql_table}(" \
|
||||
f"repo_id INTEGER PRIMARY KEY," \
|
||||
f"uuid INTEGER NOT NULL UNIQUE," \
|
||||
f"location TEXT NOT NULL," \
|
||||
f"last_modified TIMESTAMP NOT NULL)"
|
||||
self.sql_execute(create_statement)
|
|
@ -1,58 +0,0 @@
|
|||
from datetime import datetime
|
||||
from math import floor
|
||||
import re
|
||||
|
||||
|
||||
class LogEntry(object):
|
||||
|
||||
def __init__(self, name, fingerprint, start_time, end_time, duration_string,
|
||||
file_count):
|
||||
self.name = name
|
||||
self.fingerprint = fingerprint
|
||||
self.start_time = self.get_datetime(start_time)
|
||||
self.end_time = self.get_datetime(end_time)
|
||||
self.duration = self.get_duration(duration_string)
|
||||
self.file_count = file_count
|
||||
|
||||
def print_to_file(self, filename):
|
||||
with open(filename, "w") as file:
|
||||
file.writelines([f"name: {self.name}",
|
||||
f"fingerprint: {self.fingerprint}",
|
||||
f"start: {self.start_time.date()} time: {self.start_time.time()}",
|
||||
f"end: {self.end_time.date()} time: {self.end_time.time()}",
|
||||
f"duration: {self.duration}",
|
||||
f"file_count: {self.file_count}"])
|
||||
|
||||
def get_duration(self, duration_string):
|
||||
total_seconds = 0
|
||||
time_strings = [('second', 1), ('minute', 60), ('hour', 3600), ('day', 86400)]
|
||||
for ts, mult in time_strings:
|
||||
total_seconds += self.get_time_unit_string(duration_string, ts, mult)
|
||||
|
||||
return floor(total_seconds)
|
||||
|
||||
@staticmethod
|
||||
def get_time_unit_string(text: str, time_text: str, multiplier: int = 1):
|
||||
substring = re.search(rf"((\d+)\.(\d+)|(\d+))\s({time_text}|{time_text}s)", text)
|
||||
if substring is not None:
|
||||
substring = substring.group().strip(f" {time_text}s")
|
||||
return float(substring) * multiplier
|
||||
else:
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def get_datetime(datetime_string):
|
||||
date_string = re.search(r"....-..-..", datetime_string).group()
|
||||
time_string = re.search(r"..:..:..", datetime_string).group()
|
||||
|
||||
year = int(date_string[0:4])
|
||||
month = int(date_string[5:7])
|
||||
day = int(date_string[8:10])
|
||||
|
||||
hour = int(time_string[0:2])
|
||||
minute = int(time_string[3:5])
|
||||
second = int(time_string[6:8])
|
||||
|
||||
converted_datetime = datetime(year, month, day, hour, minute, second)
|
||||
|
||||
return converted_datetime
|
49
src/main.py
49
src/main.py
|
@ -1,39 +1,26 @@
|
|||
from sys import stdin
|
||||
from src.logentry import LogEntry
|
||||
from src.database import Database
|
||||
from os.path import realpath
|
||||
from pathlib import Path
|
||||
from database import Repo, Archive
|
||||
import json
|
||||
|
||||
|
||||
def main(input_lines: list):
|
||||
raw_borg_output = input_lines
|
||||
def main(input_json: dict, path: Path):
|
||||
db_path = path / 'borg.sqlite'
|
||||
|
||||
borg_log_entry = create_log_entry(raw_borg_output)
|
||||
borg_log_entry.print_to_file("borg.txt")
|
||||
|
||||
database = Database("borg.db")
|
||||
database.insert(borg_log_entry)
|
||||
|
||||
|
||||
def create_log_entry(raw_borg_output: list):
|
||||
attributes = {"Archive name: ": "",
|
||||
"Archive fingerprint: ": "",
|
||||
"Time (start): ": "",
|
||||
"Time (end): ": "",
|
||||
"Duration: ": "",
|
||||
"Number of files: ": ""}
|
||||
|
||||
for line in raw_borg_output:
|
||||
for key in attributes:
|
||||
if line.startswith(key):
|
||||
attributes[key] = line[len(key):].strip()
|
||||
|
||||
return LogEntry(attributes["Archive name: "],
|
||||
attributes["Archive fingerprint: "],
|
||||
attributes["Time (start): "],
|
||||
attributes["Time (end): "],
|
||||
attributes["Duration: "],
|
||||
attributes["Number of files: "])
|
||||
repo = Repo(db_path, input_json['repository'])
|
||||
log_entry = Archive(db_path, repo, input_json['archive'])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
path = Path(realpath(__file__)).parent.parent
|
||||
input_text = stdin.readlines()
|
||||
main(input_text)
|
||||
|
||||
try:
|
||||
input_json = json.loads(" ".join(input_text))
|
||||
except:
|
||||
# todo: output input_text somewhere
|
||||
print("Error parsing json, output:")
|
||||
print("\n".join(input_text))
|
||||
exit(1)
|
||||
main(input_json, path)
|
||||
|
|
Loading…
Reference in New Issue
Block a user