1
0
Fork 0
This commit is contained in:
Arti Zirk 2021-11-06 20:02:09 +02:00
commit 56e34902bb
3 changed files with 220 additions and 0 deletions

140
.gitignore vendored Normal file
View File

@ -0,0 +1,140 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
.idea
smartmontools

2
README.md Normal file
View File

@ -0,0 +1,2 @@
# Load smartmontools attrlog files to PostgreSQL
For luls and pretty Grafana graphs

78
attrlogimport.py Executable file
View File

@ -0,0 +1,78 @@
#!/usr/bin/env python3
import os
import re
import sys
import psycopg
drive_name_re = re.compile(r'attrlog\.(.*).ata.csv')
conn = psycopg.connect("dbname=smartmontools")
def create_tables():
with conn.cursor() as cur:
cur.execute("START TRANSACTION")
cur.execute("""
CREATE TABLE device (
id serial primary key,
name text
)""")
cur.execute("""
CREATE TABLE attrlog (
time timestamptz not null,
device_id integer references device(id) not null,
id smallint not null,
norm smallint,
raw bigint,
unique (time, device_id, id)
)
""")
cur.execute("COMMIT")
def create_or_find_device_id(name):
with conn.cursor() as cur:
cur.execute("SELECT id FROM device WHERE name = %s", (name,))
row = cur.fetchone()
if row is None:
cur.execute("INSERT INTO device (name) VALUES (%s) RETURNING id", (name,))
row = cur.fetchone()
return row[0]
def parse_attrlog_file(filename, device_id=None):
fd = open(filename)
file_size = fd.seek(0, os.SEEK_END)
fd.seek(0)
while fd.tell() != file_size:
line = fd.readline()
line_parts = [p for p in line.strip().split(";") if p.strip()]
dt = line_parts.pop(0)+"+0200"
while line_parts:
id = int(line_parts.pop(0))
norm = int(line_parts.pop(0))
raw = int(line_parts.pop(0))
yield dt, id, norm, raw, device_id
if fd.tell() % 100 == 0:
print(f"{int((fd.tell() / file_size)*100):>5}%", end='\r')
print()
def import_attrlog_file(filename):
drive_name = drive_name_re.search(filename).group(1)
print(drive_name)
device_id = create_or_find_device_id(drive_name)
with conn.cursor() as cur:
cur.execute("START TRANSACTION")
with cur.copy("copy attrlog (time, id, norm, raw, device_id) FROM STDIN") as copy:
for row in parse_attrlog_file(filename, device_id):
copy.write_row(row)
cur.execute("COMMIT")
if __name__ == '__main__':
if len(sys.argv) < 2:
create_tables()
else:
import_attrlog_file(sys.argv[1])