feb git sync

This commit is contained in:
2026-02-11 14:00:11 -08:00
parent 94c31f4be3
commit 978f640e96
13 changed files with 571 additions and 139 deletions

View File

@ -0,0 +1,94 @@
import redis
import subprocess
import re
import json
####################################################
### Redis Functions
####################################################
debug_output = False
r = redis.Redis(host='172.17.0.1', port=6379)
def update_stats_redis():
# store the data in vm_list
data = get_host_stats(as_json=False)
# push data to redis
# Publish to the Redis channel that the WS server is listening on
r.publish('host_stats', json.dumps(data))
if debug_output:
print("=== Stats Redis Update ===")
print(json.dumps(data, indent=2))
return True
####################################################
### Host Stats Function
####################################################
def get_host_stats(as_json=False):
total_memory_command = "free -h | grep 'Mem:' | awk '{print $2}'"
total_memory = run_shell(total_memory_command, zero_only=True)
used_memory_command = "free -h | grep 'Mem:' | awk '{print $3}'"
used_memory = run_shell(used_memory_command, zero_only=True)
free_memory_command = "free -h | grep 'Mem:' | awk '{print $4}'"
free_memory = run_shell(free_memory_command, zero_only=True)
cpu_load_command = "uptime | grep -oP '(?<=age: ).*'"
cpu_load = run_shell(cpu_load_command, zero_only=True)
# nano pi command
#cpu_temp_command = "sensors | grep 'temp1:' | cut -d+ -f 2 | awk '{print $1}'"
cpu_temp_command = "sensors | grep -e Sensor -e Package | cut -d+ -f 2 | awk '{print $1}'"
cpu_temp = run_shell(cpu_temp_command, zero_only=True)
cpu_temp_stripped = re.sub(r'\u00b0C', '', cpu_temp)
cpu_temp_fixed = f"{cpu_temp_stripped} C"
ip_address_command = "ip -o -4 ad | grep -v -e docker -e 127.0.0.1 | awk '{print $2\": \" $4}'"
ip_addresses = run_shell(ip_address_command, zero_only=True)
time_now_command = "date +%r"
time_now = run_shell(time_now_command, zero_only=True)
# Redis stores in this order, or at least the html renders it in this order
stats = [{
"memory_total": total_memory,
"memory_used": used_memory,
"memory_free": free_memory,
"cpu_load": cpu_load,
"cpu_temp": cpu_temp_fixed,
"ip_addresses": ip_addresses,
"time": time_now
}]
if check_for_battery():
battery_level_command = "acpi | grep Battery | awk {print'$3 \" \" $4'}"
battery_level = run_shell(battery_level_command, zero_only=True)
stats = [{
"memory_total": total_memory,
"memory_used": used_memory,
"memory_free": free_memory,
"cpu_load": cpu_load,
"cpu_temp": cpu_temp_fixed,
"ip_addresses": ip_addresses,
"battery_level": battery_level,
"time": time_now
}]
if debug_output:
print("=== Current Host Stats ===")
print(json.dumps(stats, indent=2))
return jsonify(stats) if as_json else stats
def check_for_battery():
battery_check_command = "acpi | grep Battery | awk {print'$1'}"
battery_check = run_shell(battery_check_command, zero_only=True)
if battery_check == 'Battery':
return True
else:
return False
# subroutine to run a command, return stdout as array unless zero_only then return [0]
def run_shell(cmd, zero_only=False):
# Run the command and capture the output
result = subprocess.run(cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Decode the byte output to a string
output = result.stdout.decode('utf-8')
# Split the output into lines and store it in an array
output_lines = [line for line in output.split('\n') if line]
# Return result
try:
return output_lines[0] if zero_only else output_lines
except:
return output_lines

306
files/oop_code/SSDObject.py Normal file
View File

@ -0,0 +1,306 @@
###############################################################
# SSDObject.py
# ssd object class function created for ssd monitor
# functions for handling database and shell commands
# class definitions and other class helper functions
###############################################################
import sqlite3
import json, redis
import subprocess
db_path = '/opt/ssd_health/drive_records.db'
debug_output = False
show_records = False
suppress_errors = False
push_redis = False
###############################################################
# Class Definition
#
# Most of the heavy lifting is done on object instantiation
# Object is instantiated with just the dev_id and flavor
# Drive data is collected on instantition and stored to database
###############################################################
class SSDObject:
def __str__(self) -> str:
return f"""Drive at /dev/{self.dev_id} is a '{self.model}' {self.capacity} {self.flavor}, SN: {self.serial}"""
def __init__(self, dev_id: str):
self.dev_id = dev_id
if not check_serial_attached(self.dev_id):
raise TypeError(f"No device at /dev/{self.dev_id}")
self._smart_data = return_smartctl(dev_id)
self.flavor = self._get_flavor()
if self.flavor == "HDD":
raise TypeError("Unable to instantiate HDD")
if self.flavor == "Error":
raise TypeError("Unable to instantiate storage device")
self.serial = self._smart_data['serial_number']
self.model = self._smart_data['model_name']
self.capacity_bytes = self._smart_data['user_capacity']['bytes']
self.smart_status = self._smart_data['smart_status']['passed']
self.capacity = self._get_human_capacity()
self.sector_size = return_sector_size(self.dev_id)
self.gb_written = self._get_gbw()
self._update_db()
def _get_flavor(self) -> str:
if "rotation_rate" in json.dumps(self._smart_data):
if int(self._smart_data['rotation_rate'] == 0):
if not suppress_errors:
print(f"Warning - /dev/{self.dev_id} is a weird SSD with a rotation rate of 0")
return "SSD"
else:
return "HDD"
elif "NVMe" in json.dumps(self._smart_data):
return "NVMe"
elif "Solid State" in json.dumps(self._smart_data):
return "SSD"
elif "Unknown USB bridge" in json.dumps(self._smart_data):
return "Error"
def _get_human_capacity(self) -> str:
size = self.capacity_bytes
factor = 1024
units = [
(factor ** 4, "TiB"),
(factor ** 3, "GiB"),
(factor ** 2, "MiB"),
(factor ** 1, "KiB"),
]
for thresh, suffix in units:
if size > thresh:
value = size / thresh
return f"{value:.{0}f} {suffix}"
def _get_gbw(self) -> str:
result = ''
gib_factor = 2 ** 30
if self.flavor == "SSD":
data_units_written = return_ls_written(self._smart_data)
result = round(data_units_written * self.sector_size / gib_factor, 2)
elif self.flavor == "NVMe":
data_units_written = float(self._smart_data['nvme_smart_health_information_log']['data_units_written'])
result = round(data_units_written * self.sector_size / gib_factor, 2)
return result
def _update_db(self):
if push_redis:
update_disk_redis()
if self.exists():
drive_query = f"""
UPDATE drive_records SET gb_written = '{self.gb_written}', smart = '{self.smart_status}' WHERE serial = '{self.serial}';
"""
else:
drive_query = f"""
INSERT INTO drive_records (serial, model, flavor, capacity, gb_written, smart)
VALUES ('{self.serial}', '{self.model}', '{self.flavor}', '{self.capacity}', '{self.gb_written}', '{self.smart_status}');
"""
query_db(drive_query)
def exists(self) -> bool:
return check_serial_exists(self.serial)
def attached(self) -> bool:
return(check_serial_attached(self.serial))
########################################
# Other Helper Functions
########################################
# subroutine to run a command, return stdout as array unless zero_only then return [0]
def run_command(cmd, zero_only=False):
# Run the command and capture the output
result = subprocess.run(cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Decode the byte output to a string
output = result.stdout.decode('utf-8')
# Split the output into lines and store it in an array
output_lines = [line for line in output.split('\n') if line]
# Return result
try:
return output_lines[0] if zero_only else output_lines
except:
return output_lines
def return_smartctl(drive_id):
smartctl_string = f"/usr/sbin/smartctl --json -x /dev/{drive_id} || true"
smartctl_result = subprocess.run(smartctl_string, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
smartctl_data = json.loads(smartctl_result.stdout.decode("utf-8"))
return smartctl_data
def return_sector_size(drive_id):
sector_size_command = f"fdisk -l /dev/{drive_id} | grep 'Sector size' | awk '{{print $4}}'"
sector_size_result = subprocess.run(sector_size_command, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return int(sector_size_result.stdout.decode("utf-8"))
def return_ls_written(data):
pages = data.get("ata_device_statistics", {}).get("pages", [])
for page in pages:
for entry in page.get("table", []):
if entry.get("name") == "Logical Sectors Written":
return entry.get("value")
# Function to return all drive records in database
def get_all_drive_records():
get_all_drives = "SELECT * FROM drive_records"
rows = query_db(get_all_drives)
drives = []
for row in rows:
drive = {
'id': row[0],
'serial': row[1],
'model': row[2],
'flavor': row[3],
'capacity': row[4],
'gb_written': round(float(row[5]),2),
'smart': row[6]
}
drives.append(drive)
return json.dumps(drives)
# return attached disks
def list_disk_and_serial():
# Init blank devices array
devices = []
# get the devices
cmd = "lsblk -o NAME,SERIAL,SIZE,TYPE | grep sd | grep disk | awk '{print $1 \",\" $2. \",\" $3}'"
# try to run the command, should not fail
try:
while 'disk' in run_command(cmd, zero_only=False):
time.sleep(0.5)
devices = run_command(cmd, zero_only=False)
except subprocess.CalledProcessError as e:
print(f"An error occurred: {e.stderr.decode('utf-8')}")
drives = []
for device in devices:
if debug_output:
print(device)
drive = {
"dev_id": device.split(',')[0],
"serial": device.split(',')[1],
"capacity": device.split(',')[2]
}
drives.append(drive)
# return the devices as an array
if debug_output:
print(drives)
return drives
# Function to check if a serial number exists in the database
def check_serial_exists(serial):
serial_check = f"SELECT * FROM drive_records WHERE serial='{serial}'"
if debug_output:
print(serial_check)
return bool(query_db(serial_check))
def check_serial_attached(serial):
serial_check = f"lsblk -o NAME,SERIAL,SIZE,TYPE | grep {serial} || true"
if run_command(serial_check, zero_only=False):
return True
else:
return False
# Function to run SQL Query
def query_db(sql_query):
try:
with sqlite3.connect(db_path) as conn:
cursor = conn.cursor()
if debug_output:
print("Executing SQL query:", sql_query)
cursor.execute(sql_query)
rows = cursor.fetchall()
if debug_output:
print("Query Result:", rows)
conn.commit()
conn.close()
return rows
except sqlite3.Error as e:
if not suppress_errors:
print("An error occurred:", e)
return []
def init_db():
print("Checking Database...")
db_check = "SELECT name FROM sqlite_master WHERE type='table' AND name='drive_records';"
create_table_command = """
CREATE TABLE drive_records (
id INTEGER PRIMARY KEY,
serial TEXT NOT NULL,
model TEXT NOT NULL,
flavor TEXT NOT NULL,
capacity TEXT NOT NULL,
gb_written TEXT NOT NULL,
smart TEXT NOT NULL
);
"""
# check for drives
try:
result = bool(query_db(db_check))
if debug_output:
print(f"Database exists: {result}")
# Check if any tables were found
if result:
print("drive_records exists, skipping db init")
if debug_output or show_records:
all_drives = json.loads(get_all_drive_records())
print("--- Drive Records ---")
for drive in all_drives:
print(f"{drive['model']} - SN: {drive['serial']}")
print("--- End Records ---")
print()
else:
print("drive_records does not exist, creating")
try:
result_init = query_db(create_table_command)
if debug_output:
print(result_init)
print("Database created - 201")
except sqlite3.Error as e:
if not suppress_errors:
print(f"error during table initialization: {e}")
return jsonify({'error during table initialization - 401': e}), 401
except sqlite3.Error as e:
if not suppress_errors:
print(f"error during table check: {e}")
return jsonify({'error during table check - 400': e}), 400
####################################################
### Redis Functions
####################################################
r = redis.Redis(host='172.17.0.1', port=6379)
def update_disk_redis():
active = list_disk_and_serial()
all_rec = json.loads(get_all_drive_records())
enriched = merge_active_with_details(active, all_rec)
r.publish('attached_disks', json.dumps(enriched))
if debug_output:
print("=== Active drives sent to Redis ===")
print(json.dumps(enriched, indent=2))
def merge_active_with_details(active, all_records):
# Build a quick lookup dictionary keyed by serial
record_by_serial = {rec['serial']: rec for rec in all_records}
# Add the extra fields to each active drive
for drive in active:
rec = record_by_serial.get(drive['serial'])
if rec:
extra = {k: v for k, v in rec.items() if k not in ('id', 'serial')}
drive.update(extra)
return active
########################################
# Run init_db when Class file is imported
########################################
init_db()

99
files/oop_code/ssd_api.py Normal file
View File

@ -0,0 +1,99 @@
from flask import Flask, jsonify, request
import sqlite3
import redis, json, time
from SSDObject import *
from flask_apscheduler import APScheduler
from HostRedis import *
app = Flask(__name__)
debug_output = False
secure_api = False
push_redis = True
####################################################
### Flask Routes
####################################################
# Route to check if a serial number exists in the database
@app.route('/check', methods=['GET'])
def check():
serial_lookup = request.args.get('serial_lookup')
if debug_output:
print(f"Serial to check: {serial_lookup}")
if not serial_lookup:
return jsonify({'error': 'No serial number provided'}), 400
exists = check_serial_exists(serial_lookup)
return jsonify({'serial_number_exists': exists, 'serial_lookup': serial_lookup})
# Route to get all drive records in JSON format
@app.route('/drives', methods=['GET'])
def index():
return get_all_drive_records()
# Route to return active drives
@app.route('/list_active_drives', methods=['GET'])
def list_active_drives():
return jsonify(list_disk_and_serial())
# host stats
@app.route('/host_stats', methods=['GET'])
def host_stats():
if push_redis:
update_stats_redis()
return jsonify(get_host_stats())
# test route
@app.route('/test', methods=['GET'])
def test():
db_check = "SELECT name FROM sqlite_master WHERE type='table';"
return query_db(db_check)
####################################################
### Flask Scheduler Handlers
####################################################
def ssd_scan():
disk_list_command = "ls -lo /dev/sd? | awk '{print $9}' | cut -d/ -f3"
disk_list = run_command(disk_list_command, zero_only = False)
disk_objects = []
for disk in disk_list:
if debug_output:
print(disk)
try:
disk_objects.append(SSDObject(dev_id = disk))
except (TypeError, ValueError, KeyError, OSError) as e:
print(f"Error - {e}")
if debug_output:
for disk in disk_objects:
print(disk)
print(list_disk_and_serial())
if push_redis:
update_stats_redis()
update_disk_redis()
time.sleep(0.2)
if __name__ == '__main__':
# send immediate stats update to redis
if push_redis:
update_stats_redis()
# Flask scheduler for scanner
scheduler = APScheduler()
scheduler.add_job(id='ssd_check',
func=ssd_scan,
trigger='interval',
seconds=1)
scheduler.init_app(app)
scheduler.start()
if secure_api:
app.run(debug=True, host='172.17.0.1', port=5000)
else:
app.run(debug=True, host='0.0.0.0', port=5000)