node.js dashboard works

This commit is contained in:
2025-11-30 22:17:14 -08:00
parent c6d51f2a49
commit ebbc5ac5cf
22 changed files with 829 additions and 196 deletions

View File

@ -18,6 +18,7 @@ function fetchSSDData() {
return json_decode($result, true); // Decode JSON as an associative array
}
?>
<!DOCTYPE html>
<html lang="en">
@ -28,60 +29,51 @@ function fetchSSDData() {
</head>
<body>
<div class="container">
<button onclick="window.location.reload();" class="title-button"><h1>SSD Health Dashboard</h1></button>
<button onclick="window.location.reload();" class="title-button"><h1>SSD Health Dashboard</h1></button><p>
This lists every disk ever scanned by this device.<br>
<?php
$i=0;
echo "For a live dashboard, please visit <a href=http://{$_SERVER['HTTP_HOST']}:3000/>this link</a>.<p>";
$ssdData = fetchSSDData(); // Fetch data from the API
echo '<div class="group-columns">';
foreach ($ssdData as $ssd):
if ($i % 2 == 0) {
echo '</div><div class="group-columns">';
}
echo <<<EOL
<div class="meter">
<table>
<tr><td align ="right">
Disk ID:
</td><td align ="left">
{$ssd['id']}
</td></tr><tr>
<tr><td align ="right">
Model String:
</td><td align ="left">
{$ssd['model']}
</td></tr><tr>
<tr><td align ="right">
Serial Number:
</td><td align ="left">
{$ssd['serial']}
</td></tr><tr>
<tr><td align ="right">
TB Written:
</td><td align ="left">
{$ssd['TBW']}
</td></tr><tr>
<tr><td align ="right">
Disk Capacity:
</td><td align ="left">
{$ssd['capacity']}
</td></tr><tr>
<tr><td align ="right">
Disk Flavor:
</td><td align ="left">
{$ssd['flavor']}
</td></tr><tr>
<tr><td align ="right">
SMART Result:
</td><td align ="left">
{$ssd['smart']}
</td></tr>
</table>
</div>
EOL;
$i++;
endforeach;
echo '</div>';
// Start the table
echo '<table class="ssd-list" style="border-collapse:collapse;width:100%;">';
// Table header (optional but handy)
echo '<thead>
<tr>
<th>Disk ID</th>
<th>Model String</th>
<th>Serial Number</th>
<th>TB Written</th>
<th>Disk Capacity</th>
<th>Disk Flavor</th>
<th>SMART Result</th>
</tr>
</thead>';
// Table body - one row per SSD
echo '<tbody>';
foreach ($ssdData as $ssd) {
// Escape the values so the page stays safe
$id = htmlspecialchars($ssd['id']);
$model = htmlspecialchars($ssd['model']);
$serial = htmlspecialchars($ssd['serial']);
$tbw = htmlspecialchars($ssd['TBW']);
$cap = htmlspecialchars($ssd['capacity']);
$flavor = htmlspecialchars($ssd['flavor']);
$smart = htmlspecialchars($ssd['smart']);
echo "<tr>
<td>{$id}</td>
<td>{$model}</td>
<td>{$serial}</td>
<td>{$tbw}</td>
<td>{$cap}</td>
<td>{$flavor}</td>
<td>{$smart}</td>
</tr>";
}
echo '</tbody></table>';
?>
</div>
</body>
</html>

View File

@ -24,8 +24,27 @@ body {
margin: 4px 2px;
cursor: pointer;
}
table, th, td {
border: 1px solid black;
border-collapse: collapse;
}
th, td {
padding: 10px;
}
.container {
max-width: 800px;
max-width: 950px;
margin: 0 auto;
padding: 20px;
background-color: #34495e; /* Darker background for container */
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.3); /* Slightly darker shadow */
margin-top: 20px;
}
.container-small {
max-width: 550px;
margin: 0 auto;
padding: 20px;
background-color: #34495e; /* Darker background for container */

View File

@ -1,14 +1,95 @@
from flask import Flask, jsonify, request
import sqlite3
import json
import redis, json, time
import os
import subprocess
import re
app = Flask(__name__)
db_path = '/opt/ssd_health/drive_records.db'
debug_output = False
secure_api = True
####################################################
### Redis Functions
####################################################
r = redis.Redis(host='172.17.0.1', port=6379)
def update_disk_redis():
active = get_active_drive_records(as_json=False)
all_rec = get_all_drive_records(as_json=False)
enriched = merge_active_with_details(active, all_rec)
r.publish('attached_disks', json.dumps(enriched))
if debug_output:
print("=== Active drives sent to Redis ===")
print(json.dumps(enriched, indent=2))
def update_stats_redis():
# store the data in vm_list
data = get_host_stats(as_json=False)
# push data to redis
# Publish to the Redis channel that the WS server is listening on
r.publish('host_stats', json.dumps(data))
if debug_output:
print("=== Stats Redis Update ===")
print(json.dumps(data, indent=2))
return True
def merge_active_with_details(active, all_records):
# Build a quick lookup dictionary keyed by serial
record_by_serial = {rec['serial']: rec for rec in all_records}
# Add the extra fields to each active drive
for drive in active:
rec = record_by_serial.get(drive['serial'])
if rec:
extra = {k: v for k, v in rec.items() if k not in ('id', 'serial')}
drive.update(extra)
return active
####################################################
### Host Stats Function
####################################################
def get_host_stats(as_json=False):
total_memory_command = "free -h | grep 'Mem:' | awk '{print $2}'"
total_memory = run_command(total_memory_command, zero_only=True)
used_memory_command = "free -h | grep 'Mem:' | awk '{print $3}'"
used_memory = run_command(used_memory_command, zero_only=True)
free_memory_command = "free -h | grep 'Mem:' | awk '{print $4}'"
free_memory = run_command(free_memory_command, zero_only=True)
cpu_load_command = "uptime | grep -oP '(?<=age: ).*'"
cpu_load = run_command(cpu_load_command, zero_only=True)
# nano pi command
#cpu_temp_command = "sensors | grep 'temp1:' | cut -d+ -f 2 | awk '{print $1}'"
cpu_temp_command = "sensors | grep Package | cut -d+ -f 2 | awk '{print $1}'"
cpu_temp = run_command(cpu_temp_command, zero_only=True)
cpu_temp_stripped = re.sub(r'\u00b0C', '', cpu_temp)
cpu_temp_fixed = f"{cpu_temp_stripped} C"
ip_address_command = "ip -o -4 ad | grep -e eth -e tun | awk '{print $2\": \" $4}'"
ip_addresses = run_command(ip_address_command, zero_only=True)
time_now_command = "date +%r"
time_now = run_command(time_now_command, zero_only=True)
# Redis stores in this order, or at least the html renders it in this order
stats = [{
"memory_total": total_memory,
"memory_used": used_memory,
"memory_free": free_memory,
"cpu_load": cpu_load,
"cpu_temp": cpu_temp_fixed,
"ip_addresses": ip_addresses,
"time": time_now
}]
if debug_output:
print("=== Current Host Stats ===")
print(json.dumps(stats, indent=2))
return jsonify(stats) if as_json else stats
####################################################
### db functions
####################################################
# init db function
def init_db():
print("Initializing DB")
print("Checking Database...")
db_check = "SELECT name FROM sqlite_master WHERE type='table' AND name='drive_records';"
create_table_command = """
CREATE TABLE drive_records (
@ -21,26 +102,37 @@ def init_db():
smart TEXT NOT NULL
);
"""
active_disks_command = """
CREATE TABLE active_disks (
id INTEGER PRIMARY KEY,
name TEXT,
serial TEXT,
size TEXT
);
"""
# this code deletes the db file if 0 bytes
if os.path.exists(db_path) and os.path.getsize(db_path) == 0:
try:
print("Database is 0 bytes, deleting.")
print("Database file exists and is 0 bytes, deleting.")
os.remove(db_path)
except Exception as e:
print(f"error during file deletion - 405: {e}")
return jsonify({'error during file deletion': e}), 405
try:
result = bool(query_db(db_check))
print(result)
# Check if any tables were found
if result:
print(result)
print("drive_records exists - 205")
else:
print("drive_records does not exist, creating")
try:
result_init = query_db(create_table_command)
result_active = query_db(active_disks_command)
print(result_init)
print("Database created - 201")
print(result_active)
print("Database created - 201")
except sqlite3.Error as e:
print(f"error during table initialization: {e}")
return jsonify({'error during table initialization - 401': e}), 401
@ -54,16 +146,56 @@ def query_db(sql_query):
try:
with sqlite3.connect(db_path) as conn:
cursor = conn.cursor()
print("Executing SQL query:", sql_query)
if debug_output:
print("Executing SQL query:", sql_query)
cursor.execute(sql_query)
rows = cursor.fetchall()
if debug_output:
print("Query Result:", rows)
return rows
except sqlite3.Error as e:
print("An error occurred:", e)
return []
# is this redundant? oh my, yes
# does it save me time? also, big yes
# note how the one above doesn't have the query params
# i don't want to re-write the subroutine i took from the VM party
def query_database(query_string, query_params=None):
if debug_output:
print(query_string, query_params)
# Connect to the SQLite database (or create it if it doesn't exist)
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
if query_params is not None:
cursor.execute(query_string, query_params)
else:
cursor.execute(query_string)
result = cursor.fetchall()
if debug_output:
print(result)
# Commit the transaction and close the connection
conn.commit()
conn.close()
return result
####################################################
### Other Helper Functions
####################################################
# subroutine to run a command, return stdout as array unless zero_only then return [0]
def run_command(cmd, zero_only=False):
# Run the command and capture the output
result = subprocess.run(cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Decode the byte output to a string
output = result.stdout.decode('utf-8')
# Split the output into lines and store it in an array
output_lines = [line for line in output.split('\n') if line]
# Return result
return output_lines[0] if zero_only else output_lines
# Function to return all drive records in database
def get_all_drive_records():
def get_all_drive_records(as_json=True):
get_all_drives = "SELECT * FROM drive_records"
rows = query_db(get_all_drives)
drives = []
@ -78,24 +210,45 @@ def get_all_drive_records():
'smart': row[6]
}
drives.append(drive)
return jsonify(drives)
return jsonify(drives) if as_json else drives
# Function to return all active drives in database
def get_active_drive_records(as_json=True):
get_active_drives = "SELECT * FROM active_disks"
rows = query_db(get_active_drives)
drives = []
for row in rows:
drive = {
'id': row[0],
'name': row[1],
'serial': row[2],
'size': row[3]
}
drives.append(drive)
return jsonify(drives) if as_json else drives
# Function to check if a serial number exists in the database
def check_serial_exists(serial):
serial_check = f"SELECT * FROM drive_records WHERE serial='{serial}'"
print(serial_check)
if debug_output:
print(serial_check)
return bool(query_db(serial_check))
####################################################
### Flask Routes
####################################################
# Route to check if a serial number exists in the database
@app.route('/check', methods=['GET'])
def check():
serial_lookup = request.args.get('serial_lookup')
print(f"Serial to check: {serial_lookup}")
if debug_output:
print(f"Serial to check: {serial_lookup}")
if not serial_lookup:
return jsonify({'error': 'No serial number provided'}), 400
exists = check_serial_exists(serial_lookup)
return jsonify({'serial_number_exists': exists})
return jsonify({'serial_number_exists': exists, 'serial_lookup': serial_lookup})
# Route to get all drive records in JSON format
@app.route('/drives', methods=['GET'])
@ -115,7 +268,8 @@ def add_drive():
if None in [serial, model, flavor, capacity, TBW, smart]:
return jsonify({'error': 'Missing required query parameter(s)'}), 400
add_drive_query = f"INSERT INTO drive_records (serial, model, flavor, capacity, TBW, smart) VALUES ('{serial}', '{model}', '{flavor}', '{capacity}', '{TBW}', '{smart}'); "
print(add_drive_query)
if debug_output:
print(add_drive_query)
return jsonify(query_db(add_drive_query))
# Route to update drive in database
@ -128,18 +282,95 @@ def update_drive():
if None in [serial, TBW, smart]:
return jsonify({'error': 'Missing required query parameter(s)'}), 400
update_drive_query = f"UPDATE drive_records SET TBW = '{TBW}', smart = '{smart}' WHERE serial = '{serial}';"
print(update_drive_query)
if debug_output:
print(update_drive_query)
return jsonify(query_db(update_drive_query))
# Route to return active drives
@app.route('/list_active_drives', methods=['GET'])
def list_active_drives():
return get_active_drive_records()
# list disks as sda,serial
def list_disk_and_serial():
# Init blank devices array
devices = []
# get the devices
cmd = "lsblk -o NAME,SERIAL,SIZE,TYPE | grep sd | grep disk | awk '{print $1 \",\" $2. \",\" $3}'"
# try to run the command, should not fail
try:
devices = run_command(cmd)
except subprocess.CalledProcessError as e:
print(f"An error occurred: {e.stderr.decode('utf-8')}")
# return the devices as an array
return sorted([item for item in devices if item])
# Route to refresh active drives
@app.route('/refresh_active_drives', methods=['GET'])
def refresh_active_drives(): # List of items to be inserted; each item is a tuple (name, serial, size)
current_items = list_disk_and_serial()
# Loop through the list and insert items, checking for duplicates based on 'serial'
for item in current_items:
item = item.split(',')
# Check if the serial already exists in the database
existing_item = query_database('SELECT * FROM active_disks WHERE name = ?', (item[0],))
if not existing_item:
# If no duplicate is found, insert the new item
if debug_output:
print(f"Disk /dev/{item[0]} inserted, updating database")
verified_serial = run_command(f"hdparm -I /dev/{item[0]} | grep 'Serial\ Number' | cut -d: -f2 | awk '{{print $1}}' ", zero_only=True)
if debug_output:
print(f"Verified serial number through smartctl: {verified_serial}")
item[1] = verified_serial
query_database('INSERT INTO active_disks (name, serial, size) VALUES (?, ?, ?)', item)
update_disk_redis()
# Remove items from the database that are not in the current list of items
# first grab all the disks in the database
for row in query_database('SELECT name, serial FROM active_disks'):
drive_object = ""
drive_serial = ""
# the drive is missing until proven present, let's see if it exists
not_found = True
# load the currently attached drives in another array
for item in current_items:
item = item.split(',')
# this is where the drive is found, set this to false
if row[0] == item[0]:
drive_object = item[0]
drive_serial = item[1]
not_found = False
# if the drive was not found in the above loop, it's missing, remove it and loop to the next record
if not_found:
target_name = row[0].split(',')
if debug_output:
print(f"Deleting disk /dev/{drive_object} - serial {drive_serial}")
query_database('DELETE FROM active_disks WHERE name = ?', target_name)
update_disk_redis()
update_disk_redis()
update_stats_redis()
return jsonify({"function": "update_disk_database"})
# host stats
@app.route('/host_stats', methods=['GET'])
def host_stats():
update_stats_redis()
return jsonify(get_host_stats())
# test route
@app.route('/test', methods=['GET'])
def test():
db_check = "SELECT name FROM sqlite_master WHERE type='table' AND name='drive_records';"
db_check = "SELECT name FROM sqlite_master WHERE type='table';"
return query_db(db_check)
if __name__ == '__main__':
result=init_db()
print(result)
app.run(debug=True, host='172.17.0.1', port=5000)
if secure_api:
app.run(debug=True, host='172.17.0.1', port=5000)
else:
app.run(debug=True, host='0.0.0.0', port=5000)

18
files/ws_node/Dockerfile Normal file
View File

@ -0,0 +1,18 @@
# Use an official Node runtime
FROM node:20-alpine
# Create app directory
WORKDIR /usr/src/app
# Install dependencies
COPY package.json .
RUN npm install --only=production
# Copy app source
COPY . .
# Expose the port that the app listens on
EXPOSE 3000
# Start the server
CMD ["node", "server.js"]

View File

@ -0,0 +1,13 @@
{
"name": "redis-table-demo",
"version": "1.0.0",
"main": "server.js",
"scripts": {
"start": "node server.js"
},
"dependencies": {
"express": "^4.18.2",
"socket.io": "^4.7.2",
"redis": "^4.6.7"
}
}

View File

@ -0,0 +1,90 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Attached Disks - Live Table</title>
<link rel="stylesheet" href="styles.css">
</head>
<body>
<div class="container">
<h2>Attached Disks</h2>
<div id="disk_table" class="column">Connecting…</div>
</div>
<div class="container">
<h2>System Stats</h2>
<div id="host_stats" class="column">Connecting…</div>
</div>
<!-- Socket.IO client library -->
<script src="socket.io/socket.io.js"></script>
<script>
const socket = io();
// listen for redis updates, render and error handle
socket.on('attached_disks', renderDiskTable);
socket.on('host_stats', renderStatsTable);
socket.on('connect_error', err => {
safeSetText('disk_table', `Could not connect to server - ${err.message}`);
safeSetText('host_stats', `Could not connect to server - ${err.message}`);
});
socket.on('reconnect', attempt => {
safeSetText('disk_table', `Re-connected (attempt ${attempt})`);
safeSetText('host_stats', `Re-connected (attempt ${attempt})`);
});
function safeSetText(id, txt) {
const el = document.getElementById(id);
if (el) el.textContent = txt;
}
// table rendering functions
function renderDiskTable(data) { renderGenericTable('disk_table', data, 'No Disks found'); }
function renderStatsTable(data) { renderGenericTable('host_stats', data, 'No Stats available'); }
function renderGenericTable(containerId, data, emptyMsg) {
const container = document.getElementById(containerId);
if (!Array.isArray(data) || !data.length) {
container.textContent = emptyMsg;
return;
}
const table = renderTable(data);
container.innerHTML = '';
container.appendChild(table);
}
function renderTable(data) {
// Columns are inferred from the first object (order matters)
const cols = Object.keys(data[0]);
// Create table
const table = document.createElement('table');
// Header
const thead = table.createTHead();
const headerRow = thead.insertRow();
cols.forEach(col => {
const th = document.createElement('th');
th.textContent = col.charAt(0).toUpperCase() + col.slice(1);
headerRow.appendChild(th);
});
// Body
const tbody = table.createTBody();
data.forEach(item => {
const tr = tbody.insertRow();
cols.forEach(col => {
const td = tr.insertCell();
td.textContent = item[col];
});
});
return table;
}
</script>
</body>
</html>

View File

@ -0,0 +1,111 @@
/* styles.css */
body {
font-family: Arial, sans-serif;
margin: 0;
padding: 0;
background-color: #2c3e50; /* Dark background color */
color: #bdc3c7; /* Dimmer text color */
}
.hidden-info {
display: none;
}
.title-button {
background-color: #34495e;
border: none;
color: white;
padding: 15px 32px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: 16px;
margin: 4px 2px;
cursor: pointer;
}
table, th, td {
border: 1px solid black;
border-collapse: collapse;
}
th, td {
padding: 10px;
}
.container {
max-width: 950px;
margin: 0 auto;
padding: 20px;
background-color: #34495e; /* Darker background for container */
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.3); /* Slightly darker shadow */
margin-top: 20px;
}
.container-small {
max-width: 550px;
margin: 0 auto;
padding: 20px;
background-color: #34495e; /* Darker background for container */
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.3); /* Slightly darker shadow */
margin-top: 20px;
}
h1, h2, h3, h4 {
color: #bdc3c7; /* Dimmer text color */
}
ul {
list-style-type: none;
padding: 0;
}
li {
margin-bottom: 10px;
color: #bdc3c7; /* Dimmer text color */
}
.group-columns {
display: flex;
}
.group-rows {
display: flex;
flex-wrap: wrap;
justify-content: flex-start; /* Left justification */
margin-top: 10px;
}
.group-column {
flex: 0 0 calc(33% - 10px); /* Adjust width of each column */
}
.column {
flex: 1;
padding: 0 10px; /* Adjust spacing between columns */
}
.subcolumn {
margin-left: 10px;
}
.grid {
display: flex;
flex-wrap: wrap;
justify-content: space-between;
margin-top: 5px;
}
.meter {
width: calc(90% - 5px);
max-width: calc(45% - 5px);
margin-bottom: 5px;
border: 1px solid #7f8c8d; /* Light border color */
border-radius: 5px;
padding: 5px;
text-align: center;
background-color: #2c3e50; /* Dark background for meter */
}

54
files/ws_node/server.js Normal file
View File

@ -0,0 +1,54 @@
const http = require('http');
const express = require('express');
const { createClient } = require('redis');
const { Server } = require('socket.io');
const app = express();
const server = http.createServer(app);
const io = new Server(server);
// Serve static files (index.html)
app.use(express.static('public'));
// ---------- Redis subscriber ----------
const redisClient = createClient({
url: 'redis://172.17.0.1:6379'
});
redisClient.on('error', err => console.error('Redis error', err));
(async () => {
await redisClient.connect();
// Subscribe to the channel that sends disk lists
const sub = redisClient.duplicate(); // duplicate to keep separate pub/sub
await sub.connect();
await sub.subscribe(
['attached_disks', 'host_stats'],
(message, channel) => { // <-- single handler
let payload;
try {
payload = JSON.parse(message); // message is a JSON string
} catch (e) {
console.error(`Failed to parse ${channel}`, e);
return;
}
io.emit(channel, payload);
}
);
sub.on('error', err => console.error('Subscriber error', err));
})();
// ---------- Socket.io ----------
io.on('connection', socket => {
console.log('client connected:', socket.id);
// Optional: send the current state on connect if you keep it cached
});
// ---------- Start ----------
const PORT = process.env.PORT || 3000;
server.listen(PORT, () => {
console.log(`Server listening on http://localhost:${PORT}`);
});