cosmostat has working drive health dashboard
This commit is contained in:
66
files/docker/apis/StorageSummary/Helpers.py
Normal file
66
files/docker/apis/StorageSummary/Helpers.py
Normal file
@ -0,0 +1,66 @@
|
||||
|
||||
import base64, hashlib
|
||||
import subprocess
|
||||
import ipaddress
|
||||
from typing import Dict, Any, List
|
||||
|
||||
# pickle subroutines
|
||||
import pickle
|
||||
from pathlib import Path
|
||||
|
||||
print("Importing Helpers")
|
||||
# subnet helper app
|
||||
def is_ip_in_subnets(ip, subnet):
|
||||
try:
|
||||
ip_obj = ipaddress.IPv4Address(ip)
|
||||
subnet_obj = ipaddress.IPv4Network(subnet)
|
||||
if ip_obj in subnet_obj:
|
||||
return True
|
||||
return False
|
||||
except ValueError as e:
|
||||
# If the IP address is not valid, raise an error
|
||||
return False
|
||||
|
||||
# subroutine to run a command, return stdout as array unless zero_only then return [0]
|
||||
def run_command(cmd, zero_only=False, use_shell=True, req_check = True):
|
||||
# Run the command and capture the output
|
||||
result = subprocess.run(cmd, shell=use_shell, check=req_check, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# Decode the byte output to a string
|
||||
output = result.stdout.decode('utf-8')
|
||||
# Split the output into lines and store it in an array
|
||||
output_lines = [line for line in output.split('\n') if line]
|
||||
# Return result
|
||||
try:
|
||||
return output_lines[0] if zero_only else output_lines
|
||||
except:
|
||||
return output_lines
|
||||
|
||||
def short_uuid(value: str, length=8):
|
||||
hasher = hashlib.md5()
|
||||
hasher.update(value.encode('utf-8'))
|
||||
full_hex = hasher.hexdigest()
|
||||
return full_hex[:length]
|
||||
|
||||
# test subroutine
|
||||
def get_hostname():
|
||||
hostname_command = "hostname"
|
||||
return run_command(hostname_command, zero_only = True)
|
||||
|
||||
|
||||
# pickle helpers
|
||||
# Where the pickled state will live
|
||||
STATE_FILE = Path(__file__).parent / "storage_api_state.pkl"
|
||||
|
||||
def save_state(obj: object, path: Path | str = STATE_FILE) -> None:
|
||||
path = Path(path)
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("wb") as f:
|
||||
pickle.dump(obj, f, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
print("Pickle saved")
|
||||
|
||||
def load_state(path: Path | str = STATE_FILE) -> object | None:
|
||||
path = Path(path)
|
||||
if path.is_file():
|
||||
with path.open("rb") as f:
|
||||
return pickle.load(f)
|
||||
return None
|
||||
176
files/docker/apis/StorageSummary/Routes.py
Normal file
176
files/docker/apis/StorageSummary/Routes.py
Normal file
@ -0,0 +1,176 @@
|
||||
# flask routes for storage summary API
|
||||
|
||||
# import external libraries
|
||||
from flask import Flask, jsonify, request, Response, abort
|
||||
#from flask_apscheduler import APScheduler
|
||||
from typing import Dict, Union
|
||||
import json, time, redis, yaml, datetime
|
||||
import secrets, string
|
||||
import requests
|
||||
from requests import RequestException, Response
|
||||
|
||||
# import needed Class Libraries
|
||||
from Storage import *
|
||||
from Helpers import *
|
||||
#SummaryServer = DriveHealthServer(get_hostname())
|
||||
|
||||
SummaryServer = load_state()
|
||||
|
||||
if SummaryServer is None:
|
||||
SummaryServer = DriveHealthServer(get_hostname())
|
||||
print("Created new SummaryServer")
|
||||
|
||||
# declare flask apps
|
||||
app = Flask(__name__)
|
||||
#scheduler = APScheduler()
|
||||
|
||||
|
||||
# Flask routes
|
||||
|
||||
# client update
|
||||
@app.route('/storage_client_update', methods=['POST'])
|
||||
def storage_client_update():
|
||||
payload = request.get_json(silent=False)
|
||||
if payload is None:
|
||||
abort(400, description="Request body must be valid JSON")
|
||||
payload["IP Address"] = request.remote_addr
|
||||
# offload processing to helper
|
||||
processed_payload = client_update_helper(payload)
|
||||
return jsonify(processed_payload), 200
|
||||
|
||||
# remove client
|
||||
@app.route('/storage_client_delete', methods=['POST'])
|
||||
def storage_client_delete():
|
||||
payload = request.get_json(silent=False)
|
||||
print(payload)
|
||||
if payload is None:
|
||||
abort(400, description="Request body must be valid JSON")
|
||||
result = client_remove_helper(payload)
|
||||
print(result)
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
# client details
|
||||
@app.route('/client_details', methods=['GET'])
|
||||
def client_details():
|
||||
result = []
|
||||
for client in SummaryServer.clients:
|
||||
result.append(client.get_details())
|
||||
return jsonify(result)
|
||||
|
||||
# client summary
|
||||
@app.route('/client_summary', methods=['GET'])
|
||||
def client_summary():
|
||||
result = []
|
||||
for client in SummaryServer.clients:
|
||||
result.append(client.get_summary())
|
||||
return jsonify(result)
|
||||
|
||||
# client brief summary
|
||||
@app.route('/brief_summary', methods=['GET'])
|
||||
def brief_summary():
|
||||
result = []
|
||||
for client in SummaryServer.clients:
|
||||
result.append(f"{client.name} at {client.ip} - {len(client.drives)} drives")
|
||||
return jsonify({
|
||||
"message": "Brief Summary",
|
||||
"result": result
|
||||
})
|
||||
|
||||
# test route
|
||||
@app.route('/test', methods=['GET'])
|
||||
def test_route():
|
||||
return jsonify({
|
||||
"message": "Hello world!",
|
||||
"hostname": get_hostname(),
|
||||
"DriveHealthServer": f"{SummaryServer}"
|
||||
})
|
||||
|
||||
|
||||
# test route 2
|
||||
@app.route('/test_storage_summary', methods=['GET'])
|
||||
def test_storage_summary():
|
||||
return jsonify({
|
||||
"message": "Hello world!",
|
||||
"hostname": get_hostname(),
|
||||
"DriveHealthServer": f"{SummaryServer}"
|
||||
})
|
||||
|
||||
|
||||
|
||||
|
||||
# Route Helpers
|
||||
|
||||
# helper function for client_update route
|
||||
# handles the submission data from the flask route
|
||||
def client_update_helper(payload: dict):
|
||||
result = None
|
||||
required_keys = {"hostname", "API_KEY", "drives", "IP Address"}
|
||||
# check json structure and API key
|
||||
processed_payload = post_processor(payload, required_keys)
|
||||
# add or update the client
|
||||
result = client_processor(processed_payload)
|
||||
return result
|
||||
|
||||
# handle submission from remove route
|
||||
def client_remove_helper(payload: dict):
|
||||
result = None
|
||||
required_keys = {"remove_hosts", "API_KEY"}
|
||||
# check the submission data
|
||||
processed_payload = post_processor(payload, required_keys)
|
||||
result = SummaryServer.remove_client(processed_payload["remove_hosts"])
|
||||
return result
|
||||
|
||||
# this function takes the raw POST input from client_update and makes sure it is valid and returns it if so
|
||||
def post_processor(client_dict: dict, required_keys: dict):
|
||||
payload_safe = False
|
||||
keys_present = False
|
||||
api_valid = False
|
||||
api_key = "deadbeef"
|
||||
# check for keys
|
||||
missing = required_keys - client_dict.keys()
|
||||
if not missing:
|
||||
keys_present = True
|
||||
else:
|
||||
return {
|
||||
"message": f"error - {missing} keys missing"
|
||||
}
|
||||
# check API
|
||||
if client_dict["API_KEY"] == api_key:
|
||||
api_valid = True
|
||||
|
||||
# if both then safe
|
||||
if keys_present and api_valid:
|
||||
payload_safe = True
|
||||
|
||||
# add a key to indicate this was processed
|
||||
client_dict["processed_at"] = time.time()
|
||||
return client_dict
|
||||
|
||||
# Main functions
|
||||
|
||||
# client processing function, add/update logic in Class Methods
|
||||
def client_processor(client_dict: dict):
|
||||
result = SummaryServer.process_client_data(client_dict)
|
||||
save_state(SummaryServer)
|
||||
return result
|
||||
|
||||
def background_loop():
|
||||
return True
|
||||
|
||||
def run_main():
|
||||
#if SummaryServer is none:
|
||||
|
||||
#atexit.register(lambda: save_state(SummaryServer)) test
|
||||
# Flask scheduler for background loop, run if requested
|
||||
#scheduler.add_job(id='background_loop',
|
||||
# func=background_loop,
|
||||
# trigger='interval',
|
||||
# seconds=60)
|
||||
#scheduler.init_app(app)
|
||||
#scheduler.start()
|
||||
|
||||
# Flask API
|
||||
background_loop()
|
||||
app.run(debug=False, host='0.0.0.0', port=5001)
|
||||
|
||||
178
files/docker/apis/StorageSummary/Storage.py
Normal file
178
files/docker/apis/StorageSummary/Storage.py
Normal file
@ -0,0 +1,178 @@
|
||||
# Class definitions for storage summary
|
||||
# Classes needed:
|
||||
### DriveHealthServer - this will be the list of remote server objects and functions for interacting with them
|
||||
### DriveHealthClient - this will be the remote client class where all the drives are
|
||||
|
||||
from typing import List, Mapping, Any, Sequence, Dict
|
||||
from Helpers import *
|
||||
print("Importing Storage Class")
|
||||
|
||||
#################################################################
|
||||
### DriveHealthServer Class
|
||||
### This is the server objext
|
||||
#################################################################
|
||||
|
||||
class DriveHealthServer:
|
||||
|
||||
# create server object for local cache of remote clients
|
||||
def __init__(self, hostname: str):
|
||||
# the system needs a name, should be equal to the uuid of the client
|
||||
self.name = hostname
|
||||
self.short_id = short_uuid(self.name)
|
||||
self.hostname = hostname
|
||||
# system contains an array of CosmostatClient Objects
|
||||
self.clients = []
|
||||
|
||||
def __str__(self):
|
||||
self_string = f"DriveHealthServer {self.name} - {self.short_id}"
|
||||
return self_string
|
||||
|
||||
def __repr__(self):
|
||||
self_string = f"DriveHealthServer {self.name} - {self.short_id}"
|
||||
|
||||
def __del__(self):
|
||||
print("Deleting Server")
|
||||
|
||||
# either add or update client
|
||||
def process_client_data(self, client_data: dict):
|
||||
result = None
|
||||
if self.check_for_uuid(self.calculate_uuid(client_data)):
|
||||
result = {
|
||||
"message": "Updating client.",
|
||||
"summary": self.update_client(client_data)
|
||||
}
|
||||
else:
|
||||
result = {
|
||||
"message": "Creating new client",
|
||||
"summary": self.add_client(client_data)
|
||||
}
|
||||
return result
|
||||
|
||||
def add_client(self, client_data: dict):
|
||||
new_client = DriveHealthClient(client_data)
|
||||
self.clients.append(new_client)
|
||||
return new_client.get_summary()
|
||||
|
||||
def update_client(self, client_data):
|
||||
result = self.get_client(self.calculate_uuid(client_data))
|
||||
result.update_client(client_data)
|
||||
return result.get_summary()
|
||||
|
||||
def get_client(self, client_uuid: str):
|
||||
result = None
|
||||
for client in self.clients:
|
||||
if client.short_id == client_uuid:
|
||||
result = client
|
||||
return result
|
||||
|
||||
def remove_client(self, client_uuid: str | list[str]):
|
||||
result = None
|
||||
old_clients = self.clients
|
||||
temp_clients = []
|
||||
purged_clients = []
|
||||
for client in old_clients:
|
||||
if client.short_id in client_uuid:
|
||||
purged_clients.append(client)
|
||||
else:
|
||||
temp_clients.append(client)
|
||||
self.clients = temp_clients
|
||||
result = {
|
||||
"message": "client removal complete",
|
||||
#"clients_removed": purged_clients,
|
||||
#"new_client_count": len(self.clients),
|
||||
#"old_client_count": len(old_clients)
|
||||
}
|
||||
return result
|
||||
|
||||
def check_for_uuid(self, uuid: str):
|
||||
result = False
|
||||
for client in self.clients:
|
||||
if client.short_id == uuid:
|
||||
result = True
|
||||
return result
|
||||
|
||||
# calculate uuid based on same parameters
|
||||
def calculate_uuid(self, client_data):
|
||||
unique_string = f"{client_data["hostname"]} - {client_data["IP Address"]}"
|
||||
return short_uuid(unique_string)
|
||||
|
||||
|
||||
#################################################################
|
||||
### DriveHealthClient Class
|
||||
### These are the actual remote clients
|
||||
#################################################################
|
||||
|
||||
class DriveHealthClient:
|
||||
|
||||
############################################################
|
||||
# instantiate new DriveHealthClient
|
||||
############################################################
|
||||
|
||||
def __init__(self, client_data: dict):
|
||||
# the system needs a name, should be equal to the uuid of the client
|
||||
self.client_data = client_data
|
||||
self.ip = self.client_data["IP Address"]
|
||||
self.name = self.client_data["hostname"]
|
||||
self.data_timestamp = self.client_data["processed_at"]
|
||||
self._unique_string = f"{self.name} - {self.ip}"
|
||||
self.short_id = short_uuid(self._unique_string)
|
||||
self.drives = self.client_data["drives"]
|
||||
|
||||
|
||||
def __str__(self):
|
||||
self_string = f"DriveHealthClient Server {self.name} - {self.short_id}"
|
||||
return self_string
|
||||
|
||||
def __repr__(self):
|
||||
self_string = f"DriveHealthClient Server {self.name} - {self.short_id}"
|
||||
|
||||
def __del__(self):
|
||||
print("Deleting Client")
|
||||
|
||||
def get_summary(self):
|
||||
drives_brief = []
|
||||
for drive in self.drives:
|
||||
drives_brief.append({
|
||||
"serial": drive["Serial Number"],
|
||||
"model": drive["Model"],
|
||||
"capacity": drive["Disk Size"]
|
||||
})
|
||||
result = {
|
||||
"name": self.name,
|
||||
"hostname": self.ip,
|
||||
"uuid": self.short_id,
|
||||
"drives": drives_brief
|
||||
}
|
||||
return result
|
||||
|
||||
def get_details(self):
|
||||
drive_details = []
|
||||
for drive in self.drives:
|
||||
drive_details.append({
|
||||
"disk_id": drive["Disk ID"],
|
||||
"serial": drive["Serial Number"],
|
||||
"health_status": drive["Health Status"],
|
||||
"model": drive["Model"],
|
||||
"capacity": drive["Disk Size"],
|
||||
"power_on_hours": drive["Power On Hours"],
|
||||
"power_on_count": drive["Power On Count"],
|
||||
"host_writes": drive["Host Writes"],
|
||||
"wear_level": drive["Wear Level Count"],
|
||||
"drive_letter": drive["Drive Letter"],
|
||||
"drive_interface": drive["Interface"],
|
||||
"transfer_mode" : drive["Transfer Mode"]
|
||||
})
|
||||
result = {
|
||||
"name": self.name,
|
||||
"ip": self.ip,
|
||||
"uuid": self.short_id,
|
||||
"timestamp": self.data_timestamp,
|
||||
"drives": drive_details
|
||||
}
|
||||
return result
|
||||
|
||||
def update_client(self, client_data: dict):
|
||||
result = None
|
||||
self.data_timestamp = client_data["processed_at"]
|
||||
self.drives = client_data["drives"]
|
||||
return result
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
13
files/docker/apis/StorageSummary/app.py
Normal file
13
files/docker/apis/StorageSummary/app.py
Normal file
@ -0,0 +1,13 @@
|
||||
# main function for storage API
|
||||
|
||||
# import class libraries
|
||||
from Routes import *
|
||||
|
||||
#######################################################################
|
||||
#######################################################################
|
||||
### Main Subroutine
|
||||
#######################################################################
|
||||
#######################################################################
|
||||
|
||||
if __name__ == '__main__':
|
||||
run_main()
|
||||
7
files/docker/apis/StorageSummary/requirements.txt
Normal file
7
files/docker/apis/StorageSummary/requirements.txt
Normal file
@ -0,0 +1,7 @@
|
||||
flask
|
||||
pytz
|
||||
requests
|
||||
opencv-python
|
||||
redis
|
||||
flask_apscheduler
|
||||
pyyaml
|
||||
Reference in New Issue
Block a user