Cosmostat Init Commit
This commit is contained in:
13
README.md
Normal file
13
README.md
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
This is a local service that will keep a brief history of some system stats and broadcast simple system stats.
|
||||||
|
|
||||||
|
Plan is a python service for a local API, this API will have Objects for system components (like CPU, Storage, RAM, Info) which will inherit the properties of the component object which will have common things like name, summary, live info, historical info.
|
||||||
|
|
||||||
|
There will be a web server stack in docker that will have an isolated network, nginx and js node and php apache, or whatever evolves
|
||||||
|
|
||||||
|
I want the local web service to show a web-based summary for the current system that can be seen locally at whatever IP I pick
|
||||||
|
The API will start with just responding to the local requests
|
||||||
|
|
||||||
|
There will be a docker network named cosmostat-network at 192.168.37.0/24
|
||||||
|
|
||||||
|
I will create that network in init with a generic docker command so it's there for the API to bind to since I want to build the API first after init. This has to be at the bottom because that's where all the data comes from. Build the API. build the web files, build docker
|
||||||
|
|
||||||
59
defaults/main.yaml
Normal file
59
defaults/main.yaml
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
# required system packages
|
||||||
|
cosmostat_packages:
|
||||||
|
- docker.io
|
||||||
|
- docker-compose
|
||||||
|
- python3
|
||||||
|
- python3-pip
|
||||||
|
- python3-dev
|
||||||
|
- python3-setuptools
|
||||||
|
- python3-venv
|
||||||
|
- lm-sensors
|
||||||
|
- jc
|
||||||
|
|
||||||
|
# python venv packages
|
||||||
|
cosmostat_venv_packages: |
|
||||||
|
flask
|
||||||
|
pytz
|
||||||
|
requests
|
||||||
|
opencv-python
|
||||||
|
redis
|
||||||
|
flask_apscheduler
|
||||||
|
pyyaml
|
||||||
|
|
||||||
|
# cosmostat sudoers file
|
||||||
|
cosmostat_sudoers_content: |
|
||||||
|
cosmos ALL=(root) NOPASSWD: /usr/bin/lshw
|
||||||
|
|
||||||
|
# subnet for service
|
||||||
|
docker_subnet: "192.168.37.0/24"
|
||||||
|
docker_gateway: "192.168.37.1"
|
||||||
|
|
||||||
|
# cosmostat service folder root
|
||||||
|
service_folder: "/opt/cosmos/cosmostat"
|
||||||
|
|
||||||
|
# cosmostat will run under cosmos user
|
||||||
|
service_user: "cosmos"
|
||||||
|
user_service_folder: "/home/{{ service_user }}/.config/systemd/user"
|
||||||
|
|
||||||
|
# api service variables
|
||||||
|
api_service_name: "cosmostat_api"
|
||||||
|
api_service_folder: "{{ service_folder }}/api"
|
||||||
|
venv_folder: "{{ service_folder }}/venv"
|
||||||
|
api_service_exe: "{{ venv_folder }}/bin/python -u {{ api_service_folder }}/app.py"
|
||||||
|
|
||||||
|
# dashboard vars
|
||||||
|
service_control_web_folder: "{{ service_folder }}/web"
|
||||||
|
|
||||||
|
# will skip init when true
|
||||||
|
quick_refresh: false
|
||||||
|
|
||||||
|
# cosmostat_settings
|
||||||
|
noisy_test: false
|
||||||
|
debug_output: true
|
||||||
|
secure_api: false
|
||||||
|
push_redis: true
|
||||||
|
run_background : true
|
||||||
|
log_output: true
|
||||||
|
...
|
||||||
298
files/api/Components.py
Normal file
298
files/api/Components.py
Normal file
@ -0,0 +1,298 @@
|
|||||||
|
# this class file is for the cosmostat service
|
||||||
|
import subprocess
|
||||||
|
from LinkedList import *
|
||||||
|
|
||||||
|
global_max_length = 500
|
||||||
|
|
||||||
|
class Component:
|
||||||
|
##########################################################################################
|
||||||
|
# Base class for all system components. All instantiated objects need a child class
|
||||||
|
# Class data:
|
||||||
|
### name - name of the type of component, declared in the parent class
|
||||||
|
### status
|
||||||
|
### model_string - string with device info, declared in parent class
|
||||||
|
### metric_name - name of the value being measured
|
||||||
|
### current_value
|
||||||
|
### historical_data - This will be a linked list used to generate a json when calling get_historical_data
|
||||||
|
### for this to work, the function using these classes needs to update the values periodically
|
||||||
|
#### historical_data = [
|
||||||
|
#### {
|
||||||
|
#### "timestamp": timestamp, # seconds since epoch
|
||||||
|
#### "value": value
|
||||||
|
#### },
|
||||||
|
#### {
|
||||||
|
#### "timestamp": timestamp,
|
||||||
|
#### "value": value
|
||||||
|
#### }
|
||||||
|
#### ]
|
||||||
|
|
||||||
|
def __init__(self, name: str, model_string: str = None):
|
||||||
|
# fail instantiation if critical data is missing
|
||||||
|
if self.model_string is None:
|
||||||
|
raise TypeError("Error - missing component model_string")
|
||||||
|
if self.metric_name is None:
|
||||||
|
raise TypeError("Error - missing component metric_name")
|
||||||
|
if self.metric_value_command is None:
|
||||||
|
raise TypeError("Error - missing component metric_value_command")
|
||||||
|
if self.type is None:
|
||||||
|
raise TypeError("Error - missing component type")
|
||||||
|
if self.has_temp is None:
|
||||||
|
raise TypeError("Error - missing temp data check")
|
||||||
|
|
||||||
|
# set up history list
|
||||||
|
self.history_max_length = global_max_length
|
||||||
|
self.historical_data = ValueHistory(self.history_max_length)
|
||||||
|
self.history_start = self.historical_data.get_first_timestamp()
|
||||||
|
self.update_value()
|
||||||
|
if self.current_value is None:
|
||||||
|
raise TypeError("Error - failed to read value")
|
||||||
|
|
||||||
|
# if temp data exists, handle it
|
||||||
|
if self.has_temp:
|
||||||
|
self.temp_history_data = ValueHistory(self.history_max_length)
|
||||||
|
self.temp_history_start = self.temp_history_data.get_first_timestamp()
|
||||||
|
self.current_temp = self.temp_history_data.get_current_value()
|
||||||
|
else:
|
||||||
|
self.temp_history_data = None
|
||||||
|
|
||||||
|
# instantiate other shared class variables
|
||||||
|
self.name = name
|
||||||
|
self.current_value = self.historical_data.get_current_value()
|
||||||
|
if self.has_temp:
|
||||||
|
self.current_temp = self.temp_history_data.get_current_value()
|
||||||
|
else:
|
||||||
|
self.current_temp = None
|
||||||
|
self.comment = f"This is a {self.type}, so we are measuring {self.metric_name}, currently at {self.current_value}"
|
||||||
|
|
||||||
|
# if nothing failed, the object is ready
|
||||||
|
self.status = "ready"
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return (f"{self.__class__.__name__}: {self.name} "
|
||||||
|
f"{self.model_string}")
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
print(f"Deleting {self.type} component - {self.model_string}")
|
||||||
|
|
||||||
|
def get_info_key(self):
|
||||||
|
result = {
|
||||||
|
"name": self.name,
|
||||||
|
"type": self.type,
|
||||||
|
"model_string": self.model_string,
|
||||||
|
"metric_name": self.metric_name
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_summary_key(self):
|
||||||
|
result = {
|
||||||
|
"type": self.type,
|
||||||
|
"current_value": self.current_value,
|
||||||
|
"metric_name": self.metric_name,
|
||||||
|
"model_string": self.model_string
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def update_value(self):
|
||||||
|
#try:
|
||||||
|
self.current_value = run_command(self.metric_value_command, True)
|
||||||
|
self.historical_data.add(self.current_value)
|
||||||
|
#except:
|
||||||
|
|
||||||
|
def update_temp_value(self):
|
||||||
|
if has_temp:
|
||||||
|
#try:
|
||||||
|
self.current_temp = run_command(self.temp_value_command, True)
|
||||||
|
self.temp_history_data.add(self.current_value)
|
||||||
|
#except:
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_history(self, count: int = global_max_length):
|
||||||
|
if self.has_temp:
|
||||||
|
result = {
|
||||||
|
"value_metric": self.metric_name,
|
||||||
|
"history_count": count,
|
||||||
|
"history_data": self.historical_data.get_history(count), # reminder this is a LinkedList get_history
|
||||||
|
"history_temp_data": self.temp_history_data.get_history(count)
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
result = {
|
||||||
|
"value_metric": self.metric_name,
|
||||||
|
"history_count": count,
|
||||||
|
"history_data": self.historical_data.get_history(count) # same reminder here
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
# Component Class Types
|
||||||
|
# There needs to be one of these for each monitored thing
|
||||||
|
############################################################
|
||||||
|
# Need to add:
|
||||||
|
### temperatures
|
||||||
|
### network + VPN
|
||||||
|
### storage + ZFS
|
||||||
|
### video cards
|
||||||
|
### virtual machines
|
||||||
|
|
||||||
|
# CPU component class.
|
||||||
|
class CPU(Component):
|
||||||
|
|
||||||
|
def __init__(self, name: str, is_virtual: bool = False):
|
||||||
|
# Declare component type
|
||||||
|
self.type = "CPU"
|
||||||
|
# deal with temp later
|
||||||
|
self.has_temp = False
|
||||||
|
# no temp if VM
|
||||||
|
#self.has_temp = not is_virtual
|
||||||
|
#self.temp_value_command = "acpi -V | jc --acpi -p | jq '.[] | select(.type==\"Thermal\") | .temperature '"
|
||||||
|
self.model_string = self.get_model_string()
|
||||||
|
|
||||||
|
# Initialize value
|
||||||
|
self.metric_name = "1m_load"
|
||||||
|
self.metric_value_command = "cat /proc/loadavg | awk '{print $1}'"
|
||||||
|
self.current_value = run_command(self.metric_value_command, True)
|
||||||
|
|
||||||
|
# Complete instantiation
|
||||||
|
super().__init__(name, self.model_string)
|
||||||
|
|
||||||
|
def get_model_string(self):
|
||||||
|
# Get CPU Info
|
||||||
|
model_string_command = "lscpu --json | jq -r '.lscpu[] | select(.field==\"Model name:\") | .data'"
|
||||||
|
return run_command(model_string_command, True)
|
||||||
|
|
||||||
|
# RAM component class.
|
||||||
|
class RAM(Component):
|
||||||
|
|
||||||
|
def __init__(self, name: str):
|
||||||
|
# Declare component type
|
||||||
|
self.type = "RAM"
|
||||||
|
self.has_temp = False
|
||||||
|
self.model_string = self.get_model_string()
|
||||||
|
|
||||||
|
# Initialize Value
|
||||||
|
self.metric_name = "used_capacity_mb"
|
||||||
|
self.metric_value_command = "free -m | grep Mem | awk '{print $3}'"
|
||||||
|
self.current_value = run_command(self.metric_value_command, True)
|
||||||
|
|
||||||
|
# Complete instantiation
|
||||||
|
super().__init__(name, self.model_string)
|
||||||
|
|
||||||
|
def get_model_string(self):
|
||||||
|
# Check total system RAM
|
||||||
|
bytes_total_command = "sudo lshw -json -c memory | jq -r '.[] | select(.description==\"System Memory\").size' "
|
||||||
|
bytes_total = float(run_command(bytes_total_command, True))
|
||||||
|
gb_total = round(bytes_total / 1073741824, 2)
|
||||||
|
return f"Total Capacity: {gb_total}GB"
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
# System Class
|
||||||
|
# A system is build from components
|
||||||
|
############################################################
|
||||||
|
class System:
|
||||||
|
|
||||||
|
# instantiate new system
|
||||||
|
def __init__(self, name: str):
|
||||||
|
# the system needs a name
|
||||||
|
self.name = name
|
||||||
|
# system is built of other component objects
|
||||||
|
self.components = []
|
||||||
|
# other system properties
|
||||||
|
self.sysvars = {}
|
||||||
|
# either i do it here or i do it twice
|
||||||
|
self.sysvars["is_virtual"] = self.check_for_virtual()
|
||||||
|
# Let's build a system
|
||||||
|
self.add_component(CPU("CPU", self.sysvars["is_virtual"]))
|
||||||
|
self.add_component(RAM("RAM"))
|
||||||
|
|
||||||
|
# let's build system values
|
||||||
|
self.check_values()
|
||||||
|
|
||||||
|
# Add a component to the system
|
||||||
|
def add_component(self, component: Component):
|
||||||
|
self.components.append(component)
|
||||||
|
|
||||||
|
# Get all components, optionally filtered by type
|
||||||
|
def get_components(self, component_type: type = None):
|
||||||
|
if component_type is None:
|
||||||
|
return self.components
|
||||||
|
return [c for c in self.components if isinstance(c, component_type)]
|
||||||
|
|
||||||
|
# get component count
|
||||||
|
def get_component_count(self):
|
||||||
|
result = int(len(self.components))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
components_str = "\n".join(f" - {c}" for c in self.components)
|
||||||
|
return f"System: {self.name}\n{components_str}"
|
||||||
|
|
||||||
|
# update metrics for all components
|
||||||
|
def update_values(self):
|
||||||
|
self.check_values()
|
||||||
|
for component in self.components:
|
||||||
|
component.update_value()
|
||||||
|
|
||||||
|
def check_for_virtual(self):
|
||||||
|
check_if_vm_command = "systemd-detect-virt"
|
||||||
|
check_if_vm = run_command(check_if_vm_command, True)
|
||||||
|
if check_if_vm != "none":
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def check_uptime(self):
|
||||||
|
check_uptime_command = "uptime -p"
|
||||||
|
system_uptime = run_command(check_uptime_command, True)
|
||||||
|
return system_uptime
|
||||||
|
|
||||||
|
def check_timestamp(self):
|
||||||
|
check_timestamp_command = "date '+%D %r'"
|
||||||
|
system_timestamp = run_command(check_timestamp_command, True)
|
||||||
|
return system_timestamp
|
||||||
|
|
||||||
|
def check_values(self):
|
||||||
|
self.sysvars["uptime"] = self.check_uptime()
|
||||||
|
self.sysvars["name"] = self.name
|
||||||
|
self.sysvars["component_count"] = self.get_component_count()
|
||||||
|
self.sysvars["timestamp"] = self.check_timestamp()
|
||||||
|
|
||||||
|
def get_sysvars(self):
|
||||||
|
result = {}
|
||||||
|
for sysvar in self.sysvars:
|
||||||
|
result[f"{sysvar}"] = self.sysvars[f"{sysvar}"]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_sysvars_summary_keys(self):
|
||||||
|
result = []
|
||||||
|
for sysvar in self.sysvars:
|
||||||
|
system_type_string = f"sysvar['{sysvar}']"
|
||||||
|
thisvar = {
|
||||||
|
"type": "System Class Variable",
|
||||||
|
"current_value": sysvar,
|
||||||
|
"metric_name": system_type_string,
|
||||||
|
"model_string": self.sysvars[sysvar]
|
||||||
|
}
|
||||||
|
result.append(thisvar)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
# Helper Functions
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
|
||||||
|
# subroutine to run a command, return stdout as array unless zero_only then return [0]
|
||||||
|
def run_command(cmd, zero_only=False):
|
||||||
|
# Run the command and capture the output
|
||||||
|
result = subprocess.run(cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
# Decode the byte output to a string
|
||||||
|
output = result.stdout.decode('utf-8')
|
||||||
|
# Split the output into lines and store it in an array
|
||||||
|
output_lines = [line for line in output.split('\n') if line]
|
||||||
|
# Return result
|
||||||
|
try:
|
||||||
|
return output_lines[0] if zero_only else output_lines
|
||||||
|
except:
|
||||||
|
return output_lines
|
||||||
98
files/api/LinkedList.py
Normal file
98
files/api/LinkedList.py
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
|
||||||
|
##############################
|
||||||
|
# linked list classes
|
||||||
|
# written by the intern
|
||||||
|
##############################
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
# single node in a singly linked list
|
||||||
|
class Node:
|
||||||
|
__slots__ = ("value", "next", "timestamp")
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
self.timestamp = time.time()
|
||||||
|
self.next = None
|
||||||
|
|
||||||
|
# small, bounded history implemented with a singly linked list
|
||||||
|
class ValueHistory:
|
||||||
|
def __init__(self, maxlen: int):
|
||||||
|
if maxlen <= 0:
|
||||||
|
raise ValueError("maxlen must be a positive integer")
|
||||||
|
self.maxlen = maxlen
|
||||||
|
self.head: Node | None = None # oldest entry
|
||||||
|
self.tail: Node | None = None # newest entry
|
||||||
|
self.size = 0
|
||||||
|
|
||||||
|
# Append a new value to the history, dropping the oldest if needed
|
||||||
|
def add(self, value):
|
||||||
|
new_node = Node(value)
|
||||||
|
|
||||||
|
# link it after the current tail
|
||||||
|
if self.tail is None: # empty list
|
||||||
|
self.head = self.tail = new_node
|
||||||
|
else:
|
||||||
|
self.tail.next = new_node
|
||||||
|
self.tail = new_node
|
||||||
|
|
||||||
|
self.size += 1
|
||||||
|
|
||||||
|
# 2. enforce the size bound
|
||||||
|
if self.size > self.maxlen:
|
||||||
|
# drop the head (oldest item)
|
||||||
|
assert self.head is not None # for the type checker
|
||||||
|
self.head = self.head.next
|
||||||
|
self.size -= 1
|
||||||
|
|
||||||
|
# If the list became empty, also reset tail
|
||||||
|
if self.head is None:
|
||||||
|
self.tail = None
|
||||||
|
|
||||||
|
# Return the history as a Python dict list (oldest → newest)
|
||||||
|
def get_history(self, count: int | None = None):
|
||||||
|
if count is None:
|
||||||
|
count = self.maxlen
|
||||||
|
out = []
|
||||||
|
cur = self.head
|
||||||
|
counter = 0
|
||||||
|
while cur is not None and counter < count:
|
||||||
|
counter += 1
|
||||||
|
out.append(
|
||||||
|
{
|
||||||
|
"timestamp": cur.timestamp,
|
||||||
|
"value": cur.value
|
||||||
|
}
|
||||||
|
)
|
||||||
|
cur = cur.next
|
||||||
|
return out
|
||||||
|
|
||||||
|
# Return oldest timestamp
|
||||||
|
def get_first_timestamp(self):
|
||||||
|
if self.head is not None:
|
||||||
|
return self.head.timestamp
|
||||||
|
else:
|
||||||
|
return time.time()
|
||||||
|
|
||||||
|
# Return current data
|
||||||
|
def get_current_value(self):
|
||||||
|
if self.tail is not None:
|
||||||
|
return self.tail.value
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Convenience methods
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def __len__(self):
|
||||||
|
return self.size
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Iterate over values from oldest to newest."""
|
||||||
|
cur = self.head
|
||||||
|
while cur is not None:
|
||||||
|
yield cur.value
|
||||||
|
cur = cur.next
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"BoundedHistory(maxlen={self.maxlen}, data={self.get()!r})"
|
||||||
237
files/api/app.py
Normal file
237
files/api/app.py
Normal file
@ -0,0 +1,237 @@
|
|||||||
|
from flask import Flask, jsonify, request
|
||||||
|
from flask_apscheduler import APScheduler
|
||||||
|
from typing import Dict, Union
|
||||||
|
|
||||||
|
import json, time, redis, yaml
|
||||||
|
|
||||||
|
from Components import *
|
||||||
|
# declare flask apps
|
||||||
|
app = Flask(__name__)
|
||||||
|
scheduler = APScheduler()
|
||||||
|
|
||||||
|
#######################################################################
|
||||||
|
### Settings Handler Functions
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
# default application setting variables
|
||||||
|
app_settings = {
|
||||||
|
"noisy_test" : False,
|
||||||
|
"debug_output" : False,
|
||||||
|
"log_output" : False,
|
||||||
|
"secure_api" : True,
|
||||||
|
"push_redis" : False,
|
||||||
|
"run_background" : True
|
||||||
|
}
|
||||||
|
|
||||||
|
with open('cosmostat_settings.yaml', 'r') as f:
|
||||||
|
print("Loading cosmostat_settings file")
|
||||||
|
cosmostat_settings = yaml.safe_load(f)
|
||||||
|
print("...Done")
|
||||||
|
# initialize system variables from settings file
|
||||||
|
print("Checking for system var overrides")
|
||||||
|
for setting in app_settings:
|
||||||
|
if setting in cosmostat_settings:
|
||||||
|
cosmos_setting = cosmostat_settings[setting]
|
||||||
|
if app_settings["debug_output"]:
|
||||||
|
print(f"{setting}: {cosmos_setting}")
|
||||||
|
app_settings[setting] = cosmos_setting
|
||||||
|
print("...Done")
|
||||||
|
|
||||||
|
# this returns the docker gateway from the settings
|
||||||
|
def docker_gateway_settings() -> str:
|
||||||
|
return cosmostat_settings["docker_gateway"]
|
||||||
|
|
||||||
|
# this returns the jenkins user that ran the pipeline
|
||||||
|
def jenkins_user_settings() -> str:
|
||||||
|
return cosmostat_settings["jenkins_user"]
|
||||||
|
|
||||||
|
# this returns the ansible_hostname from setup
|
||||||
|
def jenkins_hostname_settings() -> str:
|
||||||
|
return cosmostat_settings["ansible_hostname"]
|
||||||
|
|
||||||
|
# this returns the inventory_generation_timestamp
|
||||||
|
def jenkins_inventory_generation_timestamp_settings() -> str:
|
||||||
|
return cosmostat_settings["inventory_generation_timestamp"]
|
||||||
|
|
||||||
|
def service_gateway_ip():
|
||||||
|
if cosmostat_settings["secure_api"]:
|
||||||
|
return docker_gateway_settings()
|
||||||
|
else:
|
||||||
|
return "0.0.0.0"
|
||||||
|
|
||||||
|
#######################################################################
|
||||||
|
### Redis Functions
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
# Redis client – will publish updates
|
||||||
|
r = redis.Redis(host=service_gateway_ip(), port=6379)
|
||||||
|
|
||||||
|
def update_redis_channel(redis_channel, data):
|
||||||
|
# Publish to the specified Redis channel
|
||||||
|
r.publish(redis_channel, json.dumps(data))
|
||||||
|
if app_settings["noisy_test"]:
|
||||||
|
print(f"{redis_channel} Redis Update")
|
||||||
|
print(data)
|
||||||
|
|
||||||
|
def update_redis_server():
|
||||||
|
# Update Stats Redis Channel
|
||||||
|
update_redis_channel("host_stats", get_full_summary())
|
||||||
|
# Update history_stats Redis Channel
|
||||||
|
update_redis_channel("history_stats", get_component_list())
|
||||||
|
|
||||||
|
#######################################################################
|
||||||
|
### Other Functions
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
def get_component_summary():
|
||||||
|
result = []
|
||||||
|
for component in cosmostat_system.components:
|
||||||
|
result.append(component.get_summary_key())
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_full_summary():
|
||||||
|
result = []
|
||||||
|
|
||||||
|
for component in cosmostat_system.components:
|
||||||
|
result.append(component.get_summary_key())
|
||||||
|
|
||||||
|
for sysvar in cosmostat_system.get_sysvars_summary_keys():
|
||||||
|
result.append(sysvar)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
# This will instantiate a System object
|
||||||
|
def new_cosmos_system():
|
||||||
|
new_system = System(f"{jenkins_hostname_settings()}")
|
||||||
|
if app_settings["log_output"]:
|
||||||
|
print(f"New system object name: {new_system.name}")
|
||||||
|
for component in new_system.components:
|
||||||
|
print(component)
|
||||||
|
return new_system
|
||||||
|
|
||||||
|
def get_component_list(history_count = None):
|
||||||
|
result = []
|
||||||
|
for component in cosmostat_system.components:
|
||||||
|
if history_count is not None:
|
||||||
|
history = component.get_history(history_count)
|
||||||
|
else:
|
||||||
|
history = component.get_history()
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"info": component.get_info_key(),
|
||||||
|
"history": history
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_info():
|
||||||
|
device_summary = []
|
||||||
|
for component in cosmostat_system.components:
|
||||||
|
device_summary.append(
|
||||||
|
{
|
||||||
|
"info": component.get_info_key(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
result = {
|
||||||
|
"system_info":
|
||||||
|
{
|
||||||
|
"user": jenkins_user_settings(),
|
||||||
|
"hostname": jenkins_hostname_settings(),
|
||||||
|
"timestamp": jenkins_inventory_generation_timestamp_settings(),
|
||||||
|
"component_count:": len(cosmostat_system.components),
|
||||||
|
"object_name": cosmostat_system.name,
|
||||||
|
"docker_gateway": docker_gateway_settings()
|
||||||
|
},
|
||||||
|
"device_summary": device_summary
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
#def get_history_summary():
|
||||||
|
|
||||||
|
#######################################################################
|
||||||
|
### Flask Routes
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
# full component list
|
||||||
|
@app.route('/component_list', methods=['GET'])
|
||||||
|
def component_list():
|
||||||
|
count = request.args.get('count', type=int)
|
||||||
|
return jsonify(get_component_list(count))
|
||||||
|
|
||||||
|
# component summary
|
||||||
|
@app.route('/component_summary', methods=['GET'])
|
||||||
|
def component_summary():
|
||||||
|
return jsonify(get_component_summary())
|
||||||
|
|
||||||
|
# full summary
|
||||||
|
@app.route('/full_summary', methods=['GET'])
|
||||||
|
def full_summary():
|
||||||
|
return jsonify(get_full_summary())
|
||||||
|
|
||||||
|
# system info
|
||||||
|
@app.route('/info', methods=['GET'])
|
||||||
|
def info():
|
||||||
|
return jsonify(get_info())
|
||||||
|
|
||||||
|
# test route
|
||||||
|
@app.route('/test', methods=['GET'])
|
||||||
|
def test():
|
||||||
|
return jsonify(
|
||||||
|
{
|
||||||
|
"component_count:": len(cosmostat_system.components),
|
||||||
|
"user": jenkins_user_settings(),
|
||||||
|
"hostname": jenkins_hostname_settings()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
#######################################################################
|
||||||
|
### Main Subroutine
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
# Background Loop Function
|
||||||
|
def background_loop():
|
||||||
|
# Update all data on the System object
|
||||||
|
cosmostat_system.update_values()
|
||||||
|
|
||||||
|
if app_settings["push_redis"]:
|
||||||
|
update_redis_server()
|
||||||
|
|
||||||
|
if app_settings["noisy_test"]:
|
||||||
|
print("Sorry about the mess...")
|
||||||
|
print(f"Blame {jenkins_user_settings()}")
|
||||||
|
|
||||||
|
# instantiate system
|
||||||
|
cosmostat_system = new_cosmos_system()
|
||||||
|
|
||||||
|
# send initial stats update to redis
|
||||||
|
if app_settings["push_redis"]:
|
||||||
|
update_redis_server()
|
||||||
|
|
||||||
|
# Flask scheduler for scanner
|
||||||
|
if app_settings["run_background"]:
|
||||||
|
if app_settings["log_output"]:
|
||||||
|
print("Loading flask background subroutine...")
|
||||||
|
|
||||||
|
scheduler.add_job(id='background_loop',
|
||||||
|
func=background_loop,
|
||||||
|
trigger='interval',
|
||||||
|
seconds=1)
|
||||||
|
scheduler.init_app(app)
|
||||||
|
scheduler.start()
|
||||||
|
|
||||||
|
if app_settings["log_output"]:
|
||||||
|
print("...Done")
|
||||||
|
else:
|
||||||
|
if app_settings["log_output"]:
|
||||||
|
print("Skipping flask background task")
|
||||||
|
|
||||||
|
# Flask API
|
||||||
|
app.run(debug=True, host=service_gateway_ip(), port=5000)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
40
files/proxy/nginx.conf
Normal file
40
files/proxy/nginx.conf
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# nginx.conf
|
||||||
|
# This file will be mounted into /etc/nginx/conf.d/default.conf inside the container
|
||||||
|
|
||||||
|
# Enable proxy buffers (optional but recommended)
|
||||||
|
proxy_buffering on;
|
||||||
|
proxy_buffers 16 16k;
|
||||||
|
proxy_buffer_size 32k;
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
# Proxy everything under "/" to the Node backend
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
location / {
|
||||||
|
proxy_pass http://172.17.0.1:3000;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
}
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
# Proxy everything under "/history" to the Apache backend
|
||||||
|
# This is off for now
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
#location /history/ {
|
||||||
|
# proxy_pass http://172.17.0.1:8080/;
|
||||||
|
# proxy_set_header Host $host;
|
||||||
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
#}
|
||||||
|
|
||||||
|
# Optional: If you want `/history` (without trailing slash) to be
|
||||||
|
# redirected to `/history/`:
|
||||||
|
#location = /history {
|
||||||
|
# return 301 /history/;
|
||||||
|
#}
|
||||||
|
}
|
||||||
|
|
||||||
34
files/web/html/index.html
Normal file
34
files/web/html/index.html
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>Matt-Cloud Cosmostat</title>
|
||||||
|
|
||||||
|
|
||||||
|
<link rel="stylesheet" href="src/styles.css">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<h2>Matt-Cloud Cosmostat Dashboard</h2>
|
||||||
|
This dashboard shows the local Matt-Cloud system stats.<p>
|
||||||
|
</div>
|
||||||
|
<div class="container">
|
||||||
|
<h2>System Stats</h2>
|
||||||
|
<div id="host_stats" class="column">Connecting…</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Here will go the graphs once i have all the stats first
|
||||||
|
<div class="container">
|
||||||
|
<h2>System Graphs</h2>
|
||||||
|
<div id="host_graphs" class="column">Connecting…</div>
|
||||||
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Socket.IO client library -->
|
||||||
|
<script src="socket.io/socket.io.js"></script>
|
||||||
|
<!-- matt-cloud redis script -->
|
||||||
|
<script src="src/redis.js"></script>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
126
files/web/html/src/redis.js
Normal file
126
files/web/html/src/redis.js
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
/* -------------------------------------------------------------
|
||||||
|
1. Socket‑IO connection & helper functions (unchanged)
|
||||||
|
------------------------------------------------------------- */
|
||||||
|
const socket = io();
|
||||||
|
|
||||||
|
socket.on('host_stats', renderStatsTable);
|
||||||
|
socket.on('connect_error', err => {
|
||||||
|
safeSetText('host_stats', `Could not connect to server - ${err.message}`);
|
||||||
|
});
|
||||||
|
socket.on('reconnect', attempt => {
|
||||||
|
safeSetText('host_stats', `Re‑connected (attempt ${attempt})`);
|
||||||
|
});
|
||||||
|
|
||||||
|
function safeSetText(id, txt) {
|
||||||
|
const el = document.getElementById(id);
|
||||||
|
if (el) el.textContent = txt;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -------------------------------------------------------------
|
||||||
|
2. Table rendering – the table remains a <table>
|
||||||
|
------------------------------------------------------------- */
|
||||||
|
function renderStatsTable(data) {
|
||||||
|
renderGenericTable('host_stats', data, 'No Stats available');
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderGenericTable(containerId, data, emptyMsg) {
|
||||||
|
const container = document.getElementById(containerId);
|
||||||
|
if (!Array.isArray(data) || !data.length) {
|
||||||
|
container.textContent = emptyMsg;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 2️⃣ Merge “System Class Variable” rows first */
|
||||||
|
const mergedData = mergeSystemClassVariableRows(data);
|
||||||
|
|
||||||
|
/* 3️⃣ Build the table from the merged data */
|
||||||
|
const table = buildTable(mergedData);
|
||||||
|
|
||||||
|
container.innerHTML = '';
|
||||||
|
container.appendChild(table);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -------------------------------------------------------------
|
||||||
|
3. Merge consecutive rows whose type === "System Class Variable"
|
||||||
|
------------------------------------------------------------- */
|
||||||
|
function mergeSystemClassVariableRows(data) {
|
||||||
|
const result = [];
|
||||||
|
let i = 0;
|
||||||
|
|
||||||
|
while (i < data.length) {
|
||||||
|
const cur = data[i];
|
||||||
|
|
||||||
|
if (cur.type && cur.type.trim() === 'System Class Variable') {
|
||||||
|
const group = [];
|
||||||
|
while (
|
||||||
|
i < data.length &&
|
||||||
|
data[i].type &&
|
||||||
|
data[i].type.trim() === 'System Class Variable'
|
||||||
|
) {
|
||||||
|
group.push(data[i]);
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Build one merged object – keep each column as an array */
|
||||||
|
const merged = { type: 'System Class Variable' };
|
||||||
|
const cols = Object.keys(group[0]).filter(k => k !== 'type');
|
||||||
|
|
||||||
|
cols.forEach(col => {
|
||||||
|
const vals = group
|
||||||
|
.map(row => row[col])
|
||||||
|
.filter(v => v !== undefined && v !== null);
|
||||||
|
merged[col] = vals; // ← array, not joined string
|
||||||
|
});
|
||||||
|
|
||||||
|
result.push(merged);
|
||||||
|
} else {
|
||||||
|
/* Normal row – just copy it */
|
||||||
|
result.push(cur);
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -------------------------------------------------------------
|
||||||
|
4. Build an HTML table from an array of objects
|
||||||
|
------------------------------------------------------------- */
|
||||||
|
function buildTable(data) {
|
||||||
|
const cols = Object.keys(data[0]); // column order
|
||||||
|
const table = document.createElement('table');
|
||||||
|
|
||||||
|
/* Header */
|
||||||
|
const thead = table.createTHead();
|
||||||
|
const headerRow = thead.insertRow();
|
||||||
|
cols.forEach(col => {
|
||||||
|
const th = document.createElement('th');
|
||||||
|
th.textContent = col;
|
||||||
|
headerRow.appendChild(th);
|
||||||
|
});
|
||||||
|
|
||||||
|
/* Body */
|
||||||
|
const tbody = table.createTBody();
|
||||||
|
data.forEach(item => {
|
||||||
|
const tr = tbody.insertRow();
|
||||||
|
cols.forEach(col => {
|
||||||
|
const td = tr.insertCell();
|
||||||
|
const val = item[col];
|
||||||
|
|
||||||
|
/* If the value is an array → render as <ol> */
|
||||||
|
if (Array.isArray(val)) {
|
||||||
|
const ol = document.createElement('ol');
|
||||||
|
val.forEach(v => {
|
||||||
|
const li = document.createElement('li');
|
||||||
|
li.textContent = v;
|
||||||
|
ol.appendChild(li);
|
||||||
|
});
|
||||||
|
td.appendChild(ol);
|
||||||
|
} else {
|
||||||
|
td.textContent = val; // normal text
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return table;
|
||||||
|
}
|
||||||
120
files/web/html/src/styles.css
Normal file
120
files/web/html/src/styles.css
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
/* styles.css */
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
background-color: #2c3e50; /* Dark background color */
|
||||||
|
color: #bdc3c7; /* Dimmer text color */
|
||||||
|
}
|
||||||
|
|
||||||
|
.hidden-info {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.title-button {
|
||||||
|
background-color: #34495e;
|
||||||
|
border: none;
|
||||||
|
color: white;
|
||||||
|
padding: 15px 32px;
|
||||||
|
text-align: center;
|
||||||
|
text-decoration: none;
|
||||||
|
display: inline-block;
|
||||||
|
font-size: 16px;
|
||||||
|
margin: 4px 2px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
table, th, td {
|
||||||
|
border: 1px solid black;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
th, td {
|
||||||
|
padding: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.container {
|
||||||
|
max-width: 950px;
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 20px;
|
||||||
|
background-color: #34495e; /* Darker background for container */
|
||||||
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.3); /* Slightly darker shadow */
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
.container-small {
|
||||||
|
max-width: 550px;
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 20px;
|
||||||
|
background-color: #34495e; /* Darker background for container */
|
||||||
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.3); /* Slightly darker shadow */
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1, h2, h3, h4 {
|
||||||
|
color: #bdc3c7; /* Dimmer text color */
|
||||||
|
}
|
||||||
|
|
||||||
|
ul {
|
||||||
|
list-style-type: none;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
li {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
color: #bdc3c7; /* Dimmer text color */
|
||||||
|
}
|
||||||
|
|
||||||
|
.group-columns {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
.group-rows {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
justify-content: flex-start; /* Left justification */
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.group-column {
|
||||||
|
flex: 0 0 calc(33% - 10px); /* Adjust width of each column */
|
||||||
|
}
|
||||||
|
|
||||||
|
.column {
|
||||||
|
flex: 1;
|
||||||
|
padding: 0 10px; /* Adjust spacing between columns */
|
||||||
|
}
|
||||||
|
|
||||||
|
.subcolumn {
|
||||||
|
margin-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.grid {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
justify-content: space-between;
|
||||||
|
margin-top: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.meter {
|
||||||
|
width: calc(90% - 5px);
|
||||||
|
max-width: calc(45% - 5px);
|
||||||
|
margin-bottom: 5px;
|
||||||
|
border: 1px solid #7f8c8d; /* Light border color */
|
||||||
|
border-radius: 5px;
|
||||||
|
padding: 5px;
|
||||||
|
text-align: center;
|
||||||
|
background-color: #2c3e50; /* Dark background for meter */
|
||||||
|
}
|
||||||
|
|
||||||
|
#host_stats td ol {
|
||||||
|
list-style: none; /* removes the numeric markers */
|
||||||
|
padding-left: 0; /* remove the default left indent */
|
||||||
|
margin-left: 0; /* remove the default left margin */
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#host_stats td ol li:nth-child(odd) { background: #34495e; }
|
||||||
|
#host_stats td ol li:nth-child(even) { background: #3e5c78; }
|
||||||
32
files/web/html/test-2.html
Normal file
32
files/web/html/test-2.html
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>Matt-Cloud Cosmostat</title>
|
||||||
|
|
||||||
|
|
||||||
|
<link rel="stylesheet" href="src/styles.css">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<h2>Matt-Cloud Cosmostat Dashboard</h2>
|
||||||
|
This dashboard shows the local Matt-Cloud system stats.<p>
|
||||||
|
</div>
|
||||||
|
<div class="container">
|
||||||
|
<h2>System Stats</h2>
|
||||||
|
<div id="host_stats" class="column">Connecting…</div>
|
||||||
|
</div>
|
||||||
|
<!--
|
||||||
|
Here will go the graphs once i have all the stats first
|
||||||
|
<div class="container">
|
||||||
|
<h2>System Graphs</h2>
|
||||||
|
<div id="host_graphs" class="column">Connecting…</div>
|
||||||
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Socket.IO client library -->
|
||||||
|
<script src="socket.io/socket.io.js"></script>
|
||||||
|
<!-- matt-cloud redis script -->
|
||||||
|
<script src="src/redis.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
181
files/web/html/test.html
Normal file
181
files/web/html/test.html
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>Matt-Cloud Cosmostat</title>
|
||||||
|
|
||||||
|
|
||||||
|
<link rel="stylesheet" href="styles.css">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<h2>Matt-Cloud Cosmostat Dashboard</h2>
|
||||||
|
This dashboard shows the local Matt-Cloud system stats.<p>
|
||||||
|
</div>
|
||||||
|
<div class="container">
|
||||||
|
<h2>System Stats</h2>
|
||||||
|
<div id="host_stats" class="column">Connecting...</div>
|
||||||
|
</div>
|
||||||
|
<!--
|
||||||
|
Here will go the graphs once i have all the stats first
|
||||||
|
-->
|
||||||
|
<div class="container">
|
||||||
|
<h2>System Graphs</h2>
|
||||||
|
<div id="host_graphs" class="column">
|
||||||
|
<div id="history_graphs" class="container">
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Socket.IO client library -->
|
||||||
|
<script src="socket.io/socket.io.js"></script>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
const socket = io();
|
||||||
|
|
||||||
|
// listen for redis updates, render and error handle
|
||||||
|
socket.on('host_stats', renderStatsTable);
|
||||||
|
|
||||||
|
socket.on('connect_error', err => {
|
||||||
|
safeSetText('host_stats', `Could not connect to server - ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on('reconnect', attempt => {
|
||||||
|
safeSetText('host_stats', `Re-connected (attempt ${attempt})`);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
function safeSetText(id, txt) {
|
||||||
|
const el = document.getElementById(id);
|
||||||
|
if (el) el.textContent = txt;
|
||||||
|
}
|
||||||
|
|
||||||
|
// table rendering functions
|
||||||
|
function renderStatsTable(data) { renderGenericTable('host_stats', data, 'No Stats available'); }
|
||||||
|
|
||||||
|
function renderGenericTable(containerId, data, emptyMsg) {
|
||||||
|
const container = document.getElementById(containerId);
|
||||||
|
if (!Array.isArray(data) || !data.length) {
|
||||||
|
container.textContent = emptyMsg;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const table = renderTable(data);
|
||||||
|
container.innerHTML = '';
|
||||||
|
container.appendChild(table);
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderTable(data) {
|
||||||
|
// Columns are inferred from the first object (order matters)
|
||||||
|
const cols = Object.keys(data[0]);
|
||||||
|
// Create table
|
||||||
|
const table = document.createElement('table');
|
||||||
|
// Header
|
||||||
|
const thead = table.createTHead();
|
||||||
|
const headerRow = thead.insertRow();
|
||||||
|
cols.forEach(col => {
|
||||||
|
const th = document.createElement('th');
|
||||||
|
th.textContent = col;
|
||||||
|
headerRow.appendChild(th);
|
||||||
|
});
|
||||||
|
// Body
|
||||||
|
const tbody = table.createTBody();
|
||||||
|
data.forEach(item => {
|
||||||
|
const tr = tbody.insertRow();
|
||||||
|
cols.forEach(col => {
|
||||||
|
const td = tr.insertCell();
|
||||||
|
td.textContent = item[col];
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
// ────────────────────── Globals ──────────────────────
|
||||||
|
const chartInstances = {}; // {metricName: Chart}
|
||||||
|
const colorPalette = [
|
||||||
|
'#ff6384', '#36a2eb', '#ffcd56', '#4bc0c0',
|
||||||
|
'#9966ff', '#ff9f40', '#8e5ea2', '#3e95cd'
|
||||||
|
];
|
||||||
|
|
||||||
|
// ────────────────────── Socket.io ──────────────────────
|
||||||
|
socket.on('history_stats', renderHistoryGraphs);
|
||||||
|
|
||||||
|
// ────────────────────── Rendering ──────────────────────
|
||||||
|
function renderHistoryGraphs(components) {
|
||||||
|
// 1️⃣ Sanity check – components is an array of objects
|
||||||
|
if (!Array.isArray(components) || !components.length) {
|
||||||
|
console.warn('history_stats payload is empty or malformed');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2️⃣ Clean up any old charts & canvases
|
||||||
|
Object.values(chartInstances).forEach(ch => ch.destroy());
|
||||||
|
chartInstances = {}; // reset map
|
||||||
|
const container = document.getElementById('history_graphs');
|
||||||
|
container.innerHTML = ''; // empty the container
|
||||||
|
|
||||||
|
// 3️⃣ For each component create a canvas & a Chart
|
||||||
|
components.forEach((comp, idx) => {
|
||||||
|
const metricName = comp.info?.metric_name || comp.info?.name || `component-${idx+1}`;
|
||||||
|
|
||||||
|
// 3a. Create a canvas element
|
||||||
|
const canvas = document.createElement('canvas');
|
||||||
|
canvas.id = `chart-${metricName}`;
|
||||||
|
canvas.width = 800; // optional – you can use CSS instead
|
||||||
|
canvas.height = 400;
|
||||||
|
canvas.style.marginBottom = '2rem';
|
||||||
|
|
||||||
|
// 3b. Append the canvas to the container
|
||||||
|
container.appendChild(canvas);
|
||||||
|
|
||||||
|
// 3c. Build the dataset for this component
|
||||||
|
const history = comp.history?.history_data || [];
|
||||||
|
const dataPoints = history.map(d => ({
|
||||||
|
x: new Date(d.timestamp * 1000), // convert seconds → ms
|
||||||
|
y: parseFloat(d.value) // values are strings in Redis
|
||||||
|
}));
|
||||||
|
|
||||||
|
const dataset = {
|
||||||
|
label: metricName,
|
||||||
|
data: dataPoints,
|
||||||
|
borderColor: colorPalette[idx % colorPalette.length],
|
||||||
|
fill: false,
|
||||||
|
tension: 0.1
|
||||||
|
};
|
||||||
|
|
||||||
|
// 3d. Create the chart
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
chartInstances[metricName] = new Chart(ctx, {
|
||||||
|
type: 'line',
|
||||||
|
data: { datasets: [dataset] },
|
||||||
|
options: {
|
||||||
|
responsive: true,
|
||||||
|
maintainAspectRatio: false,
|
||||||
|
plugins: {
|
||||||
|
legend: { position: 'bottom' },
|
||||||
|
tooltip: { mode: 'index', intersect: false }
|
||||||
|
},
|
||||||
|
scales: {
|
||||||
|
x: {
|
||||||
|
type: 'time',
|
||||||
|
time: { unit: 'minute', tooltipFormat: 'YYYY‑MM‑DD HH:mm:ss' },
|
||||||
|
title: { display: true, text: 'Time' }
|
||||||
|
},
|
||||||
|
y: {
|
||||||
|
title: { display: true, text: 'Value' },
|
||||||
|
beginAtZero: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
</script>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
18
files/web/node_server/Dockerfile
Normal file
18
files/web/node_server/Dockerfile
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Use an official Node runtime
|
||||||
|
FROM node:20-alpine
|
||||||
|
|
||||||
|
# Create app directory
|
||||||
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
COPY package.json .
|
||||||
|
RUN npm install --only=production
|
||||||
|
|
||||||
|
# Copy app source
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Expose the port that the app listens on
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
# Start the server
|
||||||
|
CMD ["node", "server.js"]
|
||||||
13
files/web/node_server/package.json
Normal file
13
files/web/node_server/package.json
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"name": "redis-table-demo",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"main": "server.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node server.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"express": "^4.18.2",
|
||||||
|
"socket.io": "^4.7.2",
|
||||||
|
"redis": "^4.6.7"
|
||||||
|
}
|
||||||
|
}
|
||||||
70
files/web/node_server/server.js
Normal file
70
files/web/node_server/server.js
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
// server.js
|
||||||
|
const http = require('http');
|
||||||
|
const express = require('express');
|
||||||
|
const { createClient } = require('redis');
|
||||||
|
const { Server } = require('socket.io');
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const server = http.createServer(app);
|
||||||
|
const io = new Server(server);
|
||||||
|
|
||||||
|
// Serve static files (index.html)
|
||||||
|
app.use(express.static('public'));
|
||||||
|
|
||||||
|
// ---------- Redis subscriber ----------
|
||||||
|
const redisClient = createClient({
|
||||||
|
url: 'redis://172.17.0.1:6379'
|
||||||
|
});
|
||||||
|
redisClient.on('error', err => console.error('Redis error', err));
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
await redisClient.connect();
|
||||||
|
|
||||||
|
|
||||||
|
const sub = redisClient.duplicate(); // duplicate to keep separate pub/sub
|
||||||
|
await sub.connect();
|
||||||
|
// Subscribe to the channel that sends host stats
|
||||||
|
await sub.subscribe(
|
||||||
|
['host_stats'],
|
||||||
|
(message, channel) => { // <-- single handler
|
||||||
|
let payload;
|
||||||
|
try {
|
||||||
|
payload = JSON.parse(message); // message is a JSON string
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`Failed to parse ${channel}`, e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
io.emit(channel, payload);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
// Subscribe to the channel that sends history stats
|
||||||
|
await sub.subscribe(
|
||||||
|
['history_stats'],
|
||||||
|
(message, channel) => {
|
||||||
|
let payload;
|
||||||
|
try {
|
||||||
|
payload = JSON.parse(message); // message is a JSON string
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`Failed to parse ${channel}`, e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
io.emit(channel, payload);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
sub.on('error', err => console.error('Subscriber error', err));
|
||||||
|
})();
|
||||||
|
|
||||||
|
// ---------- Socket.io ----------
|
||||||
|
io.on('connection', socket => {
|
||||||
|
console.log('client connected:', socket.id);
|
||||||
|
// Optional: send the current state on connect if you keep it cached
|
||||||
|
});
|
||||||
|
|
||||||
|
// ---------- Start ----------
|
||||||
|
const PORT = process.env.PORT || 3000;
|
||||||
|
server.listen(PORT, () => {
|
||||||
|
console.log(`Server listening on http://localhost:${PORT}`);
|
||||||
|
});
|
||||||
23
handlers/main.yaml
Normal file
23
handlers/main.yaml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
# daemon reload
|
||||||
|
- name: "deamon reload for {{ service_user }}"
|
||||||
|
shell: "systemctl --user -M {{ service_user }}@ deamon-reload"
|
||||||
|
listen: user daemon reload
|
||||||
|
|
||||||
|
# cosmostat api handlers
|
||||||
|
- name: Restart cosmostat api
|
||||||
|
shell: "systemctl --user -M {{ service_user }}@ restart {{ api_service_name }}"
|
||||||
|
listen: restart api
|
||||||
|
|
||||||
|
- name: Stop cosmostat api
|
||||||
|
shell: "systemctl --user -M {{ service_user }}@ stop {{ api_service_name }}"
|
||||||
|
listen: stop api
|
||||||
|
|
||||||
|
- name: Start cosmostat api
|
||||||
|
shell: "systemctl --user -M {{ service_user }}@ start {{ api_service_name }}"
|
||||||
|
listen: start api
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
...
|
||||||
46
tasks/api.yaml
Normal file
46
tasks/api.yaml
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
- name: Cosmostat - API - Stop Service
|
||||||
|
ignore_errors: yes
|
||||||
|
shell: "systemctl --user -M {{ service_user }}@ stop {{ api_service_name }}"
|
||||||
|
|
||||||
|
- name: Cosmostat - API - copy api files
|
||||||
|
copy:
|
||||||
|
src: api/
|
||||||
|
dest: "{{ api_service_folder }}"
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
mode: 0755
|
||||||
|
|
||||||
|
- name: "Cosmostat - API - template cosmostat_settings.yaml"
|
||||||
|
template:
|
||||||
|
src: cosmostat_settings.yaml
|
||||||
|
dest: "{{ api_service_folder }}/cosmostat_settings.yaml"
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
mode: 0644
|
||||||
|
|
||||||
|
- name: "Cosmostat - API - template {{ api_service_name }}.service"
|
||||||
|
vars:
|
||||||
|
service_name: "{{ api_service_name }}"
|
||||||
|
service_working_folder: "{{ api_service_folder }}"
|
||||||
|
service_exe: "{{ api_service_exe }}"
|
||||||
|
service_group: "{{ service_user }}"
|
||||||
|
extra_options: ""
|
||||||
|
extra_service_options: ""
|
||||||
|
template:
|
||||||
|
src: "service_template.service"
|
||||||
|
dest: "{{ user_service_folder }}/{{ api_service_name }}.service"
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
mode: 0644
|
||||||
|
|
||||||
|
- name: Cosmostat - API - Daemon Reload
|
||||||
|
shell: "systemctl --user -M {{ service_user }}@ daemon-reload"
|
||||||
|
|
||||||
|
- name: Cosmostat - API - Start Service
|
||||||
|
shell: "systemctl --user -M {{ service_user }}@ start {{ api_service_name }}"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
...
|
||||||
112
tasks/init.yaml
Normal file
112
tasks/init.yaml
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
- name: Cosmostat - Init - Get installed package list
|
||||||
|
when: dpkg_output is undefined
|
||||||
|
shell: "dpkg --list | grep ii | awk '{print $2}'"
|
||||||
|
register: dpkg_output
|
||||||
|
|
||||||
|
- name: Cosmostat - Init - Install Prereq Packages
|
||||||
|
when: cosmostat_packages_item not in dpkg_output.stdout
|
||||||
|
apt:
|
||||||
|
name:
|
||||||
|
- "{{ cosmostat_packages_item }}"
|
||||||
|
state: present
|
||||||
|
loop: "{{ cosmostat_packages }}"
|
||||||
|
loop_control:
|
||||||
|
loop_var: cosmostat_packages_item
|
||||||
|
|
||||||
|
# docker network for cosmostat service
|
||||||
|
- name: Cosmostat - Init - Set Up docker network
|
||||||
|
community.docker.docker_network:
|
||||||
|
name: "cosmostat_net"
|
||||||
|
driver: bridge
|
||||||
|
ipam_config:
|
||||||
|
- subnet: "{{ docker_subnet }}"
|
||||||
|
# - gateway: "{{ docker_gateway }}"
|
||||||
|
|
||||||
|
# allow service_user to sudo lshw without a password
|
||||||
|
- name: Cosmostat - Init - cosmos user sudoers file creation
|
||||||
|
copy:
|
||||||
|
dest: "/etc/sudoers.d/cosmostat"
|
||||||
|
content: "{{ cosmostat_sudoers_content }}"
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: 0600
|
||||||
|
|
||||||
|
# create service working folder
|
||||||
|
- name: Cosmostat - Init - create cosmostat service folder
|
||||||
|
file:
|
||||||
|
path: "{{ service_folder }}"
|
||||||
|
state: directory
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
# create user service folder
|
||||||
|
- name: Cosmostat - Init - create cosmostat service folder
|
||||||
|
file:
|
||||||
|
path: "{{ user_service_folder }}"
|
||||||
|
state: directory
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
# Create python service venv
|
||||||
|
- name: Cosmostat - Init - Build Python Environment
|
||||||
|
block:
|
||||||
|
|
||||||
|
- name: "Cosmostat - Init - create python venv folder at {{ venv_folder }}"
|
||||||
|
file:
|
||||||
|
path: "{{ venv_folder }}"
|
||||||
|
state: directory
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
- name: Cosmostat - Init - create python venv requirement file
|
||||||
|
copy:
|
||||||
|
dest: "{{ venv_folder }}/requirements.txt"
|
||||||
|
content: "{{ cosmostat_venv_packages }}"
|
||||||
|
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
mode: 0644
|
||||||
|
|
||||||
|
- name: Cosmostat - Init - build python venv
|
||||||
|
become_user: "{{ service_user }}"
|
||||||
|
pip:
|
||||||
|
virtualenv: "{{ venv_folder }}"
|
||||||
|
requirements: "{{ venv_folder }}/requirements.txt"
|
||||||
|
virtualenv_command: python3 -m venv
|
||||||
|
state: present
|
||||||
|
|
||||||
|
# create node.js docker container for web dashboard
|
||||||
|
- name: node.js server container handler
|
||||||
|
block:
|
||||||
|
|
||||||
|
- name: Cosmostat - Init - node.js - copy server files
|
||||||
|
copy:
|
||||||
|
src: "web/node_server"
|
||||||
|
dest: "{{ service_control_web_folder }}/"
|
||||||
|
mode: 0755
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
|
||||||
|
- name: Cosmostat - Init - node.js - build docker container
|
||||||
|
community.docker.docker_image_build:
|
||||||
|
name: ws_node
|
||||||
|
tag: latest
|
||||||
|
rebuild: always
|
||||||
|
path: "{{ service_control_web_folder }}/node_server"
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
labels:
|
||||||
|
ws_node: "true"
|
||||||
|
|
||||||
|
- name: Cosmostat - Init - node.js - Prune old containers
|
||||||
|
community.docker.docker_prune:
|
||||||
|
containers: true
|
||||||
|
containers_filters:
|
||||||
|
label:
|
||||||
|
ws_node: "true"
|
||||||
|
|
||||||
|
...
|
||||||
17
tasks/main.yaml
Normal file
17
tasks/main.yaml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
# initializa environment
|
||||||
|
|
||||||
|
# set up API
|
||||||
|
|
||||||
|
# set up web stack
|
||||||
|
|
||||||
|
- name: Initialize Environment
|
||||||
|
when: not quick_refresh | bool
|
||||||
|
include_tasks: init.yaml
|
||||||
|
|
||||||
|
- name: Build API
|
||||||
|
include_tasks: api.yaml
|
||||||
|
|
||||||
|
- name: Build Web Dashboard
|
||||||
|
include_tasks: web.yaml
|
||||||
|
...
|
||||||
56
tasks/web.yaml
Normal file
56
tasks/web.yaml
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
---
|
||||||
|
###############################################
|
||||||
|
# This part sets up cosmostat web dashboard
|
||||||
|
###############################################
|
||||||
|
|
||||||
|
# Create web Folder
|
||||||
|
- name: "Cosmostat - Web - create {{ service_control_web_folder }}"
|
||||||
|
file:
|
||||||
|
path: "{{ service_control_web_folder }}"
|
||||||
|
state: directory
|
||||||
|
mode: '0755'
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
|
||||||
|
- name: Cosmostat - Init - node.js - copy dashboard files
|
||||||
|
copy:
|
||||||
|
src: "web/html"
|
||||||
|
dest: "{{ service_control_web_folder }}/"
|
||||||
|
mode: 0755
|
||||||
|
owner: "{{ service_user }}"
|
||||||
|
group: "{{ service_user }}"
|
||||||
|
|
||||||
|
# These are not needed unless there is a stack
|
||||||
|
#- name: Cosmostat - Web - copy files for history dashboard
|
||||||
|
# copy:
|
||||||
|
# src: "dashboard/"
|
||||||
|
# dest: "{{ service_control_web_folder }}/html"
|
||||||
|
# mode: 0755
|
||||||
|
# owner: "{{ service_user }}"
|
||||||
|
# group: "{{ service_user }}"
|
||||||
|
#
|
||||||
|
#- name: Cosmostat - Web - copy files for proxy container
|
||||||
|
# copy:
|
||||||
|
# src: "proxy/"
|
||||||
|
# dest: "{{ service_control_web_folder }}/proxy"
|
||||||
|
# mode: 0755
|
||||||
|
# owner: "{{ service_user }}"
|
||||||
|
# group: "{{ service_user }}"
|
||||||
|
|
||||||
|
- name: docker container handler
|
||||||
|
block:
|
||||||
|
|
||||||
|
- name: service_control_website - template docker-compose.yaml
|
||||||
|
template:
|
||||||
|
src: docker-compose.yaml
|
||||||
|
dest: "{{ service_control_web_folder }}/docker-compose.yaml"
|
||||||
|
mode: 0644
|
||||||
|
|
||||||
|
- name: "service_control_website - Start containers"
|
||||||
|
shell: "docker-compose -f {{ service_control_web_folder }}/docker-compose.yaml up -d"
|
||||||
|
register: docker_output
|
||||||
|
- debug: |
|
||||||
|
msg="{{ docker_output.stdout_lines }}"
|
||||||
|
msg="{{ docker_output.stderr_lines }}"
|
||||||
|
|
||||||
|
...
|
||||||
0
templates/cosmostat_api.service
Normal file
0
templates/cosmostat_api.service
Normal file
31
templates/cosmostat_settings.yaml
Normal file
31
templates/cosmostat_settings.yaml
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
############################################
|
||||||
|
# Matt-Cloud Cosmostat API Settings
|
||||||
|
############################################
|
||||||
|
|
||||||
|
# Built {{ inventory_generation_timestamp }} by {{ jenkins_user }}
|
||||||
|
inventory_generation_timestamp: "{{ inventory_generation_timestamp }}"
|
||||||
|
jenkins_user: "{{ jenkins_user }}"
|
||||||
|
ansible_hostname: "{{ ansible_hostname }}"
|
||||||
|
|
||||||
|
##################################################################
|
||||||
|
### The cosmostat service is the foundation of the cosmostat platform
|
||||||
|
### Each Matt-Cloud system will have the cosmostat service installed
|
||||||
|
### This stores a short history of some system data in a python service
|
||||||
|
### This system status service will be queriable locally only by default
|
||||||
|
### It will also report back to the main cosmostat platform when queried and available
|
||||||
|
##################################################################
|
||||||
|
|
||||||
|
# docker subnet, will use to bind the IP in default secure mode
|
||||||
|
docker_subnet: "{{ docker_subnet }}"
|
||||||
|
docker_gateway: "{{ docker_gateway }}"
|
||||||
|
|
||||||
|
# python system variables
|
||||||
|
secure_api: {{ secure_api }}
|
||||||
|
noisy_test: {{ noisy_test }}
|
||||||
|
debug_output: {{ debug_output }}
|
||||||
|
push_redis: {{ push_redis }}
|
||||||
|
run_background : {{ run_background }}
|
||||||
|
log_output: {{ log_output }}
|
||||||
|
...
|
||||||
69
templates/docker-compose.yaml
Normal file
69
templates/docker-compose.yaml
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# for now there is no php code
|
||||||
|
# to save resources, also disabling nginx
|
||||||
|
# will map 3000 to 80 here unless this changes
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
redis:
|
||||||
|
container_name: redis
|
||||||
|
image: redis:7-alpine
|
||||||
|
ports:
|
||||||
|
- {{ (docker_gateway + ':') if not secure_api else '' }}6379:6379
|
||||||
|
networks:
|
||||||
|
- cosmostat_net
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
ws_node:
|
||||||
|
container_name: ws_node
|
||||||
|
build:
|
||||||
|
context: {{ service_control_web_folder }}/node_server
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
image: ws_node:latest
|
||||||
|
volumes:
|
||||||
|
- {{ service_control_web_folder }}/html:/usr/src/app/public
|
||||||
|
ports:
|
||||||
|
# put back to 3000 if the stack is needed
|
||||||
|
- {{ (docker_gateway + ':') if not secure_api else '' }}80:3000
|
||||||
|
networks:
|
||||||
|
- cosmostat_net
|
||||||
|
restart: always
|
||||||
|
depends_on:
|
||||||
|
- redis
|
||||||
|
|
||||||
|
# these will be disabled until a stack is needed
|
||||||
|
# web_dash:
|
||||||
|
# container_name: web_dash
|
||||||
|
# image: php:8.0-apache
|
||||||
|
# ports:
|
||||||
|
# - {{ (docker_gateway + ':') if not secure_api else '' }}8080:80
|
||||||
|
# volumes:
|
||||||
|
# - ./html:/var/www/html/
|
||||||
|
# networks:
|
||||||
|
# - cosmostat_net
|
||||||
|
# restart: always
|
||||||
|
#
|
||||||
|
# nginx_proxy:
|
||||||
|
# container_name: nginx_proxy
|
||||||
|
# image: nginx:latest
|
||||||
|
# ports:
|
||||||
|
# - "{{ (docker_gateway + ':') if not secure_api else '' }}80:80"
|
||||||
|
# volumes:
|
||||||
|
# - ./proxy/nginx.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
# networks:
|
||||||
|
# - cosmostat_net
|
||||||
|
# restart: always
|
||||||
|
# depends_on:
|
||||||
|
# - web_dash
|
||||||
|
# - ws_nodenetworks:
|
||||||
|
|
||||||
|
|
||||||
|
networks:
|
||||||
|
cosmostat_net:
|
||||||
|
external: true
|
||||||
|
# driver: bridge
|
||||||
|
# ipam:
|
||||||
|
# driver: default
|
||||||
|
# config:
|
||||||
|
# -
|
||||||
|
# subnet: {{ docker_subnet }}
|
||||||
|
|
||||||
14
templates/service_template.service
Normal file
14
templates/service_template.service
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
|
||||||
|
[Unit]
|
||||||
|
Description={{ service_name }}
|
||||||
|
After=network.target
|
||||||
|
{{ extra_options }}
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
WorkingDirectory={{ service_working_folder }}
|
||||||
|
ExecStart={{ service_exe }}
|
||||||
|
Restart=always
|
||||||
|
{{ extra_service_options }}
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
Reference in New Issue
Block a user