This commit is contained in:
devdatt 2026-02-18 04:11:08 +05:30
parent db29cc1600
commit 2cfb6107c7
1 changed files with 149 additions and 130 deletions

113
setup.sh
View File

@ -176,18 +176,26 @@ mount -a
cat > /usr/local/bin/nginx_system_monitor_sampler.py<< 'EOL' cat > /usr/local/bin/nginx_system_monitor_sampler.py<< 'EOL'
#!/usr/bin/env python3 #!/usr/bin/env python3
import time, json, os, subprocess, threading import time
import json
import os
import subprocess
import threading
import re # Built-in, no installation needed
from collections import deque from collections import deque
from datetime import datetime from datetime import datetime, timezone
import psutil import psutil
# ---------------- CONFIGURATION ----------------
OUT_FILE = "/var/www/encoder/metrics.json" OUT_FILE = "/var/www/encoder/metrics.json"
TMP_FILE = OUT_FILE + ".tmp" TMP_FILE = OUT_FILE + ".tmp"
SAMPLE_INTERVAL = 10.0 SAMPLE_INTERVAL = 10.0
HISTORY_SECONDS = 15 * 60 HISTORY_SECONDS = 15 * 60
MAX_SAMPLES = int(HISTORY_SECONDS / SAMPLE_INTERVAL) MAX_SAMPLES = int(HISTORY_SECONDS / SAMPLE_INTERVAL)
# history buffers # ---------------- HISTORY BUFFERS ----------------
timestamps = deque(maxlen=MAX_SAMPLES) timestamps = deque(maxlen=MAX_SAMPLES)
cpu_hist = deque(maxlen=MAX_SAMPLES) cpu_hist = deque(maxlen=MAX_SAMPLES)
ram_hist = deque(maxlen=MAX_SAMPLES) ram_hist = deque(maxlen=MAX_SAMPLES)
@ -208,7 +216,8 @@ _prev_net=psutil.net_io_counters()
_prev_disk = psutil.disk_io_counters() _prev_disk = psutil.disk_io_counters()
_prev_time = time.time() _prev_time = time.time()
# shared gpu values # ---------------- SHARED GPU DATA ----------------
gpu_data = { gpu_data = {
"total": 0.0, "total": 0.0,
"Render/3D": 0.0, "Render/3D": 0.0,
@ -218,49 +227,66 @@ gpu_data={
} }
gpu_lock = threading.Lock() gpu_lock = threading.Lock()
# ---------- persistent GPU monitor ---------- # ---------------- GPU MONITOR THREAD ----------------
def gpu_monitor(): def gpu_monitor():
global gpu_data global gpu_data
# Use -s 1000 for 1-second updates
cmd = ["/usr/sbin/intel_gpu_top", "-J", "-s", "1000"]
while True: while True:
try: try:
p = subprocess.Popen( p = subprocess.Popen(
["intel_gpu_top","-J","-s","1000","-o","-"], cmd,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
text=True, text=True,
bufsize=1 bufsize=1
) )
buffer = ""
for line in p.stdout: for line in p.stdout:
if '"engines"' not in line: buffer += line
continue if "}" in line:
# Regex extracts the object between curly braces that contains "engines"
matches = re.findall(r'({[^{}]*("engines")[^{}]*})', buffer, re.DOTALL)
if matches:
try: try:
j=json.loads(line.rstrip(",\n")) # Parse the most recent complete match
engines=j.get("engines",{}) raw_json = matches[-1][0]
if not engines: data = json.loads(raw_json)
continue
if "engines" in data:
eng = data["engines"]
# Helper to find keys regardless of index (e.g. Render/3D/0)
def get_busy(name):
for k, v in eng.items():
if name in k:
return v.get("busy", 0.0)
return 0.0
with gpu_lock: with gpu_lock:
for k in gpu_data: gpu_data["Render/3D"] = get_busy("Render/3D")
gpu_data[k]=0.0 gpu_data["Video"] = get_busy("Video")
gpu_data["Blitter"] = get_busy("Blitter")
gpu_data["VideoEnhance"] = get_busy("VideoEnhance")
for name,val in engines.items(): # Total is the peak engine usage
gpu_data[name]=float(val.get("busy",0)) vals = [v for k, v in gpu_data.items() if k != "total"]
gpu_data["total"] = max(vals) if vals else 0.0
gpu_data["total"]=max( # Clean buffer to prevent memory growth
float(v.get("busy",0)) buffer = buffer[buffer.rfind("}")+1:]
for v in engines.values() except (json.JSONDecodeError, ValueError):
) continue
except: except Exception:
pass time.sleep(2)
except:
pass
time.sleep(2) # restart delay if intel_gpu_top exits
threading.Thread(target=gpu_monitor, daemon=True).start() threading.Thread(target=gpu_monitor, daemon=True).start()
# ---------- sampling ---------- # ---------------- SAMPLING ----------------
def sample_once(): def sample_once():
global _prev_net, _prev_disk, _prev_time global _prev_net, _prev_disk, _prev_time
@ -285,7 +311,7 @@ def sample_once():
except: except:
disk_percent = 0.0 disk_percent = 0.0
elapsed=now-_prev_time if _prev_time else SAMPLE_INTERVAL elapsed = now - _prev_time if _prev_time > 0 else SAMPLE_INTERVAL
if elapsed <= 0: elapsed = SAMPLE_INTERVAL if elapsed <= 0: elapsed = SAMPLE_INTERVAL
in_rate = (net.bytes_recv - _prev_net.bytes_recv) / elapsed in_rate = (net.bytes_recv - _prev_net.bytes_recv) / elapsed
@ -303,45 +329,41 @@ def sample_once():
gpu_blitter_hist.append(round(gb, 2)) gpu_blitter_hist.append(round(gb, 2))
gpu_ve_hist.append(round(ge, 2)) gpu_ve_hist.append(round(ge, 2))
net_in_hist.append(int(in_rate)) net_in_hist.append(int(max(0, in_rate)))
net_out_hist.append(int(out_rate)) net_out_hist.append(int(max(0, out_rate)))
disk_read_hist.append(int(read_rate)) disk_read_hist.append(int(max(0, read_rate)))
disk_write_hist.append(int(write_rate)) disk_write_hist.append(int(max(0, write_rate)))
disk_percent_hist.append(round(disk_percent, 2)) disk_percent_hist.append(round(disk_percent, 2))
_prev_net=net _prev_net, _prev_disk, _prev_time = net, disk, now
_prev_disk=disk
_prev_time=now # ---------------- WRITE JSON ----------------
# ---------- write ----------
def write_json_atomic(): def write_json_atomic():
payload = { payload = {
"timestamps": list(timestamps), "timestamps": list(timestamps),
"cpu_percent": list(cpu_hist), "cpu_percent": list(cpu_hist),
"ram_percent": list(ram_hist), "ram_percent": list(ram_hist),
"gpu_total": list(gpu_total_hist), "gpu_total": list(gpu_total_hist),
"gpu_render": list(gpu_render_hist), "gpu_render": list(gpu_render_hist),
"gpu_video": list(gpu_video_hist), "gpu_video": list(gpu_video_hist),
"gpu_blitter": list(gpu_blitter_hist), "gpu_blitter": list(gpu_blitter_hist),
"gpu_videoenhance": list(gpu_ve_hist), "gpu_videoenhance": list(gpu_ve_hist),
"net_in_Bps": list(net_in_hist), "net_in_Bps": list(net_in_hist),
"net_out_Bps": list(net_out_hist), "net_out_Bps": list(net_out_hist),
"disk_read_Bps": list(disk_read_hist), "disk_read_Bps": list(disk_read_hist),
"disk_write_Bps": list(disk_write_hist), "disk_write_Bps": list(disk_write_hist),
"disk_percent": list(disk_percent_hist), "disk_percent": list(disk_percent_hist),
"sample_interval": SAMPLE_INTERVAL, "sample_interval": SAMPLE_INTERVAL,
"generated_at":datetime.utcnow().isoformat(timespec='seconds')+"Z" "generated_at": datetime.now(timezone.utc).isoformat(timespec='seconds').replace("+00:00", "Z")
} }
with open(TMP_FILE, "w") as f: with open(TMP_FILE, "w") as f:
json.dump(payload, f) json.dump(payload, f)
os.replace(TMP_FILE, OUT_FILE) os.replace(TMP_FILE, OUT_FILE)
# ---------- main ---------- # ---------------- MAIN LOOP ----------------
def main(): def main():
global _prev_net, _prev_disk, _prev_time global _prev_net, _prev_disk, _prev_time
_prev_net = psutil.net_io_counters() _prev_net = psutil.net_io_counters()
@ -353,16 +375,13 @@ def main():
sample_once() sample_once()
write_json_atomic() write_json_atomic()
except Exception as e: except Exception as e:
print("Sampler error:",e) pass # Keep service running silently
time.sleep(SAMPLE_INTERVAL) time.sleep(SAMPLE_INTERVAL)
if __name__ == "__main__": if __name__ == "__main__":
main() main()
EOL EOL
sudo systemctl enable --now system-monitor.service sudo systemctl enable --now system-monitor.service
sudo systemctl restart system-monitor.service --no-pager sudo systemctl restart system-monitor.service --no-pager
sudo reboot