stuff
This commit is contained in:
21
.claude/settings.local.json
Normal file
21
.claude/settings.local.json
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [
|
||||||
|
"Bash(find:*)",
|
||||||
|
"Bash(rm:*)",
|
||||||
|
"Bash(python:*)",
|
||||||
|
"Bash(uv install:*)",
|
||||||
|
"Bash(uv sync:*)",
|
||||||
|
"Bash(uv run:*)",
|
||||||
|
"Edit(**)",
|
||||||
|
"Bash(mkdir:*)",
|
||||||
|
"Bash(sqlite3:*)",
|
||||||
|
"Bash(pkill:*)",
|
||||||
|
"Bash(true)",
|
||||||
|
"Bash(curl:*)",
|
||||||
|
"Bash(APP_PORT=8081 uv run:*)"
|
||||||
|
],
|
||||||
|
"deny": []
|
||||||
|
},
|
||||||
|
"defaultMode": "bypassPermissions"
|
||||||
|
}
|
||||||
@@ -6,6 +6,11 @@ readme = "README.md"
|
|||||||
requires-python = ">=3.13"
|
requires-python = ">=3.13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"dotenv>=0.9.9",
|
"dotenv>=0.9.9",
|
||||||
|
"httpx>=0.28.1",
|
||||||
"nicegui>=2.24.1",
|
"nicegui>=2.24.1",
|
||||||
|
"niceguiasyncelement",
|
||||||
"psutil>=6.1.0",
|
"psutil>=6.1.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
niceguiasyncelement = { git = "https://git.project-insanity.de/gmarth/NiceGuiAsyncElement.git" }
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from nicegui import ui
|
from nicegui import ui
|
||||||
from utils import data_manager
|
from utils import SystemMonitor, GPUMonitor
|
||||||
|
|
||||||
|
|
||||||
class Header(ui.header):
|
class Header(ui.header):
|
||||||
def __init__(self):
|
def __init__(self, system_monitor: SystemMonitor, gpu_monitor: GPUMonitor):
|
||||||
super().__init__(fixed=True, elevated=False)
|
super().__init__(fixed=True, elevated=False)
|
||||||
|
|
||||||
with self.classes('bg-transparent'):
|
with self.classes('bg-transparent'):
|
||||||
@@ -16,23 +16,18 @@ class Header(ui.header):
|
|||||||
# Right side - system status only
|
# Right side - system status only
|
||||||
with ui.row().classes('items-center gap-4'):
|
with ui.row().classes('items-center gap-4'):
|
||||||
# Get real-time data
|
# Get real-time data
|
||||||
dashboard_data = data_manager.get_dashboard_data()
|
|
||||||
|
|
||||||
# System load indicator
|
# System load indicator
|
||||||
with ui.row().classes('items-center gap-2'):
|
with ui.row().classes('items-center gap-2'):
|
||||||
ui.icon('memory', size='sm', color='cyan')
|
ui.icon('memory', size='sm', color='cyan')
|
||||||
ui.label(f'CPU: {dashboard_data["cpu"]["percent"]}%').classes('text-sm text-white')
|
ui.label().classes('text-sm text-white').bind_text_from(system_monitor, 'cpu_percent',
|
||||||
|
lambda x: f'{x:.1f}%')
|
||||||
|
|
||||||
with ui.row().classes('items-center gap-2'):
|
with ui.row().classes('items-center gap-2'):
|
||||||
ui.icon('gpu_on', size='sm', color='orange')
|
ui.label().classes('text-sm text-white').bind_text_from(gpu_monitor, 'GPU ',
|
||||||
if dashboard_data['gpu']['available']:
|
lambda x: f'{x:.1f}%')
|
||||||
ui.label(f'GPU: {dashboard_data["gpu"]["percent"]}%').classes('text-sm text-white')
|
|
||||||
else:
|
|
||||||
ui.label('GPU: N/A').classes('text-sm text-white')
|
|
||||||
|
|
||||||
with ui.row().classes('items-center gap-2'):
|
with ui.row().classes('items-center gap-2'):
|
||||||
ui.icon('thermostat', size='sm', color='red')
|
ui.icon('thermostat', size='sm', color='red')
|
||||||
if dashboard_data['gpu']['available'] and dashboard_data['gpu']['temperature'] > 0:
|
ui.label().classes('text-sm text-white').bind_text_from(gpu_monitor, 'temperature',
|
||||||
ui.label(f'{dashboard_data["gpu"]["temperature"]}°C').classes('text-sm text-white')
|
lambda x: f'{x:.1f}°C')
|
||||||
else:
|
|
||||||
ui.label('--°C').classes('text-sm text-white')
|
|
||||||
|
|||||||
60
src/main.py
60
src/main.py
@@ -5,7 +5,7 @@ from nicegui import ui, app
|
|||||||
|
|
||||||
from components import Header, Sidebar
|
from components import Header, Sidebar
|
||||||
from pages import DashboardPage, OllamaManagerPage
|
from pages import DashboardPage, OllamaManagerPage
|
||||||
from utils import data_manager
|
from utils import GPUMonitor, SystemMonitor
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
@@ -18,14 +18,13 @@ load_dotenv()
|
|||||||
|
|
||||||
app.add_static_files('/static', 'src/static')
|
app.add_static_files('/static', 'src/static')
|
||||||
|
|
||||||
# Start global data collection
|
|
||||||
@app.on_startup
|
|
||||||
async def startup():
|
|
||||||
data_manager.start()
|
|
||||||
|
|
||||||
@app.on_shutdown
|
# Create monitor instances (bindable dataclasses)
|
||||||
async def shutdown():
|
system_monitor = SystemMonitor()
|
||||||
data_manager.stop()
|
gpu_monitor = GPUMonitor()
|
||||||
|
|
||||||
|
app.timer(2.0, system_monitor.update)
|
||||||
|
app.timer(2.0, gpu_monitor.update)
|
||||||
|
|
||||||
|
|
||||||
def create_layout(current_route='/'):
|
def create_layout(current_route='/'):
|
||||||
@@ -36,14 +35,14 @@ def create_layout(current_route='/'):
|
|||||||
# Add custom CSS
|
# Add custom CSS
|
||||||
ui.add_head_html('<link rel="stylesheet" type="text/css" href="/static/style.css">')
|
ui.add_head_html('<link rel="stylesheet" type="text/css" href="/static/style.css">')
|
||||||
|
|
||||||
Header()
|
Header(system_monitor, gpu_monitor)
|
||||||
Sidebar(current_route)
|
Sidebar(current_route)
|
||||||
|
|
||||||
|
|
||||||
@ui.page('/')
|
@ui.page('/')
|
||||||
async def index_page():
|
async def index_page():
|
||||||
create_layout('/')
|
create_layout('/')
|
||||||
DashboardPage()
|
DashboardPage(system_monitor, gpu_monitor)
|
||||||
|
|
||||||
|
|
||||||
@ui.page('/system')
|
@ui.page('/system')
|
||||||
@@ -59,46 +58,9 @@ async def system_page():
|
|||||||
@ui.page('/ollama')
|
@ui.page('/ollama')
|
||||||
async def ollama_page():
|
async def ollama_page():
|
||||||
create_layout('/ollama')
|
create_layout('/ollama')
|
||||||
with ui.element('div').classes('main-content w-full'):
|
await OllamaManagerPage.create()
|
||||||
with ui.column().classes('w-full max-w-4xl mx-auto p-6 gap-6'):
|
|
||||||
ui.label('Ollama Manager').classes('text-2xl font-bold text-white mb-4')
|
|
||||||
|
|
||||||
# Status cards
|
# await page._load_models()
|
||||||
with ui.row().classes('w-full gap-4 mb-6'):
|
|
||||||
with ui.card().classes('metric-card flex-grow p-4'):
|
|
||||||
with ui.row().classes('items-center gap-2'):
|
|
||||||
ui.icon('check_circle', color='green')
|
|
||||||
ui.label('Status: Online').classes('font-medium text-white')
|
|
||||||
|
|
||||||
with ui.card().classes('metric-card flex-grow p-4'):
|
|
||||||
ui.label('Version: 0.11.11').classes('font-medium text-white')
|
|
||||||
|
|
||||||
# Models list
|
|
||||||
with ui.card().classes('metric-card p-6'):
|
|
||||||
ui.label('Installed Models').classes('text-lg font-bold text-white mb-4')
|
|
||||||
|
|
||||||
models = [
|
|
||||||
('llama3.2:3b', '2.0 GB', 'Q4_0'),
|
|
||||||
('mistral:7b', '4.1 GB', 'Q4_0'),
|
|
||||||
('codellama:13b', '7.4 GB', 'Q4_K_M'),
|
|
||||||
('phi3:mini', '2.3 GB', 'Q4_0'),
|
|
||||||
]
|
|
||||||
|
|
||||||
for name, size, quant in models:
|
|
||||||
with ui.card().classes('metric-card p-4 mb-2'):
|
|
||||||
with ui.row().classes('w-full items-center'):
|
|
||||||
with ui.column().classes('gap-1'):
|
|
||||||
ui.label(name).classes('font-bold text-white')
|
|
||||||
with ui.row().classes('gap-2'):
|
|
||||||
ui.chip(size, icon='storage').props('outline dense color=cyan')
|
|
||||||
ui.chip(quant, icon='memory').props('outline dense color=orange')
|
|
||||||
|
|
||||||
ui.space()
|
|
||||||
|
|
||||||
with ui.row().classes('gap-2'):
|
|
||||||
ui.button(icon='play_arrow').props('round flat color=green').tooltip('Run')
|
|
||||||
ui.button(icon='info').props('round flat color=blue').tooltip('Info')
|
|
||||||
ui.button(icon='delete').props('round flat color=red').tooltip('Delete')
|
|
||||||
|
|
||||||
|
|
||||||
@ui.page('/processes')
|
@ui.page('/processes')
|
||||||
|
|||||||
82
src/monitor_example.py
Normal file
82
src/monitor_example.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Example of using the refactored monitoring classes with NiceGUI's reactive system.
|
||||||
|
This demonstrates how the bindable dataclasses automatically update the UI.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from nicegui import ui, app
|
||||||
|
from utils import GPUMonitor, SystemMonitor
|
||||||
|
|
||||||
|
# Create monitor instances (bindable dataclasses)
|
||||||
|
system_monitor = SystemMonitor()
|
||||||
|
gpu_monitor = GPUMonitor()
|
||||||
|
|
||||||
|
app.timer(2.0, system_monitor.update)
|
||||||
|
app.timer(2.0, gpu_monitor.update)
|
||||||
|
|
||||||
|
|
||||||
|
@ui.page('/')
|
||||||
|
async def index_page():
|
||||||
|
"""Example usage of monitoring classes with NiceGUI"""
|
||||||
|
|
||||||
|
# Create UI that automatically updates when dataclass fields change
|
||||||
|
with ui.card().classes('w-full'):
|
||||||
|
ui.label('System Monitor').classes('text-h4')
|
||||||
|
|
||||||
|
# CPU section - binds directly to dataclass fields
|
||||||
|
with ui.row():
|
||||||
|
ui.label('CPU:')
|
||||||
|
ui.label().bind_text_from(system_monitor, 'cpu_percent',
|
||||||
|
lambda x: f'{x:.1f}%')
|
||||||
|
ui.label().bind_text_from(system_monitor, 'cpu_model')
|
||||||
|
|
||||||
|
# Memory section
|
||||||
|
with ui.row():
|
||||||
|
ui.label('Memory:')
|
||||||
|
ui.label().bind_text_from(system_monitor, 'memory_percent',
|
||||||
|
lambda x: f'{x:.1f}%')
|
||||||
|
ui.label().bind_text_from(system_monitor, 'memory_used',
|
||||||
|
lambda x: f'{x / (1024**3):.1f} GB used')
|
||||||
|
|
||||||
|
# Disk section
|
||||||
|
with ui.row():
|
||||||
|
ui.label('Disk:')
|
||||||
|
ui.label().bind_text_from(system_monitor, 'disk_percent',
|
||||||
|
lambda x: f'{x:.1f}%')
|
||||||
|
|
||||||
|
# Process count
|
||||||
|
with ui.row():
|
||||||
|
ui.label('Processes:')
|
||||||
|
ui.label().bind_text_from(system_monitor, 'process_count')
|
||||||
|
|
||||||
|
# GPU Monitor section (if available)
|
||||||
|
if gpu_monitor.available:
|
||||||
|
with ui.card().classes('w-full mt-4'):
|
||||||
|
ui.label('GPU Monitor').classes('text-h4')
|
||||||
|
|
||||||
|
with ui.row():
|
||||||
|
ui.label('GPU:')
|
||||||
|
ui.label().bind_text_from(gpu_monitor, 'gpu_name')
|
||||||
|
ui.label().bind_text_from(gpu_monitor, 'vendor',
|
||||||
|
lambda x: f'({x.value})')
|
||||||
|
|
||||||
|
with ui.row():
|
||||||
|
ui.label('Usage:')
|
||||||
|
ui.label().bind_text_from(gpu_monitor, 'usage',
|
||||||
|
lambda x: f'{x:.1f}%')
|
||||||
|
ui.label('Temp:')
|
||||||
|
ui.label().bind_text_from(gpu_monitor, 'temperature',
|
||||||
|
lambda x: f'{x:.1f}°C')
|
||||||
|
|
||||||
|
with ui.row():
|
||||||
|
ui.label('Memory:')
|
||||||
|
ui.label().bind_text_from(gpu_monitor, 'memory_percent',
|
||||||
|
lambda x: f'{x:.1f}%')
|
||||||
|
ui.label().bind_text_from(gpu_monitor, 'memory_used',
|
||||||
|
lambda x: f'({(x / 1024.0):.2f} GB / {(gpu_monitor.memory_total / 1024.0):.2f} GB)')
|
||||||
|
else:
|
||||||
|
with ui.card().classes('w-full mt-4'):
|
||||||
|
ui.label('No GPU detected').classes('text-h4')
|
||||||
|
|
||||||
|
if __name__ in {"__main__", "__mp_main__"}:
|
||||||
|
ui.run(port=8081, title='System Monitor Example')
|
||||||
@@ -1,32 +1,104 @@
|
|||||||
|
from typing import Literal
|
||||||
from nicegui import ui
|
from nicegui import ui
|
||||||
from components.circular_progress import MetricCircle, LargeMetricCircle, ColorfulMetricCard
|
from components.circular_progress import MetricCircle, LargeMetricCircle, ColorfulMetricCard
|
||||||
from utils import data_manager
|
from utils import SystemMonitor, GPUMonitor
|
||||||
|
|
||||||
|
"""
|
||||||
|
with ui.element('div').classes('main-content w-full'):
|
||||||
|
with ui.column().classes('w-full max-w-4xl mx-auto p-6 gap-6'):
|
||||||
|
ui.label('Ollama Manager').classes('text-2xl font-bold text-white mb-4')
|
||||||
|
|
||||||
|
# Status cards
|
||||||
|
with ui.row().classes('w-full gap-4 mb-6'):
|
||||||
|
with ui.card().classes('metric-card flex-grow p-4'):
|
||||||
|
with ui.row().classes('items-center gap-2'):
|
||||||
|
ui.icon('check_circle', color='green')
|
||||||
|
ui.label('Status: Online').classes('font-medium text-white')
|
||||||
|
|
||||||
|
with ui.card().classes('metric-card flex-grow p-4'):
|
||||||
|
ui.label('Version: 0.11.11').classes('font-medium text-white')
|
||||||
|
|
||||||
|
# Models list
|
||||||
|
with ui.card().classes('metric-card p-6'):
|
||||||
|
ui.label('Installed Models').classes('text-lg font-bold text-white mb-4')
|
||||||
|
|
||||||
|
models = [
|
||||||
|
('llama3.2:3b', '2.0 GB', 'Q4_0'),
|
||||||
|
('mistral:7b', '4.1 GB', 'Q4_0'),
|
||||||
|
('codellama:13b', '7.4 GB', 'Q4_K_M'),
|
||||||
|
('phi3:mini', '2.3 GB', 'Q4_0'),
|
||||||
|
]
|
||||||
|
|
||||||
|
for name, size, quant in models:
|
||||||
|
with ui.card().classes('metric-card p-4 mb-2'):
|
||||||
|
with ui.row().classes('w-full items-center'):
|
||||||
|
with ui.column().classes('gap-1'):
|
||||||
|
ui.label(name).classes('font-bold text-white')
|
||||||
|
with ui.row().classes('gap-2'):
|
||||||
|
ui.chip(size, icon='storage').props('outline dense color=cyan')
|
||||||
|
ui.chip(quant, icon='memory').props('outline dense color=orange')
|
||||||
|
|
||||||
|
ui.space()
|
||||||
|
|
||||||
|
with ui.row().classes('gap-2'):
|
||||||
|
ui.button(icon='play_arrow').props('round flat color=green').tooltip('Run')
|
||||||
|
ui.button(icon='info').props('round flat color=blue').tooltip('Info')
|
||||||
|
ui.button(icon='delete').props('round flat color=red').tooltip('Delete')
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class DashboardPage:
|
class DashboardPage(ui.column):
|
||||||
def __init__(self):
|
|
||||||
# Get real-time data
|
|
||||||
dashboard_data = data_manager.get_dashboard_data()
|
|
||||||
system_info = data_manager.get_system_info()
|
|
||||||
|
|
||||||
|
def __init__(self, system_monitor: SystemMonitor, gpu_monitor: GPUMonitor, *, wrap: bool = False, align_items: None | Literal['start'] | Literal['end'] | Literal['center'] | Literal['baseline'] | Literal['stretch'] = None) -> None:
|
||||||
|
super().__init__(wrap=wrap, align_items=align_items)
|
||||||
|
self.system_monitor = system_monitor
|
||||||
|
self.gpu_monitor = gpu_monitor
|
||||||
|
|
||||||
|
self.classes('main-content w-full')
|
||||||
# Main content area with proper viewport handling
|
# Main content area with proper viewport handling
|
||||||
with ui.element('div').classes('main-content w-full'):
|
with self:
|
||||||
with ui.column().classes('w-full max-w-6xl mx-auto p-6 gap-6'):
|
with ui.column().classes('w-full max-w-6xl mx-auto p-6 gap-6'):
|
||||||
# Top stats grid
|
# Top stats grid
|
||||||
with ui.grid(columns=4).classes('w-full gap-4'):
|
with ui.grid(columns=4).classes('w-full gap-4'):
|
||||||
MetricCircle('CPU', f"{dashboard_data['cpu']['percent']}%",
|
# CPU metric with binding
|
||||||
dashboard_data['cpu']['percent'] / 100, '#e879f9', 'memory')
|
with ui.card().classes('metric-card p-4 text-center'):
|
||||||
MetricCircle('Memory', f"{dashboard_data['memory']['used_gb']}GB",
|
with ui.column().classes('items-center gap-2'):
|
||||||
dashboard_data['memory']['percent'] / 100, '#10b981', 'storage')
|
ui.icon('memory', size='md', color='#e879f9')
|
||||||
|
ui.label('CPU').classes('text-sm text-grey-5 font-medium')
|
||||||
|
ui.circular_progress(size='60px', color='#e879f9').bind_value_from(
|
||||||
|
system_monitor, 'cpu_percent', lambda x: x / 100)
|
||||||
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
|
system_monitor, 'cpu_percent', lambda x: f'{x:.1f}%')
|
||||||
|
|
||||||
if dashboard_data['gpu']['available']:
|
# Memory metric with binding
|
||||||
MetricCircle('GPU', f"{dashboard_data['gpu']['percent']}%",
|
with ui.card().classes('metric-card p-4 text-center'):
|
||||||
dashboard_data['gpu']['percent'] / 100, '#f97316', 'gpu_on')
|
with ui.column().classes('items-center gap-2'):
|
||||||
MetricCircle('Temp', f"{dashboard_data['gpu']['temperature']}°C",
|
ui.icon('storage', size='md', color='#10b981')
|
||||||
dashboard_data['gpu']['temperature'] / 100, '#06b6d4', 'thermostat')
|
ui.label('Memory').classes('text-sm text-grey-5 font-medium')
|
||||||
else:
|
ui.circular_progress(size='60px', color='#10b981').bind_value_from(
|
||||||
MetricCircle('GPU', 'N/A', 0, '#f97316', 'gpu_on')
|
system_monitor, 'memory_percent', lambda x: x / 100)
|
||||||
MetricCircle('Temp', 'N/A', 0, '#06b6d4', 'thermostat')
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
|
system_monitor, 'memory_used', lambda x: f'{x / (1024**3):.1f}GB')
|
||||||
|
|
||||||
|
# GPU metric with conditional rendering
|
||||||
|
with ui.card().classes('metric-card p-4 text-center'):
|
||||||
|
with ui.column().classes('items-center gap-2'):
|
||||||
|
ui.icon('gpu_on', size='md', color='#f97316')
|
||||||
|
ui.label('GPU').classes('text-sm text-grey-5 font-medium')
|
||||||
|
ui.circular_progress(size='60px', color='#f97316').bind_value_from(
|
||||||
|
gpu_monitor, 'usage', lambda x: x / 100 if gpu_monitor.available else 0)
|
||||||
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
|
gpu_monitor, 'usage', lambda x: f'{x:.1f}%' if gpu_monitor.available else 'N/A')
|
||||||
|
|
||||||
|
# Temperature metric
|
||||||
|
with ui.card().classes('metric-card p-4 text-center'):
|
||||||
|
with ui.column().classes('items-center gap-2'):
|
||||||
|
ui.icon('thermostat', size='md', color='#06b6d4')
|
||||||
|
ui.label('Temp').classes('text-sm text-grey-5 font-medium')
|
||||||
|
ui.circular_progress(size='60px', color='#06b6d4').bind_value_from(
|
||||||
|
gpu_monitor, 'temperature', lambda x: x / 100 if gpu_monitor.available else 0)
|
||||||
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
|
gpu_monitor, 'temperature', lambda x: f'{x:.1f}°C' if gpu_monitor.available else 'N/A')
|
||||||
|
|
||||||
# Main dashboard content
|
# Main dashboard content
|
||||||
with ui.row().classes('w-full gap-6'):
|
with ui.row().classes('w-full gap-6'):
|
||||||
@@ -76,32 +148,47 @@ class DashboardPage:
|
|||||||
|
|
||||||
# Right column - system info and GPU details
|
# Right column - system info and GPU details
|
||||||
with ui.column().classes('w-80 gap-4'):
|
with ui.column().classes('w-80 gap-4'):
|
||||||
# Large GPU usage circle
|
# Large GPU usage circle with binding
|
||||||
if dashboard_data['gpu']['available']:
|
with ui.card().classes('metric-card p-6 text-center'):
|
||||||
gpu_info = data_manager.get_gpu_info()
|
with ui.column().classes('items-center gap-3'):
|
||||||
gpu_name = 'Unknown GPU'
|
ui.label('GPU Usage').classes('text-sm text-grey-5 font-medium uppercase tracking-wide')
|
||||||
if gpu_info.get('cards') and len(gpu_info['cards']) > 0:
|
ui.circular_progress(size='120px', color='#f97316').bind_value_from(
|
||||||
gpu_name = gpu_info['cards'][0].get('name', 'Unknown GPU')
|
gpu_monitor, 'usage', lambda x: x / 100 if gpu_monitor.available else 0)
|
||||||
LargeMetricCircle('GPU Usage', gpu_name,
|
ui.label().classes('text-2xl font-bold text-white').bind_text_from(
|
||||||
dashboard_data['gpu']['percent'] / 100, '#f97316')
|
gpu_monitor, 'usage', lambda x: f'{int(x)}%' if gpu_monitor.available else '0%')
|
||||||
else:
|
ui.label().classes('text-xs text-grey-5').bind_text_from(
|
||||||
LargeMetricCircle('GPU Usage', 'No GPU Detected', 0, '#f97316')
|
gpu_monitor, 'gpu_name', lambda x: x if gpu_monitor.available else 'No GPU Detected')
|
||||||
|
|
||||||
# System info card
|
# System info card with bindings
|
||||||
with ui.card().classes('metric-card p-4'):
|
with ui.card().classes('metric-card p-4'):
|
||||||
ui.label('System Info').classes('text-sm font-bold text-white mb-3')
|
ui.label('System Info').classes('text-sm font-bold text-white mb-3')
|
||||||
|
|
||||||
with ui.column().classes('gap-2'):
|
with ui.column().classes('gap-2'):
|
||||||
self._info_row('OS', system_info.get('os', 'Unknown'))
|
# OS
|
||||||
self._info_row('Kernel', system_info.get('kernel', 'Unknown'))
|
with ui.row().classes('w-full justify-between'):
|
||||||
self._info_row('CPU', system_info.get('cpu', 'Unknown'))
|
ui.label('OS').classes('text-xs text-grey-5')
|
||||||
# Get first GPU name for display
|
ui.label().classes('text-xs text-white font-medium').bind_text_from(
|
||||||
gpu_info = data_manager.get_gpu_info()
|
system_monitor, 'os_name')
|
||||||
gpu_display = 'No GPU'
|
# Kernel
|
||||||
if gpu_info.get('cards') and len(gpu_info['cards']) > 0:
|
with ui.row().classes('w-full justify-between'):
|
||||||
gpu_display = gpu_info['cards'][0].get('name', 'Unknown GPU')
|
ui.label('Kernel').classes('text-xs text-grey-5')
|
||||||
self._info_row('GPU', gpu_display)
|
ui.label().classes('text-xs text-white font-medium').bind_text_from(
|
||||||
self._info_row('Uptime', system_info.get('uptime', 'Unknown'))
|
system_monitor, 'kernel')
|
||||||
|
# CPU
|
||||||
|
with ui.row().classes('w-full justify-between'):
|
||||||
|
ui.label('CPU').classes('text-xs text-grey-5')
|
||||||
|
ui.label().classes('text-xs text-white font-medium').bind_text_from(
|
||||||
|
system_monitor, 'cpu_model')
|
||||||
|
# GPU
|
||||||
|
with ui.row().classes('w-full justify-between'):
|
||||||
|
ui.label('GPU').classes('text-xs text-grey-5')
|
||||||
|
ui.label().classes('text-xs text-white font-medium').bind_text_from(
|
||||||
|
gpu_monitor, 'gpu_name', lambda x: x if gpu_monitor.available else 'No GPU')
|
||||||
|
# Uptime
|
||||||
|
with ui.row().classes('w-full justify-between'):
|
||||||
|
ui.label('Uptime').classes('text-xs text-grey-5')
|
||||||
|
ui.label().classes('text-xs text-white font-medium').bind_text_from(
|
||||||
|
system_monitor, 'uptime')
|
||||||
|
|
||||||
# Ollama status card
|
# Ollama status card
|
||||||
with ui.card().classes('metric-card p-4'):
|
with ui.card().classes('metric-card p-4'):
|
||||||
@@ -115,34 +202,53 @@ class DashboardPage:
|
|||||||
ui.label('4 models active').classes('text-xs text-grey-5')
|
ui.label('4 models active').classes('text-xs text-grey-5')
|
||||||
ui.label('llama3.2:3b, mistral:7b...').classes('text-xs text-grey-6')
|
ui.label('llama3.2:3b, mistral:7b...').classes('text-xs text-grey-6')
|
||||||
|
|
||||||
# Bottom metrics row
|
# Bottom metrics row with bindings
|
||||||
with ui.grid(columns=5).classes('w-full gap-4 mt-4'):
|
with ui.grid(columns=5).classes('w-full gap-4 mt-4'):
|
||||||
self._bottom_metric(str(dashboard_data['processes']['count']), 'Processes', 'dashboard')
|
# Processes
|
||||||
|
with ui.card().classes('metric-card p-3 text-center'):
|
||||||
|
with ui.column().classes('items-center gap-1'):
|
||||||
|
ui.icon('dashboard', size='sm', color='grey-5')
|
||||||
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
|
system_monitor, 'process_count', lambda x: str(x))
|
||||||
|
ui.label('Processes').classes('text-xs text-grey-5')
|
||||||
|
|
||||||
# Format network data (bytes to human readable)
|
# Network
|
||||||
network_mb = (dashboard_data['network']['bytes_recv'] + dashboard_data['network']['bytes_sent']) / (1024 * 1024)
|
with ui.card().classes('metric-card p-3 text-center'):
|
||||||
if network_mb > 1024:
|
with ui.column().classes('items-center gap-1'):
|
||||||
network_display = f"{network_mb/1024:.1f}GB"
|
ui.icon('wifi', size='sm', color='grey-5')
|
||||||
else:
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
network_display = f"{network_mb:.0f}MB"
|
system_monitor, 'network_bytes_recv',
|
||||||
self._bottom_metric(network_display, 'Network', 'wifi')
|
lambda x: self._format_network(system_monitor.network_bytes_recv + system_monitor.network_bytes_sent))
|
||||||
|
ui.label('Network').classes('text-xs text-grey-5')
|
||||||
|
|
||||||
self._bottom_metric(f"{dashboard_data['disk']['percent']:.0f}%", 'Disk', 'storage')
|
# Disk
|
||||||
|
with ui.card().classes('metric-card p-3 text-center'):
|
||||||
|
with ui.column().classes('items-center gap-1'):
|
||||||
|
ui.icon('storage', size='sm', color='grey-5')
|
||||||
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
|
system_monitor, 'disk_percent', lambda x: f'{x:.0f}%')
|
||||||
|
ui.label('Disk').classes('text-xs text-grey-5')
|
||||||
|
|
||||||
# CPU core count as services
|
# CPU Cores
|
||||||
self._bottom_metric(str(dashboard_data['cpu']['count']), 'CPU Cores', 'settings')
|
with ui.card().classes('metric-card p-3 text-center'):
|
||||||
|
with ui.column().classes('items-center gap-1'):
|
||||||
|
ui.icon('settings', size='sm', color='grey-5')
|
||||||
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
|
system_monitor, 'cpu_count', lambda x: str(x))
|
||||||
|
ui.label('CPU Cores').classes('text-xs text-grey-5')
|
||||||
|
|
||||||
# Memory total
|
# Total RAM
|
||||||
self._bottom_metric(f"{dashboard_data['memory']['total_gb']:.0f}GB", 'Total RAM', 'memory')
|
with ui.card().classes('metric-card p-3 text-center'):
|
||||||
|
with ui.column().classes('items-center gap-1'):
|
||||||
|
ui.icon('memory', size='sm', color='grey-5')
|
||||||
|
ui.label().classes('text-lg font-bold text-white').bind_text_from(
|
||||||
|
system_monitor, 'memory_total', lambda x: f'{x / (1024**3):.0f}GB')
|
||||||
|
ui.label('Total RAM').classes('text-xs text-grey-5')
|
||||||
|
|
||||||
def _info_row(self, label: str, value: str):
|
def _format_network(self, total_bytes: int) -> str:
|
||||||
with ui.row().classes('w-full justify-between'):
|
"""Format network bytes to human readable format"""
|
||||||
ui.label(label).classes('text-xs text-grey-5')
|
mb = total_bytes / (1024 * 1024)
|
||||||
ui.label(value).classes('text-xs text-white font-medium')
|
if mb > 1024:
|
||||||
|
return f"{mb/1024:.1f}GB"
|
||||||
def _bottom_metric(self, value: str, label: str, icon: str):
|
else:
|
||||||
with ui.card().classes('metric-card p-3 text-center'):
|
return f"{mb:.0f}MB"
|
||||||
with ui.column().classes('items-center gap-1'):
|
|
||||||
ui.icon(icon, size='sm', color='grey-5')
|
|
||||||
ui.label(value).classes('text-lg font-bold text-white')
|
|
||||||
ui.label(label).classes('text-xs text-grey-5')
|
|
||||||
|
|||||||
@@ -1,11 +1,24 @@
|
|||||||
from nicegui import ui
|
from nicegui import ui
|
||||||
|
from utils import ollama
|
||||||
|
from typing import Literal, List, Dict
|
||||||
|
from pprint import pprint
|
||||||
|
from niceguiasyncelement import AsyncColumn
|
||||||
|
|
||||||
|
|
||||||
class OllamaManagerPage(ui.column):
|
class OllamaManagerPage(AsyncColumn):
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
with self.classes('w-full gap-6 p-6'):
|
models: List
|
||||||
|
quick_test_select: ui.select
|
||||||
|
quick_test_send: ui.button
|
||||||
|
quick_test_textarea: ui.textarea
|
||||||
|
quick_test_response: ui.label
|
||||||
|
|
||||||
|
async def build(self):
|
||||||
|
|
||||||
|
self.models = []
|
||||||
|
|
||||||
|
self.classes('main-content')
|
||||||
|
with self:
|
||||||
ui.label('Ollama Manager').classes('text-h4 font-bold')
|
ui.label('Ollama Manager').classes('text-h4 font-bold')
|
||||||
|
|
||||||
# Status cards
|
# Status cards
|
||||||
@@ -23,48 +36,173 @@ class OllamaManagerPage(ui.column):
|
|||||||
with ui.row().classes('w-full items-center mb-4'):
|
with ui.row().classes('w-full items-center mb-4'):
|
||||||
ui.label('Installed Models').classes('text-h6 font-bold')
|
ui.label('Installed Models').classes('text-h6 font-bold')
|
||||||
ui.space()
|
ui.space()
|
||||||
|
ui.button('Create New Model', icon='create', on_click=self._create_model).props('color=primary')
|
||||||
ui.button('Pull New Model', icon='download').props('color=primary')
|
ui.button('Pull New Model', icon='download').props('color=primary')
|
||||||
|
|
||||||
with ui.column().classes('w-full gap-2'):
|
with ui.column().classes('w-full gap-2'):
|
||||||
self._create_model_item('llama3.2:3b', '2.0 GB', 'Q4_0')
|
await self.models_container() # type: ignore
|
||||||
self._create_model_item('mistral:7b', '4.1 GB', 'Q4_0')
|
|
||||||
self._create_model_item('codellama:13b', '7.4 GB', 'Q4_K_M')
|
|
||||||
self._create_model_item('phi3:mini', '2.3 GB', 'Q4_0')
|
|
||||||
|
|
||||||
# Quick test
|
# Quick test
|
||||||
with ui.card().classes('w-full'):
|
with ui.card().classes('w-full'):
|
||||||
ui.label('Quick Chat Test').classes('text-h6 font-bold mb-4')
|
ui.label('Quick Chat Test').classes('text-h6 font-bold mb-4')
|
||||||
|
|
||||||
with ui.row().classes('w-full gap-2 mb-2'):
|
with ui.row().classes('w-full gap-2 mb-2'):
|
||||||
ui.select(
|
self.quick_test_select = ui.select(
|
||||||
['llama3.2:3b', 'mistral:7b', 'codellama:13b', 'phi3:mini'],
|
[],
|
||||||
value='llama3.2:3b',
|
|
||||||
label='Model'
|
label='Model'
|
||||||
).classes('flex-grow').props('outlined')
|
).classes('flex-grow').props('outlined')
|
||||||
|
|
||||||
ui.textarea(
|
self.quick_test_textarea = ui.textarea(
|
||||||
label='Prompt',
|
label='Prompt',
|
||||||
placeholder='Enter your prompt here...',
|
placeholder='Enter your prompt here...',
|
||||||
value='Hello! Tell me a fun fact about AMD GPUs.'
|
value='Hello! Tell me a fun fact about AMD GPUs.'
|
||||||
).classes('w-full').props('outlined')
|
).classes('w-full').props('outlined')
|
||||||
|
|
||||||
ui.button('Send', icon='send').props('color=primary')
|
self.quick_test_send = ui.button('Send', icon='send', on_click=self._quick_test).props('color=primary')
|
||||||
|
|
||||||
with ui.expansion('Response', icon='message').classes('w-full mt-4').props('default-opened'):
|
with ui.row():
|
||||||
ui.label('Response will appear here...').classes('text-grey-7')
|
ui.icon('message', size='sm')
|
||||||
|
ui.label('Response')
|
||||||
|
self.quick_test_response = ui.label('Response will appear here...').classes('text-grey-7')
|
||||||
|
await self._quick_test_populate_options()
|
||||||
|
|
||||||
def _create_model_item(self, name: str, size: str, quantization: str):
|
async def _create_model(self):
|
||||||
|
modelfile = """FROM qwen2.5-coder:7b
|
||||||
|
PARAMETER num_ctx 32768
|
||||||
|
PARAMETER temperature 0.1
|
||||||
|
SYSTEM "Du bist ein Python-Experte."
|
||||||
|
"""
|
||||||
|
print('creating model')
|
||||||
|
result = await ollama.create_ollama_model(
|
||||||
|
"qwen2.5-coder-32k-python",
|
||||||
|
modelfile
|
||||||
|
)
|
||||||
|
print('finished.')
|
||||||
|
print(result)
|
||||||
|
await self.models_container.refresh()
|
||||||
|
|
||||||
|
async def _loaded_models(self):
|
||||||
|
loaded = await ollama.loaded_models()
|
||||||
|
print(loaded)
|
||||||
|
|
||||||
|
async def _delete_model(self, model):
|
||||||
|
with ui.dialog() as dialog, ui.card():
|
||||||
|
ui.label('Are you sure?')
|
||||||
|
with ui.row():
|
||||||
|
ui.button('Yes', on_click=lambda: dialog.submit(True))
|
||||||
|
ui.button('No', on_click=lambda: dialog.submit(False))
|
||||||
|
|
||||||
|
result = await dialog
|
||||||
|
if result:
|
||||||
|
if await ollama.delete_model(model):
|
||||||
|
ui.notify(f'Model {model} deleted.')
|
||||||
|
self.models_container.refresh()
|
||||||
|
|
||||||
|
@ui.refreshable
|
||||||
|
async def models_container(self):
|
||||||
|
self.models = await ollama.available_models()
|
||||||
|
select_options = [model['name'] for model in self.models]
|
||||||
|
# self.quick_test_select.set_options(select_options)
|
||||||
|
|
||||||
|
for model in self.models:
|
||||||
|
self._create_model_item(model)
|
||||||
|
if hasattr(self, 'quick_test_select'):
|
||||||
|
await self._quick_test_populate_options()
|
||||||
|
|
||||||
|
def _create_model_item(self, model: Dict):
|
||||||
with ui.card().classes('w-full'):
|
with ui.card().classes('w-full'):
|
||||||
with ui.row().classes('w-full items-center'):
|
with ui.row().classes('w-full items-center'):
|
||||||
with ui.column().classes('gap-1'):
|
with ui.column().classes('flex-grow gap-1'):
|
||||||
ui.label(name).classes('font-bold text-h6')
|
# Model name
|
||||||
with ui.row().classes('gap-4'):
|
ui.label(model['name']).classes('font-bold text-h6')
|
||||||
ui.chip(size, icon='storage').props('outline dense')
|
|
||||||
ui.chip(quantization, icon='memory').props('outline dense')
|
# Details row with chips
|
||||||
|
with ui.row().classes('gap-2 flex-wrap'):
|
||||||
|
# Size chip
|
||||||
|
size_gb = model['size'] / (1024**3)
|
||||||
|
ui.chip(f"{size_gb:.2f} GB", icon='storage').props('outline dense color=cyan')
|
||||||
|
|
||||||
|
# Quantization chip
|
||||||
|
ui.chip(model['details']['quantization_level'], icon='memory').props('outline dense color=orange')
|
||||||
|
|
||||||
|
# Parameter size chip
|
||||||
|
if model['details'].get('parameter_size'):
|
||||||
|
ui.chip(model['details']['parameter_size'], icon='tune').props('outline dense color=purple')
|
||||||
|
|
||||||
|
# Format chip
|
||||||
|
if model['details'].get('format'):
|
||||||
|
ui.chip(model['details']['format'].upper(), icon='description').props('outline dense color=green')
|
||||||
|
|
||||||
|
# Family chip
|
||||||
|
if model['details'].get('family'):
|
||||||
|
ui.chip(model['details']['family'], icon='category').props('outline dense color=blue')
|
||||||
|
|
||||||
|
# Modified timestamp
|
||||||
|
if model.get('modified_at'):
|
||||||
|
from datetime import datetime
|
||||||
|
try:
|
||||||
|
# Parse ISO format timestamp
|
||||||
|
modified = datetime.fromisoformat(model['modified_at'].replace('Z', '+00:00'))
|
||||||
|
now = datetime.now(modified.tzinfo)
|
||||||
|
delta = now - modified
|
||||||
|
|
||||||
|
# Format time ago
|
||||||
|
if delta.days > 0:
|
||||||
|
time_ago = f"{delta.days} days ago"
|
||||||
|
elif delta.seconds > 3600:
|
||||||
|
time_ago = f"{delta.seconds // 3600} hours ago"
|
||||||
|
else:
|
||||||
|
time_ago = f"{delta.seconds // 60} minutes ago"
|
||||||
|
|
||||||
|
ui.label(f"Modified: {time_ago}").classes('text-xs text-grey-5')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
ui.space()
|
ui.space()
|
||||||
|
|
||||||
with ui.row().classes('gap-2'):
|
with ui.row().classes('gap-2'):
|
||||||
ui.button(icon='play_arrow').props('round flat color=primary').tooltip('Run Model')
|
ui.button(icon='play_arrow').props('round flat color=primary').tooltip('Run Model')
|
||||||
ui.button(icon='info').props('round flat').tooltip('Model Info')
|
ui.button(icon='info', on_click=lambda m=model['name']: self._print_model_info(m)).props('round flat').tooltip('Model Info')
|
||||||
ui.button(icon='delete').props('round flat color=negative').tooltip('Delete Model')
|
ui.button(icon='delete', on_click=lambda m=model['name']: self._delete_model(m)).props('round flat color=negative').tooltip('Delete Model')
|
||||||
|
|
||||||
|
async def _print_model_info(self, model_name):
|
||||||
|
result = await ollama.model_info(model_name)
|
||||||
|
print(result)
|
||||||
|
|
||||||
|
async def _quick_test_populate_options(self):
|
||||||
|
select_options = [model['name'] for model in self.models]
|
||||||
|
self.quick_test_select.set_options(select_options)
|
||||||
|
|
||||||
|
async def _quick_test(self):
|
||||||
|
model = self.quick_test_select.value
|
||||||
|
if not model:
|
||||||
|
ui.notify('Select a model first.', type='warning')
|
||||||
|
return
|
||||||
|
|
||||||
|
self.quick_test_response.set_text('')
|
||||||
|
prompt = self.quick_test_textarea.value
|
||||||
|
|
||||||
|
# calling stream_ollama_chat
|
||||||
|
data = {
|
||||||
|
"model": model,
|
||||||
|
"messages": [{"role": "user", "content": prompt}],
|
||||||
|
"stream": True
|
||||||
|
}
|
||||||
|
self.quick_test_send.set_enabled(False)
|
||||||
|
try:
|
||||||
|
async for chunk in ollama.stream_chat(data):
|
||||||
|
if chunk.strip():
|
||||||
|
# Parse the JSON chunk and extract content
|
||||||
|
import json
|
||||||
|
try:
|
||||||
|
chunk_data = json.loads(chunk)
|
||||||
|
if 'message' in chunk_data and 'content' in chunk_data['message']:
|
||||||
|
content = chunk_data['message']['content']
|
||||||
|
current_text = self.quick_test_response.text
|
||||||
|
self.quick_test_response.set_text(current_text + content)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass # Skip malformed chunks
|
||||||
|
except Exception as e:
|
||||||
|
ui.notify(f'Error: {str(e)}', type='negative')
|
||||||
|
finally:
|
||||||
|
self.quick_test_send.set_enabled(True)
|
||||||
|
|||||||
@@ -120,8 +120,15 @@ body {
|
|||||||
|
|
||||||
/* Animations */
|
/* Animations */
|
||||||
@keyframes fadeIn {
|
@keyframes fadeIn {
|
||||||
from { opacity: 0; transform: translateY(10px); }
|
from {
|
||||||
to { opacity: 1; transform: translateY(0); }
|
opacity: 0;
|
||||||
|
transform: translateY(10px);
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.animate-fade-in {
|
.animate-fade-in {
|
||||||
@@ -136,14 +143,19 @@ body {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Layout and viewport fixes */
|
/* Layout and viewport fixes */
|
||||||
body, .nicegui-content, .q-page, .q-page-container {
|
body,
|
||||||
|
.nicegui-content,
|
||||||
|
.q-page,
|
||||||
|
.q-page-container {
|
||||||
background: #1a1d2e !important;
|
background: #1a1d2e !important;
|
||||||
margin: 0 !important;
|
margin: 0 !important;
|
||||||
padding: 0 !important;
|
padding: 0 !important;
|
||||||
overflow: hidden !important;
|
overflow: hidden !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.q-header, .q-drawer, .q-footer {
|
.q-header,
|
||||||
|
.q-drawer,
|
||||||
|
.q-footer {
|
||||||
z-index: 2000 !important;
|
z-index: 2000 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -157,7 +169,10 @@ body, .nicegui-content, .q-page, .q-page-container {
|
|||||||
|
|
||||||
.main-content {
|
.main-content {
|
||||||
height: calc(100vh - 64px) !important;
|
height: calc(100vh - 64px) !important;
|
||||||
|
width: calc(100vw - 256px) !important;
|
||||||
overflow-y: auto !important;
|
overflow-y: auto !important;
|
||||||
margin-left: 256px !important;
|
margin-left: 256px !important;
|
||||||
padding-top: 64px !important;
|
padding-top: 72px !important;
|
||||||
|
padding-left: 32px !important;
|
||||||
|
padding-right: 32px !important;
|
||||||
}
|
}
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
from .system_monitor import SystemMonitor
|
from .system_monitor import SystemMonitor
|
||||||
from .gpu_monitor import GPUMonitor
|
from .gpu_monitor import GPUMonitor
|
||||||
from .data_manager import data_manager
|
|
||||||
|
|
||||||
__all__ = ['SystemMonitor', 'GPUMonitor', 'data_manager']
|
__all__ = ['SystemMonitor', 'GPUMonitor']
|
||||||
|
|||||||
@@ -1,142 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any
|
|
||||||
from .system_monitor import SystemMonitor
|
|
||||||
from .gpu_monitor import GPUMonitor
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DataManager:
|
|
||||||
"""Global data manager that collects system information in the background"""
|
|
||||||
|
|
||||||
def __init__(self, update_interval: float = 1.0):
|
|
||||||
self.update_interval = update_interval
|
|
||||||
self.system_monitor = SystemMonitor()
|
|
||||||
self.gpu_monitor = GPUMonitor()
|
|
||||||
|
|
||||||
self._data = {
|
|
||||||
'system_info': {},
|
|
||||||
'system_stats': {},
|
|
||||||
'gpu_info': {},
|
|
||||||
'gpu_stats': {},
|
|
||||||
'last_update': 0
|
|
||||||
}
|
|
||||||
|
|
||||||
self._running = False
|
|
||||||
self._thread = None
|
|
||||||
self._lock = threading.RLock()
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
"""Start the background data collection"""
|
|
||||||
if self._running:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._running = True
|
|
||||||
self._thread = threading.Thread(target=self._update_loop, daemon=True)
|
|
||||||
self._thread.start()
|
|
||||||
logger.info("DataManager started")
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
"""Stop the background data collection"""
|
|
||||||
self._running = False
|
|
||||||
if self._thread and self._thread.is_alive():
|
|
||||||
self._thread.join(timeout=5)
|
|
||||||
logger.info("DataManager stopped")
|
|
||||||
|
|
||||||
def _update_loop(self):
|
|
||||||
"""Background loop that updates system data"""
|
|
||||||
while self._running:
|
|
||||||
try:
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
# Collect system information
|
|
||||||
with self._lock:
|
|
||||||
# Static info (cached internally by monitors)
|
|
||||||
self._data['system_info'] = self.system_monitor.get_system_info()
|
|
||||||
self._data['gpu_info'] = self.gpu_monitor.get_gpu_info()
|
|
||||||
|
|
||||||
# Dynamic stats
|
|
||||||
self._data['system_stats'] = self.system_monitor.get_system_stats()
|
|
||||||
self._data['gpu_stats'] = self.gpu_monitor.get_primary_gpu_stats()
|
|
||||||
self._data['last_update'] = time.time()
|
|
||||||
|
|
||||||
# Calculate sleep time to maintain consistent intervals
|
|
||||||
elapsed = time.time() - start_time
|
|
||||||
sleep_time = max(0, self.update_interval - elapsed)
|
|
||||||
|
|
||||||
if sleep_time > 0:
|
|
||||||
time.sleep(sleep_time)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error in data update loop: {e}")
|
|
||||||
time.sleep(1) # Brief pause before retrying
|
|
||||||
|
|
||||||
def get_dashboard_data(self) -> Dict[str, Any]:
|
|
||||||
"""Get all data needed for the dashboard"""
|
|
||||||
with self._lock:
|
|
||||||
stats = self._data['system_stats']
|
|
||||||
gpu_stats = self._data['gpu_stats']
|
|
||||||
|
|
||||||
# Format data for dashboard consumption
|
|
||||||
return {
|
|
||||||
'cpu': {
|
|
||||||
'percent': round(stats.get('cpu', {}).get('percent', 0), 1),
|
|
||||||
'count': stats.get('cpu', {}).get('count', 0)
|
|
||||||
},
|
|
||||||
'memory': {
|
|
||||||
'percent': round(stats.get('memory', {}).get('percent', 0), 1),
|
|
||||||
'used_gb': round(stats.get('memory', {}).get('used', 0) / (1024**3), 1),
|
|
||||||
'total_gb': round(stats.get('memory', {}).get('total', 0) / (1024**3), 1)
|
|
||||||
},
|
|
||||||
'gpu': {
|
|
||||||
'percent': round(gpu_stats.get('usage', 0), 1),
|
|
||||||
'temperature': round(gpu_stats.get('temperature', 0), 1),
|
|
||||||
'available': gpu_stats.get('available', False)
|
|
||||||
},
|
|
||||||
'processes': {
|
|
||||||
'count': stats.get('processes', {}).get('count', 0)
|
|
||||||
},
|
|
||||||
'disk': {
|
|
||||||
'percent': round(stats.get('disk', {}).get('percent', 0), 1)
|
|
||||||
},
|
|
||||||
'network': {
|
|
||||||
'bytes_sent': stats.get('network', {}).get('bytes_sent', 0),
|
|
||||||
'bytes_recv': stats.get('network', {}).get('bytes_recv', 0)
|
|
||||||
},
|
|
||||||
'last_update': self._data['last_update']
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_system_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get static system information"""
|
|
||||||
with self._lock:
|
|
||||||
return self._data['system_info'].copy()
|
|
||||||
|
|
||||||
def get_system_stats(self) -> Dict[str, Any]:
|
|
||||||
"""Get current system statistics"""
|
|
||||||
with self._lock:
|
|
||||||
return self._data['system_stats'].copy()
|
|
||||||
|
|
||||||
def get_gpu_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get static GPU information"""
|
|
||||||
with self._lock:
|
|
||||||
return self._data['gpu_info'].copy()
|
|
||||||
|
|
||||||
def get_gpu_stats(self) -> Dict[str, Any]:
|
|
||||||
"""Get current GPU statistics"""
|
|
||||||
with self._lock:
|
|
||||||
return self._data['gpu_stats'].copy()
|
|
||||||
|
|
||||||
def get_processes(self, limit: int = 10) -> list:
|
|
||||||
"""Get top processes (fetched on demand to avoid overhead)"""
|
|
||||||
return self.system_monitor.get_processes(limit)
|
|
||||||
|
|
||||||
def format_bytes(self, bytes_value: int) -> str:
|
|
||||||
"""Format bytes to human readable format"""
|
|
||||||
return self.system_monitor.format_bytes(bytes_value)
|
|
||||||
|
|
||||||
|
|
||||||
# Global instance
|
|
||||||
data_manager = DataManager()
|
|
||||||
@@ -2,11 +2,144 @@ import subprocess
|
|||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
from dataclasses import dataclass, field
|
||||||
from typing import Dict, Any, Optional, List
|
from typing import Dict, Any, Optional, List
|
||||||
|
from enum import Enum
|
||||||
|
from nicegui import binding
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class GPUVendor(Enum):
|
||||||
|
AMD = "amd"
|
||||||
|
NVIDIA = "nvidia"
|
||||||
|
UNKNOWN = "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
@binding.bindable_dataclass
|
||||||
class GPUMonitor:
|
class GPUMonitor:
|
||||||
|
"""Unified GPU monitor that auto-detects and monitors AMD or NVIDIA GPUs"""
|
||||||
|
# GPU identification
|
||||||
|
vendor: GPUVendor = GPUVendor.UNKNOWN
|
||||||
|
available: bool = False
|
||||||
|
gpu_name: str = "Unknown GPU"
|
||||||
|
driver_version: str = "Unknown"
|
||||||
|
|
||||||
|
# GPU stats
|
||||||
|
temperature: float = 0.0
|
||||||
|
usage: float = 0.0
|
||||||
|
memory_used: int = 0
|
||||||
|
memory_total: int = 0
|
||||||
|
memory_percent: float = 0.0
|
||||||
|
power_draw: float = 0.0
|
||||||
|
power_limit: float = 0.0
|
||||||
|
|
||||||
|
# Multi-GPU support
|
||||||
|
gpu_count: int = 0
|
||||||
|
gpu_list: List[Dict[str, Any]] = field(default_factory=list)
|
||||||
|
|
||||||
|
# Internal state
|
||||||
|
_monitor: Optional[Any] = None
|
||||||
|
last_update: Optional[float] = None
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
"""Initialize by detecting available GPU"""
|
||||||
|
self._detect_gpu()
|
||||||
|
if self.available:
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
def _detect_gpu(self):
|
||||||
|
"""Detect which GPU vendor is available"""
|
||||||
|
# Try NVIDIA first
|
||||||
|
if self._check_nvidia():
|
||||||
|
self.vendor = GPUVendor.NVIDIA
|
||||||
|
self._monitor = GPUNVIDIAMonitor()
|
||||||
|
self.available = True
|
||||||
|
logger.info("Detected NVIDIA GPU")
|
||||||
|
# Then try AMD
|
||||||
|
elif self._check_amd():
|
||||||
|
self.vendor = GPUVendor.AMD
|
||||||
|
self._monitor = GPUAMDMonitor()
|
||||||
|
self.available = True
|
||||||
|
logger.info("Detected AMD GPU")
|
||||||
|
else:
|
||||||
|
self.available = False
|
||||||
|
logger.info("No GPU detected")
|
||||||
|
|
||||||
|
def _check_nvidia(self) -> bool:
|
||||||
|
"""Check if NVIDIA GPU is available"""
|
||||||
|
try:
|
||||||
|
result = subprocess.run(['nvidia-smi', '--help'],
|
||||||
|
capture_output=True, text=True, timeout=2)
|
||||||
|
return result.returncode == 0
|
||||||
|
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _check_amd(self) -> bool:
|
||||||
|
"""Check if AMD GPU is available"""
|
||||||
|
try:
|
||||||
|
# Check for rocm-smi
|
||||||
|
result = subprocess.run(['rocm-smi', '--help'],
|
||||||
|
capture_output=True, text=True, timeout=2)
|
||||||
|
if result.returncode == 0:
|
||||||
|
return True
|
||||||
|
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check for GPU in /sys/class/drm
|
||||||
|
try:
|
||||||
|
import os
|
||||||
|
gpu_dirs = [d for d in os.listdir('/sys/class/drm') if d.startswith('card')]
|
||||||
|
for gpu_dir in gpu_dirs:
|
||||||
|
vendor_path = f'/sys/class/drm/{gpu_dir}/device/vendor'
|
||||||
|
if os.path.exists(vendor_path):
|
||||||
|
with open(vendor_path, 'r') as f:
|
||||||
|
vendor_id = f.read().strip()
|
||||||
|
if vendor_id == '0x1002': # AMD vendor ID
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
"""Update GPU statistics"""
|
||||||
|
if not self.available or not self._monitor:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get GPU info if not already retrieved
|
||||||
|
if not self.gpu_name or self.gpu_name == "Unknown GPU":
|
||||||
|
info = self._monitor.get_gpu_info()
|
||||||
|
if info.get('available'):
|
||||||
|
if info.get('cards'):
|
||||||
|
self.gpu_name = info['cards'][0].get('name', 'Unknown GPU')
|
||||||
|
self.gpu_count = len(info['cards'])
|
||||||
|
self.gpu_list = info['cards']
|
||||||
|
if self.vendor == GPUVendor.NVIDIA:
|
||||||
|
self.driver_version = info.get('driver_version', 'Unknown')
|
||||||
|
|
||||||
|
# Get GPU stats
|
||||||
|
stats = self._monitor.get_primary_gpu_stats()
|
||||||
|
if stats.get('available'):
|
||||||
|
self.temperature = stats.get('temperature', 0.0)
|
||||||
|
self.usage = stats.get('usage', 0.0)
|
||||||
|
self.memory_used = int(stats.get('memory_used', 0))
|
||||||
|
self.memory_total = int(stats.get('memory_total', 0))
|
||||||
|
self.memory_percent = stats.get('memory_percent', 0.0)
|
||||||
|
|
||||||
|
# Power stats (mainly for NVIDIA)
|
||||||
|
if self.vendor == GPUVendor.NVIDIA:
|
||||||
|
self.power_draw = stats.get('power_draw', 0.0)
|
||||||
|
self.power_limit = stats.get('power_limit', 0.0)
|
||||||
|
|
||||||
|
self.last_update = time.time()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating GPU stats: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
class GPUAMDMonitor:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.last_update = None
|
self.last_update = None
|
||||||
self.cache_duration = 2 # seconds
|
self.cache_duration = 2 # seconds
|
||||||
@@ -18,7 +151,7 @@ class GPUMonitor:
|
|||||||
try:
|
try:
|
||||||
# Check for rocm-smi (AMD)
|
# Check for rocm-smi (AMD)
|
||||||
result = subprocess.run(['rocm-smi', '--help'],
|
result = subprocess.run(['rocm-smi', '--help'],
|
||||||
capture_output=True, text=True, timeout=5)
|
capture_output=True, text=True, timeout=5)
|
||||||
if result.returncode == 0:
|
if result.returncode == 0:
|
||||||
return True
|
return True
|
||||||
except (subprocess.TimeoutExpired, FileNotFoundError):
|
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||||
@@ -27,7 +160,7 @@ class GPUMonitor:
|
|||||||
try:
|
try:
|
||||||
# Check for radeontop
|
# Check for radeontop
|
||||||
result = subprocess.run(['radeontop', '--help'],
|
result = subprocess.run(['radeontop', '--help'],
|
||||||
capture_output=True, text=True, timeout=5)
|
capture_output=True, text=True, timeout=5)
|
||||||
if result.returncode == 0:
|
if result.returncode == 0:
|
||||||
return True
|
return True
|
||||||
except (subprocess.TimeoutExpired, FileNotFoundError):
|
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||||
@@ -68,7 +201,7 @@ class GPUMonitor:
|
|||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if (self.last_update is None or
|
if (self.last_update is None or
|
||||||
now - self.last_update > self.cache_duration):
|
now - self.last_update > self.cache_duration):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stats = self._get_rocm_stats()
|
stats = self._get_rocm_stats()
|
||||||
@@ -89,7 +222,7 @@ class GPUMonitor:
|
|||||||
"""Get GPU info using rocm-smi"""
|
"""Get GPU info using rocm-smi"""
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(['rocm-smi', '--showid', '--showproductname'],
|
result = subprocess.run(['rocm-smi', '--showid', '--showproductname'],
|
||||||
capture_output=True, text=True, timeout=10)
|
capture_output=True, text=True, timeout=10)
|
||||||
|
|
||||||
if result.returncode == 0:
|
if result.returncode == 0:
|
||||||
lines = result.stdout.strip().split('\n')
|
lines = result.stdout.strip().split('\n')
|
||||||
@@ -118,7 +251,7 @@ class GPUMonitor:
|
|||||||
try:
|
try:
|
||||||
# Get temperature, usage, and memory info
|
# Get temperature, usage, and memory info
|
||||||
result = subprocess.run(['rocm-smi', '--showtemp', '--showuse', '--showmeminfo'],
|
result = subprocess.run(['rocm-smi', '--showtemp', '--showuse', '--showmeminfo'],
|
||||||
capture_output=True, text=True, timeout=10)
|
capture_output=True, text=True, timeout=10)
|
||||||
|
|
||||||
if result.returncode == 0:
|
if result.returncode == 0:
|
||||||
stats = {'available': True, 'cards': []}
|
stats = {'available': True, 'cards': []}
|
||||||
@@ -295,7 +428,7 @@ class GPUMonitor:
|
|||||||
memory_percent = 0
|
memory_percent = 0
|
||||||
if (primary_gpu.get('memory_used') is not None and
|
if (primary_gpu.get('memory_used') is not None and
|
||||||
primary_gpu.get('memory_total') is not None and
|
primary_gpu.get('memory_total') is not None and
|
||||||
primary_gpu['memory_total'] > 0):
|
primary_gpu['memory_total'] > 0):
|
||||||
memory_percent = (primary_gpu['memory_used'] / primary_gpu['memory_total']) * 100
|
memory_percent = (primary_gpu['memory_used'] / primary_gpu['memory_total']) * 100
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -305,4 +438,222 @@ class GPUMonitor:
|
|||||||
'memory_percent': memory_percent,
|
'memory_percent': memory_percent,
|
||||||
'memory_used': primary_gpu.get('memory_used', 0) or 0,
|
'memory_used': primary_gpu.get('memory_used', 0) or 0,
|
||||||
'memory_total': primary_gpu.get('memory_total', 0) or 0
|
'memory_total': primary_gpu.get('memory_total', 0) or 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class GPUNVIDIAMonitor:
|
||||||
|
def __init__(self):
|
||||||
|
self.last_update = None
|
||||||
|
self.cache_duration = 2 # seconds
|
||||||
|
self._cached_data = {}
|
||||||
|
self.gpu_available = self._check_gpu_availability()
|
||||||
|
|
||||||
|
def _check_gpu_availability(self) -> bool:
|
||||||
|
"""Check if NVIDIA GPU monitoring tools are available"""
|
||||||
|
try:
|
||||||
|
# Check for nvidia-smi
|
||||||
|
result = subprocess.run(['nvidia-smi', '--help'],
|
||||||
|
capture_output=True, text=True, timeout=5)
|
||||||
|
if result.returncode == 0:
|
||||||
|
return True
|
||||||
|
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_gpu_info(self) -> Dict[str, Any]:
|
||||||
|
"""Get static GPU information"""
|
||||||
|
if not self.gpu_available:
|
||||||
|
return {'available': False, 'message': 'No NVIDIA GPU monitoring tools found'}
|
||||||
|
|
||||||
|
if not self._cached_data.get('gpu_info'):
|
||||||
|
try:
|
||||||
|
gpu_info = self._get_nvidia_info()
|
||||||
|
self._cached_data['gpu_info'] = gpu_info
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting GPU info: {e}")
|
||||||
|
self._cached_data['gpu_info'] = {'available': False, 'error': str(e)}
|
||||||
|
|
||||||
|
return self._cached_data['gpu_info']
|
||||||
|
|
||||||
|
def get_gpu_stats(self) -> Dict[str, Any]:
|
||||||
|
"""Get real-time GPU statistics"""
|
||||||
|
if not self.gpu_available:
|
||||||
|
return {'available': False}
|
||||||
|
|
||||||
|
now = time.time()
|
||||||
|
if (self.last_update is None or
|
||||||
|
now - self.last_update > self.cache_duration):
|
||||||
|
|
||||||
|
try:
|
||||||
|
stats = self._get_nvidia_stats()
|
||||||
|
stats['timestamp'] = now
|
||||||
|
self._cached_data['stats'] = stats
|
||||||
|
self.last_update = now
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting GPU stats: {e}")
|
||||||
|
self._cached_data['stats'] = {'available': False, 'error': str(e)}
|
||||||
|
|
||||||
|
return self._cached_data.get('stats', {'available': False})
|
||||||
|
|
||||||
|
def _get_nvidia_info(self) -> Dict[str, Any]:
|
||||||
|
"""Get GPU info using nvidia-smi"""
|
||||||
|
try:
|
||||||
|
# Get GPU name, driver version, and CUDA version
|
||||||
|
result = subprocess.run(['nvidia-smi', '--query-gpu=index,name,driver_version',
|
||||||
|
'--format=csv,noheader'],
|
||||||
|
capture_output=True, text=True, timeout=10)
|
||||||
|
|
||||||
|
if result.returncode == 0:
|
||||||
|
gpu_info = {'available': True, 'driver': 'nvidia-smi', 'cards': []}
|
||||||
|
|
||||||
|
# Get driver and CUDA version from general output
|
||||||
|
version_result = subprocess.run(['nvidia-smi'],
|
||||||
|
capture_output=True, text=True, timeout=10)
|
||||||
|
if version_result.returncode == 0:
|
||||||
|
# Parse driver version
|
||||||
|
driver_match = re.search(r'Driver Version:\s*(\S+)', version_result.stdout)
|
||||||
|
if driver_match:
|
||||||
|
gpu_info['driver_version'] = driver_match.group(1)
|
||||||
|
|
||||||
|
# Parse CUDA version
|
||||||
|
cuda_match = re.search(r'CUDA Version:\s*(\S+)', version_result.stdout)
|
||||||
|
if cuda_match:
|
||||||
|
gpu_info['cuda_version'] = cuda_match.group(1)
|
||||||
|
|
||||||
|
# Parse GPU info
|
||||||
|
lines = result.stdout.strip().split('\n')
|
||||||
|
for line in lines:
|
||||||
|
parts = [p.strip() for p in line.split(',')]
|
||||||
|
if len(parts) >= 3:
|
||||||
|
gpu_info['cards'].append({
|
||||||
|
'id': int(parts[0]),
|
||||||
|
'name': parts[1],
|
||||||
|
'driver_version': parts[2]
|
||||||
|
})
|
||||||
|
|
||||||
|
return gpu_info if gpu_info['cards'] else {'available': False}
|
||||||
|
|
||||||
|
return {'available': False}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"nvidia-smi not available: {e}")
|
||||||
|
return {'available': False, 'error': str(e)}
|
||||||
|
|
||||||
|
def _get_nvidia_stats(self) -> Dict[str, Any]:
|
||||||
|
"""Get GPU stats using nvidia-smi"""
|
||||||
|
try:
|
||||||
|
# Query multiple metrics at once
|
||||||
|
result = subprocess.run([
|
||||||
|
'nvidia-smi',
|
||||||
|
'--query-gpu=index,temperature.gpu,utilization.gpu,memory.used,memory.total,power.draw,power.limit',
|
||||||
|
'--format=csv,noheader,nounits'
|
||||||
|
], capture_output=True, text=True, timeout=10)
|
||||||
|
|
||||||
|
if result.returncode == 0:
|
||||||
|
stats = {'available': True, 'cards': []}
|
||||||
|
|
||||||
|
lines = result.stdout.strip().split('\n')
|
||||||
|
for line in lines:
|
||||||
|
parts = [p.strip() for p in line.split(',')]
|
||||||
|
if len(parts) >= 5:
|
||||||
|
gpu_stats = {
|
||||||
|
'id': int(parts[0]),
|
||||||
|
'temperature': None,
|
||||||
|
'usage': None,
|
||||||
|
'memory_used': None,
|
||||||
|
'memory_total': None,
|
||||||
|
'power_draw': None,
|
||||||
|
'power_limit': None
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse temperature
|
||||||
|
if parts[1] and parts[1] != '[N/A]':
|
||||||
|
try:
|
||||||
|
gpu_stats['temperature'] = float(parts[1])
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Parse GPU utilization
|
||||||
|
if parts[2] and parts[2] != '[N/A]':
|
||||||
|
try:
|
||||||
|
gpu_stats['usage'] = int(parts[2])
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Parse memory usage
|
||||||
|
if parts[3] and parts[3] != '[N/A]':
|
||||||
|
try:
|
||||||
|
gpu_stats['memory_used'] = int(parts[3])
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if parts[4] and parts[4] != '[N/A]':
|
||||||
|
try:
|
||||||
|
gpu_stats['memory_total'] = int(parts[4])
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Parse power stats if available
|
||||||
|
if len(parts) >= 7:
|
||||||
|
if parts[5] and parts[5] != '[N/A]':
|
||||||
|
try:
|
||||||
|
gpu_stats['power_draw'] = float(parts[5])
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if parts[6] and parts[6] != '[N/A]':
|
||||||
|
try:
|
||||||
|
gpu_stats['power_limit'] = float(parts[6])
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
stats['cards'].append(gpu_stats)
|
||||||
|
|
||||||
|
return stats if stats['cards'] else {'available': False}
|
||||||
|
|
||||||
|
return {'available': False}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"nvidia-smi stats not available: {e}")
|
||||||
|
return {'available': False, 'error': str(e)}
|
||||||
|
|
||||||
|
def get_primary_gpu_stats(self) -> Dict[str, Any]:
|
||||||
|
"""Get stats for the primary/first GPU"""
|
||||||
|
all_stats = self.get_gpu_stats()
|
||||||
|
|
||||||
|
if not all_stats.get('available') or not all_stats.get('cards'):
|
||||||
|
return {
|
||||||
|
'available': False,
|
||||||
|
'usage': 0,
|
||||||
|
'temperature': 0,
|
||||||
|
'memory_percent': 0,
|
||||||
|
'power_draw': 0,
|
||||||
|
'power_limit': 0
|
||||||
|
}
|
||||||
|
|
||||||
|
primary_gpu = all_stats['cards'][0]
|
||||||
|
|
||||||
|
# Calculate memory percentage
|
||||||
|
memory_percent = 0
|
||||||
|
if (primary_gpu.get('memory_used') is not None and
|
||||||
|
primary_gpu.get('memory_total') is not None and
|
||||||
|
primary_gpu['memory_total'] > 0):
|
||||||
|
memory_percent = (primary_gpu['memory_used'] / primary_gpu['memory_total']) * 100
|
||||||
|
|
||||||
|
return {
|
||||||
|
'available': True,
|
||||||
|
'usage': primary_gpu.get('usage', 0) or 0,
|
||||||
|
'temperature': primary_gpu.get('temperature', 0) or 0,
|
||||||
|
'memory_percent': memory_percent,
|
||||||
|
'memory_used': primary_gpu.get('memory_used', 0) or 0,
|
||||||
|
'memory_total': primary_gpu.get('memory_total', 0) or 0,
|
||||||
|
'power_draw': primary_gpu.get('power_draw', 0) or 0,
|
||||||
|
'power_limit': primary_gpu.get('power_limit', 0) or 0
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
monitor = GPUMonitor()
|
||||||
|
from pprint import pprint
|
||||||
|
|||||||
66
src/utils/ollama.py
Normal file
66
src/utils/ollama.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import httpx
|
||||||
|
from nicegui import ui
|
||||||
|
|
||||||
|
|
||||||
|
async def available_models(url='http://127.0.0.1:11434'):
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.get(f"{url}/api/tags")
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()["models"]
|
||||||
|
|
||||||
|
|
||||||
|
async def loaded_models(url='http://127.0.0.1:11434'):
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.get(f"{url}/api/ps")
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
|
||||||
|
async def create_ollama_model(name, modelfile_content, url='http://127.0.0.1:11434'):
|
||||||
|
data = {
|
||||||
|
"name": name,
|
||||||
|
"from": "qwen2.5-coder:7b",
|
||||||
|
"modelfile": modelfile_content,
|
||||||
|
"stream": False
|
||||||
|
}
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(f"{url}/api/create", json=data)
|
||||||
|
response.raise_for_status()
|
||||||
|
print(response.text)
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_model(name, url='http://127.0.0.1:11434') -> bool:
|
||||||
|
data = {"name": name}
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
try:
|
||||||
|
response = await client.request("DELETE", f"{url}/api/delete", json=data)
|
||||||
|
if response.status_code == 200:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
ui.notify(f'Failed to delete model: {response.text}', type='negative')
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
ui.notify(f'Error deleting model: {str(e)}', type='negative')
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def model_info(name, url='http://127.0.0.1:11434'):
|
||||||
|
data = {
|
||||||
|
"name": name
|
||||||
|
}
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(f"{url}/api/show", json=data)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_chat(data, url='http://127.0.0.1:11434'):
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
async with client.stream('POST', f"{url}/api/chat", json=data) as response:
|
||||||
|
async for chunk in response.aiter_text():
|
||||||
|
yield chunk
|
||||||
@@ -2,123 +2,146 @@ import psutil
|
|||||||
import platform
|
import platform
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
from dataclasses import dataclass, field
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Dict, Any
|
from typing import Dict, Any, List, Optional
|
||||||
|
from nicegui import binding
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@binding.bindable_dataclass
|
||||||
class SystemMonitor:
|
class SystemMonitor:
|
||||||
def __init__(self):
|
# CPU stats
|
||||||
self.last_update = None
|
cpu_percent: float = 0.0
|
||||||
self.cache_duration = 2 # seconds
|
cpu_count: int = 0
|
||||||
self._cached_data = {}
|
cpu_frequency: Optional[float] = None
|
||||||
|
cpu_model: str = "Unknown"
|
||||||
|
load_avg: Optional[tuple] = None
|
||||||
|
|
||||||
def get_system_info(self) -> Dict[str, Any]:
|
# Memory stats
|
||||||
"""Get static system information"""
|
memory_total: int = 0
|
||||||
if not self._cached_data.get('system_info'):
|
memory_used: int = 0
|
||||||
|
memory_available: int = 0
|
||||||
|
memory_percent: float = 0.0
|
||||||
|
memory_free: int = 0
|
||||||
|
|
||||||
|
# Swap stats
|
||||||
|
swap_total: int = 0
|
||||||
|
swap_used: int = 0
|
||||||
|
swap_percent: float = 0.0
|
||||||
|
|
||||||
|
# Disk stats
|
||||||
|
disk_total: int = 0
|
||||||
|
disk_used: int = 0
|
||||||
|
disk_free: int = 0
|
||||||
|
disk_percent: float = 0.0
|
||||||
|
|
||||||
|
# Network stats
|
||||||
|
network_bytes_sent: int = 0
|
||||||
|
network_bytes_recv: int = 0
|
||||||
|
network_packets_sent: int = 0
|
||||||
|
network_packets_recv: int = 0
|
||||||
|
|
||||||
|
# System info
|
||||||
|
os_name: str = "Unknown"
|
||||||
|
kernel: str = "Unknown"
|
||||||
|
hostname: str = "Unknown"
|
||||||
|
architecture: str = "Unknown"
|
||||||
|
uptime: str = "0m"
|
||||||
|
|
||||||
|
# Process stats
|
||||||
|
process_count: int = 0
|
||||||
|
top_processes: List[Dict] = field(default_factory=list)
|
||||||
|
|
||||||
|
# Temperature
|
||||||
|
temperatures: Dict[str, float] = field(default_factory=dict)
|
||||||
|
|
||||||
|
# Update tracking
|
||||||
|
last_update: Optional[float] = None
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
"""Initialize static system information on creation"""
|
||||||
|
self._update_static_info()
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
def _update_static_info(self):
|
||||||
|
"""Update static system information (called once on init)"""
|
||||||
|
try:
|
||||||
|
uname = platform.uname()
|
||||||
|
self.os_name = f"{uname.system}"
|
||||||
|
self.kernel = uname.release
|
||||||
|
self.hostname = uname.node
|
||||||
|
self.architecture = uname.machine
|
||||||
|
self.cpu_model = self._get_cpu_info()
|
||||||
|
self.cpu_count = psutil.cpu_count()
|
||||||
|
self.memory_total = psutil.virtual_memory().total
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting static system info: {e}")
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
"""Update all dynamic system statistics"""
|
||||||
|
try:
|
||||||
|
# CPU stats
|
||||||
|
self.cpu_percent = psutil.cpu_percent(interval=0.1)
|
||||||
|
cpu_freq = psutil.cpu_freq()
|
||||||
|
self.cpu_frequency = cpu_freq.current if cpu_freq else None
|
||||||
|
|
||||||
|
# Load average (Unix only)
|
||||||
try:
|
try:
|
||||||
uname = platform.uname()
|
self.load_avg = psutil.getloadavg()
|
||||||
boot_time = datetime.fromtimestamp(psutil.boot_time())
|
except AttributeError:
|
||||||
uptime = datetime.now() - boot_time
|
self.load_avg = None
|
||||||
|
|
||||||
self._cached_data['system_info'] = {
|
# Memory stats
|
||||||
'os': f"{uname.system}",
|
memory = psutil.virtual_memory()
|
||||||
'kernel': uname.release,
|
self.memory_total = memory.total
|
||||||
'cpu': self._get_cpu_info(),
|
self.memory_available = memory.available
|
||||||
'memory_total': psutil.virtual_memory().total,
|
self.memory_used = memory.used
|
||||||
'uptime': self._format_uptime(uptime),
|
self.memory_percent = memory.percent
|
||||||
'hostname': uname.node,
|
self.memory_free = memory.free
|
||||||
'architecture': uname.machine
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting system info: {e}")
|
|
||||||
self._cached_data['system_info'] = {}
|
|
||||||
|
|
||||||
return self._cached_data['system_info']
|
# Swap stats
|
||||||
|
swap = psutil.swap_memory()
|
||||||
|
self.swap_total = swap.total
|
||||||
|
self.swap_used = swap.used
|
||||||
|
self.swap_percent = swap.percent
|
||||||
|
|
||||||
def get_system_stats(self) -> Dict[str, Any]:
|
# Disk stats
|
||||||
"""Get real-time system statistics"""
|
disk = psutil.disk_usage('/')
|
||||||
now = time.time()
|
self.disk_total = disk.total
|
||||||
if (self.last_update is None or
|
self.disk_used = disk.used
|
||||||
now - self.last_update > self.cache_duration):
|
self.disk_free = disk.free
|
||||||
|
self.disk_percent = (disk.used / disk.total) * 100 if disk.total > 0 else 0
|
||||||
|
|
||||||
try:
|
# Network stats
|
||||||
# CPU stats
|
network = psutil.net_io_counters()
|
||||||
cpu_percent = psutil.cpu_percent(interval=0.1)
|
if network:
|
||||||
cpu_count = psutil.cpu_count()
|
self.network_bytes_sent = network.bytes_sent
|
||||||
cpu_freq = psutil.cpu_freq()
|
self.network_bytes_recv = network.bytes_recv
|
||||||
|
self.network_packets_sent = network.packets_sent
|
||||||
|
self.network_packets_recv = network.packets_recv
|
||||||
|
|
||||||
# Memory stats
|
# Process count
|
||||||
memory = psutil.virtual_memory()
|
self.process_count = len(psutil.pids())
|
||||||
swap = psutil.swap_memory()
|
|
||||||
|
|
||||||
# Disk stats
|
# Update top processes
|
||||||
disk = psutil.disk_usage('/')
|
self.top_processes = self.get_top_processes(10)
|
||||||
|
|
||||||
# Network stats
|
# Temperature (if available)
|
||||||
network = psutil.net_io_counters()
|
self.temperatures = self._get_temperatures()
|
||||||
|
|
||||||
# Process count
|
# Uptime
|
||||||
process_count = len(psutil.pids())
|
boot_time = datetime.fromtimestamp(psutil.boot_time())
|
||||||
|
uptime = datetime.now() - boot_time
|
||||||
|
self.uptime = self._format_uptime(uptime)
|
||||||
|
|
||||||
# Load average (Unix only)
|
self.last_update = time.time()
|
||||||
load_avg = None
|
|
||||||
try:
|
|
||||||
load_avg = psutil.getloadavg()
|
|
||||||
except AttributeError:
|
|
||||||
# Windows doesn't have load average
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Temperature (if available)
|
except Exception as e:
|
||||||
temperatures = self._get_temperatures()
|
logger.error(f"Error updating system stats: {e}")
|
||||||
|
|
||||||
self._cached_data['stats'] = {
|
def get_top_processes(self, limit: int = 10) -> List[Dict]:
|
||||||
'cpu': {
|
|
||||||
'percent': cpu_percent,
|
|
||||||
'count': cpu_count,
|
|
||||||
'frequency': cpu_freq.current if cpu_freq else None,
|
|
||||||
'load_avg': load_avg
|
|
||||||
},
|
|
||||||
'memory': {
|
|
||||||
'total': memory.total,
|
|
||||||
'available': memory.available,
|
|
||||||
'used': memory.used,
|
|
||||||
'percent': memory.percent,
|
|
||||||
'free': memory.free
|
|
||||||
},
|
|
||||||
'swap': {
|
|
||||||
'total': swap.total,
|
|
||||||
'used': swap.used,
|
|
||||||
'percent': swap.percent
|
|
||||||
},
|
|
||||||
'disk': {
|
|
||||||
'total': disk.total,
|
|
||||||
'used': disk.used,
|
|
||||||
'free': disk.free,
|
|
||||||
'percent': (disk.used / disk.total) * 100
|
|
||||||
},
|
|
||||||
'network': {
|
|
||||||
'bytes_sent': network.bytes_sent,
|
|
||||||
'bytes_recv': network.bytes_recv,
|
|
||||||
'packets_sent': network.packets_sent,
|
|
||||||
'packets_recv': network.packets_recv
|
|
||||||
},
|
|
||||||
'processes': {
|
|
||||||
'count': process_count
|
|
||||||
},
|
|
||||||
'temperatures': temperatures,
|
|
||||||
'timestamp': now
|
|
||||||
}
|
|
||||||
|
|
||||||
self.last_update = now
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting system stats: {e}")
|
|
||||||
self._cached_data['stats'] = {}
|
|
||||||
|
|
||||||
return self._cached_data.get('stats', {})
|
|
||||||
|
|
||||||
def get_processes(self, limit: int = 10) -> list:
|
|
||||||
"""Get top processes by CPU usage"""
|
"""Get top processes by CPU usage"""
|
||||||
try:
|
try:
|
||||||
processes = []
|
processes = []
|
||||||
@@ -184,4 +207,10 @@ class SystemMonitor:
|
|||||||
if bytes_value < 1024.0:
|
if bytes_value < 1024.0:
|
||||||
return f"{bytes_value:.1f}{unit}"
|
return f"{bytes_value:.1f}{unit}"
|
||||||
bytes_value /= 1024.0
|
bytes_value /= 1024.0
|
||||||
return f"{bytes_value:.1f}PB"
|
return f"{bytes_value:.1f}PB"
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
from pprint import pprint
|
||||||
|
monitor = SystemMonitor()
|
||||||
|
pprint(monitor)
|
||||||
|
|||||||
12
uv.lock
generated
12
uv.lock
generated
@@ -94,14 +94,18 @@ version = "0.1.0"
|
|||||||
source = { virtual = "." }
|
source = { virtual = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "dotenv" },
|
{ name = "dotenv" },
|
||||||
|
{ name = "httpx" },
|
||||||
{ name = "nicegui" },
|
{ name = "nicegui" },
|
||||||
|
{ name = "niceguiasyncelement" },
|
||||||
{ name = "psutil" },
|
{ name = "psutil" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "dotenv", specifier = ">=0.9.9" },
|
{ name = "dotenv", specifier = ">=0.9.9" },
|
||||||
|
{ name = "httpx", specifier = ">=0.28.1" },
|
||||||
{ name = "nicegui", specifier = ">=2.24.1" },
|
{ name = "nicegui", specifier = ">=2.24.1" },
|
||||||
|
{ name = "niceguiasyncelement", git = "https://git.project-insanity.de/gmarth/NiceGuiAsyncElement.git" },
|
||||||
{ name = "psutil", specifier = ">=6.1.0" },
|
{ name = "psutil", specifier = ">=6.1.0" },
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -435,6 +439,14 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/43/8f/d123b74c6e760e941ccbdaf25b61c309d8bb16177802c801aa3f14543386/nicegui-2.24.1-py3-none-any.whl", hash = "sha256:a1bfc566cd2c9d290925f4df8b8023e63edaab6590e19160de97d9f84b8d6d8c", size = 13496427, upload-time = "2025-09-11T03:20:47.828Z" },
|
{ url = "https://files.pythonhosted.org/packages/43/8f/d123b74c6e760e941ccbdaf25b61c309d8bb16177802c801aa3f14543386/nicegui-2.24.1-py3-none-any.whl", hash = "sha256:a1bfc566cd2c9d290925f4df8b8023e63edaab6590e19160de97d9f84b8d6d8c", size = 13496427, upload-time = "2025-09-11T03:20:47.828Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "niceguiasyncelement"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = { git = "https://git.project-insanity.de/gmarth/NiceGuiAsyncElement.git#2475cc8bd27b5f9b5047908968ab6d46b69dd4ca" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "nicegui" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "orjson"
|
name = "orjson"
|
||||||
version = "3.11.3"
|
version = "3.11.3"
|
||||||
|
|||||||
Reference in New Issue
Block a user