From 4b7e31b0a2053a5b72757cd90902ad8f0f3941bd Mon Sep 17 00:00:00 2001 From: Alexander Thiess Date: Thu, 18 Sep 2025 10:48:47 +0200 Subject: [PATCH] updated header metrics --- src/components/header.py | 107 ++++++++++++++++++++++++++++++++------- 1 file changed, 89 insertions(+), 18 deletions(-) diff --git a/src/components/header.py b/src/components/header.py index f7c9208..f48acdf 100644 --- a/src/components/header.py +++ b/src/components/header.py @@ -14,29 +14,35 @@ class Header(ui.header): chip = ui.chip('Live', icon='circle').props('size=sm outline') chip.bind_text_from(ollama_monitor, 'status', backward=lambda x: self.update_ollama_running_chip(chip, x)) + # Center - Active models with tooltip with ui.row().classes('items-center gap-4'): - print(ollama_monitor.active_models) + model_info_container = ui.row().classes('items-center gap-2 cursor-pointer') + with model_info_container: + ui.icon('smart_toy', size='sm', color='orange') + models_label = ui.label().classes('text-sm text-white') + models_label.bind_text_from(ollama_monitor, 'active_models', + backward=lambda x: self.update_active_models(x)) - ui.label().bind_text_from(ollama_monitor, 'active_models', backward=lambda x: self.update_active_models(x)) + # Create tooltip with model details + self._create_model_tooltip(model_info_container, ollama_monitor) - # Right side - system status only - with ui.row().classes('items-center gap-4'): - # Get real-time data + # Right side - Critical metrics + with ui.row().classes('items-center gap-3'): + # GPU Load + self._create_metric_badge('GPU', 'orange', 'show_chart', gpu_monitor, 'usage', + lambda x: f'{x:.0f}%') - # System load indicator - with ui.row().classes('items-center gap-2'): - ui.icon('memory', size='sm', color='cyan') - ui.label().classes('text-sm text-white').bind_text_from(system_monitor, 'cpu_percent', - lambda x: f'{x:.1f}%') + # VRAM Usage + self._create_metric_badge('VRAM', 'purple', 'memory', gpu_monitor, 'memory_used', + lambda x: self._format_memory(x, gpu_monitor.memory_total, 1)) - with ui.row().classes('items-center gap-2'): - ui.label().classes('text-sm text-white').bind_text_from(gpu_monitor, 'GPU ', - lambda x: f'{x:.1f}%') + # System RAM + self._create_metric_badge('RAM', 'cyan', 'memory', system_monitor, 'memory_used', + lambda x: self._format_memory(x, system_monitor.memory_total)) - with ui.row().classes('items-center gap-2'): - ui.icon('thermostat', size='sm', color='red') - ui.label().classes('text-sm text-white').bind_text_from(gpu_monitor, 'temperature', - lambda x: f'{x:.1f}°C') + # Disk Space + self._create_metric_badge('Disk', 'green', 'folder', system_monitor, 'disk_used', + lambda x: self._format_disk(x, system_monitor.disk_total)) def update_ollama_running_chip(self, obj: ui.chip, state): obj.classes(remove='text-red' if state else 'text-green') @@ -47,4 +53,69 @@ class Header(ui.header): used_vram = 0 for active_model in active_models: used_vram += active_model['size_vram'] - return f'{len(active_models)} Active Models using {(used_vram / 1024**3):.2f} GB' + return f'{len(active_models)} Models • {(used_vram / 1024**3):.2f} GB' + + def _create_metric_badge(self, label: str, color: str, icon_name: str, monitor, attr, formatter): + """Create a compact metric badge with icon and value""" + with ui.element('div').classes('flex items-center gap-1 px-2 py-1 rounded-lg bg-gray-800 bg-opacity-50'): + ui.icon(icon_name, size='xs').props(f'color={color}') + ui.label(label).classes('text-xs text-gray-400') + + # Single attribute binding + value_label = ui.label().classes('text-sm font-medium text-white') + value_label.bind_text_from(monitor, attr, backward=formatter) + + def _format_memory(self, used: int, total: int, base=3) -> str: + print(f"{used} / {total}") + """Format RAM usage in GB""" + if total == 0: + return "N/A" + used_gb = used / (1024**base) + total_gb = total / (1024**base) + formatted = f"{used_gb:.1f}/{total_gb:.0f}GB" + print(formatted) + return f"{used_gb:.1f}/{total_gb:.0f}GB" + + def _format_disk(self, used: int, total: int) -> str: + """Format disk usage""" + if total == 0: + return "N/A" + free = total - used + free_gb = free / (1024**3) + + # Show in TB if over 1000GB + if free_gb > 1000: + return f"{free_gb/1024:.1f}TB free" + else: + return f"{free_gb:.0f}GB free" + + def _create_model_tooltip(self, container, ollama_monitor): + """Create a tooltip showing detailed model information""" + with container: + # Create a tooltip with dynamic content + with ui.tooltip().classes('bg-gray-900 border border-gray-700 shadow-xl'): + # Create refreshable content that updates when active_models changes + @ui.refreshable + def tooltip_content(): + models = ollama_monitor.active_models + if not models: + ui.label('No models loaded').classes('text-xs text-gray-400') + else: + with ui.column().classes('gap-2 p-2'): + ui.label('Active Models').classes('text-sm font-bold text-white mb-1') + for model in models: + with ui.row().classes('items-center gap-2'): + ui.icon('circle', size='xs').props('color=green') + with ui.column().classes('gap-0'): + ui.label(model.get('name', 'Unknown')).classes('text-xs text-white font-medium') + vram_gb = model.get('size_vram', 0) / (1024**3) + ui.label(f'VRAM: {vram_gb:.2f} GB').classes('text-xs text-gray-400') + if 'size' in model: + size_gb = model.get('size', 0) / (1024**3) + ui.label(f'Size: {size_gb:.2f} GB').classes('text-xs text-gray-400') + + # Display initial content + tooltip_content() + + # Set up a timer to refresh the tooltip content + ui.timer(2.0, lambda: tooltip_content.refresh())