This commit is contained in:
2025-09-18 10:10:52 +02:00
parent 590af9407c
commit 994fc6873e
14 changed files with 550 additions and 136 deletions

View File

@@ -1,4 +1,29 @@
from nicegui import ui
from utils import SystemMonitor, GPUMonitor
from typing import Optional, Literal
class MetricCircleAdv:
def __init__(self, label: str, monitor: SystemMonitor | GPUMonitor,
target_value: str,
target_max_value: str,
color: str,
formatting: Literal['percent', 'units', 'degree'],
icon: Optional[str] = None):
with ui.card().classes('metric-card p-4 text-center'):
with ui.column().classes('items-center gap-2'):
# Icon at top
with ui.row().classes('items-center gap-1'):
if icon:
ui.icon(icon, size='sm', color=color)
# Title
ui.label(label).classes('text-sm text-grey-5 font-medium')
# Circular progress - simplified
with ui.circular_progress(size='60px', color=color, show_value=False).bind_value_from(monitor, target_value):
if formatting == 'percent':
ui.label().classes('text-lg font-bold text-white').bind_text_from(monitor, target_value, backward=lambda x: f"{int(x * 100)} %")
class MetricCircle:
@@ -46,4 +71,4 @@ class ColorfulMetricCard:
with ui.card().classes(f'p-4 text-center animate-fade-in').style(f'background: linear-gradient(135deg, {color}20 0%, {color}10 100%); border: 1px solid {color}40'):
with ui.column().classes('items-center gap-2'):
ui.icon(icon, size='xl').style(f'color: {color}')
ui.label(title).classes('text-sm font-medium text-white')
ui.label(title).classes('text-sm font-medium text-white')

View File

@@ -1,17 +1,23 @@
from nicegui import ui
from utils import SystemMonitor, GPUMonitor
from nicegui import ui, binding
from utils import SystemMonitor, GPUMonitor, OllamaMonitor
class Header(ui.header):
def __init__(self, system_monitor: SystemMonitor, gpu_monitor: GPUMonitor):
def __init__(self, system_monitor: SystemMonitor, gpu_monitor: GPUMonitor, ollama_monitor: OllamaMonitor):
super().__init__(fixed=True, elevated=False)
with self.classes('bg-transparent'):
with ui.row().classes('w-full items-center justify-between px-6 py-3'):
with ui.row().classes('w-full items-center justify-between px-6'):
# Left side - minimal branding
with ui.row().classes('items-center gap-3'):
ui.label('ArchGPU Frontend').classes('text-xl font-bold text-white')
ui.chip('Live', icon='circle', color='green').props('size=sm outline')
chip = ui.chip('Live', icon='circle').props('size=sm outline')
chip.bind_text_from(ollama_monitor, 'status', backward=lambda x: self.update_ollama_running_chip(chip, x))
with ui.row().classes('items-center gap-4'):
print(ollama_monitor.active_models)
ui.label().bind_text_from(ollama_monitor, 'active_models', backward=lambda x: self.update_active_models(x))
# Right side - system status only
with ui.row().classes('items-center gap-4'):
@@ -31,3 +37,14 @@ class Header(ui.header):
ui.icon('thermostat', size='sm', color='red')
ui.label().classes('text-sm text-white').bind_text_from(gpu_monitor, 'temperature',
lambda x: f'{x:.1f}°C')
def update_ollama_running_chip(self, obj: ui.chip, state):
obj.classes(remove='text-red' if state else 'text-green')
obj.classes(add='text-green' if state else 'text-red')
return 'Ollama Running' if state else 'Ollama stopped'
def update_active_models(self, active_models):
used_vram = 0
for active_model in active_models:
used_vram += active_model['size_vram']
return f'{len(active_models)} Active Models using {(used_vram / 1024**3):.2f} GB'

View File

@@ -0,0 +1,57 @@
from nicegui import ui
from niceguiasyncelement import AsyncCard
from pathlib import Path
from utils import ollama
class OllamaDownloaderComponent(AsyncCard):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.is_downloading = False
self.download_progress = 0
self.download_status = ''
async def build(self) -> None:
with self:
with ui.column().classes('w-full gap-4'):
ui.label('Model Downloader').classes('text-xl font-bold')
model_input = ui.input(
'Model ID',
placeholder='e.g., TheBloke/Llama-2-7B-GGUF',
value='qwen2.5:0.5b'
).props('outlined dense').classes('w-full')
ui.link('Ollama Library', target='https://ollama.com/library/', new_tab=True)
ui.link('Using HF Models', target='https://huggingface.co/docs/hub/en/ollama', new_tab=True)
with ui.row().classes('items-center gap-2'):
ui.icon('check_circle').props(f'color=positive').bind_visibility_from(self, 'download_status', backward=lambda x: True if x == 'success' else False)
self.status_label = ui.label().bind_text_from(self, 'download_status')
ui.linear_progress(value=0, show_value=False).props('buffer=0.0 animation-speed=0').bind_value_from(self, 'download_progress')
self.download_btn = ui.button(
'Download Model',
on_click=lambda m=model_input: self.download_model(m.value) # type: ignore
).props('color=primary').classes('w-full').bind_enabled_from(self, 'model_id', backward=lambda x: bool(x) and not self.is_downloading)
async def download_model(self, model):
self.download_btn.set_enabled(False)
try:
async for chunk in ollama.download_model(model):
if chunk.strip():
# Parse the JSON chunk and extract content
import json
try:
chunk_data = json.loads(chunk)
self.download_status = chunk_data['status']
if 'total' in chunk_data and 'completed' in chunk_data:
self.download_progress = chunk_data['completed'] / chunk_data['total']
print(self.download_progress)
else:
self.download_progress = 0
except json.JSONDecodeError:
pass # Skip malformed chunks
except Exception as e:
ui.notify(f'Error: {str(e)}', type='negative')
finally:
self.download_btn.set_enabled(True)

View File

@@ -0,0 +1,85 @@
from nicegui import ui, binding
from niceguiasyncelement import AsyncCard
from pathlib import Path
from utils import ollama
from typing import Optional, Dict
modelfile_example = """FROM qwen2.5-coder:7b
PARAMETER num_ctx 8192
PARAMETER temperature 0.1
SYSTEM "Du bist ein Python-Experte."
"""
class OllamaModelCreationComponent(AsyncCard):
model_name = binding.BindableProperty()
model_from = binding.BindableProperty()
parameters = binding.BindableProperty()
quantize = binding.BindableProperty()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.is_downloading = False
self.download_progress = 0
self.download_status = ''
async def build(self) -> None:
self.classes('w-full')
with self:
with ui.column().classes('w-full gap-4'):
ui.label('Create Model').classes('text-xl font-bold')
ui.input('Model Name', value='qwen2.5-coder-32k-python:latest').props('outlined dense').classes('w-full').bind_value(self, 'model_name')
ui.input('From', value='qwen2.5-coder:7b').props('outlined dense').classes('w-full').bind_value(self, 'model_from')
ui.textarea(placeholder='Parameters').classes('w-full').props('autogrow').bind_value(self, 'parameters')
ui.select(['q4_K_M', 'q4_K_S', 'q8_0'], label='quantize', clearable=True).props('outlined dense').classes('w-full').bind_value(self, 'quantize')
with ui.row().classes('items-center gap-2'):
ui.icon('check_circle').props(f'color=positive').bind_visibility_from(self, 'download_status', backward=lambda x: True if x == 'success' else False)
self.status_label = ui.label().bind_text_from(self, 'download_status')
ui.linear_progress(value=0, show_value=False).props('buffer=0.0 animation-speed=0').bind_value_from(self, 'download_progress')
self.create_btn = ui.button('Create Model', on_click=self.create_model).props('color=primary').classes('w-full').bind_enabled_from(self, 'model_id', backward=lambda x: bool(x) and not self.is_downloading)
async def create_model(self):
self.parameters = self.parameters.strip()
model_parameters: Optional[Dict[str, str | int | float]] = None
if self.parameters:
model_parameters = {}
for line in self.parameters.split('\n'):
line = line.strip()
try:
key, value = line.split(' ')
except:
ui.notify(f'Not a valid format. {line}')
return
if key in ['num_ctx', 'repeat_last_n', 'seed', 'num_predict', 'top_k']:
model_parameters[key] = int(value)
elif key in ['repeat_penalty', 'temperature', 'top_p', 'min_p']:
model_parameters[key] = float(value)
elif key == 'stop':
model_parameters[key] = value.strip()
else:
ui.notify(f'Unknown parameter: {key}')
return
self.create_btn.set_enabled(False)
try:
async for chunk in ollama.create_ollama_model(self.model_name, self.model_from, model_parameters, self.quantize):
if chunk.strip():
# Parse the JSON chunk and extract content
import json
try:
chunk_data = json.loads(chunk)
self.download_status = chunk_data['status']
if 'total' in chunk_data and 'completed' in chunk_data:
self.download_progress = chunk_data['completed'] / chunk_data['total']
print(self.download_progress)
else:
self.download_progress = 0
except json.JSONDecodeError:
pass # Skip malformed chunks
except Exception as e:
ui.notify(f'Error: {str(e)}', type='negative')
finally:
self.create_btn.set_enabled(True)

View File

@@ -0,0 +1,65 @@
from nicegui import ui
from niceguiasyncelement import AsyncCard
from pathlib import Path
from utils import ollama
from typing import Optional
class ModelQuickTestComponent(AsyncCard):
model: Optional[str]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.model = None
async def build(self, model: str) -> None:
self.model = model
with self:
with ui.card().classes('w-full'):
ui.label(f'Quick Chat Test with {model}').classes('text-h6 font-bold mb-4')
self.quick_test_textarea = ui.textarea(
label='Prompt',
placeholder='Enter your prompt here...',
value='Hello! Tell me a fun fact about AMD GPUs.'
).classes('w-full').props('autogrow outlined')
self.quick_test_send = ui.button('Send', icon='send', on_click=self._quick_test).props('color=primary')
with ui.row():
ui.icon('message', size='sm')
ui.label('Response')
self.quick_test_response = ui.label('Response will appear here...').classes('text-grey-7')
async def _quick_test(self):
if not self.model:
ui.notify('Select a model first.', type='warning')
return
self.quick_test_response.set_text('')
prompt = self.quick_test_textarea.value
# calling stream_ollama_chat
data = {
"model": self.model,
"messages": [{"role": "user", "content": prompt}],
"stream": True
}
self.quick_test_send.set_enabled(False)
try:
async for chunk in ollama.stream_chat(data):
if chunk.strip():
# Parse the JSON chunk and extract content
import json
try:
chunk_data = json.loads(chunk)
if 'message' in chunk_data and 'content' in chunk_data['message']:
content = chunk_data['message']['content']
current_text = self.quick_test_response.text
self.quick_test_response.set_text(current_text + content)
except json.JSONDecodeError:
pass # Skip malformed chunks
except Exception as e:
ui.notify(f'Error: {str(e)}', type='negative')
finally:
self.quick_test_send.set_enabled(True)

View File

@@ -8,29 +8,17 @@ class Sidebar:
with ui.column().classes('w-full h-full p-4'):
# Navigation sections
ui.label('MAIN').classes('text-xs text-grey-5 font-bold tracking-wide mb-2')
with ui.column().classes('gap-1 mb-6'):
self._nav_item('Dashboard', 'dashboard', '/', active=(current_route == '/'))
self._nav_item('System Overview', 'monitor', '/system', active=(current_route == '/system'))
ui.label('MANAGEMENT').classes('text-xs text-grey-5 font-bold tracking-wide mb-2')
with ui.column().classes('gap-1 mb-6'):
self._nav_item('Ollama Manager', 'smart_toy', '/ollama', active=(current_route == '/ollama'))
self._nav_item('Process Manager', 'terminal', '/processes', active=(current_route == '/processes'))
self._nav_item('Network Monitor', 'router', '/network', active=(current_route == '/network'))
self._nav_item('Package Manager', 'inventory_2', '/packages', active=(current_route == '/packages'))
ui.label('TOOLS').classes('text-xs text-grey-5 font-bold tracking-wide mb-2')
with ui.column().classes('gap-1 mb-6'):
self._nav_item('Log Viewer', 'description', '/logs', active=(current_route == '/logs'))
self._nav_item('System Info', 'info', '/info', active=(current_route == '/info'))
self._nav_item('Censor', 'description', '/censor', active=(current_route == '/censor'))
ui.space()
self._nav_item('Model Manager', 'view_in_ar', '/ollama', active=(current_route == '/ollama'))
# Bottom section
ui.separator().classes('my-4')
self._nav_item('Settings', 'settings', '/settings', active=(current_route == '/settings'))