This commit is contained in:
2025-09-18 10:10:52 +02:00
parent 590af9407c
commit 994fc6873e
14 changed files with 550 additions and 136 deletions

View File

@@ -1,4 +1,29 @@
from nicegui import ui
from utils import SystemMonitor, GPUMonitor
from typing import Optional, Literal
class MetricCircleAdv:
def __init__(self, label: str, monitor: SystemMonitor | GPUMonitor,
target_value: str,
target_max_value: str,
color: str,
formatting: Literal['percent', 'units', 'degree'],
icon: Optional[str] = None):
with ui.card().classes('metric-card p-4 text-center'):
with ui.column().classes('items-center gap-2'):
# Icon at top
with ui.row().classes('items-center gap-1'):
if icon:
ui.icon(icon, size='sm', color=color)
# Title
ui.label(label).classes('text-sm text-grey-5 font-medium')
# Circular progress - simplified
with ui.circular_progress(size='60px', color=color, show_value=False).bind_value_from(monitor, target_value):
if formatting == 'percent':
ui.label().classes('text-lg font-bold text-white').bind_text_from(monitor, target_value, backward=lambda x: f"{int(x * 100)} %")
class MetricCircle:
@@ -46,4 +71,4 @@ class ColorfulMetricCard:
with ui.card().classes(f'p-4 text-center animate-fade-in').style(f'background: linear-gradient(135deg, {color}20 0%, {color}10 100%); border: 1px solid {color}40'):
with ui.column().classes('items-center gap-2'):
ui.icon(icon, size='xl').style(f'color: {color}')
ui.label(title).classes('text-sm font-medium text-white')
ui.label(title).classes('text-sm font-medium text-white')

View File

@@ -1,17 +1,23 @@
from nicegui import ui
from utils import SystemMonitor, GPUMonitor
from nicegui import ui, binding
from utils import SystemMonitor, GPUMonitor, OllamaMonitor
class Header(ui.header):
def __init__(self, system_monitor: SystemMonitor, gpu_monitor: GPUMonitor):
def __init__(self, system_monitor: SystemMonitor, gpu_monitor: GPUMonitor, ollama_monitor: OllamaMonitor):
super().__init__(fixed=True, elevated=False)
with self.classes('bg-transparent'):
with ui.row().classes('w-full items-center justify-between px-6 py-3'):
with ui.row().classes('w-full items-center justify-between px-6'):
# Left side - minimal branding
with ui.row().classes('items-center gap-3'):
ui.label('ArchGPU Frontend').classes('text-xl font-bold text-white')
ui.chip('Live', icon='circle', color='green').props('size=sm outline')
chip = ui.chip('Live', icon='circle').props('size=sm outline')
chip.bind_text_from(ollama_monitor, 'status', backward=lambda x: self.update_ollama_running_chip(chip, x))
with ui.row().classes('items-center gap-4'):
print(ollama_monitor.active_models)
ui.label().bind_text_from(ollama_monitor, 'active_models', backward=lambda x: self.update_active_models(x))
# Right side - system status only
with ui.row().classes('items-center gap-4'):
@@ -31,3 +37,14 @@ class Header(ui.header):
ui.icon('thermostat', size='sm', color='red')
ui.label().classes('text-sm text-white').bind_text_from(gpu_monitor, 'temperature',
lambda x: f'{x:.1f}°C')
def update_ollama_running_chip(self, obj: ui.chip, state):
obj.classes(remove='text-red' if state else 'text-green')
obj.classes(add='text-green' if state else 'text-red')
return 'Ollama Running' if state else 'Ollama stopped'
def update_active_models(self, active_models):
used_vram = 0
for active_model in active_models:
used_vram += active_model['size_vram']
return f'{len(active_models)} Active Models using {(used_vram / 1024**3):.2f} GB'

View File

@@ -0,0 +1,57 @@
from nicegui import ui
from niceguiasyncelement import AsyncCard
from pathlib import Path
from utils import ollama
class OllamaDownloaderComponent(AsyncCard):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.is_downloading = False
self.download_progress = 0
self.download_status = ''
async def build(self) -> None:
with self:
with ui.column().classes('w-full gap-4'):
ui.label('Model Downloader').classes('text-xl font-bold')
model_input = ui.input(
'Model ID',
placeholder='e.g., TheBloke/Llama-2-7B-GGUF',
value='qwen2.5:0.5b'
).props('outlined dense').classes('w-full')
ui.link('Ollama Library', target='https://ollama.com/library/', new_tab=True)
ui.link('Using HF Models', target='https://huggingface.co/docs/hub/en/ollama', new_tab=True)
with ui.row().classes('items-center gap-2'):
ui.icon('check_circle').props(f'color=positive').bind_visibility_from(self, 'download_status', backward=lambda x: True if x == 'success' else False)
self.status_label = ui.label().bind_text_from(self, 'download_status')
ui.linear_progress(value=0, show_value=False).props('buffer=0.0 animation-speed=0').bind_value_from(self, 'download_progress')
self.download_btn = ui.button(
'Download Model',
on_click=lambda m=model_input: self.download_model(m.value) # type: ignore
).props('color=primary').classes('w-full').bind_enabled_from(self, 'model_id', backward=lambda x: bool(x) and not self.is_downloading)
async def download_model(self, model):
self.download_btn.set_enabled(False)
try:
async for chunk in ollama.download_model(model):
if chunk.strip():
# Parse the JSON chunk and extract content
import json
try:
chunk_data = json.loads(chunk)
self.download_status = chunk_data['status']
if 'total' in chunk_data and 'completed' in chunk_data:
self.download_progress = chunk_data['completed'] / chunk_data['total']
print(self.download_progress)
else:
self.download_progress = 0
except json.JSONDecodeError:
pass # Skip malformed chunks
except Exception as e:
ui.notify(f'Error: {str(e)}', type='negative')
finally:
self.download_btn.set_enabled(True)

View File

@@ -0,0 +1,85 @@
from nicegui import ui, binding
from niceguiasyncelement import AsyncCard
from pathlib import Path
from utils import ollama
from typing import Optional, Dict
modelfile_example = """FROM qwen2.5-coder:7b
PARAMETER num_ctx 8192
PARAMETER temperature 0.1
SYSTEM "Du bist ein Python-Experte."
"""
class OllamaModelCreationComponent(AsyncCard):
model_name = binding.BindableProperty()
model_from = binding.BindableProperty()
parameters = binding.BindableProperty()
quantize = binding.BindableProperty()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.is_downloading = False
self.download_progress = 0
self.download_status = ''
async def build(self) -> None:
self.classes('w-full')
with self:
with ui.column().classes('w-full gap-4'):
ui.label('Create Model').classes('text-xl font-bold')
ui.input('Model Name', value='qwen2.5-coder-32k-python:latest').props('outlined dense').classes('w-full').bind_value(self, 'model_name')
ui.input('From', value='qwen2.5-coder:7b').props('outlined dense').classes('w-full').bind_value(self, 'model_from')
ui.textarea(placeholder='Parameters').classes('w-full').props('autogrow').bind_value(self, 'parameters')
ui.select(['q4_K_M', 'q4_K_S', 'q8_0'], label='quantize', clearable=True).props('outlined dense').classes('w-full').bind_value(self, 'quantize')
with ui.row().classes('items-center gap-2'):
ui.icon('check_circle').props(f'color=positive').bind_visibility_from(self, 'download_status', backward=lambda x: True if x == 'success' else False)
self.status_label = ui.label().bind_text_from(self, 'download_status')
ui.linear_progress(value=0, show_value=False).props('buffer=0.0 animation-speed=0').bind_value_from(self, 'download_progress')
self.create_btn = ui.button('Create Model', on_click=self.create_model).props('color=primary').classes('w-full').bind_enabled_from(self, 'model_id', backward=lambda x: bool(x) and not self.is_downloading)
async def create_model(self):
self.parameters = self.parameters.strip()
model_parameters: Optional[Dict[str, str | int | float]] = None
if self.parameters:
model_parameters = {}
for line in self.parameters.split('\n'):
line = line.strip()
try:
key, value = line.split(' ')
except:
ui.notify(f'Not a valid format. {line}')
return
if key in ['num_ctx', 'repeat_last_n', 'seed', 'num_predict', 'top_k']:
model_parameters[key] = int(value)
elif key in ['repeat_penalty', 'temperature', 'top_p', 'min_p']:
model_parameters[key] = float(value)
elif key == 'stop':
model_parameters[key] = value.strip()
else:
ui.notify(f'Unknown parameter: {key}')
return
self.create_btn.set_enabled(False)
try:
async for chunk in ollama.create_ollama_model(self.model_name, self.model_from, model_parameters, self.quantize):
if chunk.strip():
# Parse the JSON chunk and extract content
import json
try:
chunk_data = json.loads(chunk)
self.download_status = chunk_data['status']
if 'total' in chunk_data and 'completed' in chunk_data:
self.download_progress = chunk_data['completed'] / chunk_data['total']
print(self.download_progress)
else:
self.download_progress = 0
except json.JSONDecodeError:
pass # Skip malformed chunks
except Exception as e:
ui.notify(f'Error: {str(e)}', type='negative')
finally:
self.create_btn.set_enabled(True)

View File

@@ -0,0 +1,65 @@
from nicegui import ui
from niceguiasyncelement import AsyncCard
from pathlib import Path
from utils import ollama
from typing import Optional
class ModelQuickTestComponent(AsyncCard):
model: Optional[str]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.model = None
async def build(self, model: str) -> None:
self.model = model
with self:
with ui.card().classes('w-full'):
ui.label(f'Quick Chat Test with {model}').classes('text-h6 font-bold mb-4')
self.quick_test_textarea = ui.textarea(
label='Prompt',
placeholder='Enter your prompt here...',
value='Hello! Tell me a fun fact about AMD GPUs.'
).classes('w-full').props('autogrow outlined')
self.quick_test_send = ui.button('Send', icon='send', on_click=self._quick_test).props('color=primary')
with ui.row():
ui.icon('message', size='sm')
ui.label('Response')
self.quick_test_response = ui.label('Response will appear here...').classes('text-grey-7')
async def _quick_test(self):
if not self.model:
ui.notify('Select a model first.', type='warning')
return
self.quick_test_response.set_text('')
prompt = self.quick_test_textarea.value
# calling stream_ollama_chat
data = {
"model": self.model,
"messages": [{"role": "user", "content": prompt}],
"stream": True
}
self.quick_test_send.set_enabled(False)
try:
async for chunk in ollama.stream_chat(data):
if chunk.strip():
# Parse the JSON chunk and extract content
import json
try:
chunk_data = json.loads(chunk)
if 'message' in chunk_data and 'content' in chunk_data['message']:
content = chunk_data['message']['content']
current_text = self.quick_test_response.text
self.quick_test_response.set_text(current_text + content)
except json.JSONDecodeError:
pass # Skip malformed chunks
except Exception as e:
ui.notify(f'Error: {str(e)}', type='negative')
finally:
self.quick_test_send.set_enabled(True)

View File

@@ -8,29 +8,17 @@ class Sidebar:
with ui.column().classes('w-full h-full p-4'):
# Navigation sections
ui.label('MAIN').classes('text-xs text-grey-5 font-bold tracking-wide mb-2')
with ui.column().classes('gap-1 mb-6'):
self._nav_item('Dashboard', 'dashboard', '/', active=(current_route == '/'))
self._nav_item('System Overview', 'monitor', '/system', active=(current_route == '/system'))
ui.label('MANAGEMENT').classes('text-xs text-grey-5 font-bold tracking-wide mb-2')
with ui.column().classes('gap-1 mb-6'):
self._nav_item('Ollama Manager', 'smart_toy', '/ollama', active=(current_route == '/ollama'))
self._nav_item('Process Manager', 'terminal', '/processes', active=(current_route == '/processes'))
self._nav_item('Network Monitor', 'router', '/network', active=(current_route == '/network'))
self._nav_item('Package Manager', 'inventory_2', '/packages', active=(current_route == '/packages'))
ui.label('TOOLS').classes('text-xs text-grey-5 font-bold tracking-wide mb-2')
with ui.column().classes('gap-1 mb-6'):
self._nav_item('Log Viewer', 'description', '/logs', active=(current_route == '/logs'))
self._nav_item('System Info', 'info', '/info', active=(current_route == '/info'))
self._nav_item('Censor', 'description', '/censor', active=(current_route == '/censor'))
ui.space()
self._nav_item('Model Manager', 'view_in_ar', '/ollama', active=(current_route == '/ollama'))
# Bottom section
ui.separator().classes('my-4')
self._nav_item('Settings', 'settings', '/settings', active=(current_route == '/settings'))

View File

@@ -5,7 +5,7 @@ from nicegui import ui, app
from components import Header, Sidebar
from pages import DashboardPage, OllamaManagerPage
from utils import GPUMonitor, SystemMonitor
from utils import GPUMonitor, SystemMonitor, OllamaMonitor
import logging
logging.basicConfig(
@@ -22,9 +22,11 @@ app.add_static_files('/static', 'src/static')
# Create monitor instances (bindable dataclasses)
system_monitor = SystemMonitor()
gpu_monitor = GPUMonitor()
ollama_monitor = OllamaMonitor()
app.timer(2.0, system_monitor.update)
app.timer(2.0, gpu_monitor.update)
app.timer(2.0, ollama_monitor.update)
def create_layout(current_route='/'):
@@ -35,7 +37,7 @@ def create_layout(current_route='/'):
# Add custom CSS
ui.add_head_html('<link rel="stylesheet" type="text/css" href="/static/style.css">')
Header(system_monitor, gpu_monitor)
Header(system_monitor, gpu_monitor, ollama_monitor)
Sidebar(current_route)

View File

@@ -1,6 +1,6 @@
from typing import Literal
from nicegui import ui
from components.circular_progress import MetricCircle, LargeMetricCircle, ColorfulMetricCard
from components.circular_progress import MetricCircle, LargeMetricCircle, ColorfulMetricCard, MetricCircleAdv
from utils import SystemMonitor, GPUMonitor
"""
@@ -58,6 +58,8 @@ class DashboardPage(ui.column):
# Main content area with proper viewport handling
with self:
with ui.column().classes('w-full max-w-6xl mx-auto p-6 gap-6'):
with ui.grid(columns=4).classes('w-full gap-4'):
MetricCircleAdv('CPU', system_monitor, 'cpu_percent', '', icon='memory', formatting='percent', color='#e879f9')
# Top stats grid
with ui.grid(columns=4).classes('w-full gap-4'):
# CPU metric with binding

View File

@@ -3,6 +3,9 @@ from utils import ollama
from typing import Literal, List, Dict
from pprint import pprint
from niceguiasyncelement import AsyncColumn
from components.ollama_downloader import OllamaDownloaderComponent
from components.ollama_model_creation import OllamaModelCreationComponent
from components.ollama_quick_test import ModelQuickTestComponent
class OllamaManagerPage(AsyncColumn):
@@ -44,53 +47,32 @@ class OllamaManagerPage(AsyncColumn):
with ui.row().classes('w-full items-center mb-4'):
ui.label('Installed Models').classes('text-h6 font-bold')
ui.space()
ui.button('Create New Model', icon='create', on_click=self._create_model).props('color=primary')
ui.button('Pull New Model', icon='download').props('color=primary')
ui.button('Create New Model', icon='create', on_click=self._create_model_dialog).props('color=primary')
ui.button('Pull New Model', icon='download', on_click=self._download_model_dialog).props('color=primary')
with ui.column().classes('w-full gap-2'):
await self.models_container() # type: ignore
# Quick test
with ui.card().classes('w-full'):
ui.label('Quick Chat Test').classes('text-h6 font-bold mb-4')
async def _create_model_dialog(self):
with ui.row().classes('w-full gap-2 mb-2'):
self.quick_test_select = ui.select(
[],
label='Model'
).classes('flex-grow').props('outlined')
with ui.dialog() as dialog:
await OllamaModelCreationComponent.create()
await dialog
self.models_container.refresh()
self.quick_test_textarea = ui.textarea(
label='Prompt',
placeholder='Enter your prompt here...',
value='Hello! Tell me a fun fact about AMD GPUs.'
).classes('w-full').props('outlined')
async def _download_model_dialog(self):
with ui.dialog() as dialog:
await OllamaDownloaderComponent.create()
await dialog
self.models_container.refresh()
self.quick_test_send = ui.button('Send', icon='send', on_click=self._quick_test).props('color=primary')
with ui.row():
ui.icon('message', size='sm')
ui.label('Response')
self.quick_test_response = ui.label('Response will appear here...').classes('text-grey-7')
await self._quick_test_populate_options()
async def _create_model(self):
modelfile = """FROM qwen2.5-coder:7b
PARAMETER num_ctx 32768
PARAMETER temperature 0.1
SYSTEM "Du bist ein Python-Experte."
"""
print('creating model')
result = await ollama.create_ollama_model(
"qwen2.5-coder-32k-python",
modelfile
)
print('finished.')
print(result)
await self.models_container.refresh()
async def _test_model_dialog(self, model):
with ui.dialog() as dialog:
await ModelQuickTestComponent.create(model)
await dialog
async def _loaded_models(self):
loaded = await ollama.loaded_models()
loaded = await ollama.active_models()
print(loaded)
async def _delete_model(self, model):
@@ -114,8 +96,6 @@ class OllamaManagerPage(AsyncColumn):
for model in self.models:
self._create_model_item(model)
if hasattr(self, 'quick_test_select'):
await self._quick_test_populate_options()
def _create_model_item(self, model: Dict):
with ui.card().classes('w-full'):
@@ -169,48 +149,14 @@ class OllamaManagerPage(AsyncColumn):
ui.space()
with ui.row().classes('gap-2'):
ui.button(icon='chat', on_click=lambda m=model['name']: self._test_model_dialog(m)).props('round flat').tooltip('Test Model')
ui.button(icon='play_arrow').props('round flat color=primary').tooltip('Run Model')
ui.button(icon='info', on_click=lambda m=model['name']: self._print_model_info(m)).props('round flat').tooltip('Model Info')
ui.button(icon='delete', on_click=lambda m=model['name']: self._delete_model(m)).props('round flat color=negative').tooltip('Delete Model')
async def _print_model_info(self, model_name):
result = await ollama.model_info(model_name)
print(result)
for key, value in result.items():
print(key)
async def _quick_test_populate_options(self):
select_options = [model['name'] for model in self.models]
self.quick_test_select.set_options(select_options)
async def _quick_test(self):
model = self.quick_test_select.value
if not model:
ui.notify('Select a model first.', type='warning')
return
self.quick_test_response.set_text('')
prompt = self.quick_test_textarea.value
# calling stream_ollama_chat
data = {
"model": model,
"messages": [{"role": "user", "content": prompt}],
"stream": True
}
self.quick_test_send.set_enabled(False)
try:
async for chunk in ollama.stream_chat(data):
if chunk.strip():
# Parse the JSON chunk and extract content
import json
try:
chunk_data = json.loads(chunk)
if 'message' in chunk_data and 'content' in chunk_data['message']:
content = chunk_data['message']['content']
current_text = self.quick_test_response.text
self.quick_test_response.set_text(current_text + content)
except json.JSONDecodeError:
pass # Skip malformed chunks
except Exception as e:
ui.notify(f'Error: {str(e)}', type='negative')
finally:
self.quick_test_send.set_enabled(True)
print(result['modelfile'])

View File

@@ -168,7 +168,7 @@ body,
}
.main-content {
height: calc(100vh - 64px) !important;
height: calc(100vh - 0px) !important;
width: calc(100vw - 256px) !important;
overflow-y: auto !important;
margin-left: 256px !important;

View File

@@ -1,4 +1,5 @@
from .system_monitor import SystemMonitor
from .gpu_monitor import GPUMonitor
from .ollama_monitor import OllamaMonitor
__all__ = ['SystemMonitor', 'GPUMonitor']
__all__ = ['SystemMonitor', 'GPUMonitor', 'OllamaMonitor']

View File

@@ -1,6 +1,6 @@
import httpx
from nicegui import ui
from typing import Tuple
from typing import Tuple, Dict
async def status(url='http://127.0.0.1:11434') -> Tuple[bool, str]:
@@ -24,27 +24,30 @@ async def available_models(url='http://127.0.0.1:11434'):
return response.json()["models"]
async def loaded_models(url='http://127.0.0.1:11434'):
async def active_models(url='http://127.0.0.1:11434'):
async with httpx.AsyncClient() as client:
response = await client.get(f"{url}/api/ps")
response.raise_for_status()
return response.json()
return response.json()["models"]
async def create_ollama_model(name, modelfile_content, url='http://127.0.0.1:11434'):
async def create_ollama_model(model_name, model_from, parameters=None, quantizie=None, url='http://127.0.0.1:11434'):
data = {
"name": name,
"from": "qwen2.5-coder:7b",
"modelfile": modelfile_content,
"stream": False
"model": model_name,
"from": model_from,
"stream": True
}
if parameters:
data['parameters'] = parameters
if quantizie:
data['quantizie'] = quantizie
async with httpx.AsyncClient() as client:
response = await client.post(f"{url}/api/create", json=data)
response.raise_for_status()
print(response.text)
return response.json()
async with client.stream('POST', f"{url}/api/create", json=data) as response:
async for chunk in response.aiter_text():
yield chunk
async def delete_model(name, url='http://127.0.0.1:11434') -> bool:
@@ -79,3 +82,11 @@ async def stream_chat(data, url='http://127.0.0.1:11434'):
async with client.stream('POST', f"{url}/api/chat", json=data) as response:
async for chunk in response.aiter_text():
yield chunk
async def download_model(model, url='http://127.0.0.1:11434'):
data = {'name': model, 'stream': True}
async with httpx.AsyncClient() as client:
async with client.stream('POST', f"{url}/api/pull", json=data) as response:
async for chunk in response.aiter_text():
yield chunk

View File

@@ -0,0 +1,21 @@
import psutil
import platform
import time
import logging
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from typing import Dict, Any, List, Optional, Literal
from nicegui import binding
from utils import ollama
logger = logging.getLogger(__name__)
@binding.bindable_dataclass
class OllamaMonitor:
status: bool = False
version: str = 'Unknown'
active_models: Dict = field(default_factory=dict)
async def update(self):
self.status, self.version = await ollama.status()
self.active_models = await ollama.active_models() if self.status else {}