stuff
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
from .system_monitor import SystemMonitor
|
||||
from .gpu_monitor import GPUMonitor
|
||||
from .ollama_monitor import OllamaMonitor
|
||||
|
||||
__all__ = ['SystemMonitor', 'GPUMonitor']
|
||||
__all__ = ['SystemMonitor', 'GPUMonitor', 'OllamaMonitor']
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import httpx
|
||||
from nicegui import ui
|
||||
from typing import Tuple
|
||||
from typing import Tuple, Dict
|
||||
|
||||
|
||||
async def status(url='http://127.0.0.1:11434') -> Tuple[bool, str]:
|
||||
@@ -24,27 +24,30 @@ async def available_models(url='http://127.0.0.1:11434'):
|
||||
return response.json()["models"]
|
||||
|
||||
|
||||
async def loaded_models(url='http://127.0.0.1:11434'):
|
||||
async def active_models(url='http://127.0.0.1:11434'):
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(f"{url}/api/ps")
|
||||
response.raise_for_status()
|
||||
|
||||
return response.json()
|
||||
return response.json()["models"]
|
||||
|
||||
|
||||
async def create_ollama_model(name, modelfile_content, url='http://127.0.0.1:11434'):
|
||||
async def create_ollama_model(model_name, model_from, parameters=None, quantizie=None, url='http://127.0.0.1:11434'):
|
||||
data = {
|
||||
"name": name,
|
||||
"from": "qwen2.5-coder:7b",
|
||||
"modelfile": modelfile_content,
|
||||
"stream": False
|
||||
"model": model_name,
|
||||
"from": model_from,
|
||||
"stream": True
|
||||
}
|
||||
|
||||
if parameters:
|
||||
data['parameters'] = parameters
|
||||
if quantizie:
|
||||
data['quantizie'] = quantizie
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(f"{url}/api/create", json=data)
|
||||
response.raise_for_status()
|
||||
print(response.text)
|
||||
return response.json()
|
||||
async with client.stream('POST', f"{url}/api/create", json=data) as response:
|
||||
async for chunk in response.aiter_text():
|
||||
yield chunk
|
||||
|
||||
|
||||
async def delete_model(name, url='http://127.0.0.1:11434') -> bool:
|
||||
@@ -79,3 +82,11 @@ async def stream_chat(data, url='http://127.0.0.1:11434'):
|
||||
async with client.stream('POST', f"{url}/api/chat", json=data) as response:
|
||||
async for chunk in response.aiter_text():
|
||||
yield chunk
|
||||
|
||||
|
||||
async def download_model(model, url='http://127.0.0.1:11434'):
|
||||
data = {'name': model, 'stream': True}
|
||||
async with httpx.AsyncClient() as client:
|
||||
async with client.stream('POST', f"{url}/api/pull", json=data) as response:
|
||||
async for chunk in response.aiter_text():
|
||||
yield chunk
|
||||
|
||||
21
src/utils/ollama_monitor.py
Normal file
21
src/utils/ollama_monitor.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import psutil
|
||||
import platform
|
||||
import time
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any, List, Optional, Literal
|
||||
from nicegui import binding
|
||||
from utils import ollama
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@binding.bindable_dataclass
|
||||
class OllamaMonitor:
|
||||
status: bool = False
|
||||
version: str = 'Unknown'
|
||||
active_models: Dict = field(default_factory=dict)
|
||||
|
||||
async def update(self):
|
||||
self.status, self.version = await ollama.status()
|
||||
self.active_models = await ollama.active_models() if self.status else {}
|
||||
Reference in New Issue
Block a user