tools
This commit is contained in:
19
config/external_tools.yaml
Normal file
19
config/external_tools.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
# External Tools Configuration
|
||||
# Define external tools that appear in the sidebar
|
||||
|
||||
external_tools:
|
||||
- label: "Open WebUI"
|
||||
icon: "https://cdn.jsdelivr.net/gh/homarr-labs/dashboard-icons/svg/open-webui-light.svg"
|
||||
url: "https://webui.project-insanity.de/"
|
||||
|
||||
- label: "Langfuse"
|
||||
icon: "https://langfuse.com/langfuse_icon.svg" # Material Design icon name
|
||||
url: "http://10.2.2.104:3000"
|
||||
|
||||
# Configuration options
|
||||
config:
|
||||
# Open external tools in new tab/window
|
||||
open_in_new_tab: true
|
||||
|
||||
# Show external tools section in sidebar
|
||||
enabled: true
|
||||
@@ -7,9 +7,12 @@ requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"dotenv>=0.9.9",
|
||||
"httpx>=0.28.1",
|
||||
"langfuse>=3.5.0",
|
||||
"nicegui>=2.24.1",
|
||||
"niceguiasyncelement",
|
||||
"psutil>=6.1.0",
|
||||
"pydantic-ai>=1.0.9",
|
||||
"pyyaml>=6.0.2",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
from nicegui import ui
|
||||
from tools import TOOLS
|
||||
import yaml
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class Sidebar:
|
||||
def __init__(self, current_route='/'):
|
||||
self.external_tools = self._load_external_tools()
|
||||
with ui.left_drawer(value=True, bordered=True, fixed=True).classes('w-64') as drawer:
|
||||
drawer.style('background: #252837; border-right: 1px solid #374151;')
|
||||
|
||||
@@ -19,8 +23,16 @@ class Sidebar:
|
||||
|
||||
ui.space()
|
||||
|
||||
ui.label('EXTERNAL').classes('text-xs text-grey-5 font-bold tracking-wide mb-2')
|
||||
self._nav_item_external('Open WebUI', 'view_in_ar', 'https://webui.project-insanity.de/', active=(current_route == '/ollama'))
|
||||
# External tools section
|
||||
if self.external_tools.get('config', {}).get('enabled', True) and self.external_tools.get('external_tools'):
|
||||
|
||||
for tool in self.external_tools['external_tools']:
|
||||
self._nav_item_external(
|
||||
tool['label'],
|
||||
tool['icon'],
|
||||
tool['url'],
|
||||
open_in_new_tab=self.external_tools.get('config', {}).get('open_in_new_tab', True)
|
||||
)
|
||||
|
||||
ui.separator().classes('my-4')
|
||||
self._nav_item('Model Manager', 'view_in_ar', '/ollama', active=(current_route == '/ollama'))
|
||||
@@ -40,10 +52,49 @@ class Sidebar:
|
||||
ui.icon(icon, size='sm', color=icon_color)
|
||||
ui.label(label).classes(f'text-sm {text_color}')
|
||||
|
||||
def _nav_item_external(self, label: str, icon: str, url: str, active: bool = False):
|
||||
def _nav_item_external(self, label: str, icon: str, url: str, open_in_new_tab: bool = True):
|
||||
def navigate():
|
||||
ui.navigate.to(url, new_tab=True)
|
||||
ui.navigate.to(url, new_tab=open_in_new_tab)
|
||||
|
||||
with ui.row().classes(f'w-full items-center gap-3 px-3 py-2 rounded-lg cursor-pointer hover:bg-cyan-600/30').on('click', navigate):
|
||||
ui.icon(icon, size='sm')
|
||||
ui.label(label).classes(f'text-sm')
|
||||
# Different styling for external tools - dashed border and external link indicator
|
||||
with ui.row().classes(f'w-full items-center gap-3 px-3 py-2 rounded-lg cursor-pointer border border-dashed border-gray-600 text-grey-5 hover:text-cyan-400 hover:border-cyan-600 hover:bg-cyan-600/10 transition-all').on('click', navigate):
|
||||
# Handle both Material Design icons and custom URLs
|
||||
if icon.startswith('http'):
|
||||
# Custom icon URL
|
||||
ui.html(f'<img src="{icon}" style="width: 20px; height: 20px; object-fit: contain;" />')
|
||||
else:
|
||||
# Material Design icon
|
||||
ui.icon(icon, size='sm').classes('text-gray-400')
|
||||
|
||||
# Label with external indicator
|
||||
with ui.row().classes('flex-1 items-center gap-1'):
|
||||
ui.label(label).classes('text-sm')
|
||||
# External link indicator icon
|
||||
ui.icon('open_in_new', size='xs').classes('text-gray-500 opacity-60')
|
||||
|
||||
def _load_external_tools(self) -> dict:
|
||||
"""Load external tools configuration from YAML file"""
|
||||
config_path = Path(__file__).parent.parent.parent / 'config' / 'external_tools.yaml'
|
||||
|
||||
try:
|
||||
if config_path.exists():
|
||||
with open(config_path, 'r') as f:
|
||||
return yaml.safe_load(f) or {}
|
||||
else:
|
||||
# Return default empty config if file doesn't exist
|
||||
return {
|
||||
'external_tools': [],
|
||||
'config': {
|
||||
'enabled': True,
|
||||
'open_in_new_tab': True
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error loading external tools config: {e}")
|
||||
return {
|
||||
'external_tools': [],
|
||||
'config': {
|
||||
'enabled': True,
|
||||
'open_in_new_tab': True
|
||||
}
|
||||
}
|
||||
|
||||
13
src/main.py
13
src/main.py
@@ -2,6 +2,8 @@ import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from nicegui import ui, app
|
||||
from langfuse import get_client
|
||||
from pydantic_ai.agent import Agent
|
||||
|
||||
from components import Header, Sidebar
|
||||
from pages import DashboardPage, OllamaManagerPage
|
||||
@@ -11,13 +13,22 @@ import logging
|
||||
from tools import TOOLS
|
||||
from tools.base_tool import ToolContext, set_tool_context
|
||||
|
||||
load_dotenv()
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logging.getLogger('watchfiles').setLevel(logging.WARNING)
|
||||
|
||||
load_dotenv()
|
||||
langfuse = get_client()
|
||||
|
||||
if langfuse.auth_check():
|
||||
print("Langfuse client is authenticated and ready!")
|
||||
else:
|
||||
print("Authentication failed. Please check your credentials and host.")
|
||||
|
||||
Agent.instrument_all()
|
||||
|
||||
app.add_static_files('/static', 'src/static')
|
||||
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Callable, Awaitable, Optional
|
||||
from nicegui import ui
|
||||
from typing import Dict, Callable, Awaitable, Optional, Any
|
||||
from nicegui import app
|
||||
from niceguiasyncelement import AsyncColumn
|
||||
import inspect
|
||||
from utils import SystemMonitor, GPUMonitor, OllamaMonitor
|
||||
|
||||
|
||||
class ToolContext:
|
||||
"""Global context providing access to system monitors and shared resources"""
|
||||
def __init__(self, system_monitor=None, gpu_monitor=None, ollama_monitor=None):
|
||||
self.system_monitor = system_monitor
|
||||
self.gpu_monitor = gpu_monitor
|
||||
self.ollama_monitor = ollama_monitor
|
||||
|
||||
def __init__(self, system_monitor: SystemMonitor, gpu_monitor: GPUMonitor, ollama_monitor: OllamaMonitor):
|
||||
self.system = system_monitor
|
||||
self.gpu = gpu_monitor
|
||||
self.ollama = ollama_monitor
|
||||
|
||||
|
||||
# Global context instance
|
||||
|
||||
@@ -62,17 +62,17 @@ class MainPage(BasePage):
|
||||
|
||||
# Access system monitors through context
|
||||
ui.label().classes('text-sm text-white').bind_text_from(
|
||||
self.tool.context.system_monitor, 'cpu_percent',
|
||||
self.tool.context.system, 'cpu_percent',
|
||||
backward=lambda x: f'CPU Usage: {x:.1f}%'
|
||||
)
|
||||
|
||||
ui.label().classes('text-sm text-white').bind_text_from(
|
||||
self.tool.context.gpu_monitor, 'temperature',
|
||||
self.tool.context.gpu, 'temperature',
|
||||
backward=lambda x: f'GPU Temperature: {x:.0f}°C' if x > 0 else 'GPU Temperature: N/A'
|
||||
)
|
||||
|
||||
ui.label().classes('text-sm text-white').bind_text_from(
|
||||
self.tool.context.ollama_monitor, 'active_models',
|
||||
self.tool.context.ollama, 'active_models',
|
||||
backward=lambda x: f'Active Models: {len(x)}'
|
||||
)
|
||||
|
||||
|
||||
0
src/tools/simple_chat/__init__.py
Normal file
0
src/tools/simple_chat/__init__.py
Normal file
230
src/tools/simple_chat/tool.py
Normal file
230
src/tools/simple_chat/tool.py
Normal file
@@ -0,0 +1,230 @@
|
||||
from typing import Dict, Callable, Awaitable
|
||||
from nicegui import ui, binding, app
|
||||
from tools.base_tool import BaseTool, BasePage
|
||||
from utils import ollama
|
||||
from typing import Literal, List, Optional, Any
|
||||
from datetime import datetime
|
||||
from dataclasses import dataclass
|
||||
from niceguiasyncelement import AsyncColumn
|
||||
import json
|
||||
|
||||
|
||||
class SimpleChatTool(BaseTool):
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return "Simple Chat"
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
return "Simple Chat Tool example"
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
return "chat"
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
"""Enable/disable this tool (set to False to hide from menu and disable routes)"""
|
||||
return True # Set to False to disable this tool
|
||||
|
||||
@property
|
||||
def routes(self) -> Dict[str, Callable[[], Awaitable]]:
|
||||
"""Define the routes for this tool"""
|
||||
return {
|
||||
'': lambda: MainPage.create(self)
|
||||
}
|
||||
|
||||
|
||||
@binding.bindable_dataclass
|
||||
class LLMMessage():
|
||||
name: str
|
||||
role: Literal['system', 'user', 'assistant']
|
||||
content: str
|
||||
stamp: datetime
|
||||
|
||||
|
||||
class ChatMessageComponent(LLMMessage, ui.column):
|
||||
"""Custom chat message component that supports data binding"""
|
||||
|
||||
def __init__(self, role: Literal['user', 'assistant'], name: str, content: str, timestamp: Optional[str] = None):
|
||||
# Initialize LLMMessage
|
||||
LLMMessage.__init__(self, name=name, role=role, content=content, stamp=datetime.now())
|
||||
# Initialize ui.column
|
||||
ui.column.__init__(self)
|
||||
|
||||
self.timestamp = timestamp or datetime.now().strftime('%H:%M:%S')
|
||||
self.is_user = role == 'user'
|
||||
|
||||
self.classes('w-full mb-2')
|
||||
|
||||
with self:
|
||||
# Message container with proper alignment
|
||||
container_classes = 'w-full flex'
|
||||
if self.is_user:
|
||||
container_classes += ' justify-end'
|
||||
else:
|
||||
container_classes += ' justify-start'
|
||||
|
||||
with ui.row().classes(container_classes):
|
||||
# Message bubble
|
||||
bubble_classes = 'max-w-xs lg:max-w-md px-4 py-2 rounded-lg'
|
||||
if self.is_user:
|
||||
bubble_classes += ' bg-cyan-600 text-white ml-auto'
|
||||
else:
|
||||
bubble_classes += ' bg-gray-700 text-white mr-auto'
|
||||
|
||||
with ui.column().classes(bubble_classes):
|
||||
# Message content - bind to the content property
|
||||
ui.markdown().classes('text-sm whitespace-pre-wrap').bind_content_from(self, 'content')
|
||||
|
||||
# Timestamp and role
|
||||
with ui.row().classes('items-center gap-2 mt-1'):
|
||||
ui.label(self.role.title()).classes('text-xs opacity-75 font-medium')
|
||||
ui.label(self.timestamp).classes('text-xs opacity-60')
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
'name': self.name,
|
||||
'role': self.role,
|
||||
'content': self.content,
|
||||
'stamp': self.stamp.isoformat(),
|
||||
'timestamp': self.timestamp
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'ChatMessageComponent':
|
||||
message = cls(
|
||||
role=data['role'],
|
||||
name=data['name'],
|
||||
content=data['content'],
|
||||
timestamp=data.get('timestamp')
|
||||
)
|
||||
message.name = data['name']
|
||||
message.stamp = datetime.fromisoformat(data['stamp'])
|
||||
return message
|
||||
|
||||
|
||||
class MainPage(BasePage):
|
||||
"""Main page of the example tool"""
|
||||
|
||||
chat_container: ui.scroll_area
|
||||
messages_container: ui.column
|
||||
model_selector: ui.select
|
||||
chat_input: ui.input
|
||||
history: List[ChatMessageComponent]
|
||||
|
||||
is_responding = binding.BindableProperty()
|
||||
|
||||
auto_scroll: bool
|
||||
auto_scroll_timer: ui.timer
|
||||
|
||||
async def content(self):
|
||||
self.history = []
|
||||
|
||||
self.auto_scroll = True
|
||||
self.is_responding = False
|
||||
|
||||
model_options = [model['name'] for model in await ollama.available_models()]
|
||||
|
||||
with ui.row().classes('w-full'):
|
||||
ui.label('Simple Chat').classes('text-2xl font-bold text-white mb-4')
|
||||
ui.space()
|
||||
ui.button('Clear Chat', on_click=self.clear_history)
|
||||
self.model_selector = ui.select(model_options, label='Model', value=model_options[0]).props('outlined dense')
|
||||
self.model_selector.bind_value(app.storage.user, f'{self.tool.name}_selected_model')
|
||||
|
||||
# Main chat layout - full width and height
|
||||
with ui.column().classes('w-full gap-4 h-full'):
|
||||
# Chat messages area - takes all available space
|
||||
with ui.card().classes('w-full flex-1 p-0'):
|
||||
self.chat_container = ui.scroll_area(on_scroll=lambda e: self.on_scroll_event(e.vertical_percentage)).classes('w-full').style('height: 100%; min-height: 500px')
|
||||
self.chat_container.on('wheel', self.wheel_callback, ['deltaY'])
|
||||
with self.chat_container:
|
||||
self.messages_container = ui.column().classes('w-full p-4 gap-2') # Container for messages
|
||||
|
||||
# Input area at the bottom - fixed height
|
||||
with ui.card().classes('w-full p-4').style('flex-shrink: 0'):
|
||||
with ui.row().classes('w-full gap-2 items-center'):
|
||||
self.chat_input = ui.input(placeholder='Type your message...').classes('flex-1').props('outlined dense')
|
||||
self.chat_input.bind_enabled_from(self, 'is_responding', backward=lambda x: not x)
|
||||
self.chat_input.on('keydown.enter', self.send_message)
|
||||
ui.button(icon='send', on_click=self.send_message).props('color=primary').bind_enabled_from(self, 'is_responding', backward=lambda x: not x)
|
||||
|
||||
# Add example messages
|
||||
await self.load_chat_history()
|
||||
|
||||
self.auto_scroll_timer = ui.timer(0.1, lambda: self.chat_container.scroll_to(percent=1))
|
||||
|
||||
async def add_message(self, role: Literal['user', 'assistant'], name: str, content: str):
|
||||
with self.messages_container:
|
||||
message_component = ChatMessageComponent(role, name, content)
|
||||
|
||||
self.history.append(message_component)
|
||||
|
||||
async def send_message(self):
|
||||
"""Send a user message from the input field"""
|
||||
if self.chat_input.value and self.chat_input.value.strip():
|
||||
user_message = self.chat_input.value.strip()
|
||||
self.chat_input.value = '' # Clear the input
|
||||
await self.add_message('user', 'User', user_message)
|
||||
|
||||
# create data dict
|
||||
self.is_responding = True
|
||||
data = {
|
||||
'model': self.model_selector.value,
|
||||
'messages': []
|
||||
}
|
||||
for mes in self.history:
|
||||
data['messages'].append({'role': mes.role, 'content': mes.content})
|
||||
|
||||
# create new empty message object for the response
|
||||
await self.add_message('assistant', 'Assistant', '')
|
||||
|
||||
# generate streaming response
|
||||
try:
|
||||
async for chunk in ollama.stream_chat(data):
|
||||
if chunk.strip():
|
||||
# Parse the JSON chunk and extract content
|
||||
try:
|
||||
chunk_data = json.loads(chunk)
|
||||
if 'message' in chunk_data and 'content' in chunk_data['message']:
|
||||
content = chunk_data['message']['content']
|
||||
self.history[-1].content += content
|
||||
except json.JSONDecodeError:
|
||||
pass # Skip malformed chunks
|
||||
except Exception as e:
|
||||
ui.notify(f'Error: {str(e)}', type='negative')
|
||||
finally:
|
||||
await self.save_chat_history()
|
||||
self.is_responding = False
|
||||
|
||||
async def load_chat_history(self):
|
||||
if f'{self.tool.name}_history' in app.storage.user:
|
||||
for mes in app.storage.user[f'{self.tool.name}_history']:
|
||||
|
||||
with self.messages_container:
|
||||
message_component = ChatMessageComponent.from_dict(mes)
|
||||
|
||||
self.history.append(message_component)
|
||||
|
||||
async def save_chat_history(self):
|
||||
app.storage.user[f'{self.tool.name}_history'] = []
|
||||
for mes in self.history:
|
||||
app.storage.user[f'{self.tool.name}_history'].append(mes.to_dict())
|
||||
|
||||
async def clear_history(self):
|
||||
app.storage.user[f'{self.tool.name}_history'] = []
|
||||
self.history = []
|
||||
self.messages_container.clear()
|
||||
...
|
||||
|
||||
def wheel_callback(self, event_data):
|
||||
delta_y = event_data.args['deltaY']
|
||||
if delta_y < 0:
|
||||
if self.auto_scroll_timer.active:
|
||||
self.auto_scroll_timer.deactivate()
|
||||
|
||||
def on_scroll_event(self, vertical_percentage):
|
||||
if vertical_percentage == 1:
|
||||
if not self.auto_scroll_timer.active:
|
||||
self.auto_scroll_timer.activate()
|
||||
0
src/tools/simple_chat_pydantic/__init__.py
Normal file
0
src/tools/simple_chat_pydantic/__init__.py
Normal file
72
src/tools/simple_chat_pydantic/auto_scroll_area.py
Normal file
72
src/tools/simple_chat_pydantic/auto_scroll_area.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from typing import Optional
|
||||
from nicegui import ui
|
||||
|
||||
|
||||
class AutoScrollArea(ui.scroll_area):
|
||||
"""A scroll area that automatically scrolls to bottom when new content is added
|
||||
|
||||
Features:
|
||||
- Auto-scrolls to bottom when at bottom and new content arrives
|
||||
- Stops auto-scroll when user scrolls up manually
|
||||
- Resumes auto-scroll when user scrolls back to bottom
|
||||
"""
|
||||
|
||||
_auto_scroll_enabled: bool = True
|
||||
_auto_scroll_timer: Optional[ui.timer] = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Set up scroll monitoring
|
||||
# self._handle_scroll
|
||||
self.on_scroll(self._handle_scroll_event)
|
||||
self.on('wheel', self._handle_wheel, ['deltaY'])
|
||||
|
||||
# Create timer for auto-scrolling
|
||||
self._auto_scroll_timer = ui.timer(0.1, lambda: self.scroll_to(percent=1))
|
||||
self._auto_scroll_timer.activate()
|
||||
|
||||
def _scroll_event_test(self, e):
|
||||
print(e.vertical_percentage)
|
||||
|
||||
def _handle_scroll_event(self, event_data):
|
||||
"""Handle scroll events to detect when user is at bottom"""
|
||||
|
||||
if not self._auto_scroll_timer:
|
||||
print('no timer instantiated.')
|
||||
return
|
||||
|
||||
# If scrolled to bottom (100%), enable auto-scroll
|
||||
if event_data.vertical_percentage > 0.99: # Using 0.99 for some tolerance
|
||||
if not self._auto_scroll_timer.active:
|
||||
self._auto_scroll_timer.activate()
|
||||
|
||||
def _handle_wheel(self, event_data):
|
||||
"""Handle mouse wheel events to detect manual scrolling"""
|
||||
delta_y = event_data.args['deltaY']
|
||||
if not self._auto_scroll_timer:
|
||||
print('no timer instantiated.')
|
||||
return
|
||||
# If scrolling up (negative delta), disable auto-scroll
|
||||
if delta_y < 0:
|
||||
if self._auto_scroll_timer.active:
|
||||
self._auto_scroll_timer.deactivate()
|
||||
|
||||
def enable_auto_scroll(self):
|
||||
"""Manually enable auto-scrolling"""
|
||||
if self._auto_scroll_timer:
|
||||
if not self._auto_scroll_timer.active:
|
||||
self._auto_scroll_timer.activate()
|
||||
|
||||
def disable_auto_scroll(self):
|
||||
"""Manually disable auto-scrolling"""
|
||||
|
||||
if self._auto_scroll_timer:
|
||||
if self._auto_scroll_timer.active:
|
||||
self._auto_scroll_timer.deactivate()
|
||||
|
||||
def cleanup(self):
|
||||
"""Clean up timer when component is destroyed"""
|
||||
if self._auto_scroll_timer:
|
||||
self._auto_scroll_timer.deactivate()
|
||||
self._auto_scroll_timer = None
|
||||
167
src/tools/simple_chat_pydantic/chat_message.py
Normal file
167
src/tools/simple_chat_pydantic/chat_message.py
Normal file
@@ -0,0 +1,167 @@
|
||||
from nicegui import ui, binding
|
||||
from typing import Literal, Optional
|
||||
from datetime import datetime
|
||||
from pydantic_ai.messages import ModelMessage, ModelRequest, ModelResponse, SystemPromptPart, UserPromptPart, TextPart
|
||||
|
||||
|
||||
class ChatMessageComponent(ui.column):
|
||||
"""Custom chat message component that supports data binding"""
|
||||
|
||||
content = binding.BindableProperty()
|
||||
model_message: Optional[ModelMessage] = None
|
||||
|
||||
message_bubble: ui.column
|
||||
|
||||
# only used when model_message is None
|
||||
_timestamp: Optional[datetime] = None
|
||||
_role: Optional[Literal['user', 'assistant']] = None
|
||||
|
||||
def __init__(self, role: Literal['user', 'assistant'], content: str = '', timestamp: Optional[datetime] = None):
|
||||
ui.column.__init__(self)
|
||||
|
||||
self.content = content
|
||||
self.model_message = None
|
||||
|
||||
# Store role and timestamp for UI creation
|
||||
self._role = role
|
||||
self._timestamp = timestamp or datetime.now() # .strftime('%-d %b %Y - %H:%M:%S')
|
||||
|
||||
self.classes('w-full mb-2')
|
||||
|
||||
# hide self if content is empty.
|
||||
# self.bind_visibility_from(self, 'content', backward=lambda x: len(x) > 0 if x else False)
|
||||
|
||||
self._create_ui()
|
||||
|
||||
def set_model(self, message: ModelMessage):
|
||||
"""Update this component with data from a ModelMessage
|
||||
|
||||
This is useful for updating a streaming message once the full model response is available.
|
||||
"""
|
||||
# Extract and update content
|
||||
new_content = self._extract_content_from_model(message)
|
||||
if new_content:
|
||||
self.content = new_content
|
||||
|
||||
# Store the model message
|
||||
self.model_message = message
|
||||
|
||||
def update_content(self, new_content: str):
|
||||
"""Update the content of this message
|
||||
|
||||
This is useful for streaming responses where content is updated incrementally.
|
||||
"""
|
||||
self.content = new_content
|
||||
|
||||
def _create_ui(self):
|
||||
"""Create the UI elements for the message"""
|
||||
with self:
|
||||
# Main message container with hover effect
|
||||
with ui.column().classes('w-full group gap-0'):
|
||||
# Header row with avatar, role, and timestamp
|
||||
|
||||
with ui.row().classes('items-center gap-2 mb-2'):
|
||||
# Avatar/Role indicator
|
||||
if self._role == 'user':
|
||||
ui.icon('person', size='sm').classes('text-cyan-500')
|
||||
else:
|
||||
ui.icon('smart_toy', size='sm').classes('text-purple-500')
|
||||
|
||||
# Role label
|
||||
ui.label(self._role.capitalize() if isinstance(self._role, str) else '').classes('text-sm font-medium text-gray-300')
|
||||
|
||||
# Timestamp
|
||||
ui.label().classes('text-xs text-gray-500')\
|
||||
.bind_text_from(self, '_timestamp', backward=lambda t: t.strftime('%-d %b %Y - %H:%M:%S'))
|
||||
|
||||
# Message bubble with improved styling
|
||||
if self._role == 'user':
|
||||
bubble_classes = 'bg-gradient-to-r from-cyan-900/50 to-cyan-800/30 border border-cyan-700/50'
|
||||
text_color = 'text-cyan-50'
|
||||
else:
|
||||
bubble_classes = 'bg-gradient-to-r from-gray-800/50 to-gray-700/30 border border-gray-600/50'
|
||||
text_color = 'text-gray-100'
|
||||
|
||||
with ui.column().classes(f'rounded-xl px-4 py-3 {bubble_classes} backdrop-blur-sm shadow-lg w-fit') as self.message_bubble:
|
||||
# Message content with better typography
|
||||
ui.markdown().classes(f'text-sm {text_color} leading-relaxed').bind_content_from(self, 'content')
|
||||
|
||||
# Action buttons - visible on hover
|
||||
with ui.row().classes('mt-2 opacity-0 group-hover:opacity-100 transition-opacity duration-200 gap-1'):
|
||||
# Copy button
|
||||
copy_btn = ui.button(icon='content_copy', on_click=lambda: None).props('flat size=sm color=gray')
|
||||
copy_btn.classes('!min-w-0 !px-2 !py-1')
|
||||
copy_btn.tooltip('Copy')
|
||||
|
||||
# Edit button
|
||||
edit_btn = ui.button(icon='edit', on_click=lambda: None).props('flat size=sm color=gray')
|
||||
edit_btn.classes('!min-w-0 !px-2 !py-1')
|
||||
edit_btn.tooltip('Edit')
|
||||
|
||||
# Delete button
|
||||
delete_btn = ui.button(icon='delete', on_click=lambda: None).props('flat size=sm color=gray')
|
||||
delete_btn.classes('!min-w-0 !px-2 !py-1')
|
||||
delete_btn.tooltip('Delete')
|
||||
|
||||
# Regenerate button (only for assistant messages)
|
||||
if self._role == 'assistant':
|
||||
regen_btn = ui.button(icon='refresh', on_click=lambda: None).props('flat size=sm color=gray')
|
||||
regen_btn.classes('!min-w-0 !px-2 !py-1')
|
||||
regen_btn.tooltip('Regenerate')
|
||||
|
||||
@staticmethod
|
||||
def _extract_content_from_model(message: ModelMessage) -> str:
|
||||
"""Extract text content from a ModelMessage"""
|
||||
content_parts = []
|
||||
for part in message.parts:
|
||||
if isinstance(part, (TextPart, SystemPromptPart, UserPromptPart)):
|
||||
content_parts.append(part.content)
|
||||
return '\n'.join(content_parts) if content_parts else ''
|
||||
|
||||
@staticmethod
|
||||
def _extract_timestamp_from_model(message: ModelMessage) -> datetime:
|
||||
"""Extract and format timestamp from a ModelMessage
|
||||
|
||||
Returns formatted timestamp string or None if not found
|
||||
"""
|
||||
if isinstance(message, ModelResponse):
|
||||
# ModelResponse has timestamp attribute directly
|
||||
if hasattr(message, 'timestamp') and message.timestamp:
|
||||
return message.timestamp # .strftime('%-d %b %Y - %H:%M:%S')
|
||||
elif isinstance(message, ModelRequest):
|
||||
# ModelRequest: look for timestamp in UserPromptPart
|
||||
for part in message.parts:
|
||||
if isinstance(part, UserPromptPart) and hasattr(part, 'timestamp') and part.timestamp:
|
||||
return part.timestamp # .strftime('%-d %b %Y - %H:%M:%S')
|
||||
|
||||
raise ValueError
|
||||
|
||||
@classmethod
|
||||
def from_model(cls, message: ModelMessage) -> 'ChatMessageComponent':
|
||||
"""Create a ChatMessageComponent from a ModelMessage instance"""
|
||||
# Determine role from message type
|
||||
if isinstance(message, ModelRequest):
|
||||
role: Literal['user', 'assistant'] = 'user'
|
||||
else: # ModelResponse
|
||||
role = 'assistant'
|
||||
|
||||
# Extract text content from message parts
|
||||
content = cls._extract_content_from_model(message)
|
||||
|
||||
# Extract and format timestamp from ModelMessage
|
||||
timestamp_str = cls._extract_timestamp_from_model(message)
|
||||
|
||||
# Create instance
|
||||
instance = cls(
|
||||
role=role,
|
||||
content=content,
|
||||
timestamp=timestamp_str
|
||||
)
|
||||
instance.model_message = message
|
||||
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def from_values(cls, role: Literal['user', 'assistant'], content: str = '', timestamp: Optional[datetime] = None) -> 'ChatMessageComponent':
|
||||
"""Create a ChatMessageComponent from individual values"""
|
||||
return cls(role=role, content=content, timestamp=timestamp)
|
||||
136
src/tools/simple_chat_pydantic/tool.py
Normal file
136
src/tools/simple_chat_pydantic/tool.py
Normal file
@@ -0,0 +1,136 @@
|
||||
from typing import Dict, Callable, Awaitable, TypedDict
|
||||
from nicegui import ui, binding, app
|
||||
from tools.base_tool import BaseTool, BasePage
|
||||
from utils import ollama
|
||||
from pydantic_ai import Agent
|
||||
from pydantic_ai.messages import ModelMessage, ModelMessagesTypeAdapter
|
||||
from pydantic_ai.models.openai import OpenAIChatModel
|
||||
from pydantic_ai.providers.ollama import OllamaProvider
|
||||
|
||||
from .chat_message import ChatMessageComponent
|
||||
from .auto_scroll_area import AutoScrollArea
|
||||
|
||||
|
||||
class SimpleChatTool(BaseTool):
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return "Chat Pydantic AI"
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
return "Simple Chat Tool example using Pydantic AI"
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
return "chat"
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
"""Enable/disable this tool (set to False to hide from menu and disable routes)"""
|
||||
return True # Set to False to disable this tool
|
||||
|
||||
@property
|
||||
def routes(self) -> Dict[str, Callable[[], Awaitable]]:
|
||||
"""Define the routes for this tool"""
|
||||
return {
|
||||
'': lambda: MainPage.create(self)
|
||||
}
|
||||
|
||||
|
||||
class MainPage(BasePage):
|
||||
"""Main page of the example tool"""
|
||||
|
||||
messages_container: ui.column
|
||||
user_input = binding.BindableProperty()
|
||||
|
||||
is_responding = binding.BindableProperty()
|
||||
|
||||
model: OpenAIChatModel
|
||||
agent: Agent
|
||||
agent_history: list[ModelMessage] = []
|
||||
|
||||
async def content(self):
|
||||
|
||||
self.is_responding = False
|
||||
|
||||
model_options = [model['name'] for model in await ollama.available_models()]
|
||||
|
||||
with ui.row().classes('w-full'):
|
||||
ui.label('Simple Chat').classes('text-2xl font-bold text-white mb-4')
|
||||
ui.space()
|
||||
ui.button('Clear Chat', on_click=self.clear_history)
|
||||
ui.select(model_options, label='Model', value=model_options[0]).props('outlined dense')\
|
||||
.bind_value(app.storage.user, f'{self.tool.name}_selected_model')
|
||||
|
||||
# Main chat layout - full width and height
|
||||
with ui.column().classes('w-full gap-4 h-full'):
|
||||
# Chat messages area - takes all available space
|
||||
with ui.column().classes('w-full flex-1 p-0'):
|
||||
with AutoScrollArea().classes('w-full p-0').style('height: 100%; min-height: 500px'):
|
||||
self.messages_container = ui.column().classes('w-full p-0 gap-2') # Container for messages
|
||||
|
||||
# Input area at the bottom - fixed height
|
||||
with ui.column().classes('w-full p-0 pb-2').style('flex-shrink: 0'):
|
||||
with ui.row().classes('w-full gap-2 items-center'):
|
||||
ui.input(placeholder='Type your message...').classes('flex-1').props('outlined dense')\
|
||||
.bind_value(self, 'user_input')\
|
||||
.on('keydown.enter', self.send_message)
|
||||
ui.button(icon='send', on_click=self.send_message).props('color=primary').bind_enabled_from(self, 'is_responding', backward=lambda x: not x)
|
||||
|
||||
await self.load_history()
|
||||
|
||||
async def send_message(self):
|
||||
if self.is_responding:
|
||||
return
|
||||
|
||||
if not app.storage.user[f'{self.tool.name}_selected_model']:
|
||||
ui.notify('Select a model first.')
|
||||
return
|
||||
|
||||
user_message = self.user_input.strip()
|
||||
if not user_message:
|
||||
ui.notify('should write something', type='info')
|
||||
return
|
||||
|
||||
# Clear input and set responding
|
||||
self.user_input = ''
|
||||
self.is_responding = True
|
||||
|
||||
# generate streaming response
|
||||
ollama_model = OpenAIChatModel(model_name=app.storage.user[f'{self.tool.name}_selected_model'], provider=OllamaProvider())
|
||||
agent = Agent(model=ollama_model)
|
||||
|
||||
async with agent.run_stream(user_message, message_history=self.agent_history) as result:
|
||||
with self.messages_container:
|
||||
# Create component for user request
|
||||
ChatMessageComponent.from_model(result.new_messages()[0])
|
||||
# create valued component for response
|
||||
response_component = ChatMessageComponent.from_values('assistant')
|
||||
|
||||
async for chunk in result.stream_output():
|
||||
response_component.content = chunk
|
||||
response_component.set_model(result.new_messages()[1])
|
||||
self.agent_history += result.new_messages()
|
||||
|
||||
# save history
|
||||
app.storage.user[f'{self.tool.baseroute}_history'] = result.all_messages_json().decode('utf-8')
|
||||
self.is_responding = False
|
||||
|
||||
async def load_history(self):
|
||||
|
||||
# load history from app user storage
|
||||
history_json_str = app.storage.user.get(f'{self.tool.baseroute}_history')
|
||||
if history_json_str:
|
||||
self.agent_history = ModelMessagesTypeAdapter.validate_json(history_json_str)
|
||||
else:
|
||||
self.agent_history = []
|
||||
|
||||
# create ChatMessageComponents from history
|
||||
with self.messages_container:
|
||||
for msg in self.agent_history:
|
||||
ChatMessageComponent.from_model(msg)
|
||||
|
||||
async def clear_history(self):
|
||||
self.agent_history = []
|
||||
self.messages_container.clear()
|
||||
app.storage.user[f'{self.tool.baseroute}_history'] = []
|
||||
Reference in New Issue
Block a user