feature: tool storage property added

This commit is contained in:
2025-09-23 05:58:17 +02:00
parent e3a28f1589
commit 3785edc35a
2 changed files with 14 additions and 7 deletions

View File

@@ -52,6 +52,10 @@ class BaseTool(ABC):
# Convert to route: /example-tool # Convert to route: /example-tool
return f"/{package_name.replace('_', '-')}" return f"/{package_name.replace('_', '-')}"
@property
def storage(self) -> dict:
return app.storage.user.setdefault(f'{self.baseroute}_storage', {})
@property @property
@abstractmethod @abstractmethod
def name(self) -> str: def name(self) -> str:

View File

@@ -1,5 +1,5 @@
from typing import Dict, Callable, Awaitable, TypedDict from typing import Dict, Callable, Awaitable, TypedDict
from nicegui import ui, binding, app from nicegui import ui, binding
from tools.base_tool import BaseTool, BasePage from tools.base_tool import BaseTool, BasePage
from utils import ollama from utils import ollama
from pydantic_ai import Agent from pydantic_ai import Agent
@@ -60,7 +60,7 @@ class MainPage(BasePage):
ui.space() ui.space()
ui.button('Clear Chat', on_click=self.clear_history) ui.button('Clear Chat', on_click=self.clear_history)
ui.select(model_options, label='Model', value=model_options[0]).props('outlined dense')\ ui.select(model_options, label='Model', value=model_options[0]).props('outlined dense')\
.bind_value(app.storage.user, f'{self.tool.name}_selected_model') .bind_value(self.tool.storage, f'selected_model')
# Main chat layout - full width and height # Main chat layout - full width and height
with ui.column().classes('w-full gap-4 h-full'): with ui.column().classes('w-full gap-4 h-full'):
@@ -83,9 +83,12 @@ class MainPage(BasePage):
if self.is_responding: if self.is_responding:
return return
if not app.storage.user[f'{self.tool.name}_selected_model']: if not self.tool.storage['selected_model']:
ui.notify('Select a model first.') ui.notify('Select a model first.')
return return
"""if not app.storage.user[f'{self.tool.name}_selected_model']:
ui.notify('Select a model first.')
return"""
user_message = self.user_input.strip() user_message = self.user_input.strip()
if not user_message: if not user_message:
@@ -97,7 +100,7 @@ class MainPage(BasePage):
self.is_responding = True self.is_responding = True
# generate streaming response # generate streaming response
ollama_model = OpenAIChatModel(model_name=app.storage.user[f'{self.tool.name}_selected_model'], provider=OllamaProvider()) ollama_model = OpenAIChatModel(model_name=self.tool.storage['selected_model'], provider=OllamaProvider())
agent = Agent(model=ollama_model) agent = Agent(model=ollama_model)
async with agent.run_stream(user_message, message_history=self.agent_history) as result: async with agent.run_stream(user_message, message_history=self.agent_history) as result:
@@ -113,13 +116,13 @@ class MainPage(BasePage):
self.agent_history += result.new_messages() self.agent_history += result.new_messages()
# save history # save history
app.storage.user[f'{self.tool.baseroute}_history'] = result.all_messages_json().decode('utf-8') self.tool.storage['history'] = result.all_messages_json().decode('utf-8')
self.is_responding = False self.is_responding = False
async def load_history(self): async def load_history(self):
# load history from app user storage # load history from app user storage
history_json_str = app.storage.user.get(f'{self.tool.baseroute}_history') history_json_str = self.tool.storage['history']
if history_json_str: if history_json_str:
self.agent_history = ModelMessagesTypeAdapter.validate_json(history_json_str) self.agent_history = ModelMessagesTypeAdapter.validate_json(history_json_str)
else: else:
@@ -133,4 +136,4 @@ class MainPage(BasePage):
async def clear_history(self): async def clear_history(self):
self.agent_history = [] self.agent_history = []
self.messages_container.clear() self.messages_container.clear()
app.storage.user[f'{self.tool.baseroute}_history'] = [] self.tool.storage['history'] = []