diff --git a/src/tools/base_tool.py b/src/tools/base_tool.py index d7768a7..f3d60ce 100644 --- a/src/tools/base_tool.py +++ b/src/tools/base_tool.py @@ -52,6 +52,10 @@ class BaseTool(ABC): # Convert to route: /example-tool return f"/{package_name.replace('_', '-')}" + @property + def storage(self) -> dict: + return app.storage.user.setdefault(f'{self.baseroute}_storage', {}) + @property @abstractmethod def name(self) -> str: diff --git a/src/tools/simple_chat_pydantic/tool.py b/src/tools/simple_chat_pydantic/tool.py index ff68644..1f53faf 100644 --- a/src/tools/simple_chat_pydantic/tool.py +++ b/src/tools/simple_chat_pydantic/tool.py @@ -1,5 +1,5 @@ from typing import Dict, Callable, Awaitable, TypedDict -from nicegui import ui, binding, app +from nicegui import ui, binding from tools.base_tool import BaseTool, BasePage from utils import ollama from pydantic_ai import Agent @@ -60,7 +60,7 @@ class MainPage(BasePage): ui.space() ui.button('Clear Chat', on_click=self.clear_history) ui.select(model_options, label='Model', value=model_options[0]).props('outlined dense')\ - .bind_value(app.storage.user, f'{self.tool.name}_selected_model') + .bind_value(self.tool.storage, f'selected_model') # Main chat layout - full width and height with ui.column().classes('w-full gap-4 h-full'): @@ -83,9 +83,12 @@ class MainPage(BasePage): if self.is_responding: return - if not app.storage.user[f'{self.tool.name}_selected_model']: + if not self.tool.storage['selected_model']: ui.notify('Select a model first.') return + """if not app.storage.user[f'{self.tool.name}_selected_model']: + ui.notify('Select a model first.') + return""" user_message = self.user_input.strip() if not user_message: @@ -97,7 +100,7 @@ class MainPage(BasePage): self.is_responding = True # generate streaming response - ollama_model = OpenAIChatModel(model_name=app.storage.user[f'{self.tool.name}_selected_model'], provider=OllamaProvider()) + ollama_model = OpenAIChatModel(model_name=self.tool.storage['selected_model'], provider=OllamaProvider()) agent = Agent(model=ollama_model) async with agent.run_stream(user_message, message_history=self.agent_history) as result: @@ -113,13 +116,13 @@ class MainPage(BasePage): self.agent_history += result.new_messages() # save history - app.storage.user[f'{self.tool.baseroute}_history'] = result.all_messages_json().decode('utf-8') + self.tool.storage['history'] = result.all_messages_json().decode('utf-8') self.is_responding = False async def load_history(self): # load history from app user storage - history_json_str = app.storage.user.get(f'{self.tool.baseroute}_history') + history_json_str = self.tool.storage['history'] if history_json_str: self.agent_history = ModelMessagesTypeAdapter.validate_json(history_json_str) else: @@ -133,4 +136,4 @@ class MainPage(BasePage): async def clear_history(self): self.agent_history = [] self.messages_container.clear() - app.storage.user[f'{self.tool.baseroute}_history'] = [] + self.tool.storage['history'] = []