diff --git a/src/components/ollama_downloader.py b/src/components/ollama_downloader.py
index d293899..d0da025 100644
--- a/src/components/ollama_downloader.py
+++ b/src/components/ollama_downloader.py
@@ -1,10 +1,11 @@
-from nicegui import ui
+from nicegui import ui, binding
from niceguiasyncelement import AsyncCard
from pathlib import Path
from utils import ollama
class OllamaDownloaderComponent(AsyncCard):
+
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.is_downloading = False
@@ -19,8 +20,7 @@ class OllamaDownloaderComponent(AsyncCard):
model_input = ui.input(
'Model ID',
- placeholder='e.g., TheBloke/Llama-2-7B-GGUF',
- value='qwen2.5:0.5b'
+ placeholder='e.g., qwen2.5:0.5b'
).props('outlined dense').classes('w-full')
with ui.row().classes('items-center gap-2'):
ui.link('Ollama Library', target='https://ollama.com/library/', new_tab=True)
@@ -37,6 +37,9 @@ class OllamaDownloaderComponent(AsyncCard):
).props('color=primary').classes('w-full').bind_enabled_from(self, 'model_id', backward=lambda x: bool(x) and not self.is_downloading)
async def download_model(self, model):
+ if model.startswith('ollama run '):
+ model = model[11:]
+
self.download_btn.set_enabled(False)
try:
async for chunk in ollama.download_model(model):
diff --git a/src/components/ollama_model_creation.py b/src/components/ollama_model_edit.py
similarity index 68%
rename from src/components/ollama_model_creation.py
rename to src/components/ollama_model_edit.py
index b5c7c0c..084ca7d 100644
--- a/src/components/ollama_model_creation.py
+++ b/src/components/ollama_model_edit.py
@@ -3,9 +3,13 @@ from niceguiasyncelement import AsyncCard
from pathlib import Path
from utils import ollama
from typing import Optional
+from pprint import pprint
-class OllamaModelCreationComponent(AsyncCard):
+class OllamaModelEditComponent(AsyncCard):
+ model_info: dict
+
+ model_name_original: str
model_name = binding.BindableProperty()
model_from = binding.BindableProperty()
system_message = binding.BindableProperty()
@@ -68,127 +72,133 @@ class OllamaModelCreationComponent(AsyncCard):
self.use_seed = False
self.use_stop = False
- async def build(self, existing_model_name: Optional[str] = None) -> None:
+ async def build(self, model_name: str, model_info: dict) -> None:
self.classes('w-full')
+ self.model_name_original = model_name
- # Load existing model data if provided
- if existing_model_name:
- await self.load_existing_model(existing_model_name)
+ model_parameters = ollama.model_parameters(model_info)
+ print(model_parameters)
+
+ # pprint(model_info)
+ # Always load the existing model data
+ self.model_name = model_name
+ # await self.load_existing_model(model_name)
with self:
- with ui.column().classes('w-full gap-4'):
- title = 'Edit Model' if existing_model_name else 'Create Model'
- ui.label(title).classes('text-xl font-bold')
+ with ui.column().classes('w-full gap-2'):
+ ui.label('Edit Model').classes('text-xl font-bold')
- # Basic fields
- model_name_default = existing_model_name if existing_model_name else 'my-custom-model:latest'
- base_model_default = '' if existing_model_name else 'llama3.2:3b'
-
- ui.input('Model Name', value=model_name_default).props('outlined dense').classes('w-full').bind_value(self, 'model_name')
- ui.input('Base Model', value=base_model_default).props('outlined dense').classes('w-full').bind_value(self, 'model_from')
+ # Basic fields - pre-filled with existing model data
+ ui.input('Model Name', value=model_name, on_change=lambda e: self._on_model_name_change(e.value)).props('outlined dense').classes('w-full').bind_value(self, 'model_name').tooltip('Keep the same name to update the existing model, or change it to create a new model')
+ # ui.label().props('outlined dense').classes('w-full').bind_text_from(self, 'model_from')
+ ui.input('From', value=model_info['details']['parent_model']).props('outlined dense').classes('w-full').bind_value(self, 'model_from').set_enabled(False)
# System message field (commonly used)
- ui.textarea('System Message', placeholder='You are a helpful assistant...').classes('w-full').props('autogrow outlined').bind_value(self, 'system_message')
+ ui.textarea('System Message', placeholder='You are a helpful assistant...', value=model_info['system'] if 'system' in model_info else '').classes('w-full').props('autogrow outlined').bind_value(self, 'system_message')
# Common Parameters section
ui.label('Common Parameters').classes('text-md font-medium mt-2 mb-3')
- # Temperature (always visible)
- with ui.row().classes('items-center gap-3 w-full mb-3'):
- ui.switch().bind_value(self, 'use_temperature')
- ui.label('Temperature').classes('min-w-fit')
- ui.slider(min=0.0, max=2.0, step=0.1).classes('flex-1').bind_value(self, 'temperature').bind_enabled_from(self, 'use_temperature')
- ui.label().bind_text_from(self, 'temperature', backward=lambda x: f'{x:.1f}').classes('text-xs text-gray-500 min-w-fit')
- ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('The temperature of the model. Higher values (e.g., 1.2) make output more creative, lower values (e.g., 0.5) more focused. Default: 0.8')
-
+ # Temperature
+ self.build_float_component(label='Temperature', switch_binding='use_temperature', value_binding='temperature',
+ value_min=0.0, value_max=2.0, value_step=0.1,
+ value=model_parameters['temperature'] if 'temperature' in model_parameters else None,
+ value_default=0.8,
+ info='The temperature of the model. Higher values (e.g., 1.2) make output more creative, lower values (e.g., 0.5) more focused. Default: 0.8',
+ backward=lambda x: f'{x:.1f}')
# Context Length (always visible)
- with ui.row().classes('items-center gap-3 w-full mb-3'):
- ui.switch().bind_value(self, 'use_num_ctx')
- ui.label('Context Length').classes('min-w-fit')
- ui.number(value=4096, min=1, max=32768).classes('flex-1').bind_value(self, 'num_ctx').bind_enabled_from(self, 'use_num_ctx')
- ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('Size of the context window used to generate the next token. Default: 4096')
+ model_context_length: Optional[int] = None # the context length the model supports
+ for model_info_key, model_info_value in model_info['model_info'].items():
+ if model_info_key.endswith('context_length'):
+ model_context_length = model_info_value
+
+ self.build_float_component('Context Length', 'use_num_ctx', 'num_ctx', 512, model_context_length, 1,
+ model_parameters['num_ctx'] if 'num_ctx' in model_parameters else None,
+ 4096,
+ 'Size of the context window used to generate the next token. Default: 4096')
# Max Tokens (always visible)
- with ui.row().classes('items-center gap-3 w-full mb-4'):
- ui.switch().bind_value(self, 'use_num_predict')
+ # TODO MISSING ?????
+ with ui.row().classes('items-center gap-2 w-full mb-1'):
+ ui.switch().bind_value(self, 'use_num_predict').props('dense')
ui.label('Max Tokens').classes('min-w-fit')
- ui.number(value=-1, min=-1, max=4096).classes('flex-1').bind_value(self, 'num_predict').bind_enabled_from(self, 'use_num_predict')
+ ui.number(value=-1, min=-1, max=4096).classes('flex-1').props('dense').bind_value(self, 'num_predict').bind_enabled_from(self, 'use_num_predict')
ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('Maximum number of tokens to predict. -1 for infinite generation. Default: -1')
# Advanced Parameters section
ui.label('Advanced Parameters').classes('text-md font-medium mt-2 mb-3')
# Generation Parameters
- with ui.expansion('Generation', icon='tune').classes('w-full mb-2'):
- with ui.column().classes('w-full gap-3 pt-2'):
+ with ui.expansion('Generation', icon='tune', group='creation_group').classes('w-full mb-2'):
+ with ui.column().classes('w-full gap-1 pt-2'):
# Top K
- with ui.row().classes('items-center gap-3 w-full'):
- ui.switch().bind_value(self, 'use_top_k')
+ with ui.row().classes('items-center gap-2 w-full'):
+ ui.switch().bind_value(self, 'use_top_k').props('dense')
ui.label('Top K').classes('min-w-fit')
- ui.number(value=40, min=1, max=200).classes('flex-1').bind_value(self, 'top_k').bind_enabled_from(self, 'use_top_k')
+ ui.number(value=40, min=1, max=200).classes('flex-1').props('dense').bind_value(self, 'top_k').bind_enabled_from(self, 'use_top_k')
ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('Reduces probability of generating nonsense. Higher values (e.g., 100) give more diverse answers, lower values (e.g., 10) are more conservative. Default: 40')
# Top P
- with ui.row().classes('items-center gap-3 w-full'):
- ui.switch().bind_value(self, 'use_top_p')
+ with ui.row().classes('items-center gap-2 w-full'):
+ ui.switch().bind_value(self, 'use_top_p').props('dense')
ui.label('Top P').classes('min-w-fit')
- ui.slider(min=0.0, max=1.0, step=0.05).classes('flex-1').bind_value(self, 'top_p').bind_enabled_from(self, 'use_top_p')
+ ui.slider(min=0.0, max=1.0, step=0.05).classes('flex-1').props('dense').bind_value(self, 'top_p').bind_enabled_from(self, 'use_top_p')
ui.label().bind_text_from(self, 'top_p', backward=lambda x: f'{x:.2f}').classes('text-xs text-gray-500 min-w-fit')
ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('Works with top-k. Higher values (e.g., 0.95) lead to more diverse text, lower values (e.g., 0.5) generate more focused text. Default: 0.9')
# Min P
- with ui.row().classes('items-center gap-3 w-full'):
- ui.switch().bind_value(self, 'use_min_p')
+ with ui.row().classes('items-center gap-2 w-full'):
+ ui.switch().bind_value(self, 'use_min_p').props('dense')
ui.label('Min P').classes('min-w-fit')
- ui.slider(min=0.0, max=1.0, step=0.01).classes('flex-1').bind_value(self, 'min_p').bind_enabled_from(self, 'use_min_p')
+ ui.slider(min=0.0, max=1.0, step=0.01).classes('flex-1').props('dense').bind_value(self, 'min_p').bind_enabled_from(self, 'use_min_p')
ui.label().bind_text_from(self, 'min_p', backward=lambda x: f'{x:.2f}').classes('text-xs text-gray-500 min-w-fit')
ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('Alternative to top_p. Minimum probability for a token relative to the most likely token. Default: 0.0')
# Repetition Parameters
- with ui.expansion('Repetition Control', icon='repeat').classes('w-full mb-2'):
- with ui.column().classes('w-full gap-3 pt-2'):
+ with ui.expansion('Repetition Control', icon='repeat', group='creation_group').classes('w-full mb-2'):
+ with ui.column().classes('w-full gap-1 pt-2'):
# Repeat Last N
- with ui.row().classes('items-center gap-3 w-full'):
- ui.switch().bind_value(self, 'use_repeat_last_n')
+ with ui.row().classes('items-center gap-2 w-full'):
+ ui.switch().bind_value(self, 'use_repeat_last_n').props('dense')
ui.label('Repeat Last N').classes('min-w-fit')
- ui.number(value=64, min=-1, max=512).classes('flex-1').bind_value(self, 'repeat_last_n').bind_enabled_from(self, 'use_repeat_last_n')
+ ui.number(value=64, min=-1, max=512).classes('flex-1').props('dense').bind_value(self, 'repeat_last_n').bind_enabled_from(self, 'use_repeat_last_n')
ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('How far back the model looks to prevent repetition. 0=disabled, -1=num_ctx. Default: 64')
# Repeat Penalty
- with ui.row().classes('items-center gap-3 w-full'):
- ui.switch().bind_value(self, 'use_repeat_penalty')
+ with ui.row().classes('items-center gap-2 w-full'):
+ ui.switch().bind_value(self, 'use_repeat_penalty').props('dense')
ui.label('Repeat Penalty').classes('min-w-fit')
ui.slider(min=0.5, max=2.0, step=0.1).classes('flex-1').bind_value(self, 'repeat_penalty').bind_enabled_from(self, 'use_repeat_penalty')
- ui.label().bind_text_from(self, 'repeat_penalty', backward=lambda x: f'{x:.1f}').classes('text-xs text-gray-500 min-w-fit')
+ ui.label().bind_text_from(self, 'repeat_penalty', backward=lambda x: f'{x:.1f}').classes('text-xs text-gray-500 min-w-fit').props('dense')
ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('How strongly to penalize repetitions. Higher values (e.g., 1.5) penalize more, lower values (e.g., 0.9) are more lenient. Default: 1.1')
# Control Parameters
- with ui.expansion('Control', icon='settings').classes('w-full mb-2'):
- with ui.column().classes('w-full gap-3 pt-2'):
+ with ui.expansion('Control', icon='settings', group='creation_group').classes('w-full mb-2'):
+ with ui.column().classes('w-full gap-1 pt-2'):
# Seed
- with ui.row().classes('items-center gap-3 w-full'):
- ui.switch().bind_value(self, 'use_seed')
+ with ui.row().classes('items-center gap-2 w-full'):
+ ui.switch().bind_value(self, 'use_seed').props('dense')
ui.label('Seed').classes('min-w-fit')
- ui.number(value=0, min=0, max=999999).classes('flex-1').bind_value(self, 'seed').bind_enabled_from(self, 'use_seed')
+ ui.number(value=0, min=0, max=999999).classes('flex-1').props('dense').bind_value(self, 'seed').bind_enabled_from(self, 'use_seed')
ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('Random number seed for generation. Same seed produces same output for same prompt. Default: 0')
# Stop Sequences
- with ui.row().classes('items-center gap-3 w-full'):
- ui.switch().bind_value(self, 'use_stop')
+ with ui.row().classes('items-center gap-2 w-full'):
+ ui.switch().bind_value(self, 'use_stop').props('dense')
ui.label('Stop Sequence').classes('min-w-fit')
- ui.input(placeholder='AI assistant:').classes('flex-1').bind_value(self, 'stop').bind_enabled_from(self, 'use_stop')
+ ui.input(placeholder='AI assistant:').classes('flex-1').props('dense').bind_value(self, 'stop').bind_enabled_from(self, 'use_stop')
ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip('Text pattern where the model stops generating. Default: none')
# Advanced section (collapsible)
- with ui.expansion('Advanced Settings', icon='settings').classes('w-full').bind_value(self, 'show_advanced'):
+ with ui.expansion('Advanced Settings', icon='settings', group='creation_group').classes('w-full').bind_value(self, 'show_advanced'):
with ui.column().classes('w-full gap-4 pt-2'):
# Quantization
ui.select(['q4_K_M', 'q4_K_S', 'q8_0'],
- label='Quantization', clearable=True).props('outlined dense').classes('w-full').bind_value(self, 'quantize')
+ label='Quantization', clearable=True).props('outlined dense').classes('w-full').props('dense').bind_value(self, 'quantize')
# Template field
- ui.textarea('Template',
- placeholder='{{ if .System }}<|im_start|>system\n{{ .System }}<|im_end|>\n{{ end }}...').classes('w-full').props('autogrow outlined').bind_value(self, 'template')
+ template_placeholder = '{{ if .System }}<|im_start|>system\n{{ .System }}<|im_end|>\n{{ end }}...'
+ ui.textarea('Template', placeholder=template_placeholder, value=model_info['template']).classes('w-full').props('autogrow outlined').props('dense').bind_value(self, 'template')
# Status and progress
with ui.row().classes('items-center gap-2'):
@@ -196,15 +206,36 @@ class OllamaModelCreationComponent(AsyncCard):
self.status_label = ui.label().bind_text_from(self, 'download_status')
ui.linear_progress(value=0, show_value=False).props('buffer=0.0 animation-speed=0').bind_value_from(self, 'download_progress')
- # Create button
- button_text = 'Update Model' if existing_model_name else 'Create Model'
- self.create_btn = ui.button(button_text, icon='add', on_click=self.create_model).props('color=primary').classes('w-full').bind_enabled_from(self, 'model_name', backward=lambda x: bool(x) and not self.is_downloading)
+ # Save button
+ self.create_btn = ui.button('Save Model', icon='save', on_click=self.create_model).props('color=primary').classes('w-full').bind_enabled_from(self, 'model_name', backward=lambda x: bool(x) and not self.is_downloading)
+
+ def build_float_component(self, label, switch_binding,
+ value_binding, value_min, value_max, value_step, value, value_default,
+ info, backward=None
+ ):
+ with ui.row().classes('items-center gap-2 w-full mb-1'):
+ ui.switch().bind_value(self, switch_binding).props('dense').set_value(value is not None)
+ ui.label(label).classes('min-w-fit')
+ ui.slider(min=value_min, max=value_max, step=value_step, value=value if value else value_default).classes('flex-1').props('dense').bind_value(self, value_binding).bind_enabled_from(self, switch_binding)
+ # TODO backward
+ ui.label().bind_text_from(self, value_binding, backward=backward).classes('text-xs text-gray-500 min-w-fit')
+ ui.icon('info', size='sm').classes('text-gray-500 cursor-help').tooltip(info)
+
+ async def _on_model_name_change(self, value):
+ return
+ if self.model_name != self.model_name_original:
+ self.model_from = self.model_name_original
+ else:
+ self.model_from = ''
+ if len(self.model_info['details']['parent_model']) > 0:
+ self.model_from = self.model_info['details']['parent_model']
async def load_existing_model(self, model_name):
"""Load existing model data and populate form fields"""
try:
- model_info = await ollama.model_info(model_name)
- modelfile = model_info.get('modelfile', '')
+ self.model_info = await ollama.model_info(model_name)
+ modelfile = self.model_info.get('modelfile', '')
+ await self._on_model_name_change(model_name)
# Parse the modelfile content to extract settings
for line in modelfile.split('\n'):
@@ -212,8 +243,6 @@ class OllamaModelCreationComponent(AsyncCard):
if not line:
continue
- if line.startswith('FROM '):
- self.model_from = line[5:].strip()
elif line.startswith('SYSTEM '):
# Extract system message (remove quotes)
system_msg = line[7:].strip()
@@ -343,7 +372,7 @@ class OllamaModelCreationComponent(AsyncCard):
# Success
self.download_status = 'success'
self.download_progress = 1.0
- ui.notify(f'Model "{self.model_name}" created successfully!', type='positive')
+ ui.notify(f'Model "{self.model_name}" saved successfully!', type='positive')
except Exception as e:
self.download_status = f'Error: {str(e)}'
diff --git a/src/pages/ollama_manager.py b/src/pages/ollama_manager.py
index 21293ef..8ccea14 100644
--- a/src/pages/ollama_manager.py
+++ b/src/pages/ollama_manager.py
@@ -4,7 +4,7 @@ from typing import Literal, List, Dict, Optional
from pprint import pprint
from niceguiasyncelement import AsyncColumn
from components.ollama_downloader import OllamaDownloaderComponent
-from components.ollama_model_creation import OllamaModelCreationComponent
+from components.ollama_model_edit import OllamaModelEditComponent
from components.ollama_quick_test import ModelQuickTestComponent
from components.model_info import ModelInfoComponent
@@ -48,7 +48,6 @@ class OllamaManagerPage(AsyncColumn):
with ui.row().classes('w-full items-center mb-4'):
ui.label('Installed Models').classes('text-h6 font-bold')
ui.space()
- ui.button('Create New Model', icon='create', on_click=self._create_model_dialog).props('color=primary')
ui.button('Pull New Model', icon='download', on_click=self._download_model_dialog).props('color=primary')
with ui.column().classes('w-full gap-2'):
@@ -57,7 +56,7 @@ class OllamaManagerPage(AsyncColumn):
async def _create_model_dialog(self):
with ui.dialog() as dialog:
- await OllamaModelCreationComponent.create()
+ await OllamaModelEditComponent.create()
await dialog
self.models_container.refresh()
@@ -97,10 +96,10 @@ class OllamaManagerPage(AsyncColumn):
async def _create_model_item(self, model):
- base_model: Optional[Literal['ollama', 'huggingface']] = None
+ base_model: Optional[Literal['ollama', 'hugging-face']] = None
if len(model['details']['parent_model']) == 0:
if model['name'].startswith('hf.co/'):
- base_model = 'huggingface'
+ base_model = 'hugging-face'
else:
base_model = 'ollama'
@@ -115,36 +114,69 @@ class OllamaManagerPage(AsyncColumn):
parameters = ollama.model_parameters(model) # if no num_ctx is in parameters, we have a default of 4096
with ui.card().classes('w-full'):
- with ui.row().classes('w-full items-center'):
- with ui.column().classes('flex-grow gap-1'):
- # Model name
- with ui.row().classes('items-center'):
+ with ui.row().classes('w-full flex items-center'):
+ with ui.column().classes('grow gap-1'):
+ with ui.row().classes('items-center w-full'):
ui.label(model['name']).classes('font-bold text-h6')
- if base_model == 'ollama':
- ui.html(f'
')
- if base_model == 'huggingface':
- ui.html(f'
')
+ if base_model:
+ base_icon = 'ollama-dark' if base_model == 'ollama' else 'hugging-face'
+ ui.html(f'
')
+ ui.space()
+ # Capabilities icons
+ if 'vision' in capabilities:
+ ui.icon('visibility', size='xs').classes('text-green-500').tooltip('Vision capabilities')
+ if 'tools' in capabilities:
+ ui.icon('build', size='xs').classes('text-indigo-500').tooltip('Tool/Function calling')
+ if 'completion' in capabilities:
+ ui.icon('edit_note', size='xs').classes('text-yellow-500').tooltip('Code completion')
+ if 'embedding' in capabilities:
+ ui.icon('vector', size='xs').classes('text-pink-500').tooltip('Embedding generation')
+ if not base_model:
+ ui.label(model['details']['parent_model']).classes('text-xs text-grey-5 ml-2')
+ # Details row with icon rows
+ with ui.row().classes('gap-3 flex-wrap items-center'):
+ # Size
+ with ui.row().classes('gap-1 items-center'):
+ ui.icon('storage', size='xs').classes('text-cyan-500')
+ size_gb = model['size'] / (1024**3)
+ ui.label(f"{size_gb:.2f} GB").classes('text-xs')
- # Details row with chips
- with ui.row().classes('gap-2 flex-wrap'):
- # Size chip
- size_gb = model['size'] / (1024**3)
- ui.chip(f"{size_gb:.2f} GB", icon='storage').props('outline dense color=cyan')
+ # Quantization
+ with ui.row().classes('gap-1 items-center'):
+ ui.icon('memory', size='xs').classes('text-orange-500')
+ ui.label(model['details']['quantization_level']).classes('text-xs')
- # Quantization chip
- ui.chip(model['details']['quantization_level'], icon='memory').props('outline dense color=orange')
-
- # Parameter size chip
+ # Parameter size
if model['details'].get('parameter_size'):
- ui.chip(model['details']['parameter_size'], icon='tune').props('outline dense color=purple')
+ with ui.row().classes('gap-1 items-center'):
+ ui.icon('tune', size='xs').classes('text-purple-500')
+ ui.label(model['details']['parameter_size']).classes('text-xs')
- # Format chip
+ # Context Length (what Ollama uses)
+ with ui.row().classes('gap-1 items-center'):
+ ui.icon('width_normal', size='xs').classes('text-blue-500')
+ ollama_ctx = parameters.get('num_ctx', 4096)
+ ctx_label = f"{ollama_ctx:,}"
+ if model_context_length and model_context_length != ollama_ctx:
+ ctx_label += f" / {model_context_length:,}"
+ ui.label(ctx_label).classes('text-xs').tooltip(f'Context: {ollama_ctx:,} tokens (model supports {model_context_length:,} max)' if model_context_length else f'Context: {ollama_ctx:,} tokens')
+
+ # Key parameters (if customized)
+ if parameters.get('temperature') and parameters['temperature'] != 0.8:
+ with ui.row().classes('gap-1 items-center'):
+ ui.icon('thermostat', size='xs').classes('text-red-500')
+ ui.label(f"{parameters['temperature']:.1f}").classes('text-xs').tooltip(f'Temperature: {parameters["temperature"]}')
+
+ if parameters.get('top_p'):
+ with ui.row().classes('gap-1 items-center'):
+ ui.icon('percent', size='xs').classes('text-teal-500')
+ ui.label(f"{parameters['top_p']:.2f}").classes('text-xs').tooltip(f'Top-p: {parameters["top_p"]}')
+
+ # Format
if model['details'].get('format'):
- ui.chip(model['details']['format'].upper(), icon='description').props('outline dense color=green')
-
- # Family chip
- if model['details'].get('family'):
- ui.chip(model['details']['family'], icon='category').props('outline dense color=blue')
+ with ui.row().classes('gap-1 items-center'):
+ ui.icon('description', size='xs').classes('text-gray-500')
+ ui.label(model['details']['format'].upper()).classes('text-xs')
# Modified timestamp
if model.get('modified_at'):
@@ -167,8 +199,6 @@ class OllamaManagerPage(AsyncColumn):
except:
pass
- ui.space()
-
with ui.row().classes('gap-2'):
ui.button(icon='chat', on_click=lambda m=model['name']: self._test_model_dialog(m)).props('round flat').tooltip('Test Model')
ui.button(icon='edit', on_click=lambda m=model['name']: self._model_edit_model_dialog(m)).props('round flat').tooltip('Model Info')
@@ -176,17 +206,13 @@ class OllamaManagerPage(AsyncColumn):
ui.button(icon='delete', on_click=lambda m=model['name']: self._delete_model(m)).props('round flat color=negative').tooltip('Delete Model')
async def _model_edit_model_dialog(self, model_name):
+ model_info = await ollama.model_info(model_name)
with ui.dialog() as dialog:
- await OllamaModelCreationComponent.create(model_name)
- await dialog
- self.models_container.refresh() # type: ignore
+ await OllamaModelEditComponent.create(model_name, model_info)
+ result = await dialog
- async def _print_model_info(self, model_name):
- result = await ollama.model_info(model_name)
- for key, value in result.items():
- print(key)
-
- print(result['modelfile'])
+ if result:
+ self.models_container.refresh() # type: ignore
async def _model_information_dialog(self, model_name):
model_info = await ollama.model_info(model_name)