helper functions
This commit is contained in:
@@ -87,25 +87,33 @@ class OllamaManagerPage(AsyncColumn):
|
|||||||
if result:
|
if result:
|
||||||
if await ollama.delete_model(model):
|
if await ollama.delete_model(model):
|
||||||
ui.notify(f'Model {model} deleted.')
|
ui.notify(f'Model {model} deleted.')
|
||||||
self.models_container.refresh()
|
self.models_container.refresh() # type: ignore
|
||||||
|
|
||||||
@ui.refreshable
|
@ui.refreshable
|
||||||
async def models_container(self):
|
async def models_container(self):
|
||||||
self.models = await ollama.available_models()
|
|
||||||
select_options = [model['name'] for model in self.models]
|
|
||||||
# self.quick_test_select.set_options(select_options)
|
|
||||||
|
|
||||||
for model in self.models:
|
available_models = await ollama.available_models_detailed()
|
||||||
await self._create_model_item(model)
|
[await self._create_model_item(model) for model in available_models]
|
||||||
|
|
||||||
|
async def _create_model_item(self, model):
|
||||||
|
|
||||||
async def _create_model_item(self, model: Dict):
|
|
||||||
model_info = await ollama.model_info(model['name'])
|
|
||||||
base_model: Optional[Literal['ollama', 'huggingface']] = None
|
base_model: Optional[Literal['ollama', 'huggingface']] = None
|
||||||
if len(model_info['details']['parent_model']) == 0:
|
if len(model['details']['parent_model']) == 0:
|
||||||
if model['name'].startswith('hf.co/'):
|
if model['name'].startswith('hf.co/'):
|
||||||
base_model = 'huggingface'
|
base_model = 'huggingface'
|
||||||
else:
|
else:
|
||||||
base_model = 'ollama'
|
base_model = 'ollama'
|
||||||
|
|
||||||
|
# things to add
|
||||||
|
capabilities: List[str] = model['capabilities']
|
||||||
|
model_context_length: Optional[int] = None # the context length the model supports
|
||||||
|
for model_info_key, model_info_value in model['model_info'].items():
|
||||||
|
if model_info_key.endswith('context_length'):
|
||||||
|
model_context_length = model_info_value
|
||||||
|
|
||||||
|
# values from parameters:
|
||||||
|
parameters = ollama.model_parameters(model) # if no num_ctx is in parameters, we have a default of 4096
|
||||||
|
|
||||||
with ui.card().classes('w-full'):
|
with ui.card().classes('w-full'):
|
||||||
with ui.row().classes('w-full items-center'):
|
with ui.row().classes('w-full items-center'):
|
||||||
with ui.column().classes('flex-grow gap-1'):
|
with ui.column().classes('flex-grow gap-1'):
|
||||||
@@ -171,7 +179,7 @@ class OllamaManagerPage(AsyncColumn):
|
|||||||
with ui.dialog() as dialog:
|
with ui.dialog() as dialog:
|
||||||
await OllamaModelCreationComponent.create(model_name)
|
await OllamaModelCreationComponent.create(model_name)
|
||||||
await dialog
|
await dialog
|
||||||
self.models_container.refresh()
|
self.models_container.refresh() # type: ignore
|
||||||
|
|
||||||
async def _print_model_info(self, model_name):
|
async def _print_model_info(self, model_name):
|
||||||
result = await ollama.model_info(model_name)
|
result = await ollama.model_info(model_name)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import httpx
|
import httpx
|
||||||
from nicegui import ui
|
from nicegui import ui
|
||||||
from typing import Tuple, Dict
|
from typing import Tuple, Dict, List, Any
|
||||||
|
|
||||||
|
|
||||||
async def status(url='http://127.0.0.1:11434') -> Tuple[bool, str]:
|
async def status(url='http://127.0.0.1:11434') -> Tuple[bool, str]:
|
||||||
@@ -24,6 +24,51 @@ async def available_models(url='http://127.0.0.1:11434'):
|
|||||||
return response.json()["models"]
|
return response.json()["models"]
|
||||||
|
|
||||||
|
|
||||||
|
async def available_models_detailed(url='http://127.0.0.1:11434') -> List[Dict[str, Any]]:
|
||||||
|
detailed_models = []
|
||||||
|
models = await available_models(url)
|
||||||
|
sorted_model_names = sorted([model['name'] for model in models], key=str.lower)
|
||||||
|
for model_name in sorted_model_names:
|
||||||
|
for model in models:
|
||||||
|
if model['name'] == model_name:
|
||||||
|
detailed_models.append(model | await model_info(model_name))
|
||||||
|
continue
|
||||||
|
return detailed_models
|
||||||
|
|
||||||
|
|
||||||
|
def model_parameters(model: dict) -> Dict[str, float | int | List[str]]:
|
||||||
|
modelfile = model.get('modelfile', '')
|
||||||
|
parameters = {}
|
||||||
|
# Parse the modelfile content to extract settings
|
||||||
|
for line in modelfile.split('\n'):
|
||||||
|
line = line.strip()
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if line.startswith('PARAMETER '):
|
||||||
|
# Parse parameter lines
|
||||||
|
param_line = line[10:].strip()
|
||||||
|
try:
|
||||||
|
key, value = param_line.split(' ', 1)
|
||||||
|
|
||||||
|
# Set parameter values and enable toggles
|
||||||
|
# floats
|
||||||
|
if key in ['temperature', 'top_p', 'min_p', 'repeat_penalty']:
|
||||||
|
parameters[key] = float(value)
|
||||||
|
# integers
|
||||||
|
elif key in ['top_k', 'num_ctx', 'num_predict', 'repeat_last_n', 'seed']:
|
||||||
|
parameters[key] = int(value)
|
||||||
|
# stops
|
||||||
|
elif key == 'stop':
|
||||||
|
if 'stop' not in parameters:
|
||||||
|
parameters['stop'] = []
|
||||||
|
parameters['stop'].append(value)
|
||||||
|
except ValueError:
|
||||||
|
# Skip invalid parameter lines
|
||||||
|
continue
|
||||||
|
return parameters
|
||||||
|
|
||||||
|
|
||||||
async def active_models(url='http://127.0.0.1:11434'):
|
async def active_models(url='http://127.0.0.1:11434'):
|
||||||
async with httpx.AsyncClient() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.get(f"{url}/api/ps")
|
response = await client.get(f"{url}/api/ps")
|
||||||
|
|||||||
Reference in New Issue
Block a user