⚠ This page is served via a proxy. Original site: https://github.com
This service does not collect credentials or authentication data.
Skip to content

Commit d824609

Browse files
Fix provider filter updates
1 parent 4b5dee5 commit d824609

File tree

8 files changed

+13
-13
lines changed

8 files changed

+13
-13
lines changed

backend/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""Backend sync worker service."""
22

3-
__version__ = "0.6.24"
3+
__version__ = "0.6.25"

frontend/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""Frontend API and UI service."""
22

3-
__version__ = "0.6.24"
3+
__version__ = "0.6.25"

frontend/api.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -473,8 +473,8 @@ async def create_provider_legacy(
473473
api_key=api_key,
474474
prefix=prefix,
475475
default_ollama_mode=default_ollama_mode,
476-
model_filter=model_filter,
477-
model_filter_exclude=model_filter_exclude,
476+
model_filter=model_filter.strip() if model_filter is not None else None,
477+
model_filter_exclude=model_filter_exclude.strip() if model_filter_exclude is not None else None,
478478
sync_interval_seconds=sync_interval_seconds
479479
)
480480
from fastapi.responses import RedirectResponse

frontend/routes/providers.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -341,8 +341,8 @@ async def add_provider(
341341
api_key=_normalize_optional_str(api_key),
342342
prefix=_normalize_optional_str(prefix),
343343
default_ollama_mode=_normalize_optional_str(default_ollama_mode),
344-
model_filter=_normalize_optional_str(model_filter),
345-
model_filter_exclude=_normalize_optional_str(model_filter_exclude),
344+
model_filter=model_filter.strip() if model_filter is not None else None,
345+
model_filter_exclude=model_filter_exclude.strip() if model_filter_exclude is not None else None,
346346
tags=_parse_csv_list(tags),
347347
access_groups=_parse_csv_list(access_groups),
348348
sync_enabled=sync_enabled_val,
@@ -436,8 +436,8 @@ async def update_provider_endpoint(
436436
api_key=_normalize_optional_str(api_key),
437437
prefix=_normalize_optional_str(prefix),
438438
default_ollama_mode=_normalize_optional_str(default_ollama_mode),
439-
model_filter=_normalize_optional_str(model_filter),
440-
model_filter_exclude=_normalize_optional_str(model_filter_exclude),
439+
model_filter=model_filter.strip() if model_filter is not None else None,
440+
model_filter_exclude=model_filter_exclude.strip() if model_filter_exclude is not None else None,
441441
tags=_parse_csv_list(tags),
442442
access_groups=_parse_csv_list(access_groups),
443443
sync_enabled=_parse_bool(sync_enabled),

proxy/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.6.24"
1+
__version__ = "0.6.25"

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "litellm-companion"
3-
version = "0.6.24"
3+
version = "0.6.25"
44
description = "Synchronize models from Ollama or OpenAI-compatible endpoints into LiteLLM"
55
authors = [
66
{name = "LiteLLM Companion Authors", email = "[email protected]"}

shared/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""Shared code between backend and frontend services."""
22

3-
__version__ = "0.6.24"
3+
__version__ = "0.6.25"

shared/crud.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -194,9 +194,9 @@ async def update_provider(
194194
elif provider.type == "ollama" and provider.default_ollama_mode is None:
195195
provider.default_ollama_mode = "ollama_chat"
196196
if model_filter is not None:
197-
provider.model_filter = model_filter
197+
provider.model_filter = model_filter or None
198198
if model_filter_exclude is not None:
199-
provider.model_filter_exclude = model_filter_exclude
199+
provider.model_filter_exclude = model_filter_exclude or None
200200
if max_requests_per_hour is not None:
201201
provider.max_requests_per_hour = max_requests_per_hour
202202
if tags is not None:

0 commit comments

Comments
 (0)