feat: add support for Atomic Chat provider
- Introduced a new provider profile for Atomic Chat, allowing it to be used alongside existing providers. - Updated `package.json` to include a new development script for launching Atomic Chat. - Modified `smart_router.py` to recognize Atomic Chat as a local provider that does not require an API key. - Enhanced provider discovery and launch scripts to handle Atomic Chat, including model listing and connection checks. - Added tests to ensure proper environment setup and behavior for Atomic Chat profiles. This update expands the functionality of the application to support local LLMs via Atomic Chat, improving versatility for users.
This commit is contained in:
@@ -57,8 +57,8 @@ class Provider:
|
||||
@property
|
||||
def is_configured(self) -> bool:
|
||||
"""True if the provider has an API key set."""
|
||||
if self.name == "ollama":
|
||||
return True # Ollama needs no API key
|
||||
if self.name in ("ollama", "atomic-chat"):
|
||||
return True # Local providers need no API key
|
||||
return bool(self.api_key)
|
||||
|
||||
@property
|
||||
@@ -93,6 +93,7 @@ def build_default_providers() -> list[Provider]:
|
||||
big = os.getenv("BIG_MODEL", "gpt-4.1")
|
||||
small = os.getenv("SMALL_MODEL", "gpt-4.1-mini")
|
||||
ollama_url = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
|
||||
atomic_chat_url = os.getenv("ATOMIC_CHAT_BASE_URL", "http://127.0.0.1:1337")
|
||||
|
||||
return [
|
||||
Provider(
|
||||
@@ -119,6 +120,14 @@ def build_default_providers() -> list[Provider]:
|
||||
big_model=big if "gemini" not in big and "gpt" not in big else "llama3:8b",
|
||||
small_model=small if "gemini" not in small and "gpt" not in small else "llama3:8b",
|
||||
),
|
||||
Provider(
|
||||
name="atomic-chat",
|
||||
ping_url=f"{atomic_chat_url}/v1/models",
|
||||
api_key_env="",
|
||||
cost_per_1k_tokens=0.0, # free — local (Apple Silicon)
|
||||
big_model=big if "gemini" not in big and "gpt" not in big else "llama3:8b",
|
||||
small_model=small if "gemini" not in small and "gpt" not in small else "llama3:8b",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user