diff --git a/gurt/commands.py b/gurt/commands.py index 8ed362a..f7ca791 100644 --- a/gurt/commands.py +++ b/gurt/commands.py @@ -12,7 +12,16 @@ from typing import TYPE_CHECKING, Optional, Dict, Any, List, Tuple # Add more ty # We need access to the cog instance for state and methods like get_ai_response # These commands will likely be added to the GurtCog instance dynamically in cog.py's setup -from .config import AVAILABLE_AI_MODELS # Import for choices +try: + from .config import AVAILABLE_AI_MODELS +except (ImportError, AttributeError): + AVAILABLE_AI_MODELS = { + "google/gemini-2.5-flash-preview-05-20": "Gemini 2.5 Flash Preview", + "google/gemini-2.5-pro-preview-05-06": "Gemini 2.5 Pro Preview", + "claude-sonnet-4@20250514": "Claude Sonnet 4", + "llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick Instruct", + "google/gemini-2.0-flash-001": "Gemini 2.0 Flash" + } if TYPE_CHECKING: from .cog import GurtCog # For type hinting diff --git a/gurt/config.py b/gurt/config.py index 50ab89d..2bcd8ce 100644 --- a/gurt/config.py +++ b/gurt/config.py @@ -33,7 +33,7 @@ AVAILABLE_AI_MODELS = { "google/gemini-2.5-flash-preview-05-20": "Gemini 2.5 Flash Preview", "google/gemini-2.5-pro-preview-05-06": "Gemini 2.5 Pro Preview", "claude-sonnet-4@20250514": "Claude Sonnet 4", - "llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick", + "llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick Instruct", "google/gemini-2.0-flash-001": "Gemini 2.0 Flash" }