From c374a1a835fe5aabf3e37a486b85663251575a5f Mon Sep 17 00:00:00 2001 From: Slipstream Date: Fri, 30 May 2025 13:18:57 -0600 Subject: [PATCH] fix: Handle missing AVAILABLE_AI_MODELS import and provide fallback options --- gurt/commands.py | 11 ++++++++++- gurt/config.py | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/gurt/commands.py b/gurt/commands.py index 8ed362a..f7ca791 100644 --- a/gurt/commands.py +++ b/gurt/commands.py @@ -12,7 +12,16 @@ from typing import TYPE_CHECKING, Optional, Dict, Any, List, Tuple # Add more ty # We need access to the cog instance for state and methods like get_ai_response # These commands will likely be added to the GurtCog instance dynamically in cog.py's setup -from .config import AVAILABLE_AI_MODELS # Import for choices +try: + from .config import AVAILABLE_AI_MODELS +except (ImportError, AttributeError): + AVAILABLE_AI_MODELS = { + "google/gemini-2.5-flash-preview-05-20": "Gemini 2.5 Flash Preview", + "google/gemini-2.5-pro-preview-05-06": "Gemini 2.5 Pro Preview", + "claude-sonnet-4@20250514": "Claude Sonnet 4", + "llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick Instruct", + "google/gemini-2.0-flash-001": "Gemini 2.0 Flash" + } if TYPE_CHECKING: from .cog import GurtCog # For type hinting diff --git a/gurt/config.py b/gurt/config.py index 50ab89d..2bcd8ce 100644 --- a/gurt/config.py +++ b/gurt/config.py @@ -33,7 +33,7 @@ AVAILABLE_AI_MODELS = { "google/gemini-2.5-flash-preview-05-20": "Gemini 2.5 Flash Preview", "google/gemini-2.5-pro-preview-05-06": "Gemini 2.5 Pro Preview", "claude-sonnet-4@20250514": "Claude Sonnet 4", - "llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick", + "llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick Instruct", "google/gemini-2.0-flash-001": "Gemini 2.0 Flash" }