fix: Handle missing AVAILABLE_AI_MODELS import and provide fallback options

This commit is contained in:
Slipstream 2025-05-30 13:18:57 -06:00
parent 583a19ed6e
commit c374a1a835
Signed by: slipstream
GPG Key ID: 13E498CE010AC6FD
2 changed files with 11 additions and 2 deletions

View File

@ -12,7 +12,16 @@ from typing import TYPE_CHECKING, Optional, Dict, Any, List, Tuple # Add more ty
# We need access to the cog instance for state and methods like get_ai_response
# These commands will likely be added to the GurtCog instance dynamically in cog.py's setup
from .config import AVAILABLE_AI_MODELS # Import for choices
try:
from .config import AVAILABLE_AI_MODELS
except (ImportError, AttributeError):
AVAILABLE_AI_MODELS = {
"google/gemini-2.5-flash-preview-05-20": "Gemini 2.5 Flash Preview",
"google/gemini-2.5-pro-preview-05-06": "Gemini 2.5 Pro Preview",
"claude-sonnet-4@20250514": "Claude Sonnet 4",
"llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick Instruct",
"google/gemini-2.0-flash-001": "Gemini 2.0 Flash"
}
if TYPE_CHECKING:
from .cog import GurtCog # For type hinting

View File

@ -33,7 +33,7 @@ AVAILABLE_AI_MODELS = {
"google/gemini-2.5-flash-preview-05-20": "Gemini 2.5 Flash Preview",
"google/gemini-2.5-pro-preview-05-06": "Gemini 2.5 Pro Preview",
"claude-sonnet-4@20250514": "Claude Sonnet 4",
"llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick",
"llama-4-maverick-17b-128e-instruct-maas": "Llama 4 Maverick Instruct",
"google/gemini-2.0-flash-001": "Gemini 2.0 Flash"
}