feat: Allow optional model override in Teto AI response function
This commit is contained in:
parent
27284cf4a0
commit
4c377a47be
@ -33,10 +33,11 @@ def save_conversation_history(history):
|
||||
with open(CONVERSATION_HISTORY_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(history, f, indent=4)
|
||||
|
||||
async def _teto_reply_ai_with_messages(messages, system_mode="reply"):
|
||||
async def _teto_reply_ai_with_messages(messages, system_mode="reply", model_override=None):
|
||||
"""
|
||||
Use OpenRouter AI to generate a Kasane Teto-style response.
|
||||
system_mode: "reply" for replying as Kasane Teto.
|
||||
model_override: Optional model name to use instead of the default.
|
||||
"""
|
||||
api_key = os.getenv("AI_API_KEY")
|
||||
if not api_key:
|
||||
@ -64,10 +65,8 @@ async def _teto_reply_ai_with_messages(messages, system_mode="reply"):
|
||||
)
|
||||
|
||||
# Determine the model to use
|
||||
# This function doesn't have access to user-specific conversation history directly.
|
||||
# The model should be passed as an argument or retrieved based on user_id if needed.
|
||||
# For now, we'll keep the default here and handle user-specific model in the command handler.
|
||||
model_to_use = DEFAULT_AI_MODEL
|
||||
# Use the model_override if provided, otherwise use the default
|
||||
model_to_use = model_override if model_override else DEFAULT_AI_MODEL
|
||||
|
||||
payload = {
|
||||
"model": model_to_use,
|
||||
@ -107,7 +106,7 @@ class RoleplayTetoCog(commands.Cog):
|
||||
|
||||
# Get AI reply using the user's conversation history and selected model
|
||||
conversation_messages = self.conversations[user_id]['messages']
|
||||
ai_reply = await _teto_reply_ai_with_messages(conversation_messages)
|
||||
ai_reply = await _teto_reply_ai_with_messages(conversation_messages, model_override=user_model)
|
||||
ai_reply = strip_think_blocks(ai_reply)
|
||||
|
||||
# Append AI's reply to the history
|
||||
|
Loading…
x
Reference in New Issue
Block a user