- Add multi-provider LLM architecture supporting OpenRouter, OpenAI, Gemini, and custom providers - Implement global LLM on/off switch with default DISABLED state for cost protection - Add per-character LLM configuration with provider-specific models and settings - Create performance-optimized caching system for LLM enabled status checks - Add API key validation before enabling LLM providers to prevent broken configurations - Implement audit logging for all LLM enable/disable actions for cost accountability - Create comprehensive admin UI with prominent cost warnings and confirmation dialogs - Add visual indicators in character list for custom AI model configurations - Build character-specific LLM client system with global fallback mechanism - Add database schema support for per-character LLM settings - Implement graceful fallback responses when LLM is globally disabled - Create provider testing and validation system for reliable connections
140 lines
4.4 KiB
Python
Executable File
140 lines
4.4 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
Script to help migrate from single LLM provider to multi-provider configuration
|
|
"""
|
|
|
|
import json
|
|
import os
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
|
|
def update_fishbowl_config():
|
|
"""Update fishbowl_config.json to include multi-provider LLM configuration"""
|
|
|
|
config_path = Path("config/fishbowl_config.json")
|
|
|
|
if not config_path.exists():
|
|
print(f"Configuration file not found: {config_path}")
|
|
return False
|
|
|
|
# Read existing config
|
|
with open(config_path, 'r') as f:
|
|
config = json.load(f)
|
|
|
|
# Check if already has providers config
|
|
if 'providers' in config.get('llm', {}):
|
|
print("Multi-provider configuration already exists")
|
|
return True
|
|
|
|
# Get current LLM config
|
|
current_llm = config.get('llm', {})
|
|
|
|
# Create new multi-provider config
|
|
providers_config = {
|
|
"custom": {
|
|
"type": "custom",
|
|
"enabled": True,
|
|
"priority": 70,
|
|
"config": {
|
|
"base_url": current_llm.get('base_url', 'http://localhost:11434'),
|
|
"model": current_llm.get('model', 'llama2'),
|
|
"api_key": os.getenv('LLM_API_KEY', 'x'),
|
|
"timeout": current_llm.get('timeout', 300),
|
|
"max_tokens": current_llm.get('max_tokens', 2000),
|
|
"temperature": current_llm.get('temperature', 0.8),
|
|
"api_format": "openai"
|
|
}
|
|
}
|
|
}
|
|
|
|
# Add example provider configurations (disabled by default)
|
|
providers_config.update({
|
|
"openrouter": {
|
|
"type": "openrouter",
|
|
"enabled": False,
|
|
"priority": 100,
|
|
"config": {
|
|
"api_key": "${OPENROUTER_API_KEY:}",
|
|
"base_url": "https://openrouter.ai/api/v1",
|
|
"model": "${OPENROUTER_MODEL:anthropic/claude-3-sonnet}",
|
|
"timeout": 300,
|
|
"max_tokens": 2000,
|
|
"temperature": 0.8,
|
|
"app_name": "discord-fishbowl"
|
|
}
|
|
},
|
|
"openai": {
|
|
"type": "openai",
|
|
"enabled": False,
|
|
"priority": 90,
|
|
"config": {
|
|
"api_key": "${OPENAI_API_KEY:}",
|
|
"base_url": "https://api.openai.com/v1",
|
|
"model": "${OPENAI_MODEL:gpt-4o-mini}",
|
|
"timeout": 300,
|
|
"max_tokens": 2000,
|
|
"temperature": 0.8
|
|
}
|
|
},
|
|
"gemini": {
|
|
"type": "gemini",
|
|
"enabled": False,
|
|
"priority": 80,
|
|
"config": {
|
|
"api_key": "${GEMINI_API_KEY:}",
|
|
"base_url": "https://generativelanguage.googleapis.com/v1beta",
|
|
"model": "${GEMINI_MODEL:gemini-1.5-flash}",
|
|
"timeout": 300,
|
|
"max_tokens": 2000,
|
|
"temperature": 0.8
|
|
}
|
|
}
|
|
})
|
|
|
|
# Update config
|
|
config['llm']['providers'] = providers_config
|
|
|
|
# Create backup
|
|
backup_path = config_path.with_suffix('.json.backup')
|
|
with open(backup_path, 'w') as f:
|
|
json.dump(config, f, indent=2)
|
|
print(f"Created backup: {backup_path}")
|
|
|
|
# Write updated config
|
|
with open(config_path, 'w') as f:
|
|
json.dump(config, f, indent=2)
|
|
|
|
print(f"Updated {config_path} with multi-provider configuration")
|
|
print("\nTo enable additional providers:")
|
|
print("1. Set environment variables for the provider you want to use")
|
|
print("2. Change 'enabled': false to 'enabled': true in the config")
|
|
print("3. Restart the application")
|
|
|
|
return True
|
|
|
|
|
|
def main():
|
|
"""Main script function"""
|
|
print("Discord Fishbowl LLM Configuration Updater")
|
|
print("=" * 50)
|
|
|
|
if update_fishbowl_config():
|
|
print("\n✅ Configuration updated successfully!")
|
|
print("\nAvailable providers:")
|
|
print("- OpenRouter (supports Claude, GPT, Llama, etc.)")
|
|
print("- OpenAI (GPT models)")
|
|
print("- Google Gemini")
|
|
print("- Custom/Local (current setup)")
|
|
|
|
print("\nNext steps:")
|
|
print("1. Update your .env file with API keys for desired providers")
|
|
print("2. Enable providers in config/fishbowl_config.json")
|
|
print("3. Restart the application")
|
|
else:
|
|
print("\n❌ Configuration update failed!")
|
|
sys.exit(1)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main() |