Spaces:
Running
Running
| """ | |
| ⚙️ Configuration Management for CourseCrafter AI | |
| Centralized configuration system with environment variable support and validation. | |
| """ | |
| import os | |
| import json | |
| from typing import Dict, Any, Optional, List | |
| from dataclasses import dataclass, field | |
| from pathlib import Path | |
| from dotenv import load_dotenv | |
| from ..types import LLMProvider | |
| class LLMProviderConfig: | |
| """Configuration for a specific LLM provider""" | |
| api_key: str | |
| model: str | |
| temperature: float = 0.7 | |
| max_tokens: Optional[int] = None | |
| timeout: int = 60 | |
| base_url: Optional[str] = None | |
| class Config: | |
| """ | |
| Centralized configuration management for Course Creator AI | |
| Handles environment variables, API keys, and URL configurations. | |
| """ | |
| def __init__(self): | |
| # Load environment variables | |
| load_dotenv() | |
| # Initialize configuration | |
| self._config = self._load_default_config() | |
| self._validate_config() | |
| def _load_default_config(self) -> Dict[str, Any]: | |
| """Load default configuration with environment variable overrides""" | |
| # Get default model from env or fallback | |
| default_model = os.getenv("DEFAULT_MODEL", "gpt-4.1-nano") | |
| # Get default LLM provider from env or fallback to first available | |
| default_llm_provider = os.getenv("DEFAULT_LLM_PROVIDER", "openai") | |
| return { | |
| # LLM Provider Configurations | |
| "llm_providers": { | |
| "openai": { | |
| "api_key": os.getenv("OPENAI_API_KEY", ""), | |
| "model": os.getenv("OPENAI_MODEL", default_model), | |
| "temperature": float(os.getenv("OPENAI_TEMPERATURE", "0.7")), | |
| "max_tokens": int(os.getenv("OPENAI_MAX_TOKENS", "20000")) if os.getenv("OPENAI_MAX_TOKENS") else None, | |
| "timeout": int(os.getenv("OPENAI_TIMEOUT", "60")) | |
| }, | |
| "anthropic": { | |
| "api_key": os.getenv("ANTHROPIC_API_KEY", ""), | |
| "model": os.getenv("ANTHROPIC_MODEL", "claude-3-5-sonnet-20241022"), | |
| "temperature": float(os.getenv("ANTHROPIC_TEMPERATURE", "0.7")), | |
| "max_tokens": int(os.getenv("ANTHROPIC_MAX_TOKENS", "20000")) if os.getenv("ANTHROPIC_MAX_TOKENS") else None, | |
| "timeout": int(os.getenv("ANTHROPIC_TIMEOUT", "60")) | |
| }, | |
| "google": { | |
| "api_key": os.getenv("GOOGLE_API_KEY", ""), | |
| "model": os.getenv("GOOGLE_MODEL", "gemini-2.0-flash"), | |
| "temperature": float(os.getenv("GOOGLE_TEMPERATURE", "0.7")), | |
| "max_tokens": int(os.getenv("GOOGLE_MAX_TOKENS", "20000")) if os.getenv("GOOGLE_MAX_TOKENS") else None, | |
| "timeout": int(os.getenv("GOOGLE_TIMEOUT", "60")) | |
| }, | |
| "openai_compatible": { | |
| "api_key": os.getenv("OPENAI_COMPATIBLE_API_KEY", "dummy"), | |
| "base_url": os.getenv("OPENAI_COMPATIBLE_BASE_URL", ""), | |
| "model": os.getenv("OPENAI_COMPATIBLE_MODEL", ""), | |
| "temperature": float(os.getenv("OPENAI_COMPATIBLE_TEMPERATURE", "0.7")), | |
| "max_tokens": int(os.getenv("OPENAI_COMPATIBLE_MAX_TOKENS", "20000")) if os.getenv("OPENAI_COMPATIBLE_MAX_TOKENS") else None, | |
| "timeout": int(os.getenv("OPENAI_COMPATIBLE_TIMEOUT", "60")) | |
| } | |
| }, | |
| # Course Generation Settings | |
| "course_generation": { | |
| "default_difficulty": "beginner", | |
| "default_lesson_count": 5, | |
| "max_lesson_duration": 30, | |
| "include_images": True, | |
| "include_flashcards": True, | |
| "include_quizzes": True, | |
| "research_depth": "comprehensive" | |
| }, | |
| # Image Generation Settings | |
| "image_generation": { | |
| "pollinations_api_token": os.getenv("POLLINATIONS_API_TOKEN", ""), | |
| "pollinations_api_reference": os.getenv("POLLINATIONS_API_REFERENCE", ""), | |
| "default_width": 1280, | |
| "default_height": 720, | |
| "default_model": "gptimage", | |
| "enhance_prompts": True, | |
| "no_logo": True | |
| }, | |
| # Export Settings | |
| "export": { | |
| "default_formats": ["pdf", "markdown"], | |
| "output_directory": os.getenv("COURSECRAFTER_OUTPUT_DIR", "./output"), | |
| "max_file_size": 50 * 1024 * 1024, # 50MB | |
| "compression": True | |
| }, | |
| # UI Settings | |
| "ui": { | |
| "theme": "soft", | |
| "show_progress": True, | |
| "auto_scroll": True, | |
| "max_concurrent_generations": 3 | |
| }, | |
| # System Settings | |
| "system": { | |
| "default_llm_provider": default_llm_provider, | |
| "max_turns": 25, | |
| "timeout": 300, # 5 minutes | |
| "retry_attempts": 3, | |
| "log_level": os.getenv("LOG_LEVEL", "INFO"), | |
| "debug_mode": os.getenv("DEBUG", "false").lower() == "true" | |
| } | |
| } | |
| def _validate_config(self): | |
| """Validate configuration and warn about missing required settings""" | |
| warnings = [] | |
| # Check LLM provider API keys | |
| for provider, config in self._config["llm_providers"].items(): | |
| if provider == "openai_compatible": | |
| # For openai_compatible, check for base_url instead of api_key | |
| if not config.get("base_url"): | |
| warnings.append(f"Missing base_url for {provider}") | |
| else: | |
| # For other providers, check for api_key | |
| if not config["api_key"]: | |
| warnings.append(f"Missing API key for {provider}") | |
| # Check if at least one LLM provider is configured | |
| has_provider = False | |
| for provider, config in self._config["llm_providers"].items(): | |
| if provider == "openai_compatible": | |
| if config.get("base_url"): | |
| has_provider = True | |
| break | |
| else: | |
| if config["api_key"]: | |
| has_provider = True | |
| break | |
| if not has_provider: | |
| # Only warn instead of raising error - allows app to start for UI configuration | |
| print("⚠️ Warning: No LLM providers configured. Please configure at least one provider in the UI.") | |
| def get_llm_config(self, provider: LLMProvider) -> LLMProviderConfig: | |
| """Get configuration for a specific LLM provider""" | |
| # Reload config to pick up any environment variable changes | |
| self._config = self._load_default_config() | |
| if provider not in self._config["llm_providers"]: | |
| raise ValueError(f"Unknown LLM provider: {provider}") | |
| config = self._config["llm_providers"][provider] | |
| return LLMProviderConfig(**config) | |
| def get_available_llm_providers(self) -> List[LLMProvider]: | |
| """Get list of available LLM providers with API keys""" | |
| # Reload config to pick up any environment variable changes | |
| self._config = self._load_default_config() | |
| available = [] | |
| for provider, config in self._config["llm_providers"].items(): | |
| if provider == "openai_compatible": | |
| # For openai_compatible, require base_url instead of api_key | |
| if config.get("base_url"): | |
| available.append(provider) | |
| else: | |
| # For other providers, require api_key | |
| if config["api_key"]: | |
| available.append(provider) | |
| return available | |
| def get_default_llm_provider(self) -> LLMProvider: | |
| """Get the default LLM provider, falling back to first available if not configured""" | |
| # Reload config to pick up any environment variable changes | |
| self._config = self._load_default_config() | |
| default_provider = self._config["system"]["default_llm_provider"] | |
| available_providers = self.get_available_llm_providers() | |
| # If the default provider is available, use it | |
| if default_provider in available_providers: | |
| return default_provider | |
| # Otherwise, use the first available provider | |
| if available_providers: | |
| print(f"⚠️ Default provider '{default_provider}' not configured, using '{available_providers[0]}'") | |
| return available_providers[0] | |
| # If no providers are available, return a fallback instead of raising an error | |
| print("⚠️ Warning: No LLM providers are configured. Returning 'google' as fallback.") | |
| return "google" # Return a fallback provider that can be configured later | |
| def get_image_generation_config(self) -> Dict[str, Any]: | |
| """Get image generation configuration""" | |
| return self._config["image_generation"] | |
| def get(self, key: str, default: Any = None) -> Any: | |
| """Get a configuration value using dot notation""" | |
| keys = key.split(".") | |
| value = self._config | |
| try: | |
| for k in keys: | |
| value = value[k] | |
| return value | |
| except (KeyError, TypeError): | |
| return default | |
| def set(self, key: str, value: Any): | |
| """Set a configuration value using dot notation""" | |
| keys = key.split(".") | |
| config = self._config | |
| for k in keys[:-1]: | |
| if k not in config: | |
| config[k] = {} | |
| config = config[k] | |
| config[keys[-1]] = value | |
| def update_llm_provider(self, provider: LLMProvider, **kwargs): | |
| """Update LLM provider configuration""" | |
| if provider not in self._config["llm_providers"]: | |
| raise ValueError(f"Unknown LLM provider: {provider}") | |
| self._config["llm_providers"][provider].update(kwargs) | |
| def to_dict(self) -> Dict[str, Any]: | |
| """Convert configuration to dictionary""" | |
| return self._config.copy() | |
| def save_to_file(self, filepath: str): | |
| """Save configuration to JSON file""" | |
| with open(filepath, 'w') as f: | |
| json.dump(self._config, f, indent=2) | |
| def load_from_file(cls, filepath: str) -> 'Config': | |
| """Load configuration from JSON file""" | |
| instance = cls() | |
| if os.path.exists(filepath): | |
| with open(filepath, 'r') as f: | |
| file_config = json.load(f) | |
| instance._config.update(file_config) | |
| instance._validate_config() | |
| return instance | |
| # Global configuration instance | |
| config = Config() |