"""
OpenAI API service for accessing LLM capabilities.
"""
import os
import logging
from typing import Dict, List, Optional, Any, Union
from django.conf import settings
from openai import OpenAI
from .llm_service import LLMService

logger = logging.getLogger(__name__)

class OpenAIService(LLMService):
    """
    Service for interacting with the OpenAI API.
    """
    def __init__(self, api_key: Optional[str] = None, model: Optional[str] = None):
        """
        Initialize the OpenAI service with API key and model.
        
        Args:
            api_key: OpenAI API key, defaults to settings.OPENAI_API_KEY
            model: OpenAI model to use, defaults to settings.OPENAI_MODEL
        """
        api_key = api_key or settings.OPENAI_API_KEY
        model = model or settings.OPENAI_MODEL
        super().__init__(api_key, model)
        self.client = OpenAI(api_key=self.api_key)
        
    def get_completion(self, 
                       prompt: str, 
                       system_prompt: Optional[str] = None,
                       temperature: float = 0.7,
                       max_tokens: int = 4096,
                       top_p: float = 0.9) -> str:
        """
        Get a text completion from the OpenAI API.
        
        Args:
            prompt: The user prompt to send to the model
            system_prompt: Optional system prompt to set context
            temperature: Controls randomness (0-1), lower is more deterministic
            max_tokens: Maximum number of tokens to generate
            top_p: Nucleus sampling parameter
            
        Returns:
            The text response from the model
        """
        messages = []
        
        # Add system prompt if provided
        if system_prompt:
            messages.append({"role": "system", "content": system_prompt})
            
        # Add user prompt
        messages.append({"role": "user", "content": prompt})
        
        try:
            # Some newer models (gpt-5) require max_completion_tokens instead of max_tokens
            # Check if this model requires the newer parameter
            models_requiring_max_completion_tokens = ['gpt-5']
            
            # gpt-5 only supports default temperature (1), so we omit the parameter
            models_with_fixed_temperature = ['gpt-5']
            
            # gpt-5 does not support top_p parameter
            models_not_supporting_top_p = ['gpt-5']
            
            api_params = {
                "model": self.model,
                "messages": messages
            }
            
            # Only include top_p if the model supports it
            if self.model not in models_not_supporting_top_p:
                api_params["top_p"] = top_p
            
            # Only include temperature if the model supports custom values
            if self.model not in models_with_fixed_temperature:
                api_params["temperature"] = temperature
            
            if self.model in models_requiring_max_completion_tokens:
                api_params["max_completion_tokens"] = max_tokens
            else:
                api_params["max_tokens"] = max_tokens
            
            response = self.client.chat.completions.create(**api_params)
            
            return response.choices[0].message.content
            
        except Exception as e:
            logger.error(f"Error calling OpenAI API: {str(e)}")
            raise
    
    def change_model(self, model_key: str) -> bool:
        """
        Change the model being used by the service.
        
        Args:
            model_key: Key from the OPENAI_MODELS dictionary in settings
            
        Returns:
            True if successful, False otherwise
        """
        models = getattr(settings, 'OPENAI_MODELS', {})
        if model_key in models:
            self.model = models[model_key]
            return True
        else:
            logger.warning(f"Model key '{model_key}' not found in settings.OPENAI_MODELS")
            return False
            
    def get_available_models(self) -> Dict[str, str]:
        """
        Get a dictionary of available models.
        
        Returns:
            Dictionary with model keys and their corresponding model IDs
        """
        return getattr(settings, 'OPENAI_MODELS', {}) 