from ai.services.factory import LLMServiceFactory
from ai.services.groq import GroqService
from ai.services.openai import OpenAIService

def run_admin_llm_inference(instance):
    """
    Sends the prompt to the selected LLM provider/model and saves the response.
    """
    try:
        # Get the service for the provider and model
        if instance.provider == 'groq':
            service = GroqService(model=instance.model)
        elif instance.provider == 'openai':
            service = OpenAIService(model=instance.model)
        else:
            raise ValueError(f"Unsupported provider: {instance.provider}")
        
        # Get completion from the LLM
        result = service.get_completion(instance.prompt)
        instance.response = result
        instance.status = 'completed'
    except Exception as e:
        instance.response = str(e)
        instance.status = 'error'
    instance.save() 