
    P1i                     .    d dl mZ d dlmZ d dlmZ d Zy)    )LLMServiceFactory)GroqService)OpenAIServicec                    	 | j                   dk(  rt        | j                        }n>| j                   dk(  rt        | j                        }nt	        d| j                          |j                  | j                        }|| _        d| _        | j                          y# t        $ r!}t        |      | _        d| _        Y d}~6d}~ww xY w)zU
    Sends the prompt to the selected LLM provider/model and saves the response.
    groq)modelopenaizUnsupported provider: 	completederrorN)providerr   r   r   
ValueErrorget_completionpromptresponsestatus	Exceptionstrsave)instanceserviceresultes       4/home/cursorai/projects/iching/ai/utils/admin_llm.pyrun_admin_llm_inferencer      s    "&!7G(*#(..9G5h6G6G5HIJJ ''8"% MMO  "F!"s   BB   	C
)CC
N)ai.services.factoryr   ai.services.groqr   ai.services.openair   r        r   <module>r       s    1 ( ,r   