import os
import sys
import google.generativeai as genai
from dotenv import load_dotenv
from tenacity import retry, stop_after_attempt, wait_fixed, retry_if_exception_type
import time # For potential sleep in retry if needed

def setup_generative_ai(api_key, model_name):
    """Configures the generative AI client and returns the model."""
    try:
        genai.configure(api_key=api_key)
        return genai.GenerativeModel(model_name)
    except Exception as e:
        print(f"Error configuring the AI client: {e}")
        sys.exit(1)

@retry(stop=stop_after_attempt(3), wait=wait_fixed(2))
def get_intent(model, user_input):
    """Calls the API to infer the user's intent with retry logic."""
    print("  (Retrying get_intent...)") # Indicate retry attempts
    meta_prompt = """You are an expert at understanding user intent. Analyze the following user request and classify it into one of these categories: [Image Generation], [Text Summarization], [Code Generation], [General Question].

Respond with only the category name in brackets.

User Request: "{{user_input}}"
"""
    prompt_for_api = meta_prompt.replace("{{user_input}}", user_input)
    response = model.generate_content(prompt_for_api)
    return response.text.strip()

@retry(stop=stop_after_attempt(3), wait=wait_fixed(2))
def generate_detailed_prompt(model, user_input, intent):
    """Generates the final detailed prompt based on the intent with retry logic."""
    print("  (Retrying generate_detailed_prompt...)") # Indicate retry attempts
    
    specialized_prompts = {
        "[Image Generation]": """You are a creative expert at writing prompts for AI image generators. Based on the user's simple description, create a single, detailed, rich paragraph for a prompt. This paragraph should include several descriptive sentences covering the subject, setting, lighting, mood, and artistic style. Do not just list keywords; weave them into a coherent paragraph.

User's simple description: "{{user_input}}"
""",
        "[Code Generation]": """You are an expert Python developer assistant. Based on the user's high-level request, generate a detailed prompt for a large language model (LLM) to create a Python script. The prompt should be comprehensive, including requirements for Google Sheets API, Gemini API, .env for API keys and email, and specifying the dashboard output to be in Japanese.

User's request: "{{user_input}}"

Return the detailed prompt as a paragraph ready for an LLM.
""",
        "[Text Summarization]": """You are an expert summarizer. The user wants a summary of the following text. Create a concise, easy-to-read paragraph that captures the main points of the text. Focus on key information and essential details, presenting them clearly and neutrally.

User's text: "{{user_input}}"
""",
        "[General Question]": """You are a helpful assistant. The user has a general question or request. Formulate a comprehensive and clear prompt for a large language model to answer this question or fulfill the request. The prompt should encourage the LLM to provide a detailed, well-structured, and informative response.

User's request: "{{user_input}}"
"""
    }

    prompt_template = specialized_prompts.get(intent)
    if not prompt_template:
        raise ValueError(f"No specialized prompt found for intent: '{intent}'") # Raise specific error

    prompt_for_api = prompt_template.replace("{{user_input}}", user_input)
    response = model.generate_content(prompt_for_api)
    return response.text.strip()


def main():
    """Main function to run the prompt generator."""
    load_dotenv()
    api_key = os.getenv("GOOGLE_API_KEY")
    if not api_key:
        print("Error: GOOGLE_API_KEY not found in .env file. Please set it in your .env file.")
        sys.exit(1)

    # Get model name from .env, with a fallback default
    model_name = os.getenv("GEMINI_MODEL", "gemini-1.5-pro")
    
    print("\n--- Gemini-powered Prompt Generator ---")
    print(f"Using Gemini model: {model_name}")

    model = setup_generative_ai(api_key, model_name)

    user_input = input("Enter your simple prompt idea: ")

    try:
        # Step 1: Infer Intent
        print("\n[Step 1/2] Inferring intent...")
        intent = get_intent(model, user_input)
        print(f"  Intent detected: {intent}")

        # Step 2: Generate Detailed Prompt
        print("\n[Step 2/2] Generating detailed prompt...")
        detailed_prompt = generate_detailed_prompt(model, user_input, intent)
        
        print("\n--- Your Detailed Prompt Paragraph ---")
        print(detailed_prompt)
        print("------------------------------------")

    except ValueError as ve: # Catch specific errors
        print(f"\nConfiguration Error: {ve}")
    except Exception as e:
        print(f"\nAn unexpected error occurred during prompt generation: {e}")

if __name__ == "__main__":
    main()
