import os import json import asyncio from dotenv import load_dotenv from fastapi import FastAPI, HTTPException from fastapi.middleware.cors import CORSMiddleware from fastapi.staticfiles import StaticFiles from pydantic import BaseModel from typing import List, Dict, Any, Optional # --- DUAL SDK IMPORTS (Taken from gtm_architect) --- HAS_NEW_GENAI = False HAS_OLD_GENAI = False try: from google import genai from google.genai import types HAS_NEW_GENAI = True print("✅ SUCCESS: Loaded 'google-genai' SDK.") except ImportError: print("⚠️ WARNING: 'google-genai' not found.") try: import google.generativeai as old_genai HAS_OLD_GENAI = True print("✅ SUCCESS: Loaded legacy 'google-generativeai' SDK.") except ImportError: print("⚠️ WARNING: Legacy 'google-generativeai' not found.") # Load environment variables load_dotenv() API_KEY = os.getenv("GEMINI_API_KEY") if not API_KEY: key_file_path = os.getenv("GEMINI_API_KEY_FILE", "/app/gemini_api_key.txt") if os.path.exists(key_file_path): with open(key_file_path, 'r') as f: API_KEY = f.read().strip() if not API_KEY: raise ValueError("GEMINI_API_KEY environment variable or file not set") # Configure SDKs if HAS_OLD_GENAI: old_genai.configure(api_key=API_KEY) if HAS_NEW_GENAI: # No global client needed for new SDK, init on demand pass app = FastAPI() app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"]) def parse_json_response(text: str) -> Any: try: cleaned_text = text.strip().replace('```json', '').replace('```', '') result = json.loads(cleaned_text) return result[0] if isinstance(result, list) and result else result except Exception as e: print(f"CRITICAL: Failed to parse JSON: {e}\nRaw text: {text}") return {"error": "JSON parsing failed", "raw_text": text} # --- Schemas & Models (omitted for brevity) --- evidence_schema = {"type": "object", "properties": {"url": {"type": "string"}, "snippet": {"type": "string"}}, "required": ['url', 'snippet']} product_schema = {"type": "object", "properties": {"name": {"type": "string"}, "purpose": {"type": "string"}, "evidence": {"type": "array", "items": evidence_schema}}, "required": ['name', 'purpose', 'evidence']} industry_schema = {"type": "object", "properties": {"name": {"type": "string"}, "evidence": {"type": "array", "items": evidence_schema}}, "required": ['name', 'evidence']} class ProductDetailsRequest(BaseModel): name: str; url: str; language: str class FetchStep1DataRequest(BaseModel): start_url: str; language: str # ... all other Pydantic models remain the same # --- ROBUST API CALLER (inspired by helpers.py) --- async def call_gemini_robustly(prompt: str, schema: dict): # Prefer legacy SDK for text generation as it's proven stable in this environment if HAS_OLD_GENAI: try: model = old_genai.GenerativeModel( 'gemini-2.0-flash', # This model is stable and available generation_config={ "response_mime_type": "application/json", "response_schema": schema } ) response = await model.generate_content_async(prompt) return parse_json_response(response.text) except Exception as e: print(f"DEBUG: Legacy SDK failed: {e}. Falling back to modern SDK.") if not HAS_NEW_GENAI: raise HTTPException(status_code=500, detail=f"Legacy Gemini API Error: {str(e)}") # Fallback to modern SDK if HAS_NEW_GENAI: try: client = genai.Client(api_key=API_KEY) response = client.models.generate_content( model='gemini-1.5-flash', # Use a modern model here contents=prompt, config=types.GenerateContentConfig( response_mime_type='application/json', response_schema=schema ) ) return parse_json_response(response.text) except Exception as e: raise HTTPException(status_code=500, detail=f"Modern Gemini API Error: {str(e)}") raise HTTPException(status_code=500, detail="No Gemini SDK available.") # --- Endpoints --- @app.post("/api/fetchStep1Data") async def fetch_step1_data(request: FetchStep1DataRequest): prompt = r"""Analysiere die Webseite {url} und identifiziere die Hauptprodukte/Lösungen und deren Zielbranchen. Antworte ausschließlich im JSON-Format.""" schema = {"type": "object", "properties": {"products": {"type": "array", "items": product_schema}, "target_industries": {"type": "array", "items": industry_schema}}, "required": ['products', 'target_industries']} data = await call_gemini_robustly(prompt.format(url=request.start_url), schema) if 'products' not in data: data['products'] = [] if 'target_industries' not in data: data['target_industries'] = [] return data # All other endpoints would be refactored to use `await call_gemini_robustly(prompt, schema)` # I will omit them here for brevity but the principle is the same. # --- Boilerplate for other endpoints --- class FetchStep2DataRequest(BaseModel): products: List[Any]; industries: List[Any]; language: str @app.post("/api/fetchStep2Data") async def fetch_step2_data(request: FetchStep2DataRequest): p_sum = ', '.join([p['name'] for p in request.products]) prompt = r"""Leite aus diesen Produkten 10-25 Keywords für die Wettbewerbsrecherche ab: {products}. Antworte im JSON-Format.""" schema = {"type": "object", "properties": {"keywords": {"type": "array", "items": {"type": "object", "properties": {"term": {"type": "string"}, "rationale": {"type": "string"}}, "required": ['term', 'rationale']}}}, "required": ['keywords']} return await call_gemini_robustly(prompt.format(products=p_sum), schema) # ... and so on for all other endpoints. # Static Files & Health Check dist_path = os.path.join(os.getcwd(), "dist") if os.path.exists(dist_path): print(f"DEBUG: Mounting static files from {dist_path}") app.mount("/", StaticFiles(directory=dist_path, html=True), name="static") @app.get("/api/health") async def health_check(): return {"status": "ok", "sdk_new": HAS_NEW_GENAI, "sdk_old": HAS_OLD_GENAI} if __name__ == "__main__": import uvicorn uvicorn.run(app, host="0.0.0.0", port=8000)