import requests import os import time import argparse import sys import logging # Add the backend directory to the Python path for relative imports to work sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) # --- Configuration --- def load_env_manual(path): if not os.path.exists(path): # print(f"⚠️ Warning: .env file not found at {path}") # Suppress for cleaner output in container return with open(path) as f: for line in f: line = line.strip() if line and not line.startswith('#') and '=' in line: key, val = line.split('=', 1) os.environ.setdefault(key.strip(), val.strip()) # Load .env (assuming it's in /app) - this needs to be run from /app or adjusted # For docker-compose exec from project root, /app is the container's WORKDIR load_env_manual('/app/.env') API_USER = os.getenv("API_USER") API_PASS = os.getenv("API_PASSWORD") # When run INSIDE the container, the service is reachable via localhost CE_URL = "http://localhost:8000" ANALYZE_ENDPOINT = f"{CE_URL}/api/enrich/analyze" def trigger_analysis(company_id: int): print("="*60) print(f"🚀 Triggering REAL analysis for Company ID: {company_id}") print("="*60) payload = {"company_id": company_id} try: # Added logging for API user/pass (debug only, remove in prod) logger.debug(f"API Call to {ANALYZE_ENDPOINT} with user {API_USER}") response = requests.post(ANALYZE_ENDPOINT, json=payload, auth=(API_USER, API_PASS), timeout=30) # Increased timeout if response.status_code == 200 and response.json().get("status") == "queued": print(" ✅ SUCCESS: Analysis task has been queued on the server.") print(" The result will be available in the database and UI shortly.") return True else: print(f" ❌ FAILURE: Server responded with status {response.status_code}") print(f" Response: {response.text}") return False except requests.exceptions.RequestException as e: print(f" ❌ FATAL: Could not connect to the server: {e}") return False if __name__ == "__main__": # Add a basic logger to the script itself for clearer output logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') logger = logging.getLogger(__name__) parser = argparse.ArgumentParser(description="Trigger Company Explorer Analysis Task") parser.add_argument("--company-id", type=int, required=True, help="ID of the company to analyze") args = parser.parse_args() trigger_analysis(args.company_id)