103 lines
3.5 KiB
Python
103 lines
3.5 KiB
Python
import argparse
|
|
import subprocess
|
|
import os
|
|
import sys
|
|
from datetime import datetime
|
|
|
|
# --- Setup Paths ---
|
|
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
SCRIPTS_SUBDIR = os.path.join(SCRIPT_DIR, "scripts")
|
|
LOG_DIR = os.path.join(SCRIPT_DIR, "logs")
|
|
THROUGHPUT_LOG = os.path.join(LOG_DIR, "throughput.log")
|
|
|
|
# Add scripts subdir to path to allow imports
|
|
sys.path.append(SCRIPTS_SUBDIR)
|
|
|
|
# TODO: Import other modules once they are ready
|
|
# from company_explorer_connector import handle_company_workflow
|
|
# from generate_sniper_copy import generate_copy
|
|
|
|
def setup_environment():
|
|
"""Ensures necessary directories exist."""
|
|
os.makedirs(LOG_DIR, exist_ok=True)
|
|
|
|
def log_throughput(identifier):
|
|
"""Logs a successful processing event for the dashboard."""
|
|
with open(THROUGHPUT_LOG, "a") as f:
|
|
f.write(f"{datetime.utcnow().isoformat()},{identifier}\n")
|
|
print(f"📈 Logged successful processing for '{identifier}' for dashboard.")
|
|
|
|
def run_sync():
|
|
"""Runs the database sync script to ensure local data is fresh."""
|
|
print("\n--- [Step 1: Syncing local Company Explorer database] ---")
|
|
sync_script_path = os.path.join(SCRIPTS_SUBDIR, "sync_ce_to_sqlite.py")
|
|
|
|
if not os.path.exists(sync_script_path):
|
|
print(f"❌ ERROR: Sync script not found at {sync_script_path}")
|
|
return False
|
|
|
|
result = subprocess.run(["python3", sync_script_path], capture_output=True, text=True, check=False)
|
|
|
|
if result.returncode != 0:
|
|
print("❌ ERROR: Database sync failed.")
|
|
print(result.stderr)
|
|
return False
|
|
|
|
print("✅ Sync successful.")
|
|
return True
|
|
|
|
def process_lead(identifier):
|
|
"""
|
|
Orchestrates the full enrichment and copy generation for a single lead.
|
|
"""
|
|
print(f"\n======= PROCESSING LEAD: {identifier} =======")
|
|
|
|
# --- Step 2: Enrich Company (if necessary) ---
|
|
print("\n--- [Step 2: Check/Enrich Company Data] ---")
|
|
# ce_data = handle_company_workflow(identifier) # Example of direct import
|
|
# if not ce_data or 'error' in ce_data:
|
|
# print(f"❌ Failed to enrich '{identifier}'. Aborting.")
|
|
# return
|
|
print("... (Placeholder for Enrichment Logic)")
|
|
print("✅ Enrichment complete.")
|
|
|
|
|
|
# --- Step 3: Generate Sniper Copy ---
|
|
print("\n--- [Step 3: Generate Sniper Copy] ---")
|
|
# sniper_copy = generate_copy(ce_data['data']['id'])
|
|
# print("\nGENERATED COPY:\n", sniper_copy)
|
|
print("... (Placeholder for Sniper Copy Generation)")
|
|
print("✅ Copy generation complete.")
|
|
|
|
# --- Step 4: Finalize & Log ---
|
|
print("\n--- [Step 4: Finalizing] ---")
|
|
log_throughput(identifier)
|
|
print(f"✅ Successfully processed lead '{identifier}'.")
|
|
print("============================================")
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="GTM Lead Engine Orchestrator.")
|
|
parser.add_argument("leads", nargs='+', help="One or more company names or SuperOffice IDs to process.")
|
|
parser.add_argument("--skip-sync", action="store_true", help="Skip the initial database sync for faster iteration.")
|
|
args = parser.parse_args()
|
|
|
|
print("🚀 GTM Lead Engine Orchestrator started.")
|
|
|
|
setup_environment()
|
|
|
|
if not args.skip_sync:
|
|
if not run_sync():
|
|
sys.exit(1) # Exit if sync fails
|
|
else:
|
|
print("\n--- [Skipping Step 1: Database Sync] ---")
|
|
|
|
for lead_identifier in args.leads:
|
|
process_lead(lead_identifier)
|
|
|
|
print("\n🎉 Orchestration complete for all leads. 🎉")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|