import os import sys import re import logging import requests import json from datetime import datetime from dotenv import load_dotenv # Ensure we can import from root directory sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) # Import db functions try: from db import insert_lead, init_db except ImportError: # Fallback for direct execution sys.path.append(os.path.dirname(__file__)) from db import insert_lead, init_db # Configuration env_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '.env')) load_dotenv(dotenv_path=env_path, override=True) CLIENT_ID = os.getenv("INFO_Application_ID") TENANT_ID = os.getenv("INFO_Tenant_ID") CLIENT_SECRET = os.getenv("INFO_Secret") USER_EMAIL = "info@robo-planet.de" # Setup logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') logger = logging.getLogger(__name__) def get_access_token(): url = f"https://login.microsoftonline.com/{TENANT_ID}/oauth2/v2.0/token" data = { "client_id": CLIENT_ID, "scope": "https://graph.microsoft.com/.default", "client_secret": CLIENT_SECRET, "grant_type": "client_credentials" } response = requests.post(url, data=data) response.raise_for_status() return response.json().get("access_token") def fetch_tradingtwins_emails(token, limit=200): url = f"https://graph.microsoft.com/v1.0/users/{USER_EMAIL}/messages" headers = { "Authorization": f"Bearer {token}", "Content-Type": "application/json" } # Graph API restriction: 'contains' on subject is often blocked. # Strategy: Fetch metadata + body for last 200 messages and filter client-side. params = { "$top": limit, "$select": "id,subject,receivedDateTime,body", "$orderby": "receivedDateTime desc" } response = requests.get(url, headers=headers, params=params) if response.status_code != 200: logger.error(f"Graph API Error: {response.status_code} - {response.text}") return [] all_msgs = response.json().get("value", []) # Filter strictly for the subject pattern locally # Handle case where subject might be None filtered = [m for m in all_msgs if "Neue Anfrage zum Thema Roboter" in (m.get('subject') or '')] return filtered def parse_tradingtwins_html(html_body): """ Extracts data from the Tradingtwins HTML table structure. Pattern:
Label:
...Value
""" data = {} # Map label names in HTML to our keys field_map = { 'Firma': 'company', 'Vorname': 'contact_first', # Key fixed to match ingest.py logic 'Nachname': 'contact_last', # Key fixed to match ingest.py logic 'E-Mail': 'email', 'Rufnummer': 'phone', 'Einsatzzweck': 'purpose', # Specific field 'Reinigungs-Fläche': 'area', # Specific field 'PLZ': 'zip', 'Stadt': 'city', 'Lead-ID': 'source_id' # Mapped to DB column source_id } for label, key in field_map.items(): pattern = fr'>\s*{re.escape(label)}:\s*.*?]*>(.*?)
' match = re.search(pattern, html_body, re.DOTALL | re.IGNORECASE) if match: raw_val = match.group(1).strip() clean_val = re.sub(r'<[^>]+>', '', raw_val).strip() data[key] = clean_val # Composite fields if data.get('contact_first') and data.get('contact_last'): data['contact'] = f"{data['contact_first']} {data['contact_last']}" # Ensure source_id is present and map to 'id' for db.py compatibility if not data.get('source_id'): data['source_id'] = f"tt_unknown_{int(datetime.now().timestamp())}" data['id'] = data['source_id'] # db.py expects 'id' for source_id column return data def process_leads(): init_db() new_count = 0 try: token = get_access_token() emails = fetch_tradingtwins_emails(token) logger.info(f"Found {len(emails)} Tradingtwins emails.") for email in emails: body = email.get('body', {}).get('content', '') lead_data = parse_tradingtwins_html(body) # Add raw body for reference lead_data['raw_body'] = body company_name = lead_data.get('company') if not company_name or company_name == '-': company_name = lead_data.get('contact') lead_data['company'] = company_name if not company_name: logger.warning(f"Skipping email {email['id']}: No company or contact name found.") continue logger.info(f"Ingesting Lead: {company_name} (ID: {lead_data.get('id')})") # Save to local DB (status=new) if insert_lead(lead_data): logger.info(f" -> Successfully saved to DB.") new_count += 1 else: logger.info(f" -> Lead already exists (skipped).") return new_count except Exception as e: logger.error(f"Error in process_leads: {e}") return 0 if __name__ == "__main__": count = process_leads() print(f"Ingested {count} new leads.")