Files
Brancheneinstufung2/lead-engine/trading_twins_ingest.py

167 lines
5.6 KiB
Python

import os
import sys
import re
import logging
import requests
import json
from datetime import datetime
from dotenv import load_dotenv
# Ensure we can import from root directory
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
# Import db functions
try:
from db import insert_lead, init_db
except ImportError:
# Fallback for direct execution
sys.path.append(os.path.dirname(__file__))
from db import insert_lead, init_db
# Configuration
env_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '.env'))
load_dotenv(dotenv_path=env_path, override=True)
CLIENT_ID = os.getenv("INFO_Application_ID")
TENANT_ID = os.getenv("INFO_Tenant_ID")
CLIENT_SECRET = os.getenv("INFO_Secret")
USER_EMAIL = "info@robo-planet.de"
# Setup logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def get_access_token():
url = f"https://login.microsoftonline.com/{TENANT_ID}/oauth2/v2.0/token"
data = {
"client_id": CLIENT_ID,
"scope": "https://graph.microsoft.com/.default",
"client_secret": CLIENT_SECRET,
"grant_type": "client_credentials"
}
response = requests.post(url, data=data)
response.raise_for_status()
return response.json().get("access_token")
def fetch_tradingtwins_emails(token, limit=200):
url = f"https://graph.microsoft.com/v1.0/users/{USER_EMAIL}/messages"
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
}
# Graph API restriction: 'contains' on subject is often blocked.
# Strategy: Fetch metadata + body for last 200 messages and filter client-side.
params = {
"$top": limit,
"$select": "id,subject,receivedDateTime,body",
"$orderby": "receivedDateTime desc"
}
response = requests.get(url, headers=headers, params=params)
if response.status_code != 200:
logger.error(f"Graph API Error: {response.status_code} - {response.text}")
return []
all_msgs = response.json().get("value", [])
# Filter strictly for the subject pattern locally
# Handle case where subject might be None
filtered = [m for m in all_msgs if "Neue Anfrage zum Thema Roboter" in (m.get('subject') or '')]
return filtered
def parse_tradingtwins_html(html_body):
"""
Extracts data from the Tradingtwins HTML table structure.
Pattern: <p ...>Label:</p>...<p ...>Value</p>
"""
data = {}
# Map label names in HTML to our keys
field_map = {
'Firma': 'company',
'Vorname': 'contact_first', # Key fixed to match ingest.py logic
'Nachname': 'contact_last', # Key fixed to match ingest.py logic
'E-Mail': 'email',
'Rufnummer': 'phone',
'Einsatzzweck': 'purpose', # Specific field
'Reinigungs-Fläche': 'area', # Specific field
'PLZ': 'zip',
'Stadt': 'city',
'Lead-ID': 'source_id' # Mapped to DB column source_id
}
for label, key in field_map.items():
pattern = fr'>\s*{re.escape(label)}:\s*</p>.*?<p[^>]*>(.*?)</p>'
match = re.search(pattern, html_body, re.DOTALL | re.IGNORECASE)
if match:
raw_val = match.group(1).strip()
clean_val = re.sub(r'<[^>]+>', '', raw_val).strip()
data[key] = clean_val
# Composite fields
if data.get('contact_first') and data.get('contact_last'):
data['contact'] = f"{data['contact_first']} {data['contact_last']}"
# Ensure source_id is present and map to 'id' for db.py compatibility
if not data.get('source_id'):
data['source_id'] = f"tt_unknown_{int(datetime.now().timestamp())}"
data['id'] = data['source_id'] # db.py expects 'id' for source_id column
return data
def process_leads(auto_sync=True):
init_db()
new_count = 0
try:
token = get_access_token()
emails = fetch_tradingtwins_emails(token)
logger.info(f"Found {len(emails)} Tradingtwins emails.")
for email in emails:
# ... (parsing logic remains same)
body = email.get('body', {}).get('content', '')
received_at_str = email.get('receivedDateTime')
# Convert ISO string to datetime object
received_at = None
if received_at_str:
try:
received_at = datetime.fromisoformat(received_at_str.replace('Z', '+00:00'))
except:
pass
lead_data = parse_tradingtwins_html(body)
lead_data['raw_body'] = body
lead_data['received_at'] = received_at
company_name = lead_data.get('company')
if not company_name or company_name == '-':
company_name = lead_data.get('contact')
lead_data['company'] = company_name
if not company_name:
continue
lead_data['id'] = lead_data.get('source_id') or f"tt_{int(datetime.now().timestamp())}"
if insert_lead(lead_data):
logger.info(f" -> Ingested: {company_name}")
new_count += 1
if new_count > 0 and auto_sync:
logger.info(f"Triggering auto-sync for {new_count} new leads...")
from enrich import run_sync
run_sync()
return new_count
except Exception as e:
logger.error(f"Error in process_leads: {e}")
return 0
if __name__ == "__main__":
count = process_leads()
print(f"Ingested {count} new leads.")