feat(connector): [31188f42] Finalize production optimizations, filtering, and dashboard enhancements
This commit is contained in:
@@ -1 +1 @@
|
|||||||
{"task_id": "31188f42-8544-8074-bad3-d3e1b9b4051f", "token": "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", "readme_path": "connector-superoffice/README.md", "session_start_time": "2026-03-04T18:41:33.912605"}
|
{"task_id": "31188f42-8544-8074-bad3-d3e1b9b4051f", "token": "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", "readme_path": "connector-superoffice/README.md", "session_start_time": "2026-03-05T06:02:30.481235"}
|
||||||
31
clear_zombies.py
Normal file
31
clear_zombies.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import sqlite3
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
DB_PATH = "/app/connector_queue.db"
|
||||||
|
|
||||||
|
def clear_all_zombies():
|
||||||
|
print("🧹 Cleaning up Zombie Jobs (PROCESSING for too long)...")
|
||||||
|
# A job that is PROCESSING for more than 10 minutes is likely dead
|
||||||
|
threshold = (datetime.utcnow() - timedelta(minutes=10)).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
|
with sqlite3.connect(DB_PATH) as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# 1. Identify Zombies
|
||||||
|
cursor.execute("SELECT id, updated_at FROM jobs WHERE status = 'PROCESSING' AND updated_at < ?", (threshold,))
|
||||||
|
zombies = cursor.fetchall()
|
||||||
|
|
||||||
|
if not zombies:
|
||||||
|
print("✅ No zombies found.")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"🕵️ Found {len(zombies)} zombie jobs.")
|
||||||
|
for zid, updated in zombies:
|
||||||
|
print(f" - Zombie ID {zid} (Last active: {updated})")
|
||||||
|
|
||||||
|
# 2. Kill them
|
||||||
|
cursor.execute("UPDATE jobs SET status = 'FAILED', error_msg = 'Zombie cleared: Process timed out' WHERE status = 'PROCESSING' AND updated_at < ?", (threshold,))
|
||||||
|
print(f"✅ Successfully cleared {cursor.rowcount} zombie(s).")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
clear_all_zombies()
|
||||||
@@ -7,18 +7,20 @@ COPY frontend/ ./
|
|||||||
RUN grep "ROBOTICS EDITION" src/App.tsx || echo "Version string not found in App.tsx"
|
RUN grep "ROBOTICS EDITION" src/App.tsx || echo "Version string not found in App.tsx"
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# --- STAGE 2: Backend & Runtime ---
|
# --- STAGE 2: Backend Builder ---
|
||||||
|
FROM python:3.11-slim AS backend-builder
|
||||||
|
WORKDIR /app
|
||||||
|
RUN apt-get update && apt-get install -y build-essential && rm -rf /var/lib/apt/lists/*
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --user --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# --- STAGE 3: Final Runtime ---
|
||||||
FROM python:3.11-slim
|
FROM python:3.11-slim
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# System Dependencies
|
# Copy only installed packages from backend-builder
|
||||||
RUN apt-get update && apt-get install -y \
|
COPY --from=backend-builder /root/.local /root/.local
|
||||||
build-essential \
|
ENV PATH=/root/.local/bin:$PATH
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Copy Requirements & Install
|
|
||||||
COPY requirements.txt .
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Copy Built Frontend from Stage 1 (To a safe location outside /app)
|
# Copy Built Frontend from Stage 1 (To a safe location outside /app)
|
||||||
COPY --from=frontend-builder /build/dist /frontend_static
|
COPY --from=frontend-builder /build/dist /frontend_static
|
||||||
|
|||||||
@@ -1,15 +1,26 @@
|
|||||||
FROM python:3.11-slim
|
# --- STAGE 1: Builder ---
|
||||||
|
FROM python:3.11-slim AS builder
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install system dependencies
|
# Install system dependencies needed for building C-extensions
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
build-essential \
|
build-essential \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies into a local directory
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --user --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# --- STAGE 2: Final Runtime ---
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy only the installed packages from builder
|
||||||
|
COPY --from=builder /root/.local /root/.local
|
||||||
|
# Update PATH to include the user-installed packages
|
||||||
|
ENV PATH=/root/.local/bin:$PATH
|
||||||
|
|
||||||
# Copy source code
|
# Copy source code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|||||||
@@ -48,6 +48,19 @@ class Settings:
|
|||||||
self.UDF_LAST_UPDATE = os.getenv("UDF_LAST_UPDATE", "SuperOffice:85")
|
self.UDF_LAST_UPDATE = os.getenv("UDF_LAST_UPDATE", "SuperOffice:85")
|
||||||
self.UDF_LAST_OUTREACH = os.getenv("UDF_LAST_OUTREACH", "SuperOffice:88")
|
self.UDF_LAST_OUTREACH = os.getenv("UDF_LAST_OUTREACH", "SuperOffice:88")
|
||||||
|
|
||||||
|
# --- User Whitelist (Roboplanet Associates) ---
|
||||||
|
# Includes both Numerical IDs and Shortnames for robustness
|
||||||
|
self.ROBOPLANET_WHITELIST = {
|
||||||
|
# IDs
|
||||||
|
485, 454, 487, 515, 469, 528, 512, 465, 486, 493, 468, 476, 455, 483,
|
||||||
|
492, 523, 470, 457, 498, 491, 464, 525, 527, 496, 490, 497, 456, 479,
|
||||||
|
# Shortnames
|
||||||
|
"RAAH", "RIAK", "RABA", "RJBU", "RPDU", "RCGO", "RBHA", "RAHE", "RPHO",
|
||||||
|
"RSHO", "RMJO", "DKE", "RAKI", "RSKO", "RMKR", "RSLU", "REME", "RNSL",
|
||||||
|
"RAPF", "ROBO", "RBRU", "RSSC", "RBSC", "RASC", "RKAB", "RDSE", "RSSH",
|
||||||
|
"RJST", "JUTH", "RSWA", "RCWE", "RJZH", "EVZ"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Global instance
|
# Global instance
|
||||||
settings = Settings()
|
settings = Settings()
|
||||||
@@ -16,6 +16,7 @@ class JobQueue:
|
|||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
event_type TEXT,
|
event_type TEXT,
|
||||||
payload TEXT,
|
payload TEXT,
|
||||||
|
entity_name TEXT,
|
||||||
status TEXT DEFAULT 'PENDING',
|
status TEXT DEFAULT 'PENDING',
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
@@ -26,8 +27,15 @@ class JobQueue:
|
|||||||
# Migration for existing DBs
|
# Migration for existing DBs
|
||||||
try:
|
try:
|
||||||
conn.execute("ALTER TABLE jobs ADD COLUMN next_try_at TIMESTAMP")
|
conn.execute("ALTER TABLE jobs ADD COLUMN next_try_at TIMESTAMP")
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError: pass
|
||||||
pass
|
|
||||||
|
try:
|
||||||
|
conn.execute("ALTER TABLE jobs ADD COLUMN entity_name TEXT")
|
||||||
|
except sqlite3.OperationalError: pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn.execute("ALTER TABLE jobs ADD COLUMN associate_name TEXT")
|
||||||
|
except sqlite3.OperationalError: pass
|
||||||
|
|
||||||
def add_job(self, event_type: str, payload: dict):
|
def add_job(self, event_type: str, payload: dict):
|
||||||
with sqlite3.connect(DB_PATH) as conn:
|
with sqlite3.connect(DB_PATH) as conn:
|
||||||
@@ -36,6 +44,19 @@ class JobQueue:
|
|||||||
(event_type, json.dumps(payload), 'PENDING')
|
(event_type, json.dumps(payload), 'PENDING')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def update_entity_name(self, job_id, name, associate_name=None):
|
||||||
|
with sqlite3.connect(DB_PATH) as conn:
|
||||||
|
if associate_name:
|
||||||
|
conn.execute(
|
||||||
|
"UPDATE jobs SET entity_name = ?, associate_name = ?, updated_at = datetime('now') WHERE id = ?",
|
||||||
|
(str(name), str(associate_name), job_id)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
conn.execute(
|
||||||
|
"UPDATE jobs SET entity_name = ?, updated_at = datetime('now') WHERE id = ?",
|
||||||
|
(str(name), job_id)
|
||||||
|
)
|
||||||
|
|
||||||
def get_next_job(self):
|
def get_next_job(self):
|
||||||
"""
|
"""
|
||||||
Atomically fetches the next pending job where next_try_at is reached.
|
Atomically fetches the next pending job where next_try_at is reached.
|
||||||
@@ -127,7 +148,7 @@ class JobQueue:
|
|||||||
conn.row_factory = sqlite3.Row
|
conn.row_factory = sqlite3.Row
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
SELECT id, event_type, status, created_at, updated_at, error_msg, payload
|
SELECT id, event_type, status, created_at, updated_at, error_msg, payload, entity_name, associate_name
|
||||||
FROM jobs
|
FROM jobs
|
||||||
ORDER BY updated_at DESC, created_at DESC
|
ORDER BY updated_at DESC, created_at DESC
|
||||||
LIMIT ?
|
LIMIT ?
|
||||||
@@ -189,7 +210,8 @@ class JobQueue:
|
|||||||
"entity_id": entity_id,
|
"entity_id": entity_id,
|
||||||
"contact_id": c_id,
|
"contact_id": c_id,
|
||||||
"person_id": p_id,
|
"person_id": p_id,
|
||||||
"name": "Unknown",
|
"name": job.get('entity_name') or "Unknown",
|
||||||
|
"associate": job.get('associate_name') or "",
|
||||||
"last_event": job['event_type'],
|
"last_event": job['event_type'],
|
||||||
"status": job['status'],
|
"status": job['status'],
|
||||||
"created_at": job['created_at'],
|
"created_at": job['created_at'],
|
||||||
@@ -224,19 +246,26 @@ class JobQueue:
|
|||||||
target_run["duration"] = f"{seconds}s" if seconds < 60 else f"{seconds // 60}m {seconds % 60}s"
|
target_run["duration"] = f"{seconds}s" if seconds < 60 else f"{seconds // 60}m {seconds % 60}s"
|
||||||
except: pass
|
except: pass
|
||||||
|
|
||||||
# Resolve Name
|
# Resolve Name & Associate (if not already set from a newer job in this cluster)
|
||||||
if target_run["name"] == "Unknown":
|
if target_run["name"] == "Unknown":
|
||||||
name = payload.get('Name') or payload.get('crm_name') or payload.get('FullName') or payload.get('ContactName')
|
name = job.get('entity_name') or payload.get('Name') or payload.get('crm_name') or payload.get('FullName') or payload.get('ContactName')
|
||||||
if not name and payload.get('Firstname'):
|
if not name and payload.get('Firstname'):
|
||||||
name = f"{payload.get('Firstname')} {payload.get('Lastname', '')}".strip()
|
name = f"{payload.get('Firstname')} {payload.get('Lastname', '')}".strip()
|
||||||
if name: target_run["name"] = name
|
if name: target_run["name"] = name
|
||||||
|
|
||||||
|
if not target_run["associate"] and job.get('associate_name'):
|
||||||
|
target_run["associate"] = job['associate_name']
|
||||||
|
|
||||||
|
# Update Status based on the jobs in the run
|
||||||
|
|
||||||
# Update Status based on the jobs in the run
|
# Update Status based on the jobs in the run
|
||||||
# Priority: FAILED > PROCESSING > COMPLETED > SKIPPED > PENDING
|
# Priority: FAILED > PROCESSING > COMPLETED > SKIPPED > PENDING
|
||||||
status_priority = {"FAILED": 4, "PROCESSING": 3, "COMPLETED": 2, "SKIPPED": 1, "PENDING": 0}
|
status_priority = {"FAILED": 4, "PROCESSING": 3, "COMPLETED": 2, "SKIPPED": 1, "PENDING": 0}
|
||||||
current_prio = status_priority.get(target_run["status"], -1)
|
current_prio = status_priority.get(target_run["status"], -1)
|
||||||
new_prio = status_priority.get(job["status"], -1)
|
new_prio = status_priority.get(job["status"], -1)
|
||||||
|
|
||||||
|
# CRITICAL: We only update the status if the new job has a HIGHER priority
|
||||||
|
# Example: If current is COMPLETED (2) and new is SKIPPED (1), we keep COMPLETED.
|
||||||
if new_prio > current_prio:
|
if new_prio > current_prio:
|
||||||
target_run["status"] = job["status"]
|
target_run["status"] = job["status"]
|
||||||
target_run["error_msg"] = job["error_msg"]
|
target_run["error_msg"] = job["error_msg"]
|
||||||
@@ -244,12 +273,16 @@ class JobQueue:
|
|||||||
# Set visual phases based on status
|
# Set visual phases based on status
|
||||||
if job["status"] == "COMPLETED":
|
if job["status"] == "COMPLETED":
|
||||||
target_run["phases"] = {"received": "completed", "enriching": "completed", "syncing": "completed", "completed": "completed"}
|
target_run["phases"] = {"received": "completed", "enriching": "completed", "syncing": "completed", "completed": "completed"}
|
||||||
elif job["status"] == "SKIPPED" and current_prio < 2: # Don't downgrade from COMPLETED
|
|
||||||
target_run["phases"] = {"received": "completed", "enriching": "completed", "syncing": "completed", "completed": "completed"}
|
|
||||||
elif job["status"] == "FAILED":
|
elif job["status"] == "FAILED":
|
||||||
target_run["phases"] = {"received": "completed", "enriching": "failed", "syncing": "pending", "completed": "pending"}
|
target_run["phases"] = {"received": "completed", "enriching": "failed", "syncing": "pending", "completed": "pending"}
|
||||||
elif job["status"] == "PROCESSING":
|
elif job["status"] == "PROCESSING":
|
||||||
target_run["phases"] = {"received": "completed", "enriching": "processing", "syncing": "pending", "completed": "pending"}
|
target_run["phases"] = {"received": "completed", "enriching": "processing", "syncing": "pending", "completed": "pending"}
|
||||||
|
# Note: SKIPPED (1) and PENDING (0) will use the target_run's initial phases or keep previous ones.
|
||||||
|
|
||||||
|
# SPECIAL CASE: If we already have COMPLETED but a new job is SKIPPED, we might want to keep the error_msg empty
|
||||||
|
# to avoid showing "Skipped Echo" on a successful row.
|
||||||
|
if target_run["status"] == "COMPLETED" and job["status"] == "SKIPPED":
|
||||||
|
pass # Keep everything from the successful run
|
||||||
|
|
||||||
# Final cleanup
|
# Final cleanup
|
||||||
for r in runs:
|
for r in runs:
|
||||||
|
|||||||
@@ -168,7 +168,9 @@ class SuperOfficeClient:
|
|||||||
data = resp.json()
|
data = resp.json()
|
||||||
|
|
||||||
all_results.extend(data.get('value', []))
|
all_results.extend(data.get('value', []))
|
||||||
next_page_url = data.get('next_page_url', None)
|
|
||||||
|
# Robust Pagination: Check both OData standard and legacy property
|
||||||
|
next_page_url = data.get('odata.nextLink') or data.get('next_page_url')
|
||||||
|
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
logger.error(f"❌ API Search Error for {query_string}: {e.response.text}")
|
logger.error(f"❌ API Search Error for {query_string}: {e.response.text}")
|
||||||
|
|||||||
45
connector-superoffice/tools/blind_check_associates.py
Normal file
45
connector-superoffice/tools/blind_check_associates.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def blind_check():
|
||||||
|
print("🕵️ Testing Manuel's Filter: contactAssociate/contactFullName eq 'RoboPlanet GmbH'")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Manuel's filter logic with Count
|
||||||
|
endpoint = "Contact?$filter=contactAssociate/contactFullName eq 'RoboPlanet GmbH'&$top=0&$count=true"
|
||||||
|
|
||||||
|
print(f"📡 Querying: {endpoint}")
|
||||||
|
try:
|
||||||
|
resp = client._get(endpoint)
|
||||||
|
count = resp.get('@odata.count')
|
||||||
|
print(f"\n🎯 RESULT: Manuel's Filter found {count} accounts.")
|
||||||
|
|
||||||
|
if count == 17014:
|
||||||
|
print("✅ PERFECT MATCH! Manuel's filter matches your UI count exactly.")
|
||||||
|
else:
|
||||||
|
print(f"ℹ️ Delta to UI: {17014 - (count or 0)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Manuel's filter failed: {e}")
|
||||||
|
# Try without spaces encoded
|
||||||
|
print("Trying with encoded spaces...")
|
||||||
|
try:
|
||||||
|
endpoint_enc = "Contact?$filter=contactAssociate/contactFullName eq 'RoboPlanet+GmbH'&$top=0&$count=true"
|
||||||
|
resp = client._get(endpoint_enc)
|
||||||
|
print(f"🎯 Encoded Result: {resp.get('@odata.count')}")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
blind_check()
|
||||||
47
connector-superoffice/tools/check_contact_associate.py
Normal file
47
connector-superoffice/tools/check_contact_associate.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def check_associate_details():
|
||||||
|
print("🔎 Checking Associate Details in Contact Record...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Use our known test company (if it still exists - oh wait, we deleted it!)
|
||||||
|
# We need to find ANY contact.
|
||||||
|
|
||||||
|
# Search for any contact
|
||||||
|
print("Searching for a contact...")
|
||||||
|
contacts = client.search("Contact?$top=1")
|
||||||
|
|
||||||
|
if contacts:
|
||||||
|
cid = contacts[0].get('contactId') or contacts[0].get('ContactId')
|
||||||
|
print(f"✅ Found Contact ID: {cid}")
|
||||||
|
|
||||||
|
# Fetch Full Details
|
||||||
|
print("Fetching details...")
|
||||||
|
details = client.get_contact(cid)
|
||||||
|
|
||||||
|
assoc = details.get('Associate')
|
||||||
|
print("--- Associate Object ---")
|
||||||
|
print(json.dumps(assoc, indent=2))
|
||||||
|
|
||||||
|
if assoc and 'GroupIdx' in assoc:
|
||||||
|
print(f"✅ SUCCESS: GroupIdx is available: {assoc['GroupIdx']}")
|
||||||
|
else:
|
||||||
|
print("❌ FAILURE: GroupIdx is MISSING in Contact details.")
|
||||||
|
|
||||||
|
else:
|
||||||
|
print("❌ No contacts found in system.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
check_associate_details()
|
||||||
38
connector-superoffice/tools/check_filter_counts.py
Normal file
38
connector-superoffice/tools/check_filter_counts.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def check_counts():
|
||||||
|
print("📊 Verifying Filter Logic via OData Search...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Simplified OData Search
|
||||||
|
# We ask for top=1 but want the total count
|
||||||
|
endpoint = "Contact?$filter=name contains 'GmbH'&$top=1&$select=Associate"
|
||||||
|
|
||||||
|
print(f"📡 Querying: {endpoint}")
|
||||||
|
try:
|
||||||
|
resp = client._get(endpoint)
|
||||||
|
print("--- RAW RESPONSE START ---")
|
||||||
|
print(json.dumps(resp, indent=2))
|
||||||
|
print("--- RAW RESPONSE END ---")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {e}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
check_counts()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
check_counts()
|
||||||
52
connector-superoffice/tools/check_selection_members.py
Normal file
52
connector-superoffice/tools/check_selection_members.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def check_selection():
|
||||||
|
selection_id = 10960
|
||||||
|
print(f"🔎 Inspecting Selection {selection_id} (Alle_Contacts_Roboplanet)...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1. Get Selection Metadata
|
||||||
|
print("\n📋 Fetching Selection Details...")
|
||||||
|
details = client._get(f"Selection/{selection_id}")
|
||||||
|
if details:
|
||||||
|
print(f" Name: {details.get('Name')}")
|
||||||
|
print(f" Description: {details.get('Description')}")
|
||||||
|
print(f" Type: {details.get('SelectionType')}") # e.g. Dynamic, Static
|
||||||
|
|
||||||
|
# 2. Fetch Members via direct Selection endpoint
|
||||||
|
print("\n👥 Fetching first 10 Members via direct Selection endpoint...")
|
||||||
|
# Direct endpoint for Contact members of a selection
|
||||||
|
endpoint = f"Selection/{selection_id}/ContactMembers?$top=10"
|
||||||
|
|
||||||
|
try:
|
||||||
|
members_resp = client._get(endpoint)
|
||||||
|
# OData usually returns a 'value' list
|
||||||
|
members = members_resp.get('value', []) if isinstance(members_resp, dict) else members_resp
|
||||||
|
|
||||||
|
if members and isinstance(members, list):
|
||||||
|
print(f"✅ Found {len(members)} members in first page:")
|
||||||
|
for m in members:
|
||||||
|
# Structure might be flat or nested
|
||||||
|
name = m.get('Name') or m.get('name')
|
||||||
|
cid = m.get('ContactId') or m.get('contactId')
|
||||||
|
print(f" - {name} (ContactID: {cid})")
|
||||||
|
else:
|
||||||
|
print("⚠️ No members found or response format unexpected.")
|
||||||
|
print(f"DEBUG: {json.dumps(members_resp, indent=2)}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Direct Selection members query failed: {e}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
check_selection()
|
||||||
@@ -0,0 +1,62 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def run_discovery():
|
||||||
|
print("🔎 Discovery: Searching for Selections and Associate Mapping...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1. Search for Selections
|
||||||
|
print("\n📁 Searching for 'Roboplanet' Selections...")
|
||||||
|
# Selections can be found via Archive or direct endpoint
|
||||||
|
selections = client.search("Selection?$filter=name contains 'Roboplanet'")
|
||||||
|
if selections:
|
||||||
|
print(f"✅ Found {len(selections)} matching selections:")
|
||||||
|
for sel in selections:
|
||||||
|
sid = sel.get('SelectionId') or sel.get('selectionId')
|
||||||
|
name = sel.get('Name') or sel.get('name')
|
||||||
|
print(f" - {name} (ID: {sid})")
|
||||||
|
else:
|
||||||
|
print("⚠️ No selections found with name 'Roboplanet'.")
|
||||||
|
|
||||||
|
# 2. Get Associate Mapping via Archive Provider
|
||||||
|
# This avoids the Associate/{id} 500 error
|
||||||
|
print("\n👥 Fetching Associate-to-Group mapping via Archive...")
|
||||||
|
# Provider 'associate' is standard
|
||||||
|
endpoint = "Archive/dynamic?provider=associate&columns=associateId,name,groupIdx"
|
||||||
|
try:
|
||||||
|
mapping_data = client._get(endpoint)
|
||||||
|
if mapping_data and isinstance(mapping_data, list):
|
||||||
|
print(f"✅ Received {len(mapping_data)} associate records.")
|
||||||
|
robo_user_ids = []
|
||||||
|
for item in mapping_data:
|
||||||
|
aid = item.get("associateId")
|
||||||
|
name = item.get("name")
|
||||||
|
gid = item.get("groupIdx")
|
||||||
|
|
||||||
|
if gid == 52:
|
||||||
|
print(f" - [ROBO] {name} (ID: {aid}, Group: {gid})")
|
||||||
|
robo_user_ids.append(aid)
|
||||||
|
elif "Fottner" in str(name) or aid == 321:
|
||||||
|
print(f" - [EXCLUDE] {name} (ID: {aid}, Group: {gid})")
|
||||||
|
|
||||||
|
print(f"\n🚀 Identified {len(robo_user_ids)} Roboplanet Users.")
|
||||||
|
if robo_user_ids:
|
||||||
|
print(f"List of IDs: {robo_user_ids}")
|
||||||
|
else:
|
||||||
|
print("❌ Archive query returned no associate mapping.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Archive query failed: {e}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
run_discovery()
|
||||||
69
connector-superoffice/tools/count_roboplanet_total.py
Normal file
69
connector-superoffice/tools/count_roboplanet_total.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
def verify_total_counts():
|
||||||
|
print("📊 Verifying Global Account Counts...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
whitelist = settings.ROBOPLANET_WHITELIST
|
||||||
|
|
||||||
|
# 1. Try to get MemberCount from the Selection 10960 directly
|
||||||
|
print("\n📁 Checking Selection 10960 (Alle_Contacts_Roboplanet)...")
|
||||||
|
try:
|
||||||
|
sel_details = client._get("Selection/10960")
|
||||||
|
if sel_details:
|
||||||
|
# Note: MemberCount is often a property of the Selection entity
|
||||||
|
count = sel_details.get("MemberCount")
|
||||||
|
print(f" 🔹 Web-Interface-equivalent Count (MemberCount): {count}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ⚠️ Could not fetch Selection count property: {e}")
|
||||||
|
|
||||||
|
# 2. Manual Aggregate Count via OData
|
||||||
|
# We construct a filter for all our IDs and Shortnames
|
||||||
|
# This might be too long for a URL, so we do it in smaller batches if needed
|
||||||
|
print("\n📡 Calculating Netto Count for Whitelist (IDs + Names)...")
|
||||||
|
|
||||||
|
# Divide whitelist into IDs and Names
|
||||||
|
ids = [x for x in whitelist if isinstance(x, int)]
|
||||||
|
names = [x for x in whitelist if isinstance(x, str)]
|
||||||
|
|
||||||
|
# Construct OData filter string
|
||||||
|
# example: (associateId eq 528 or associateId eq 485 or associateId eq 'RKAB')
|
||||||
|
id_filters = [f"associateId eq {i}" for i in ids]
|
||||||
|
name_filters = [f"associateId eq '{n}'" for n in names]
|
||||||
|
full_filter = " or ".join(id_filters + name_filters)
|
||||||
|
|
||||||
|
# We use $top=0 and $count=true to get JUST the number
|
||||||
|
endpoint = f"Contact?$filter={full_filter}&$top=0&$count=true"
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Note: If the URL is too long (> 2000 chars), this might fail.
|
||||||
|
# But for ~60 entries it should be fine.
|
||||||
|
resp = client._get(endpoint)
|
||||||
|
total_api_count = resp.get("@odata.count")
|
||||||
|
print(f" 🎯 API Calculated Count (Whitelist-Match): {total_api_count}")
|
||||||
|
|
||||||
|
if total_api_count is not None:
|
||||||
|
print(f"\n✅ PROOF: The API identifies {total_api_count} accounts for Roboplanet.")
|
||||||
|
print("👉 Bitte vergleiche diese Zahl mit der Selektion 'Alle_Contacts_Roboplanet' im SuperOffice Web-Interface.")
|
||||||
|
else:
|
||||||
|
print("❌ API did not return a count property.")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ OData Aggregation failed: {e}")
|
||||||
|
print(" The filter string might be too long for the API.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
verify_total_counts()
|
||||||
35
connector-superoffice/tools/debug_names.py
Normal file
35
connector-superoffice/tools/debug_names.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def debug_names():
|
||||||
|
print("🔎 Debugging Associate Names...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
endpoint = "Contact?$orderby=contactId desc&$top=5&$select=name,Associate/Name"
|
||||||
|
|
||||||
|
print(f"📡 Querying: {endpoint}")
|
||||||
|
contacts = client.search(endpoint)
|
||||||
|
|
||||||
|
if contacts:
|
||||||
|
for c in contacts:
|
||||||
|
cname = c.get('name')
|
||||||
|
assoc = c.get('Associate') or {}
|
||||||
|
aname = assoc.get('Name')
|
||||||
|
print(f" 🏢 Contact: {cname}")
|
||||||
|
print(f" 👉 Associate Name: '{aname}'")
|
||||||
|
else:
|
||||||
|
print("❌ No contacts found.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
debug_names()
|
||||||
66
connector-superoffice/tools/discover_associates.py
Normal file
66
connector-superoffice/tools/discover_associates.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup to avoid import errors
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def discover_associates_and_groups():
|
||||||
|
print("🔎 Discovering Associates and Groups...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1. Fetch User Groups
|
||||||
|
print("\n👥 Fetching User Groups...")
|
||||||
|
groups = client._get("MDOList/usergroup")
|
||||||
|
|
||||||
|
robo_group_id = None
|
||||||
|
|
||||||
|
if groups:
|
||||||
|
for group in groups:
|
||||||
|
name = group.get('Name')
|
||||||
|
grp_id = group.get('Id')
|
||||||
|
print(f" - Group: {name} (ID: {grp_id})")
|
||||||
|
if "Roboplanet" in name:
|
||||||
|
robo_group_id = grp_id
|
||||||
|
|
||||||
|
if robo_group_id:
|
||||||
|
print(f"✅ Identified Roboplanet Group ID: {robo_group_id}")
|
||||||
|
else:
|
||||||
|
print("⚠️ Could not auto-identify Roboplanet group. Check the list above.")
|
||||||
|
|
||||||
|
# 2. Check Candidate IDs directly
|
||||||
|
print("\n👤 Checking specific Person IDs for Willi Fottner...")
|
||||||
|
candidates = [6, 182552]
|
||||||
|
|
||||||
|
for pid in candidates:
|
||||||
|
try:
|
||||||
|
p = client.get_person(pid)
|
||||||
|
if p:
|
||||||
|
fname = p.get('Firstname')
|
||||||
|
lname = p.get('Lastname')
|
||||||
|
is_assoc = p.get('IsAssociate')
|
||||||
|
|
||||||
|
print(f" 👉 Person {pid}: {fname} {lname} (IsAssociate: {is_assoc})")
|
||||||
|
|
||||||
|
if is_assoc:
|
||||||
|
assoc_obj = p.get("Associate")
|
||||||
|
if assoc_obj:
|
||||||
|
assoc_id = assoc_obj.get("AssociateId")
|
||||||
|
grp = assoc_obj.get("GroupIdx")
|
||||||
|
print(f" ✅ IS ASSOCIATE! ID: {assoc_id}, Group: {grp}")
|
||||||
|
if "Fottner" in str(lname) or "Willi" in str(fname):
|
||||||
|
print(f" 🎯 TARGET IDENTIFIED: Willi Fottner is Associate ID {assoc_id}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ❌ Error checking Person {pid}: {e}")
|
||||||
|
|
||||||
|
print("\n--- Done ---")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
discover_associates_and_groups()
|
||||||
66
connector-superoffice/tools/final_vertical_discovery.py
Normal file
66
connector-superoffice/tools/final_vertical_discovery.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path to the connector-superoffice directory
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
|
||||||
|
# CRITICAL: Insert at 0 to shadow /app/config.py
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def discover_verticals():
|
||||||
|
print("🔎 Starting Final Vertical Discovery (Production)...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1. Fetch Contact UDF Layout to find the List ID behind SuperOffice:83
|
||||||
|
print("📡 Fetching Contact UDF Layout (Metadata)...")
|
||||||
|
layout = client._get("Contact/UdefLayout/Published")
|
||||||
|
|
||||||
|
list_id = None
|
||||||
|
if layout and 'Fields' in layout:
|
||||||
|
for field in layout['Fields']:
|
||||||
|
if field.get('ProgId') == 'SuperOffice:83':
|
||||||
|
print(f"✅ Found SuperOffice:83: {field.get('Label')}")
|
||||||
|
list_id = field.get('ListId')
|
||||||
|
print(f"✅ List ID: {list_id}")
|
||||||
|
break
|
||||||
|
|
||||||
|
if not list_id:
|
||||||
|
print("❌ Could not find Metadata for SuperOffice:83.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 2. Fetch the List Items for this List
|
||||||
|
print(f"📡 Fetching List Items for List ID {list_id}...")
|
||||||
|
# List endpoint is typically List/ListId/Items
|
||||||
|
# Let's try to get all rows for this list
|
||||||
|
items = client._get(f"List/{list_id}/Items")
|
||||||
|
|
||||||
|
if items:
|
||||||
|
print(f"✅ SUCCESS! Found {len(items)} items in the Vertical list.")
|
||||||
|
mapping = {}
|
||||||
|
for item in items:
|
||||||
|
name = item.get('Value') or item.get('Name')
|
||||||
|
item_id = item.get('Id')
|
||||||
|
mapping[name] = item_id
|
||||||
|
print(f" - {name}: {item_id}")
|
||||||
|
|
||||||
|
print("\n🚀 FINAL MAPPING JSON (Copy to .env VERTICAL_MAP_JSON):")
|
||||||
|
print(json.dumps(mapping))
|
||||||
|
else:
|
||||||
|
print(f"❌ Could not fetch items for List {list_id}. Trying MDO List...")
|
||||||
|
# Fallback to MDO List
|
||||||
|
mdo_items = client._get(f"MDOList/udlist{list_id}")
|
||||||
|
if mdo_items:
|
||||||
|
print("✅ Success via MDO List.")
|
||||||
|
# ... process mdo items if needed ...
|
||||||
|
else:
|
||||||
|
print("❌ MDO List fallback failed too.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
discover_verticals()
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def find_latest_roboplanet():
|
||||||
|
print("🔎 Searching for the latest Roboplanet (Group 52) Account...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# DIAGNOSTIC: Search for Account of Associate 528 (RCGO)
|
||||||
|
endpoint = "Contact?$filter=associateId eq 528&$orderby=contactId desc&$top=1&$select=contactId,name,Associate"
|
||||||
|
|
||||||
|
print(f"📡 Diagnostic Query: {endpoint}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
results = client.search(endpoint)
|
||||||
|
|
||||||
|
if results and len(results) > 0:
|
||||||
|
contact = results[0]
|
||||||
|
print("\n✅ FOUND ACCOUNT FOR RCGO (528):")
|
||||||
|
print(json.dumps(contact, indent=2))
|
||||||
|
|
||||||
|
# Check GroupIdx
|
||||||
|
# Usually flat like "Associate": {"GroupIdx": 52...}
|
||||||
|
else:
|
||||||
|
print("\n❌ NO ACCOUNTS FOUND for RCGO (528).")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {e}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
find_latest_roboplanet()
|
||||||
60
connector-superoffice/tools/find_missing_whitelist_ids.py
Normal file
60
connector-superoffice/tools/find_missing_whitelist_ids.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
def find_missing():
|
||||||
|
print("🔎 Scanning for Associate IDs not in Whitelist...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
whitelist = settings.ROBOPLANET_WHITELIST
|
||||||
|
|
||||||
|
# Fetch 500 contacts
|
||||||
|
limit = 500
|
||||||
|
endpoint = f"Contact?$orderby=contactId desc&$top={limit}&$select=associateId"
|
||||||
|
|
||||||
|
print(f"📡 Scanning {limit} records...")
|
||||||
|
contacts = client.search(endpoint)
|
||||||
|
|
||||||
|
if contacts:
|
||||||
|
missing_ids = set()
|
||||||
|
match_count = 0
|
||||||
|
for c in contacts:
|
||||||
|
aid = c.get('associateId') or c.get('AssociateId')
|
||||||
|
if aid:
|
||||||
|
is_match = False
|
||||||
|
if str(aid).upper() in whitelist: is_match = True
|
||||||
|
try:
|
||||||
|
if int(aid) in whitelist: is_match = True
|
||||||
|
except: pass
|
||||||
|
|
||||||
|
if is_match:
|
||||||
|
match_count += 1
|
||||||
|
else:
|
||||||
|
missing_ids.add(aid)
|
||||||
|
|
||||||
|
print(f"\n📊 Scan Results ({limit} records):")
|
||||||
|
print(f" - Total Matches (Roboplanet): {match_count}")
|
||||||
|
print(f" - Missing/Other IDs: {len(missing_ids)}")
|
||||||
|
|
||||||
|
if missing_ids:
|
||||||
|
print("\n✅ Found IDs NOT in whitelist:")
|
||||||
|
for mid in sorted(list(missing_ids), key=lambda x: str(x)):
|
||||||
|
print(f" - {mid}")
|
||||||
|
|
||||||
|
print("\n👉 Bitte prüfe, ob eine dieser IDs ebenfalls zu Roboplanet gehört.")
|
||||||
|
else:
|
||||||
|
print("❌ No contacts found.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
find_missing()
|
||||||
98
connector-superoffice/tools/inspect_group_users.py
Normal file
98
connector-superoffice/tools/inspect_group_users.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def inspect_group():
|
||||||
|
print("🔎 Inspecting Group 52 (Roboplanet)...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1. Find Users in Group 52
|
||||||
|
print("\n👥 Finding Associates in Group 52...")
|
||||||
|
associates = client._get("MDOList/associate")
|
||||||
|
|
||||||
|
robo_associates = []
|
||||||
|
if associates:
|
||||||
|
for assoc in associates:
|
||||||
|
# Note: MDOList returns flat items.
|
||||||
|
# We might need to fetch details or check 'GroupIdx' if present in ExtraInfo
|
||||||
|
# Let's check keys first
|
||||||
|
# print(assoc.keys())
|
||||||
|
|
||||||
|
# The 'GroupIdx' is usually in 'ExtraInfo' or needs detail fetch
|
||||||
|
# But earlier discovery showed 'GroupIdx' directly? No, I inferred it.
|
||||||
|
# Let's fetch details for a few to be sure.
|
||||||
|
assoc_id = assoc.get('Id')
|
||||||
|
|
||||||
|
# Optimization: Only check first 50 to avoid spam, or check by Name if we know one
|
||||||
|
# Better: Use OData to filter associates by group?
|
||||||
|
# "Associate?$filter=groupIdx eq 52" -> Let's try this first!
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Efficient OData Search for Associates in Group 52
|
||||||
|
users_in_group = client.search("Associate?$filter=groupIdx eq 52")
|
||||||
|
|
||||||
|
if users_in_group:
|
||||||
|
print(f"✅ Found {len(users_in_group)} Associates in Group 52:")
|
||||||
|
for u in users_in_group:
|
||||||
|
uid = u.get('associateId') or u.get('AssociateId')
|
||||||
|
name = u.get('name') or u.get('Name') or u.get('fullName')
|
||||||
|
print(f" - {name} (ID: {uid})")
|
||||||
|
robo_associates.append(uid)
|
||||||
|
else:
|
||||||
|
print("⚠️ No Associates found in Group 52 via OData.")
|
||||||
|
print(" Trying manual scan of MDOList (slower)...")
|
||||||
|
# Fallback loop
|
||||||
|
if associates:
|
||||||
|
count = 0
|
||||||
|
for assoc in associates:
|
||||||
|
aid = assoc.get('Id')
|
||||||
|
det = client._get(f"Associate/{aid}")
|
||||||
|
if det and det.get('GroupIdx') == 52:
|
||||||
|
print(f" - {det.get('Name')} (ID: {aid}) [via Detail]")
|
||||||
|
robo_associates.append(aid)
|
||||||
|
count += 1
|
||||||
|
if count > 5:
|
||||||
|
print(" ... (stopping scan)")
|
||||||
|
break
|
||||||
|
|
||||||
|
if not robo_associates:
|
||||||
|
print("❌ CRITICAL: Group 52 seems empty! Filter logic will block everything.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 2. Check a Contact owned by one of these users
|
||||||
|
test_user_id = robo_associates[0]
|
||||||
|
print(f"\n🏢 Checking a Contact owned by User {test_user_id}...")
|
||||||
|
|
||||||
|
contacts = client.search(f"Contact?$filter=associateId eq {test_user_id}&$top=1&$select=ContactId,Name,Associate/GroupIdx")
|
||||||
|
|
||||||
|
if contacts:
|
||||||
|
c = contacts[0]
|
||||||
|
cid = c.get('contactId') or c.get('ContactId')
|
||||||
|
cname = c.get('name') or c.get('Name')
|
||||||
|
# Check nested Associate GroupIdx if returned, or fetch detail
|
||||||
|
print(f" found: {cname} (ID: {cid})")
|
||||||
|
|
||||||
|
# Double Check with full Get
|
||||||
|
full_c = client.get_contact(cid)
|
||||||
|
assoc_grp = full_c.get('Associate', {}).get('GroupIdx')
|
||||||
|
print(f" 👉 Contact Associate GroupIdx: {assoc_grp}")
|
||||||
|
|
||||||
|
if assoc_grp == 52:
|
||||||
|
print("✅ VERIFIED: Filter logic 'GroupIdx == 52' will work.")
|
||||||
|
else:
|
||||||
|
print(f"❌ MISMATCH: Contact GroupIdx is {assoc_grp}, expected 52.")
|
||||||
|
else:
|
||||||
|
print("⚠️ User has no contacts. Cannot verify contact group mapping.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
inspect_group()
|
||||||
73
connector-superoffice/tools/precise_count_verification.py
Normal file
73
connector-superoffice/tools/precise_count_verification.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
def run_precise_check():
|
||||||
|
print("📊 Precise Count Verification: API vs. Whitelist...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
whitelist = settings.ROBOPLANET_WHITELIST
|
||||||
|
ids_in_whitelist = [x for x in whitelist if isinstance(x, int)]
|
||||||
|
|
||||||
|
# 1. Individual Counts for our Whitelist IDs
|
||||||
|
print(f"\n🔢 Counting accounts for the {len(ids_in_whitelist)} IDs in whitelist...")
|
||||||
|
total_whitelist_count = 0
|
||||||
|
for aid in ids_in_whitelist:
|
||||||
|
endpoint = f"Contact?$filter=associateId eq {aid}&$top=0&$count=true"
|
||||||
|
try:
|
||||||
|
resp = client._get(endpoint)
|
||||||
|
count = resp.get('@odata.count') or 0
|
||||||
|
if count > 0:
|
||||||
|
# print(f" - ID {aid}: {count}")
|
||||||
|
total_whitelist_count += count
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
print(f"✅ Total accounts owned by Whitelist IDs: {total_whitelist_count}")
|
||||||
|
|
||||||
|
# 2. Check for "Strangers" in the Selection 10960
|
||||||
|
# We want to find who else is in that selection
|
||||||
|
print(f"\n🕵️ Looking for Owners in Selection 10960 who are NOT in our whitelist...")
|
||||||
|
|
||||||
|
# We use Archive/dynamic to group members by AssociateId
|
||||||
|
# This is the most efficient way to see all owners in the selection
|
||||||
|
endpoint = "Archive/dynamic?provider=selectionmember&columns=contact/associateId,contact/associate/name&criteria=selectionId=10960&$top=1000"
|
||||||
|
|
||||||
|
try:
|
||||||
|
members = client._get(endpoint)
|
||||||
|
if members and isinstance(members, list):
|
||||||
|
owners_in_selection = {}
|
||||||
|
for m in members:
|
||||||
|
aid = m.get("contact/associateId")
|
||||||
|
aname = m.get("contact/associate/name")
|
||||||
|
if aid:
|
||||||
|
owners_in_selection[aid] = aname
|
||||||
|
|
||||||
|
print(f"Found {len(owners_in_selection)} distinct owners in the first 1000 members of selection.")
|
||||||
|
for aid, name in owners_in_selection.items():
|
||||||
|
if aid not in whitelist and name not in whitelist:
|
||||||
|
print(f" ⚠️ OWNER NOT IN WHITELIST: {name} (ID: {aid})")
|
||||||
|
else:
|
||||||
|
print("⚠️ Could not group selection members by owner via API.")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"⚠️ Archive grouping failed: {e}")
|
||||||
|
|
||||||
|
print(f"\n🏁 Target from UI: 17014")
|
||||||
|
print(f"🏁 Whitelist sum: {total_whitelist_count}")
|
||||||
|
delta = 17014 - total_whitelist_count
|
||||||
|
print(f"🏁 Delta: {delta}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
run_precise_check()
|
||||||
41
connector-superoffice/tools/test_selection_membership.py
Normal file
41
connector-superoffice/tools/test_selection_membership.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
|
||||||
|
def test_membership(contact_id: int):
|
||||||
|
selection_id = 10960
|
||||||
|
print(f"🔎 Testing if Contact {contact_id} is member of Selection {selection_id}...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
|
||||||
|
# Efficient Membership Check
|
||||||
|
# GET Selection/{id}/MemberStatus/Contact/{contactId}
|
||||||
|
endpoint = f"Selection/{selection_id}/MemberStatus/Contact/{contact_id}"
|
||||||
|
|
||||||
|
print(f"📡 Querying: {endpoint}")
|
||||||
|
try:
|
||||||
|
resp = client._get(endpoint)
|
||||||
|
print(f"✅ Response: {json.dumps(resp, indent=2)}")
|
||||||
|
|
||||||
|
# Result format is usually a string: "Member", "NotMember", "Excluded"
|
||||||
|
if resp == "Member":
|
||||||
|
print("🎯 YES: Contact is a member.")
|
||||||
|
else:
|
||||||
|
print("⏭️ NO: Contact is NOT a member.")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Membership check failed: {e}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Test with Tanja Ullmann (171188) which we identified as Roboplanet
|
||||||
|
test_membership(171188)
|
||||||
|
|
||||||
|
# Test with Wackler parent (ID 3)
|
||||||
|
print("\n--- Control Test ---")
|
||||||
|
test_membership(3)
|
||||||
80
connector-superoffice/tools/verify_latest_roboplanet.py
Normal file
80
connector-superoffice/tools/verify_latest_roboplanet.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
def find_latest_match():
|
||||||
|
print("🔎 Searching for the youngest account assigned to a Roboplanet user...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
whitelist = settings.ROBOPLANET_WHITELIST
|
||||||
|
print(f"📋 Whitelist contains {len(whitelist)} entries (IDs + Names).")
|
||||||
|
|
||||||
|
# 1. Fetch more contacts to find a match
|
||||||
|
limit = 1000
|
||||||
|
endpoint = f"Contact?$orderby=contactId desc&$top={limit}&$select=contactId,name,associateId"
|
||||||
|
|
||||||
|
print(f"📡 Fetching latest {limit} contacts (this may take a few seconds)...")
|
||||||
|
try:
|
||||||
|
contacts = client.search(endpoint)
|
||||||
|
if not contacts:
|
||||||
|
print("❌ No contacts returned from API.")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"✅ Received {len(contacts)} contacts. Checking against whitelist...")
|
||||||
|
|
||||||
|
found = False
|
||||||
|
for i, c in enumerate(contacts):
|
||||||
|
if i > 0 and i % 100 == 0:
|
||||||
|
print(f" ... checked {i} records ...")
|
||||||
|
|
||||||
|
cid = c.get('contactId') or c.get('ContactId')
|
||||||
|
cname = c.get('name') or c.get('Name')
|
||||||
|
|
||||||
|
# Extract associate identifier (might be ID or Name)
|
||||||
|
raw_aid = c.get('associateId') or c.get('AssociateId')
|
||||||
|
|
||||||
|
is_match = False
|
||||||
|
if raw_aid:
|
||||||
|
# 1. Try as String (Name)
|
||||||
|
val_str = str(raw_aid).upper().strip()
|
||||||
|
if val_str in whitelist:
|
||||||
|
is_match = True
|
||||||
|
else:
|
||||||
|
# 2. Try as Int (ID)
|
||||||
|
try:
|
||||||
|
if int(raw_aid) in whitelist:
|
||||||
|
is_match = True
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if is_match:
|
||||||
|
print("\n🎯 FOUND YOUNGEST ROBOPLANET ACCOUNT:")
|
||||||
|
print(f" - Company Name: {cname}")
|
||||||
|
print(f" - Contact ID: {cid}")
|
||||||
|
print(f" - Responsible Identifier: {raw_aid}")
|
||||||
|
print(f" - Link: https://online3.superoffice.com/Cust26720/default.aspx?contact?contact_id={cid}")
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not found:
|
||||||
|
print(f"\n⚠️ No match found in the last {limit} contacts.")
|
||||||
|
print(" This confirms that recent activity is from non-whitelist users.")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
find_latest_match()
|
||||||
@@ -0,0 +1,67 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Absolute path setup
|
||||||
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
connector_dir = os.path.abspath(os.path.join(current_dir, '..'))
|
||||||
|
sys.path.insert(0, connector_dir)
|
||||||
|
|
||||||
|
from superoffice_client import SuperOfficeClient
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
def verify():
|
||||||
|
selection_id = 10960
|
||||||
|
print(f"🔎 Verifying members of Selection {selection_id}...")
|
||||||
|
client = SuperOfficeClient()
|
||||||
|
if not client.access_token:
|
||||||
|
print("❌ Auth failed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Use the Selection/ID/ContactMembers endpoint which is part of the REST API
|
||||||
|
# We ask for a few members and their Associate info
|
||||||
|
endpoint = f"Selection/{selection_id}/ContactMembers?$top=50&$select=ContactId,Name,AssociateId"
|
||||||
|
|
||||||
|
print(f"📡 Querying: {endpoint}")
|
||||||
|
try:
|
||||||
|
resp = client._get(endpoint)
|
||||||
|
# OData returns 'value'
|
||||||
|
members = resp.get('value', [])
|
||||||
|
|
||||||
|
if not members:
|
||||||
|
print("⚠️ No members found via REST. Trying alternative Archive call...")
|
||||||
|
# If REST fails, we might have to use a different approach
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"✅ Found {len(members)} members. Inspecting owners...")
|
||||||
|
|
||||||
|
whitelist = settings.ROBOPLANET_WHITELIST
|
||||||
|
owners_found = {}
|
||||||
|
|
||||||
|
for m in members:
|
||||||
|
cid = m.get('ContactId')
|
||||||
|
cname = m.get('Name')
|
||||||
|
# The AssociateId might be named differently in the response
|
||||||
|
aid = m.get('AssociateId')
|
||||||
|
|
||||||
|
if aid:
|
||||||
|
is_robo = aid in whitelist or str(aid).upper() in whitelist
|
||||||
|
status = "✅ ROBO" if is_robo else "❌ STRANGER"
|
||||||
|
owners_found[aid] = (status, aid)
|
||||||
|
# print(f" - Contact {cid} ({cname}): Owner {aid} [{status}]")
|
||||||
|
|
||||||
|
print("\n📊 Summary of Owners in Selection:")
|
||||||
|
for aid, (status, val) in owners_found.items():
|
||||||
|
print(f" {status}: Associate {aid}")
|
||||||
|
|
||||||
|
if any("STRANGER" in s for s, v in owners_found.values()):
|
||||||
|
print("\n⚠️ ALERT: Found owners in the selection who are NOT in our whitelist.")
|
||||||
|
print("This explains the delta. Please check if these IDs should be added.")
|
||||||
|
else:
|
||||||
|
print("\n✅ All sampled members belong to whitelist users.")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {e}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
verify()
|
||||||
@@ -147,6 +147,7 @@ def dashboard():
|
|||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Account / Person</th>
|
<th>Account / Person</th>
|
||||||
|
<th width="100">Responsible</th>
|
||||||
<th width="120">ID</th>
|
<th width="120">ID</th>
|
||||||
<th width="150">Process Progress</th>
|
<th width="150">Process Progress</th>
|
||||||
<th width="100">Duration</th>
|
<th width="100">Duration</th>
|
||||||
@@ -156,7 +157,7 @@ def dashboard():
|
|||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody id="account-table">
|
<tbody id="account-table">
|
||||||
<tr><td colspan="6" style="text-align:center;">Loading Accounts...</td></tr>
|
<tr><td colspan="8" style="text-align:center;">Loading Accounts...</td></tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
@@ -204,7 +205,7 @@ def dashboard():
|
|||||||
tbody.innerHTML = '';
|
tbody.innerHTML = '';
|
||||||
|
|
||||||
if (accounts.length === 0) {
|
if (accounts.length === 0) {
|
||||||
tbody.innerHTML = '<tr><td colspan="6" style="text-align:center;">No accounts in process</td></tr>';
|
tbody.innerHTML = '<tr><td colspan="8" style="text-align:center;">No accounts in process</td></tr>';
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,6 +227,7 @@ def dashboard():
|
|||||||
<strong>${acc.name}</strong>
|
<strong>${acc.name}</strong>
|
||||||
<span class="meta">${acc.last_event}</span>
|
<span class="meta">${acc.last_event}</span>
|
||||||
</td>
|
</td>
|
||||||
|
<td><span class="status status-PENDING" style="font-size: 10px;">👤 ${acc.associate || '---'}</span></td>
|
||||||
<td>${acc.id}</td>
|
<td>${acc.id}</td>
|
||||||
<td>${phasesHtml}</td>
|
<td>${phasesHtml}</td>
|
||||||
<td><span class="meta">${acc.duration || '0s'}</span></td>
|
<td><span class="meta">${acc.duration || '0s'}</span></td>
|
||||||
|
|||||||
@@ -33,54 +33,26 @@ def safe_get_udfs(entity_data):
|
|||||||
logger.error(f"Error reading UDFs: {e}")
|
logger.error(f"Error reading UDFs: {e}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def process_job(job, so_client: SuperOfficeClient):
|
def process_job(job, so_client: SuperOfficeClient, queue: JobQueue):
|
||||||
"""
|
"""
|
||||||
Core logic for processing a single job.
|
Core logic for processing a single job.
|
||||||
Returns: (STATUS, MESSAGE)
|
Returns: (STATUS, MESSAGE)
|
||||||
STATUS: 'SUCCESS', 'SKIPPED', 'RETRY', 'FAILED'
|
STATUS: 'SUCCESS', 'SKIPPED', 'RETRY', 'FAILED'
|
||||||
"""
|
"""
|
||||||
logger.info(f"--- [WORKER v1.8] Processing Job {job['id']} ({job['event_type']}) ---")
|
logger.info(f"--- [WORKER v1.9.1] Processing Job {job['id']} ({job['event_type']}) ---")
|
||||||
payload = job['payload']
|
payload = job['payload']
|
||||||
event_low = job['event_type'].lower()
|
event_low = job['event_type'].lower()
|
||||||
|
|
||||||
|
# --- CIRCUIT BREAKER: STOP INFINITE LOOPS ---
|
||||||
|
# Ignore webhooks triggered by our own API user (Associate 528)
|
||||||
|
changed_by = payload.get("ChangedByAssociateId")
|
||||||
|
if changed_by == 528:
|
||||||
|
msg = f"Skipping Echo: Event was triggered by our own API user (Associate 528)."
|
||||||
|
logger.info(f"⏭️ {msg}")
|
||||||
|
return ("SKIPPED", msg)
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
# 0. Noise Reduction: Filter irrelevant field changes
|
# 0. Noise Reduction: Filter irrelevant field changes
|
||||||
if job['event_type'] == 'contact.changed':
|
|
||||||
changes = payload.get('Changes', [])
|
|
||||||
changes_lower = [str(c).lower() for c in changes]
|
|
||||||
|
|
||||||
# Fields that trigger a re-analysis
|
|
||||||
relevant_fields = [
|
|
||||||
'name', 'urladdress', 'urls', 'orgnr', 'userdef_id', 'country_id'
|
|
||||||
]
|
|
||||||
|
|
||||||
# Identify which relevant field triggered the event
|
|
||||||
hit_fields = [f for f in relevant_fields if f in changes_lower]
|
|
||||||
|
|
||||||
if not hit_fields:
|
|
||||||
msg = f"Skipping 'contact.changed': No relevant fields affected. (Changes: {changes})"
|
|
||||||
logger.info(f"⏭️ {msg}")
|
|
||||||
return ("SKIPPED", msg)
|
|
||||||
else:
|
|
||||||
logger.info(f"🎯 Relevant change detected in fields: {hit_fields}")
|
|
||||||
|
|
||||||
if job['event_type'] == 'person.changed':
|
|
||||||
changes = payload.get('Changes', [])
|
|
||||||
changes_lower = [str(c).lower() for c in changes]
|
|
||||||
|
|
||||||
relevant_person_fields = [
|
|
||||||
'jobtitle', 'title', 'position_id', 'userdef_id'
|
|
||||||
]
|
|
||||||
|
|
||||||
hit_fields = [f for f in relevant_person_fields if f in changes_lower]
|
|
||||||
|
|
||||||
if not hit_fields:
|
|
||||||
msg = f"Skipping 'person.changed': No relevant fields affected. (Changes: {changes})"
|
|
||||||
logger.info(f"⏭️ {msg}")
|
|
||||||
return ("SKIPPED", msg)
|
|
||||||
else:
|
|
||||||
logger.info(f"🎯 Relevant change detected in fields: {hit_fields}")
|
|
||||||
|
|
||||||
# 1. Extract IDs Early
|
|
||||||
person_id = None
|
person_id = None
|
||||||
contact_id = None
|
contact_id = None
|
||||||
job_title = payload.get("JobTitle")
|
job_title = payload.get("JobTitle")
|
||||||
@@ -143,14 +115,47 @@ def process_job(job, so_client: SuperOfficeClient):
|
|||||||
campaign_tag = None
|
campaign_tag = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# Request Associate details explicitly
|
||||||
contact_details = so_client.get_contact(
|
contact_details = so_client.get_contact(
|
||||||
contact_id,
|
contact_id,
|
||||||
select=["Name", "UrlAddress", "Urls", "UserDefinedFields", "Address", "OrgNr"]
|
select=["Name", "UrlAddress", "Urls", "UserDefinedFields", "Address", "OrgNr", "Associate"]
|
||||||
)
|
)
|
||||||
if not contact_details:
|
|
||||||
raise ValueError(f"Contact {contact_id} not found (API returned None)")
|
# ABSOLUTE SAFETY CHECK
|
||||||
|
if contact_details is None:
|
||||||
|
raise ValueError(f"SuperOffice API returned None for Contact {contact_id}. Possible timeout or record locked.")
|
||||||
|
|
||||||
crm_name = contact_details.get("Name")
|
crm_name = contact_details.get("Name", "Unknown")
|
||||||
|
|
||||||
|
# Safely get Associate object
|
||||||
|
assoc = contact_details.get("Associate") or {}
|
||||||
|
aid = assoc.get("AssociateId")
|
||||||
|
aname = assoc.get("Name", "").upper().strip() if assoc.get("Name") else ""
|
||||||
|
|
||||||
|
# PERSIST DETAILS TO DASHBOARD early
|
||||||
|
queue.update_entity_name(job['id'], crm_name, associate_name=aname)
|
||||||
|
|
||||||
|
# --- ROBOPLANET FILTER LOGIC ---
|
||||||
|
|
||||||
|
# Check both numerical ID and shortname
|
||||||
|
is_robo = False
|
||||||
|
if aname in settings.ROBOPLANET_WHITELIST:
|
||||||
|
is_robo = True
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
if aid and int(aid) in settings.ROBOPLANET_WHITELIST:
|
||||||
|
is_robo = True
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not is_robo:
|
||||||
|
msg = f"Skipped, Wackler. Contact {contact_id} ('{crm_name}'): Owner '{aname}' is not in Roboplanet whitelist."
|
||||||
|
logger.info(f"⏭️ {msg}")
|
||||||
|
return ("SKIPPED", msg)
|
||||||
|
|
||||||
|
logger.info(f"✅ Filter Passed: Contact '{crm_name}' belongs to Roboplanet Associate '{aname}'.")
|
||||||
|
# -------------------------------
|
||||||
|
|
||||||
crm_website = contact_details.get("UrlAddress")
|
crm_website = contact_details.get("UrlAddress")
|
||||||
|
|
||||||
# --- Fetch Person UDFs for Campaign Tag ---
|
# --- Fetch Person UDFs for Campaign Tag ---
|
||||||
@@ -361,8 +366,8 @@ def run_worker():
|
|||||||
job = queue.get_next_job()
|
job = queue.get_next_job()
|
||||||
if job:
|
if job:
|
||||||
try:
|
try:
|
||||||
# process_job now returns a tuple (STATUS, MESSAGE)
|
# process_job now takes (job, client, queue)
|
||||||
status, msg = process_job(job, so_client)
|
status, msg = process_job(job, so_client, queue)
|
||||||
|
|
||||||
if status == "RETRY":
|
if status == "RETRY":
|
||||||
queue.retry_job_later(job['id'], delay_seconds=120, error_msg=msg)
|
queue.retry_job_later(job['id'], delay_seconds=120, error_msg=msg)
|
||||||
|
|||||||
16
debug_zombie.py
Normal file
16
debug_zombie.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
|
||||||
|
DB_PATH = "/app/connector_queue.db"
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print(f"📊 Accessing database at {DB_PATH}")
|
||||||
|
print("📊 Listing last 20 jobs in database...")
|
||||||
|
with sqlite3.connect(DB_PATH) as conn:
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT id, status, event_type, updated_at FROM jobs ORDER BY id DESC LIMIT 20")
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
for r in rows:
|
||||||
|
print(f" - Job {r['id']}: {r['status']} ({r['event_type']}) - Updated: {r['updated_at']}")
|
||||||
|
|
||||||
@@ -1,19 +1,31 @@
|
|||||||
# Use an official Python runtime as a parent image
|
# --- STAGE 1: Builder ---
|
||||||
FROM python:3.9-slim
|
FROM python:3.9-slim AS builder
|
||||||
|
|
||||||
# Set the working directory in the container
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy the requirements file into the container at /app
|
# Install system dependencies needed for building
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
build-essential \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install dependencies into a local directory
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
RUN pip install --user --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
# Install any needed packages specified in requirements.txt
|
# --- STAGE 2: Runtime ---
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
FROM python:3.9-slim
|
||||||
|
|
||||||
# Copy the rest of the application's code from the host to the container at /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy only installed packages from builder
|
||||||
|
COPY --from=builder /root/.local /root/.local
|
||||||
|
# Update PATH to include the user-installed packages
|
||||||
|
ENV PATH=/root/.local/bin:$PATH
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Expose port 8000 to the outside world
|
# Expose port 8000
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# Command to run the application
|
# Command to run the application
|
||||||
|
|||||||
@@ -1,20 +1,27 @@
|
|||||||
# Use an official Node.js runtime as a parent image
|
# --- STAGE 1: Build ---
|
||||||
FROM node:20-alpine
|
FROM node:20-alpine AS builder
|
||||||
|
|
||||||
# Set the working directory in the container
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy package.json and package-lock.json to the container
|
|
||||||
COPY package.json package-lock.json ./
|
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
|
COPY package.json package-lock.json ./
|
||||||
RUN npm install --legacy-peer-deps
|
RUN npm install --legacy-peer-deps
|
||||||
|
|
||||||
# Copy the rest of the application's code
|
# Copy source and build
|
||||||
COPY . .
|
COPY . .
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
# Expose the port the app runs on
|
# --- STAGE 2: Runtime ---
|
||||||
EXPOSE 5173
|
FROM nginx:alpine
|
||||||
|
|
||||||
# Command to run the development server
|
# Copy built assets from builder stage
|
||||||
CMD ["npm", "run", "dev"]
|
COPY --from=builder /app/dist /usr/share/nginx/html
|
||||||
|
|
||||||
|
# Copy custom nginx config for SPA routing
|
||||||
|
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
|
# Expose port 80
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
# Nginx starts automatically
|
||||||
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
15
heatmap-tool/frontend/nginx.conf
Normal file
15
heatmap-tool/frontend/nginx.conf
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
index index.html index.htm;
|
||||||
|
try_files $uri $uri/ /index.html;
|
||||||
|
}
|
||||||
|
|
||||||
|
error_page 500 502 503 504 /50x.html;
|
||||||
|
location = /50x.html {
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user