[30388f42] Infrastructure Hardening: Repaired CE/Connector DB schema, fixed frontend styling build, implemented robust echo shield in worker v2.1.1, and integrated Lead Engine into gateway.
This commit is contained in:
24
devtools/add_strategy_column.py
Normal file
24
devtools/add_strategy_column.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import sqlite3
|
||||
|
||||
DB_PATH = "/app/companies_v3_fixed_2.db"
|
||||
|
||||
def add_column():
|
||||
try:
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
print(f"Adding column 'strategy_briefing' to 'industries' table in {DB_PATH}...")
|
||||
cursor.execute("ALTER TABLE industries ADD COLUMN strategy_briefing TEXT;")
|
||||
conn.commit()
|
||||
print("Success.")
|
||||
except sqlite3.OperationalError as e:
|
||||
if "duplicate column name" in str(e):
|
||||
print("Column 'strategy_briefing' already exists. Skipping.")
|
||||
else:
|
||||
print(f"Error: {e}")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
finally:
|
||||
if conn: conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
add_column()
|
||||
15
devtools/db_utils/inspect_schema.py
Normal file
15
devtools/db_utils/inspect_schema.py
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
import sqlite3
|
||||
|
||||
def list_industries_schema():
|
||||
conn = sqlite3.connect('/app/companies_v3_fixed_2.db')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("PRAGMA table_info(industries)")
|
||||
columns = cursor.fetchall()
|
||||
print("Columns in 'industries' table:")
|
||||
for col in columns:
|
||||
print(f"Name: {col[1]}, Type: {col[2]}")
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
list_industries_schema()
|
||||
15
devtools/db_utils/list_robotics_categories.py
Normal file
15
devtools/db_utils/list_robotics_categories.py
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
import sqlite3
|
||||
|
||||
def list_categories():
|
||||
conn = sqlite3.connect('/app/companies_v3_fixed_2.db')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT id, name FROM robotics_categories")
|
||||
rows = cursor.fetchall()
|
||||
print("Categories:")
|
||||
for r in rows:
|
||||
print(f"ID: {r[0]}, Name: {r[1]}")
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
list_categories()
|
||||
14
devtools/db_utils/list_tables.py
Normal file
14
devtools/db_utils/list_tables.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import sqlite3
|
||||
|
||||
def list_tables():
|
||||
conn = sqlite3.connect('/app/companies_v3_fixed_2.db')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
tables = cursor.fetchall()
|
||||
print("Tables in database:")
|
||||
for table in tables:
|
||||
print(table[0])
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
list_tables()
|
||||
29
devtools/db_utils/read_vertical_data.py
Normal file
29
devtools/db_utils/read_vertical_data.py
Normal file
@@ -0,0 +1,29 @@
|
||||
|
||||
import sqlite3
|
||||
|
||||
def read_verticals():
|
||||
conn = sqlite3.connect('/app/companies_v3_fixed_2.db')
|
||||
cursor = conn.cursor()
|
||||
|
||||
verticals = [
|
||||
"Energy - Grid & Utilities",
|
||||
"Retail - Non-Food",
|
||||
"Tech - Data Center",
|
||||
"Healthcare - Hospital"
|
||||
]
|
||||
|
||||
for v_name in verticals:
|
||||
cursor.execute("SELECT name, pains, gains, primary_category_id, secondary_category_id, ops_focus_secondary FROM industries WHERE name = ?", (v_name,))
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
print(f"--- {row[0]} ---")
|
||||
print(f"Primary ID: {row[3]}, Secondary ID: {row[4]}, Ops Focus Secondary: {row[5]}")
|
||||
print(f"PAINS:\n{row[1]}")
|
||||
print(f"GAINS:\n{row[2]}\n")
|
||||
else:
|
||||
print(f"Vertical '{v_name}' not found.")
|
||||
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
read_verticals()
|
||||
20
devtools/history/fix_tech_id_2026-02-24.py
Normal file
20
devtools/history/fix_tech_id_2026-02-24.py
Normal file
@@ -0,0 +1,20 @@
|
||||
|
||||
import sqlite3
|
||||
|
||||
def fix_tech_id():
|
||||
db_path = '/app/companies_v3_fixed_2.db'
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
UPDATE industries
|
||||
SET secondary_category_id = 9
|
||||
WHERE name = 'Tech - Data Center'
|
||||
""")
|
||||
print("Updated Tech - Data Center Secondary ID to 9")
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
fix_tech_id()
|
||||
100
devtools/history/update_verticals_2026-02-24.py
Normal file
100
devtools/history/update_verticals_2026-02-24.py
Normal file
@@ -0,0 +1,100 @@
|
||||
|
||||
import sqlite3
|
||||
|
||||
def update_verticals():
|
||||
db_path = '/app/companies_v3_fixed_2.db'
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 1. Energy - Grid & Utilities
|
||||
# Update Secondary Product to 9 (Cleaning Indoor Wet) and update Texts
|
||||
energy_pains = """[Primary Product: Security]
|
||||
- Sabotage & Diebstahl: Kupferdiebstahl in Umspannwerken verursacht Millionenschäden und Versorgungsausfälle.
|
||||
- Reaktionszeit: Entlegene Standorte sind für Interventionskräfte oft zu spät erreichbar.
|
||||
- Sicherheitsrisiko Mensch: Alleinarbeit bei Kontrollgängen in Hochspannungsbereichen ist gefährlich.
|
||||
|
||||
[Secondary Product: Cleaning Indoor]
|
||||
- Verschmutzung in Umspannwerken: Staubablagerungen auf Böden und in technischen Bereichen können die Betriebssicherheit gefährden.
|
||||
- Manuelle Reinigung in Sicherheitsbereichen: Externes Reinigungspersonal benötigt aufwändige Sicherheitsunterweisungen und Begleitung.
|
||||
- Große Distanzen: Die Reinigung weitläufiger, oft unbemannter Anlagen ist logistisch aufwändig und wird häufig vernachlässigt."""
|
||||
|
||||
energy_gains = """[Primary Product: Security]
|
||||
- First Responder Maschine: Roboter ist bereits vor Ort, verifiziert Alarm und schreckt Täter ab.
|
||||
- KRITIS-Compliance: Lückenlose, manipulationssichere Dokumentation aller Vorfälle für Behörden.
|
||||
- Arbeitsschutz: Roboter übernimmt gefährliche Routinekontrollen (z.B. Thermografie an Trafos).
|
||||
|
||||
[Secondary Product: Cleaning Indoor]
|
||||
- Permanente Sauberkeit: Autonome Reinigung gewährleistet staubfreie Böden und reduziert das Risiko von technischen Störungen.
|
||||
- Zugang ohne Sicherheitsrisiko: Der Roboter ist "Teil der Anlage" und benötigt keine externe Sicherheitsfreigabe oder Begleitung.
|
||||
- Ressourceneffizienz: Kosteneffiziente Reinigung großer Flächen ohne Anreisezeiten für Dienstleister."""
|
||||
|
||||
cursor.execute("""
|
||||
UPDATE industries
|
||||
SET secondary_category_id = 9,
|
||||
pains = ?,
|
||||
gains = ?
|
||||
WHERE name = 'Energy - Grid & Utilities'
|
||||
""", (energy_pains, energy_gains))
|
||||
print("Updated Energy - Grid & Utilities")
|
||||
|
||||
# 2. Retail - Non-Food
|
||||
# Update Texts (Enrichment)
|
||||
retail_pains = """[Primary Product: Cleaning Indoor]
|
||||
- Optischer Eindruck: Verschmutzte Böden, insbesondere im Premium-Segment (Möbel, Elektronik), mindern die Wertwahrnehmung der ausgestellten Produkte massiv.
|
||||
- Staubentwicklung auf großen Flächen: In Möbelhäusern und Baumärkten sammelt sich auf den riesigen Gangflächen schnell Staub, der das Einkaufserlebnis trübt.
|
||||
- Personalbindung: Verkaufsberater sollen Kunden betreuen und Umsatz generieren, statt wertvolle Zeit mit unproduktiven Kehr- oder Wischtätigkeiten zu verbringen.
|
||||
|
||||
[Secondary Product: Service]
|
||||
- Unübersichtlichkeit: Kunden finden in großen Märkten oft nicht sofort das gesuchte Produkt und binden Personal für einfache Wegbeschreibungen.
|
||||
- Fehlende Interaktion: Passive Verkaufsflächen bieten wenig Anreiz für Kunden, sich länger aufzuhalten oder zu interagieren."""
|
||||
|
||||
retail_gains = """[Primary Product: Cleaning Indoor]
|
||||
- Perfektes Einkaufserlebnis: Stets makellos saubere Böden unterstreichen den Qualitätsanspruch des Sortiments und laden zum Verweilen ein.
|
||||
- Fokus auf Beratung: Mitarbeiter werden von routinemäßigen Reinigungsaufgaben befreit und können sich voll auf den Kunden und den Verkauf konzentrieren.
|
||||
- Kosteneffizienz auf der Fläche: Autonome Reinigung großer Quadratmeterzahlen ist deutlich günstiger als manuelle Arbeit, besonders außerhalb der Öffnungszeiten.
|
||||
|
||||
[Secondary Product: Service]
|
||||
- Innovativer Kundenservice: Roboter führen Kunden autonom zum gesuchten Produktregal ("Guide-Funktion").
|
||||
- Wow-Effekt: Der Einsatz von Robotik modernisiert das Markenimage und zieht Aufmerksamkeit auf sich."""
|
||||
|
||||
cursor.execute("""
|
||||
UPDATE industries
|
||||
SET pains = ?,
|
||||
gains = ?
|
||||
WHERE name = 'Retail - Non-Food'
|
||||
""", (retail_pains, retail_gains))
|
||||
print("Updated Retail - Non-Food")
|
||||
|
||||
# 3. Tech - Data Center
|
||||
# Update Texts (Enrichment)
|
||||
tech_pains = """[Primary Product: Security]
|
||||
- Sicherheitsrisiko Zutritt: Unbefugter Zutritt in Hochsicherheitsbereiche (Serverräume, Cages) muss lückenlos detektiert und dokumentiert werden, um Zertifizierungen (ISO 27001) nicht zu gefährden.
|
||||
- Fachkräftemangel Security: Qualifiziertes Wachpersonal mit Sicherheitsüberprüfung ist extrem schwer zu finden und teuer im 24/7-Schichtbetrieb.
|
||||
- Dokumentationslücken: Manuelle Patrouillen sind fehleranfällig und Protokolle können unvollständig sein, was bei Audits zu Problemen führt.
|
||||
|
||||
[Secondary Product: Cleaning Indoor]
|
||||
- Gefahr durch Staubpartikel: Feinstaub in Serverräumen kann Kühlsysteme verstopfen und Kurzschlüsse verursachen, was die Hardware-Lebensdauer verkürzt.
|
||||
- Sicherheitsrisiko Reinigungspersonal: Externes Reinigungspersonal in Sicherheitsbereichen erfordert ständige Begleitung und Überwachung (Vier-Augen-Prinzip), was Personal bindet."""
|
||||
|
||||
tech_gains = """[Primary Product: Security]
|
||||
- Lückenloser Audit-Trail: Automatisierte, manipulationssichere Dokumentation aller Kontrollgänge und Ereignisse sichert Compliance-Anforderungen.
|
||||
- 24/7 Präsenz: Der Roboter ist immer im Dienst, wird nicht müde und garantiert eine konstante Überwachungsqualität ohne Schichtwechsel-Risiken.
|
||||
- Sofortige Alarmierung: Bei Anomalien (offene Rack-Tür, Wärmeentwicklung) erfolgt eine Echtzeit-Meldung an die Leitzentrale.
|
||||
|
||||
[Secondary Product: Cleaning Indoor]
|
||||
- Maximale Hardware-Verfügbarkeit: Staubfreie Umgebung optimiert die Kühleffizienz und reduziert das Ausfallrisiko teurer Komponenten.
|
||||
- Autonome "Trusted" Cleaning: Der Roboter reinigt sensibelste Bereiche ohne das Risiko menschlichen Fehlverhaltens oder unbefugten Zugriffs."""
|
||||
|
||||
cursor.execute("""
|
||||
UPDATE industries
|
||||
SET pains = ?,
|
||||
gains = ?
|
||||
WHERE name = 'Tech - Data Center'
|
||||
""", (tech_pains, tech_gains))
|
||||
print("Updated Tech - Data Center")
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
update_verticals()
|
||||
188
devtools/push_mh_to_notion.py
Normal file
188
devtools/push_mh_to_notion.py
Normal file
@@ -0,0 +1,188 @@
|
||||
import sys
|
||||
import os
|
||||
import csv
|
||||
import requests
|
||||
import json
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
|
||||
# Setup Logger
|
||||
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
|
||||
logger = logging.getLogger("NotionPusher")
|
||||
|
||||
# Config
|
||||
CSV_PATH = "./docs/miller_heiman_augmented.csv"
|
||||
NOTION_TOKEN_FILE = "/app/notion_token.txt"
|
||||
INDUSTRIES_DB_ID = "2ec88f4285448014ab38ea664b4c2b81"
|
||||
|
||||
def load_notion_token():
|
||||
try:
|
||||
with open(NOTION_TOKEN_FILE, "r") as f:
|
||||
return f.read().strip()
|
||||
except:
|
||||
logger.error("Token file not found.")
|
||||
sys.exit(1)
|
||||
|
||||
def add_strategy_property(token):
|
||||
"""Try to add the 'Strategy Briefing' property to the database schema."""
|
||||
url = f"https://api.notion.com/v1/databases/{INDUSTRIES_DB_ID}"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Notion-Version": "2022-06-28",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
# First, get current schema to check if it exists
|
||||
resp = requests.get(url, headers=headers)
|
||||
if resp.status_code != 200:
|
||||
logger.error(f"Failed to fetch DB schema: {resp.text}")
|
||||
return False
|
||||
|
||||
current_properties = resp.json().get("properties", {})
|
||||
if "Strategy Briefing" in current_properties:
|
||||
logger.info("Property 'Strategy Briefing' already exists.")
|
||||
return True
|
||||
|
||||
logger.info("Property 'Strategy Briefing' missing. Attempting to create...")
|
||||
|
||||
payload = {
|
||||
"properties": {
|
||||
"Strategy Briefing": {
|
||||
"rich_text": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update_resp = requests.patch(url, headers=headers, json=payload)
|
||||
if update_resp.status_code == 200:
|
||||
logger.info("Successfully added 'Strategy Briefing' property.")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Failed to add property: {update_resp.text}")
|
||||
return False
|
||||
|
||||
def get_pages_map(token):
|
||||
"""Returns a map of Vertical Name -> Page ID"""
|
||||
url = f"https://api.notion.com/v1/databases/{INDUSTRIES_DB_ID}/query"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Notion-Version": "2022-06-28",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
mapping = {}
|
||||
has_more = True
|
||||
next_cursor = None
|
||||
|
||||
while has_more:
|
||||
payload = {}
|
||||
if next_cursor:
|
||||
payload["start_cursor"] = next_cursor
|
||||
|
||||
resp = requests.post(url, headers=headers, json=payload)
|
||||
if resp.status_code != 200:
|
||||
break
|
||||
|
||||
data = resp.json()
|
||||
for page in data.get("results", []):
|
||||
props = page.get("properties", {})
|
||||
name_parts = props.get("Vertical", {}).get("title", [])
|
||||
if name_parts:
|
||||
name = "".join([t.get("plain_text", "") for t in name_parts])
|
||||
mapping[name] = page["id"]
|
||||
|
||||
has_more = data.get("has_more", False)
|
||||
next_cursor = data.get("next_cursor")
|
||||
|
||||
return mapping
|
||||
|
||||
def update_page(token, page_id, pains, gains, strategy):
|
||||
url = f"https://api.notion.com/v1/pages/{page_id}"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Notion-Version": "2022-06-28",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
# Construct Payload
|
||||
props = {}
|
||||
|
||||
# Only update if content exists (safety)
|
||||
if pains:
|
||||
props["Pains"] = {"rich_text": [{"text": {"content": pains[:2000]}}]} # Limit to 2000 chars to be safe
|
||||
if gains:
|
||||
props["Gains"] = {"rich_text": [{"text": {"content": gains[:2000]}}]}
|
||||
if strategy:
|
||||
props["Strategy Briefing"] = {"rich_text": [{"text": {"content": strategy[:2000]}}]}
|
||||
|
||||
if not props:
|
||||
return
|
||||
|
||||
payload = {"properties": props}
|
||||
resp = requests.patch(url, headers=headers, json=payload)
|
||||
|
||||
if resp.status_code != 200:
|
||||
logger.error(f"Failed to update page {page_id}: {resp.text}")
|
||||
else:
|
||||
logger.info(f"Updated page {page_id}")
|
||||
|
||||
def main():
|
||||
token = load_notion_token()
|
||||
|
||||
# 1. Ensure Schema
|
||||
if not add_strategy_property(token):
|
||||
logger.warning("Could not add 'Strategy Briefing' column. Please add it manually in Notion as a 'Text' property.")
|
||||
# We continue, maybe it failed because of permissions but column exists?
|
||||
# Actually if it failed, the update later will fail too if key is missing.
|
||||
# But let's try.
|
||||
|
||||
# 2. Map Notion Verticals
|
||||
logger.info("Mapping existing Notion pages...")
|
||||
notion_map = get_pages_map(token)
|
||||
logger.info(f"Found {len(notion_map)} verticals in Notion.")
|
||||
|
||||
# 3. Read CSV
|
||||
logger.info(f"Reading CSV: {CSV_PATH}")
|
||||
csv_data = {}
|
||||
|
||||
with open(CSV_PATH, "r", encoding="utf-8-sig") as f:
|
||||
reader = csv.DictReader(f, delimiter=";")
|
||||
for row in reader:
|
||||
vertical = row.get("Vertical")
|
||||
if not vertical:
|
||||
continue
|
||||
|
||||
# Aggregate Strategy
|
||||
parts = []
|
||||
if row.get("MH Coach Hypothesis"):
|
||||
parts.append(f"🧠 MH Coach: {row.get('MH Coach Hypothesis')}")
|
||||
if row.get("MH Early Red Flags"):
|
||||
parts.append(f"🚩 Red Flags: {row.get('MH Early Red Flags')}")
|
||||
if row.get("MH Adjustments / Ergänzungen"):
|
||||
parts.append(f"🔧 Adjustments: {row.get('MH Adjustments / Ergänzungen')}")
|
||||
|
||||
strategy = "\n\n".join(parts)
|
||||
|
||||
# Store (last row wins if duplicates, but they should be identical for vertical data)
|
||||
csv_data[vertical] = {
|
||||
"pains": row.get("Pains (clean)", ""),
|
||||
"gains": row.get("Gains (clean)", ""),
|
||||
"strategy": strategy
|
||||
}
|
||||
|
||||
# 4. Push Updates
|
||||
logger.info("Starting Batch Update...")
|
||||
count = 0
|
||||
for vertical, data in csv_data.items():
|
||||
if vertical in notion_map:
|
||||
page_id = notion_map[vertical]
|
||||
logger.info(f"Updating '{vertical}'...")
|
||||
update_page(token, page_id, data["pains"], data["gains"], data["strategy"])
|
||||
count += 1
|
||||
else:
|
||||
logger.warning(f"Skipping '{vertical}' (Not found in Notion)")
|
||||
|
||||
logger.info(f"Finished. Updated {count} verticals.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user