refactor: [30388f42] Strukturiere Root-Skripte thematisch neu

- Organisiert eine Vielzahl von Skripten aus dem Root-Verzeichnis in thematische Unterordner, um die Übersichtlichkeit zu verbessern und die Migration vorzubereiten.
- Verschiebt SuperOffice-bezogene Test- und Hilfsskripte in .
- Verschiebt Notion-bezogene Synchronisations- und Import-Skripte in .
- Archiviert eindeutig veraltete und ungenutzte Skripte in .
- Die zentralen Helfer  und  bleiben im Root, da sie von mehreren Tools als Abhängigkeit genutzt werden.
This commit is contained in:
2026-03-06 10:16:08 +00:00
parent a89d1625d4
commit d021b6b71c
99 changed files with 0 additions and 0 deletions

View File

@@ -0,0 +1,91 @@
import requests
import json
import os
TOKEN_FILE = 'notion_api_key.txt'
DATABASE_ID = "2e088f42-8544-815e-a3f9-e226f817bded"
# Data from the VIGGO S100-N analysis
PRODUCT_DATA = {
"specs": {
"metadata": {
"brand": "VIGGO",
"model_name": "S100-N",
"category": "cleaning",
"manufacturer_url": None
},
"core_specs": {
"battery_runtime_min": 360,
"charge_time_min": 270,
"weight_kg": 395.0,
"max_slope_deg": 10.0
},
"layers": {
"cleaning": {
"fresh_water_l": 60.0,
"area_performance_sqm_h": 3000.0
}
}
}
}
def add_to_notion(token):
url = "https://api.notion.com/v1/pages"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28",
"Content-Type": "application/json"
}
specs = PRODUCT_DATA["specs"]
meta = specs["metadata"]
core = specs["core_specs"]
cleaning = specs["layers"].get("cleaning", {})
properties = {
"Model Name": {"title": [{"text": {"content": meta["model_name"]}}]},
"Brand": {"select": {"name": meta["brand"]}},
"Category": {"select": {"name": meta["category"]}},
"Battery Runtime (min)": {"number": core.get("battery_runtime_min")},
"Charge Time (min)": {"number": core.get("charge_time_min")},
"Weight (kg)": {"number": core.get("weight_kg")},
"Max Slope (deg)": {"number": core.get("max_slope_deg")},
"Fresh Water (l)": {"number": cleaning.get("fresh_water_l")},
"Area Performance (m2/h)": {"number": cleaning.get("area_performance_sqm_h")}
}
# Add URL if present
if meta.get("manufacturer_url"):
properties["Manufacturer URL"] = {"url": meta["manufacturer_url"]}
payload = {
"parent": {"database_id": DATABASE_ID},
"properties": properties
}
print(f"Adding {meta['brand']} {meta['model_name']} to Notion database...")
try:
response = requests.post(url, headers=headers, json=payload)
response.raise_for_status()
data = response.json()
print("\n=== SUCCESS ===")
print(f"Product added to database!")
print(f"Page URL: {data.get('url')}")
except requests.exceptions.HTTPError as e:
print(f"\n=== ERROR ===")
print(f"HTTP Error: {e}")
print(f"Response: {response.text}")
def main():
try:
with open(TOKEN_FILE, 'r') as f:
token = f.read().strip()
except FileNotFoundError:
print(f"Error: Could not find '{TOKEN_FILE}'")
return
add_to_notion(token)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,58 @@
import requests
from getpass import getpass
# Interaktive und sichere Abfrage des Tokens
print("--- Notion API Token Gültigkeits-Check ---")
notion_token = getpass("Bitte gib deinen Notion API Key ein (Eingabe wird nicht angezeigt): ")
if not notion_token:
print("\nFehler: Kein Token eingegeben.")
exit()
# Der einfachste API-Endpunkt, um die Authentifizierung zu testen
url = "https://api.notion.com/v1/users/me"
headers = {
"Authorization": f"Bearer {notion_token}",
"Notion-Version": "2022-06-28"
}
print("\n... Sende Test-Anfrage an Notion...")
try:
# --- TEST 1: Grundlegende Authentifizierung ---
print("\n[TEST 1/2] Prüfe grundlegende Authentifizierung (/users/me)...")
user_response = requests.get("https://api.notion.com/v1/users/me", headers=headers)
user_response.raise_for_status()
print("✅ ERFOLG! Der API Token ist gültig.")
# --- TEST 2: Suche nach der 'Projects' Datenbank ---
print("\n[TEST 2/2] Versuche, die 'Projects'-Datenbank über die Suche zu finden (/search)...")
search_url = "https://api.notion.com/v1/search"
search_payload = {
"query": "Projects",
"filter": {"value": "database", "property": "object"}
}
search_response = requests.post(search_url, headers=headers, json=search_payload)
search_response.raise_for_status()
results = search_response.json().get("results", [])
if not results:
print("🟡 WARNUNG: Die Suche war erfolgreich, hat aber keine Datenbank namens 'Projects' gefunden.")
else:
print("✅✅✅ ERFOLG! Die Suche funktioniert und hat die 'Projects'-Datenbank gefunden.")
print("Gefundene Datenbanken:")
for db in results:
print(f"- ID: {db['id']}, Titel: {db.get('title', [{}])[0].get('plain_text', 'N/A')}")
except requests.exceptions.HTTPError as e:
print(f"\n❌ FEHLER! Einer der Tests ist fehlgeschlagen.")
print(f"URL: {e.request.url}")
print(f"HTTP Status Code: {e.response.status_code}")
print("Antwort von Notion:")
try:
print(e.response.json())
except:
print(e.response.text)
except requests.exceptions.RequestException as e:
print(f"\n❌ FEHLER! Ein Netzwerk- oder Verbindungsfehler ist aufgetreten: {e}")

View File

@@ -0,0 +1,42 @@
import json
from notion_client import Client
# SETUP
TOKEN = "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"
SECTOR_DB_ID = "59a4598a20084ddaa035f5eba750a1be"
notion = Client(auth=TOKEN)
def inspect_via_page():
print(f"🔍 Suche nach einer Seite in DB {SECTOR_DB_ID}...")
try:
# 1. Wir holen uns die erste verfügbare Seite aus der Datenbank
response = notion.databases.query(
database_id=SECTOR_DB_ID,
page_size=1
)
results = response.get("results")
if not results:
print("⚠️ Keine Seiten in der Datenbank gefunden. Bitte lege manuell eine an.")
return
page = results[0]
print(f"✅ Seite gefunden: '{page['id']}'")
# 2. Wir inspizieren die Properties der Seite
properties = page.get("properties", {})
print("\n--- INTERNE PROPERTY-MAP DER SEITE ---")
print(json.dumps(properties, indent=2))
print("\n--- ZUSAMMENFASSUNG FÜR DEINE PIPELINE ---")
for prop_name, prop_data in properties.items():
print(f"Spaltenname: '{prop_name}' | ID: {prop_data.get('id')} | Typ: {prop_data.get('type')}")
except Exception as e:
print(f"💥 Fehler beim Inspect: {e}")
if __name__ == "__main__":
inspect_via_page()

View File

@@ -0,0 +1,68 @@
# create_feature_translator_db.py
import requests
import time
import json
# --- Configuration ---
try:
with open("notion_token.txt", "r") as f:
NOTION_TOKEN = f.read().strip()
except FileNotFoundError:
print("Error: notion_token.txt not found.")
exit(1)
PARENT_PAGE_ID = "2e088f42854480248289deb383da3818"
NOTION_VERSION = "2022-06-28"
NOTION_API_BASE_URL = "https://api.notion.com/v1"
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Notion-Version": NOTION_VERSION,
"Content-Type": "application/json",
}
# --- Database Schema ---
DB_NAME = "Feature-to-Value Translator"
DB_SCHEMA = {
"title": [{"type": "text", "text": {"content": DB_NAME}}],
"properties": {
"Feature": {"title": {}},
"Story (Benefit)": {"rich_text": {}},
"Headline": {"rich_text": {}},
"Product Master": {
"relation": {
"database_id": "2e288f42-8544-81d8-96f5-c231f84f719a", # Product Master DB ID
"dual_property": {}
}
}
}
}
# --- Main Logic ---
def main():
print(f"Attempting to create database: {DB_NAME}")
create_url = f"{NOTION_API_BASE_URL}/databases"
payload = {
"parent": {"type": "page_id", "page_id": PARENT_PAGE_ID},
"title": DB_SCHEMA["title"],
"properties": DB_SCHEMA["properties"],
}
try:
response = requests.post(create_url, headers=HEADERS, json=payload)
response.raise_for_status()
db_data = response.json()
db_id = db_data["id"]
print(f"Successfully created database '{DB_NAME}' with ID: {db_id}")
print("\n--- IMPORTANT ---")
print("Please update 'Notion_Dashboard.md' with this new ID.")
print(f"'Feature-to-Value Translator': '{db_id}'")
print("-------------------")
except requests.exceptions.HTTPError as e:
print(f"HTTP Error creating database {DB_NAME}: {e}")
print(f"Response content: {response.text}")
except Exception as e:
print(f"An unexpected error occurred: {e}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,71 @@
import requests
import json
import os
TOKEN_FILE = 'notion_api_key.txt'
PARENT_PAGE_ID = "2e088f42-8544-8024-8289-deb383da3818" # "Roboplanet" page
def create_product_database(token):
print(f"Creating '📦 RoboPlanet Product Master' database under parent {PARENT_PAGE_ID}...")
url = "https://api.notion.com/v1/databases"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28",
"Content-Type": "application/json"
}
database_definition = {
"parent": {"type": "page_id", "page_id": PARENT_PAGE_ID},
"title": [{"type": "text", "text": {"content": "📦 RoboPlanet Product Master"}}],
"properties": {
"Model Name": {"title": {}},
"Brand": {"select": {"options": [
{"name": "VIGGO", "color": "blue"},
{"name": "PUDU", "color": "orange"}
]}},
"Category": {"select": {"options": [
{"name": "cleaning", "color": "green"},
{"name": "service", "color": "blue"},
{"name": "security", "color": "red"}
]}},
# Core Specs
"Battery Runtime (min)": {"number": {"format": "number"}},
"Charge Time (min)": {"number": {"format": "number"}},
"Weight (kg)": {"number": {"format": "number"}},
"Max Slope (deg)": {"number": {"format": "number"}},
# Cleaning Layer
"Fresh Water (l)": {"number": {"format": "number"}},
"Area Performance (m2/h)": {"number": {"format": "number"}},
# Metadata
"Manufacturer URL": {"url": {}},
"GTM Status": {"status": {}}
}
}
try:
response = requests.post(url, headers=headers, json=database_definition)
response.raise_for_status()
new_db = response.json()
print(f"\n=== SUCCESS ===")
print(f"Database created! ID: {new_db['id']}")
print(f"URL: {new_db.get('url')}")
return new_db['id']
except requests.exceptions.HTTPError as e:
print(f"\n=== ERROR ===")
print(f"HTTP Error: {e}")
print(f"Response: {response.text}")
return None
def main():
try:
with open(TOKEN_FILE, 'r') as f:
token = f.read().strip()
except FileNotFoundError:
print(f"Error: Could not find '{TOKEN_FILE}'")
return
db_id = create_product_database(token)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,58 @@
import requests
import json
from getpass import getpass
def inspect_database_properties(db_id: str):
"""Liest die Eigenschaften (Spalten) einer Notion-Datenbank aus."""
print(f"--- Untersuche Eigenschaften von Notion DB: {db_id} ---")
token = getpass("Bitte gib deinen Notion API Key ein (Eingabe wird nicht angezeigt): ")
if not token:
print("\nFehler: Kein Token eingegeben. Abbruch.")
return
print(f"\n... Lese Struktur von Datenbank {db_id}...")
url = f"https://api.notion.com/v1/databases/{db_id}"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28"
}
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
database_info = response.json()
properties = database_info.get("properties", {})
print("\n✅ Erfolgreich! Folgende Spalten (Properties) wurden gefunden:")
print("--------------------------------------------------")
for name, details in properties.items():
prop_type = details.get("type")
print(f"Spaltenname: '{name}' (Typ: {prop_type})")
if prop_type == "relation":
relation_details = details.get("relation", {})
print(f" -> Verknüpft mit Datenbank-ID: {relation_details.get('database_id')}")
# Gib die verfügbaren Optionen für Status- und Select-Felder aus
elif prop_type in ["status", "select", "multi_select"]:
options = details.get(prop_type, {}).get("options", [])
if options:
print(f" -> Verfügbare Optionen:")
for option in options:
print(f" - '{option.get('name')}'")
print("--------------------------------------------------")
print("Bitte finde den korrekten Namen der Spalte, die zu den Projekten verknüpft ist, und den exakten Namen für den 'In Bearbeitung'-Status.")
except requests.exceptions.RequestException as e:
print(f"\n❌ FEHLER! Konnte die Datenbankstruktur nicht lesen: {e}")
if hasattr(e, 'response') and e.response is not None:
print(f"HTTP Status Code: {e.response.status_code}")
try:
print(f"Antwort des Servers: {json.dumps(e.response.json(), indent=2)}")
except:
print(f"Antwort des Servers: {e.response.text}")
if __name__ == "__main__":
tasks_db_id = "2e888f42-8544-8153-beac-e604719029cf" # Die ID für "Tasks [UT]"
inspect_database_properties(tasks_db_id)

View File

@@ -0,0 +1,36 @@
import requests
import json
# Notion Config
try:
with open("notion_token.txt", "r") as f:
NOTION_TOKEN = f.read().strip()
except FileNotFoundError:
print("Error: notion_token.txt not found.")
exit(1)
NOTION_VERSION = "2022-06-28"
NOTION_API_BASE_URL = "https://api.notion.com/v1"
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Notion-Version": NOTION_VERSION,
"Content-Type": "application/json",
}
# DB ID from import_product.py
DB_ID = "2e288f42-8544-8113-b878-ec99c8a02a6b"
def get_db_properties(database_id):
url = f"{NOTION_API_BASE_URL}/databases/{database_id}"
try:
response = requests.get(url, headers=HEADERS)
response.raise_for_status()
return response.json().get("properties")
except Exception as e:
print(f"Error: {e}")
return None
props = get_db_properties(DB_ID)
if props:
print(json.dumps(props, indent=2))

View File

@@ -0,0 +1,63 @@
import requests
import json
from getpass import getpass
def debug_search_databases():
print("--- Notion Datenbank Such-Debugger ---")
token = getpass("Bitte gib deinen Notion API Key ein (Eingabe wird nicht angezeigt): ")
if not token:
print("\nFehler: Kein Token eingegeben. Abbruch.")
return
print("\n... Sende Suchanfrage an Notion für alle Datenbanken...")
url = "https://api.notion.com/v1/search"
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Notion-Version": "2022-06-28"
}
payload = {
"filter": {
"value": "database",
"property": "object"
},
"sort": {
"direction": "ascending",
"timestamp": "last_edited_time"
}
}
try:
response = requests.post(url, headers=headers, json=payload)
response.raise_for_status() # Hebt HTTPError für 4xx/5xx Statuscodes hervor
results = response.json().get("results", [])
if not results:
print("\nKeine Datenbanken gefunden, auf die die Integration Zugriff hat.")
print("Bitte stelle sicher, dass die Integration auf Top-Level-Seiten geteilt ist.")
return
print(f"\nGefundene Datenbanken ({len(results)} insgesamt):")
print("--------------------------------------------------")
for db in results:
db_id = db["id"]
db_title_parts = db.get("title", [])
db_title = db_title_parts[0].get("plain_text", "(Unbenannt)") if db_title_parts else "(Unbenannt)"
print(f"Titel: '{db_title}'\n ID: {db_id}\n")
print("--------------------------------------------------")
print("Bitte überprüfe die genauen Titel und IDs für 'Projects' und 'All Tasks'.")
except requests.exceptions.RequestException as e:
print(f"\n❌ FEHLER! Fehler bei der Suche nach Datenbanken: {e}")
if hasattr(e, 'response') and e.response is not None:
print(f"HTTP Status Code: {e.response.status_code}")
try:
print(f"Antwort des Servers: {json.dumps(e.response.json(), indent=2)}")
except:
print(f"Antwort des Servers: {e.response.text}")
if __name__ == "__main__":
debug_search_databases()

View File

@@ -0,0 +1,85 @@
import os
import json
import requests
from dotenv import load_dotenv
load_dotenv()
SESSION_FILE = ".dev_session/SESSION_INFO"
def debug_notion():
if not os.path.exists(SESSION_FILE):
print("No session file found.")
return
with open(SESSION_FILE, "r") as f:
data = json.load(f)
task_id = data.get("task_id")
token = data.get("token")
print(f"Debug Info:")
print(f"Task ID: {task_id}")
print(f"Token (first 4 chars): {token[:4]}...")
url = f"https://api.notion.com/v1/pages/{task_id}"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28",
"Content-Type": "application/json"
}
# 1. Fetch Page
print("\n--- Fetching Page Properties ---")
resp = requests.get(url, headers=headers)
if resp.status_code != 200:
print(f"Error fetching page: {resp.status_code}")
print(resp.text)
return
page_data = resp.json()
properties = page_data.get("properties", {})
print(f"Found {len(properties)} properties:")
target_prop_name = "Total Duration (h)"
found_target = False
for name, prop in properties.items():
type_ = prop.get("type")
val = prop.get(type_)
print(f"- '{name}' ({type_}): {val}")
if name == target_prop_name:
found_target = True
if not found_target:
print(f"\nCRITICAL: Property '{target_prop_name}' NOT found on this task!")
# Check for similar names
for name in properties.keys():
if "duration" in name.lower() or "zeit" in name.lower() or "hours" in name.lower():
print(f" -> Did you mean: '{name}'?")
return
# 2. Try Update
print(f"\n--- Attempting Update of '{target_prop_name}' ---")
current_val = properties[target_prop_name].get("number") or 0.0
print(f"Current Value: {current_val}")
new_val = current_val + 0.01
print(f"Updating to: {new_val}")
update_payload = {
"properties": {
target_prop_name: {"number": new_val}
}
}
patch_resp = requests.patch(url, headers=headers, json=update_payload)
if patch_resp.status_code == 200:
print("✅ Update Successful!")
print(f"New Value on Server: {patch_resp.json()['properties'][target_prop_name].get('number')}")
else:
print(f"❌ Update Failed: {patch_resp.status_code}")
print(patch_resp.text)
if __name__ == "__main__":
debug_notion()

View File

@@ -0,0 +1,254 @@
# distribute_product_data.py
import requests
import json
import re
import os
import time
# --- Configuration ---
try:
with open("notion_token.txt", "r") as f:
NOTION_TOKEN = f.read().strip()
except FileNotFoundError:
print("Error: notion_token.txt not found.")
exit(1)
NOTION_VERSION = "2022-06-28"
NOTION_API_BASE_URL = "https://api.notion.com/v1"
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Notion-Version": NOTION_VERSION,
"Content-Type": "application/json",
}
# --- Database IDs (from Notion_Dashboard.md) ---
DB_IDS = {
"Product Master": "2e288f42-8544-81d8-96f5-c231f84f719a",
"Sector & Persona Master": "2e288f42-8544-8113-b878-ec99c8a02a6b",
"Messaging Matrix": "2e288f42-8544-81b0-83d4-c16623cc32d1",
"Feature-to-Value Translator": "2e288f42-8544-8184-ba08-d6d736879f19",
}
# --- Helper Functions ---
def create_notion_page(database_id, properties):
"""Creates a new page in a Notion database."""
url = f"{NOTION_API_BASE_URL}/pages"
payload = {"parent": {"database_id": database_id}, "properties": properties}
try:
response = requests.post(url, headers=HEADERS, json=payload)
response.raise_for_status()
print(f"Successfully created page in DB {database_id}.")
return response.json()
except requests.exceptions.HTTPError as e:
print(f"HTTP Error creating page in DB {database_id}: {e}\nResponse: {response.text}")
return None
def update_notion_page(page_id, properties):
"""Updates an existing page in Notion."""
url = f"{NOTION_API_BASE_URL}/pages/{page_id}"
payload = {"properties": properties}
try:
response = requests.patch(url, headers=HEADERS, json=payload)
response.raise_for_status()
print(f"Successfully updated page {page_id}.")
return response.json()
except requests.exceptions.HTTPError as e:
print(f"HTTP Error updating page {page_id}: {e}\nResponse: {response.text}")
return None
def find_notion_page_by_title(database_id, title):
"""Searches for a page in a Notion database by its title property."""
url = f"{NOTION_API_BASE_URL}/databases/{database_id}/query"
filter_payload = {"filter": {"property": "Name", "title": {"equals": title}}}
try:
response = requests.post(url, headers=HEADERS, json=filter_payload)
response.raise_for_status()
results = response.json().get("results")
return results[0] if results else None
except requests.exceptions.HTTPError as e:
print(f"HTTP Error searching page in DB {database_id}: {e}\nResponse: {response.text}")
return None
def get_page_property(page_id, property_id):
"""Retrieves a specific property from a Notion page."""
url = f"{NOTION_API_BASE_URL}/pages/{page_id}/properties/{property_id}"
try:
response = requests.get(url, headers=HEADERS)
response.raise_for_status()
return response.json()
except requests.exceptions.HTTPError as e:
print(f"HTTP Error retrieving property {property_id}: {e}\nResponse: {response.text}")
return None
def get_rich_text_content(property_object):
"""Extracts plain text from a Notion rich_text property object."""
if not property_object:
return ""
try:
# The property endpoint returns a list in the 'results' key
if 'results' in property_object and property_object['results']:
# The actual content is in the 'rich_text' object within the first result
rich_text_items = property_object['results'][0].get('rich_text', {})
# It can be a single dict or a list, we handle the main plain_text for simplicity here
if isinstance(rich_text_items, dict) and 'plain_text' in rich_text_items:
return rich_text_items.get('plain_text', '')
# If it is a list of rich text objects (less common for a single property)
elif isinstance(rich_text_items, list):
return "".join(item.get("plain_text", "") for item in rich_text_items if isinstance(item, dict))
except (KeyError, IndexError, TypeError) as e:
print(f"Error parsing rich text object: {e}")
return ""
return ""
def format_rich_text(text):
"""Formats a string into Notion's rich text structure."""
if len(text) > 2000:
print(f"Warning: Truncating text from {len(text)} to 2000 characters.")
text = text[:2000]
return {"rich_text": [{"type": "text", "text": {"content": text}}]}
def format_title(text):
"""Formats a string into Notion's title structure."""
return {"title": [{"type": "text", "text": {"content": text}}]}
def format_relation(page_ids):
"""Formats a list of page IDs into Notion's relation structure."""
if not isinstance(page_ids, list):
page_ids = [page_ids]
return {"relation": [{"id": page_id} for page_id in page_ids]}
def parse_markdown_table(markdown_text):
"""Parses a generic markdown table into a list of dicts."""
lines = markdown_text.strip().split('\n')
if len(lines) < 2:
return []
headers = [h.strip() for h in lines[0].split('|') if h.strip()]
data_rows = []
for line in lines[2:]: # Skip header and separator
values = [v.strip() for v in line.split('|') if v.strip()]
if len(values) == len(headers):
data_rows.append(dict(zip(headers, values)))
return data_rows
# --- Main Logic ---
def main():
PRODUCT_NAME = "Puma M20"
print(f"--- Starting data distribution for product: {PRODUCT_NAME} ---")
# 1. Get the product page from Product Master
product_page = find_notion_page_by_title(DB_IDS["Product Master"], PRODUCT_NAME)
if not product_page:
print(f"Product '{PRODUCT_NAME}' not found. Aborting.")
return
product_page_id = product_page["id"]
print(f"Found Product Page ID: {product_page_id}")
# 2. Distribute Strategy Matrix Data
strategy_matrix_prop_id = product_page["properties"]["Strategy Matrix"]["id"]
strategy_matrix_obj = get_page_property(product_page_id, strategy_matrix_prop_id)
strategy_matrix_text = get_rich_text_content(strategy_matrix_obj)
if strategy_matrix_text:
parsed_matrix = parse_markdown_table(strategy_matrix_text)
if parsed_matrix:
print("\n--- Distributing Strategy Matrix Data ---")
sector_page_ids_for_product = []
for row in parsed_matrix:
segment_name = row.get("Segment")
pain_point = row.get("Pain Point")
angle = row.get("Angle")
differentiation = row.get("Differentiation")
if not all([segment_name, pain_point, angle, differentiation]):
print(f"Skipping row due to missing data: {row}")
continue
print(f"\nProcessing Segment: {segment_name}")
# Find or Create Sector in Sector & Persona Master
sector_page = find_notion_page_by_title(DB_IDS["Sector & Persona Master"], segment_name)
if sector_page:
sector_page_id = sector_page["id"]
print(f"Found existing Sector page with ID: {sector_page_id}")
update_notion_page(sector_page_id, {"Pains": format_rich_text(pain_point)})
else:
print(f"Creating new Sector page for '{segment_name}'...")
new_sector_page = create_notion_page(DB_IDS["Sector & Persona Master"], {"Name": format_title(segment_name), "Pains": format_rich_text(pain_point)})
if not new_sector_page:
print(f"Failed to create sector page for '{segment_name}'. Skipping.")
continue
sector_page_id = new_sector_page["id"]
sector_page_ids_for_product.append(sector_page_id)
# Create entry in Messaging Matrix
print(f"Creating Messaging Matrix entry for '{segment_name}'...")
messaging_properties = {
"Name": format_title(f"{PRODUCT_NAME} - {segment_name}"),
"Satz 1": format_rich_text(angle),
"Satz 2": format_rich_text(differentiation),
"Product Master": format_relation(product_page_id),
"Sector Master": format_relation(sector_page_id)
}
create_notion_page(DB_IDS["Messaging Matrix"], messaging_properties)
# Update Product Master with relations to all processed sectors
if sector_page_ids_for_product:
print(f"\nUpdating Product Master with relations to {len(sector_page_ids_for_product)} sectors...")
update_notion_page(product_page_id, {"Sector Master": format_relation(sector_page_ids_for_product)})
# Clean up redundant fields in Product Master
print("Cleaning up redundant Strategy Matrix field in Product Master...")
update_notion_page(product_page_id, {"Strategy Matrix": format_rich_text("")})
else:
print("Strategy Matrix is empty. Skipping distribution.")
# 3. Distribute Feature-to-Value Translator Data
feature_translator_prop_id = product_page["properties"]["Feature-to-Value Translator"]["id"]
feature_translator_obj = get_page_property(product_page_id, feature_translator_prop_id)
feature_translator_text = get_rich_text_content(feature_translator_obj)
if feature_translator_text:
parsed_features = parse_markdown_table(feature_translator_text)
if parsed_features:
print("\n--- Distributing Feature-to-Value Translator Data ---")
for item in parsed_features:
feature = item.get("Feature")
story = item.get("The Story (Benefit)")
headline = item.get("Headline")
if not all([feature, story, headline]):
print(f"Skipping feature item due to missing data: {item}")
continue
print(f"Creating Feature-to-Value entry for: {feature}")
create_notion_page(
DB_IDS["Feature-to-Value Translator"],
{
"Feature": format_title(feature),
"Story (Benefit)": format_rich_text(story),
"Headline": format_rich_text(headline),
"Product Master": format_relation(product_page_id)
}
)
# Clean up the source field
print("Cleaning up redundant Feature-to-Value Translator field in Product Master...")
update_notion_page(product_page_id, {"Feature-to-Value Translator": format_rich_text("")})
else:
print("Feature-to-Value Translator is empty. Skipping distribution.")
print("\n--- Data distribution process complete. ---")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,72 @@
import requests
import json
import os
TOKEN_FILE = 'notion_api_key.txt'
PARENT_PAGE_ID = "2e088f42-8544-8024-8289-deb383da3818" # "Roboplanet" page
def main():
try:
with open(TOKEN_FILE, 'r') as f:
token = f.read().strip()
except FileNotFoundError:
print(f"Error: Could not find '{TOKEN_FILE}'")
return
print(f"Creating 'Hello World' page under parent {PARENT_PAGE_ID}...")
url = "https://api.notion.com/v1/pages"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28",
"Content-Type": "application/json"
}
payload = {
"parent": { "page_id": PARENT_PAGE_ID },
"properties": {
"title": [
{
"text": {
"content": "Hello World"
}
}
]
},
"children": [
{
"object": "block",
"type": "paragraph",
"paragraph": {
"rich_text": [
{
"type": "text",
"text": {
"content": "This page was created automatically by the GTM Engine Bot."
}
}
]
}
}
]
}
try:
response = requests.post(url, headers=headers, json=payload)
response.raise_for_status()
data = response.json()
print("\n=== SUCCESS ===")
print(f"New page created!")
print(f"URL: {data.get('url')}")
except requests.exceptions.HTTPError as e:
print(f"\n=== ERROR ===")
print(f"HTTP Error: {e}")
print(f"Response: {response.text}")
except Exception as e:
print(f"\n=== ERROR ===")
print(f"An error occurred: {e}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,179 @@
import json
import requests
import sys
# --- CONFIGURATION ---
JSON_FILE = 'analysis_robo-planet.de-4.json'
NOTION_TOKEN = "" # Will be loaded from file
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
}
# --- DATABASE IDs ---
COMPANIES_DB_ID = "2e688f42-8544-8158-8673-d8b1e3eca5b5"
CANONICAL_PRODUCTS_DB_ID = "2f088f42-8544-81d5-bec7-d9189f3bacd4"
PORTFOLIO_DB_ID = "2e688f42-8544-81df-8fcc-f1d7f8745e00"
LANDMINES_DB_ID = "" # Optional: Add if you want to re-import landmines
REFERENCES_DB_ID = "" # Optional: Add if you want to re-import references
# --- API HELPERS ---
def query_db(db_id, filter_payload=None):
"""Retrieves all pages from a Notion database, with optional filter."""
url = f"https://api.notion.com/v1/databases/{db_id}/query"
all_pages = []
start_cursor = None
while True:
payload = {}
if start_cursor:
payload["start_cursor"] = start_cursor
if filter_payload:
payload["filter"] = filter_payload
response = requests.post(url, headers=HEADERS, json=payload)
if response.status_code != 200:
print(f"Error querying DB {db_id}: {response.status_code}")
print(response.json())
return None
data = response.json()
all_pages.extend(data["results"])
if data.get("has_more"):
start_cursor = data["next_cursor"]
else:
break
return all_pages
def create_page(db_id, properties):
"""Creates a new page in a Notion database."""
url = "https://api.notion.com/v1/pages"
payload = {"parent": {"database_id": db_id}, "properties": properties}
response = requests.post(url, headers=HEADERS, data=json.dumps(payload))
if response.status_code == 200:
return response.json()
else:
print(f"Error creating page in DB {db_id}: {response.status_code}")
print(response.json())
return None
# --- STATE AWARENESS HELPERS ---
def get_existing_items_map(db_id, name_property="Name"):
"""Fetches all items from a DB and returns a map of {name: id}."""
print(f"Fetching existing items from DB {db_id} to build cache...")
pages = query_db(db_id)
if pages is None:
sys.exit(f"Could not fetch items from DB {db_id}. Aborting.")
item_map = {}
for page in pages:
try:
item_name = page["properties"][name_property]["title"][0]["text"]["content"]
item_map[item_name] = page["id"]
except (KeyError, IndexError):
continue
print(f" - Found {len(item_map)} existing items.")
return item_map
def get_existing_portfolio_links(db_id):
"""Fetches all portfolio links and returns a set of (company_id, product_id) tuples."""
print(f"Fetching existing portfolio links from DB {db_id}...")
pages = query_db(db_id)
if pages is None:
sys.exit(f"Could not fetch portfolio links from DB {db_id}. Aborting.")
link_set = set()
for page in pages:
try:
company_id = page["properties"]["Related Competitor"]["relation"][0]["id"]
product_id = page["properties"]["Canonical Product"]["relation"][0]["id"]
link_set.add((company_id, product_id))
except (KeyError, IndexError):
continue
print(f" - Found {len(link_set)} existing portfolio links.")
return link_set
# --- MAIN LOGIC ---
def main():
global NOTION_TOKEN, HEADERS
try:
with open("notion_token.txt", "r") as f:
NOTION_TOKEN = f.read().strip()
HEADERS["Authorization"] = f"Bearer {NOTION_TOKEN}"
except FileNotFoundError:
print("Error: `notion_token.txt` not found.")
return
# --- Phase 1: State Awareness ---
print("\n--- Phase 1: Reading current state from Notion ---")
companies_map = get_existing_items_map(COMPANIES_DB_ID)
products_map = get_existing_items_map(CANONICAL_PRODUCTS_DB_ID)
portfolio_links = get_existing_portfolio_links(PORTFOLIO_DB_ID)
# --- Phase 2: Processing JSON ---
print("\n--- Phase 2: Processing local JSON file ---")
try:
with open(JSON_FILE, 'r') as f:
data = json.load(f)
except FileNotFoundError:
print(f"Error: `{JSON_FILE}` not found.")
return
for analysis in data.get('analyses', []):
competitor = analysis['competitor']
competitor_name = competitor['name']
print(f"\nProcessing competitor: {competitor_name}")
# --- Phase 3: "Upsert" Company ---
if competitor_name not in companies_map:
print(f" - Company '{competitor_name}' not found. Creating...")
props = {"Name": {"title": [{"text": {"content": competitor_name}}]}}
new_company = create_page(COMPANIES_DB_ID, props)
if new_company:
companies_map[competitor_name] = new_company["id"]
else:
print(f" - Failed to create company '{competitor_name}'. Skipping.")
continue
company_id = companies_map[competitor_name]
# --- Phase 4: "Upsert" Products and Portfolio Links ---
for product in analysis.get('portfolio', []):
product_name = product['product']
# Upsert Canonical Product
if product_name not in products_map:
print(f" - Product '{product_name}' not found. Creating canonical product...")
props = {"Name": {"title": [{"text": {"content": product_name}}]}}
new_product = create_page(CANONICAL_PRODUCTS_DB_ID, props)
if new_product:
products_map[product_name] = new_product["id"]
else:
print(f" - Failed to create canonical product '{product_name}'. Skipping.")
continue
product_id = products_map[product_name]
# Check and create Portfolio Link
if (company_id, product_id) not in portfolio_links:
print(f" - Portfolio link for '{competitor_name}' -> '{product_name}' not found. Creating...")
portfolio_props = {
"Product": {"title": [{"text": {"content": f"{competitor_name} - {product_name}"}}]},
"Related Competitor": {"relation": [{"id": company_id}]},
"Canonical Product": {"relation": [{"id": product_id}]}
}
new_portfolio_entry = create_page(PORTFOLIO_DB_ID, portfolio_props)
if new_portfolio_entry:
portfolio_links.add((company_id, product_id)) # Add to cache to prevent re-creation in same run
else:
print(f" - Portfolio link for '{competitor_name}' -> '{product_name}' already exists. Skipping.")
print("\n--- ✅ Import script finished ---")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,263 @@
import requests
import json
import re
import os
import time
# --- Configuration ---
# NOTION_TOKEN wird jetzt aus der Datei gelesen
try:
with open("notion_token.txt", "r") as f:
NOTION_TOKEN = f.read().strip()
except FileNotFoundError:
print("Error: notion_token.txt not found.")
print("Please create the notion_token.txt file with your Notion integration token.")
exit(1)
NOTION_VERSION = "2022-06-28"
NOTION_API_BASE_URL = "https://api.notion.com/v1"
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Notion-Version": NOTION_VERSION,
"Content-Type": "application/json",
}
# --- Database IDs (from Notion_Dashboard.md) ---
DB_IDS = {
"Product Master": "2e288f42-8544-81d8-96f5-c231f84f719a",
"Sector & Persona Master": "2e288f42-8544-8113-b878-ec99c8a02a6b",
"Messaging Matrix": "2e288f42-8544-81b0-83d4-c16623cc32d1",
}
# --- Helper Functions ---
def clean_json_response(text):
if text.startswith("```json") and text.endswith("```"):
return text[7:-3].strip()
return text
def create_notion_page(database_id, properties):
"""Creates a new page in a Notion database."""
url = f"{NOTION_API_BASE_URL}/pages"
payload = {
"parent": {"database_id": database_id},
"properties": properties,
}
try:
response = requests.post(url, headers=HEADERS, json=payload)
response.raise_for_status()
print(f"Successfully created page in DB {database_id}.")
return response.json()
except requests.exceptions.HTTPError as e:
print(f"HTTP Error creating page in DB {database_id}: {e}")
print(f"Response content: {response.text}")
return None
except Exception as e:
print(f"An unexpected error occurred while creating a page: {e}")
return None
def update_notion_page(page_id, properties):
"""Updates an existing page in Notion."""
url = f"{NOTION_API_BASE_URL}/pages/{page_id}"
payload = {
"properties": properties
}
try:
response = requests.patch(url, headers=HEADERS, json=payload)
response.raise_for_status()
print(f"Successfully updated page {page_id}.")
return response.json()
except requests.exceptions.HTTPError as e:
print(f"HTTP Error updating page {page_id}: {e}")
print(f"Response content: {response.text}")
return None
except Exception as e:
print(f"An unexpected error occurred while updating a page: {e}")
return None
def find_notion_page_by_title(database_id, title):
"""Searches for a page in a Notion database by its title property."""
url = f"{NOTION_API_BASE_URL}/databases/{database_id}/query"
filter_payload = {
"filter": {
"property": "Name",
"title": {"equals": title}
}
}
try:
response = requests.post(url, headers=HEADERS, json=filter_payload)
response.raise_for_status()
results = response.json().get("results")
if results:
return results[0] # Return the first matching page
return None
except requests.exceptions.HTTPError as e:
print(f"HTTP Error searching page in DB {database_id}: {e}")
print(f"Response content: {response.text}")
return None
except Exception as e:
print(f"An unexpected error occurred while searching for a page: {e}")
return None
def get_database_properties(database_id):
"""Retrieves the properties (schema) of a Notion database."""
url = f"{NOTION_API_BASE_URL}/databases/{database_id}"
try:
response = requests.get(url, headers=HEADERS)
response.raise_for_status()
return response.json().get("properties")
except requests.exceptions.HTTPError as e:
print(f"HTTP Error retrieving database properties for DB {database_id}: {e}")
print(f"Response content: {response.text}")
return None
except Exception as e:
print(f"An unexpected error occurred while retrieving database properties: {e}")
return None
def format_rich_text(text):
"""Formats a string into Notion's rich text structure."""
return {"rich_text": [{"type": "text", "text": {"content": text}}]}
def format_title(text):
"""Formats a string into Notion's title structure."""
return {"title": [{"type": "text", "text": {"content": text}}]}
def format_relation(page_ids):
"""Formats a list of page IDs into Notion's relation structure."""
if not isinstance(page_ids, list):
page_ids = [page_ids] # Ensure it's a list
return {"relation": [{"id": page_id} for page_id in page_ids]}
def extract_section(content, title):
"""Extracts a section from markdown content based on a ## title."""
pattern = re.compile(rf"## {re.escape(title)}\n(.*?)(?=\n## |\Z)", re.S)
match = pattern.search(content)
return match.group(1).strip() if match else ""
# --- Main Import Logic ---
def main():
if NOTION_TOKEN == "YOUR_NOTION_TOKEN":
print("ERROR: Please replace 'YOUR_NOTION_TOKEN' in the script with your actual Notion token.")
return
# 1. Read the markdown file
try:
with open("Puma_m20_2026-01-08.md", "r", encoding="utf-8") as f:
md_content = f.read()
except FileNotFoundError:
print("ERROR: 'Puma_m20_2026-01-08.md' not found. Please make sure the file is in the same directory.")
return
# Define the product name
PRODUCT_NAME = "Puma M20" # This will be replaced by the user's actual product name.
# --- Phase 1: Prepare Product Data ---
print(f"--- Phase 1: Preparing Product Data for {PRODUCT_NAME} ---")
product_analysis = extract_section(md_content, "2. Product Analysis")
key_features = re.search(r"\*\*Key Features:\*\*(.*?)\*\*Constraints:\*\*", product_analysis, re.S).group(1).strip()
constraints = re.search(r"\*\*Constraints:\*\*(.*)", product_analysis, re.S).group(1).strip()
target_audience = extract_section(md_content, "3. Target Audience")
strategy_matrix = extract_section(md_content, "5. Strategy Matrix")
if len(strategy_matrix) > 2000:
strategy_matrix = strategy_matrix[:2000] # Truncate to 2000 characters
print("Warning: 'Strategy Matrix' content truncated to 2000 characters due to Notion API limit.")
feature_translator = extract_section(md_content, "FEATURE-TO-VALUE TRANSLATOR (PHASE 9)")
product_properties = {
"Name": format_title(PRODUCT_NAME),
"Beschreibung": format_rich_text("Ein geländegängiger, wetterfester Roboter, der für anspruchsvolle Umgebungen konzipiert wurde."),
"Key Features": format_rich_text(key_features),
"Constraints": format_rich_text(constraints),
"Target Audience": format_rich_text(target_audience),
"Strategy Matrix": format_rich_text(strategy_matrix),
"Feature-to-Value Translator": format_rich_text(feature_translator),
"Layer": {"multi_select": [{"name": "Security"}, {"name": "Service"}]}
}
# Check if product already exists
existing_product_page = find_notion_page_by_title(DB_IDS["Product Master"], PRODUCT_NAME)
product_page_id = None
if existing_product_page:
product_page_id = existing_product_page["id"]
print(f"Product '{PRODUCT_NAME}' already exists with ID: {product_page_id}. Updating...")
updated_page = update_notion_page(product_page_id, product_properties)
if not updated_page:
print("Failed to update product page. Aborting.")
return
else:
print(f"Product '{PRODUCT_NAME}' not found. Creating new page...")
new_product_page = create_notion_page(DB_IDS["Product Master"], product_properties)
if not new_product_page:
print("Failed to create product page. Aborting.")
return
product_page_id = new_product_page["id"]
print(f"Created Product '{PRODUCT_NAME}' with ID: {product_page_id}")
# --- Phase 2: Create Sectors in Sector & Persona Master ---
print("\n--- Phase 2: Creating Sectors ---")
sector_pages = {}
sectors = {
"Chemieparks/Petrochemische Anlagen": {
"definition": "Anlagen dieser Art haben ausgedehnte Gelände, komplexe Infrastruktur und hohe Sicherheitsanforderungen...",
"pains": "Umfangreiche Gelände erfordern ständige Sicherheits- und Inspektionsrundgänge, oft unter gefährlichen Bedingungen. Personalmangel und hohe Kosten für manuelle Inspektionen.",
"personas": ["Head of Security", "Werkschutzleiter", "Geschäftsführer/Vorstand", "Leiter Instandhaltung / Betriebsleiter"]
},
"Energieversorgungsunternehmen (z.B. Windparks, Solarparks)": {
"definition": "Diese Anlagen erstrecken sich oft über große, schwer zugängliche Gebiete...",
"pains": "Weitläufige Anlagen in oft unwegsamem Gelände. Schwierige und teure Inspektion von Solarmodulen oder Windkraftanlagen. Anfälligkeit für Vandalismus und Diebstahl.",
"personas": ["Head of Security", "Geschäftsführer/Vorstand", "Leiter Instandhaltung / Betriebsleiter"]
},
"Logistikzentren/Großflächenlager": {
"definition": "Große Lagerflächen und komplexe Logistikprozesse erfordern eine ständige Überwachung und Inspektion.",
"pains": "Hohe Anforderungen an Sicherheit und Ordnung in großen Lagerhallen... Ineffiziente manuelle Reinigung großer Flächen. Gefahr von Unfällen...",
"personas": ["Leiter Instandhaltung / Betriebsleiter", "Geschäftsführer/Vorstand", "Head of Security"]
}
}
for name, data in sectors.items():
sector_properties = {
"Name": format_title(name),
"RoboPlanet-Definition": format_rich_text(data["definition"]),
"Pains": format_rich_text(data["pains"]),
"Personas": {"multi_select": [{"name": p} for p in data["personas"]]}
}
sector_page = create_notion_page(DB_IDS["Sector & Persona Master"], sector_properties)
if sector_page:
sector_pages[name] = sector_page["id"]
print(f"Created Sector '{name}' with ID: {sector_page['id']}")
else:
print(f"Failed to create sector '{name}'.")
# --- Phase 3: Create Messaging Elements ---
print("\n--- Phase 3: Creating Messaging Elements (Battlecards) ---")
battlecards_content = extract_section(md_content, "Kill-Critique Battlecards")
battlecards = re.findall(r"### Persona: (.*?)\n> \*\*Objection:\*\* \"(.*?)\"\n\n\*\*Response:\*\* (.*?)(?=\n\n---|\Z)", battlecards_content, re.S)
for persona, objection, response in battlecards:
# Determine which sector this battlecard applies to
current_sector_id = None
if "Chemiepark" in response or "Wackler Security" in response:
current_sector_id = sector_pages.get("Chemieparks/Petrochemische Anlagen")
if "Logistik" in response or "Reinigung" in response:
current_sector_id = sector_pages.get("Logistikzentren/Großflächenlager")
message_properties = {
"Name": format_title(f"Objection: {objection}"),
"Satz 1": format_rich_text(f"Persona: {persona.strip()}\nObjection: {objection}"),
"Satz 2": format_rich_text(response.strip()),
"Product Master": format_relation(product_page_id),
}
if current_sector_id:
message_properties["Sector Master"] = format_relation(current_sector_id)
create_notion_page(DB_IDS["Messaging Matrix"], message_properties)
print("\nImport process complete.")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,320 @@
import json
import requests
import sys
import argparse
import re
# --- CONFIGURATION ---
NOTION_TOKEN = "" # Will be loaded from file
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
}
# --- DATABASE IDs ---
COMPANIES_DB_ID = "2e688f42-8544-8158-8673-d8b1e3eca5b5"
CANONICAL_PRODUCTS_DB_ID = "2f088f42-8544-81d5-bec7-d9189f3bacd4"
PORTFOLIO_DB_ID = "2e688f42-8544-81df-8fcc-f1d7f8745e00"
LANDMINES_DB_ID = "2e688f42-8544-81aa-94f8-d6242be4d0cd"
REFERENCES_DB_ID = "2e688f42-8544-81df-8d83-f4d7f57d8168"
INDUSTRIES_DB_ID = "2ec88f42-8544-8014-ab38-ea664b4c2b81"
# --- API HELPERS ---
def query_db(db_id, filter_payload=None):
"""Retrieves all pages from a Notion database, with optional filter."""
url = f"https://api.notion.com/v1/databases/{db_id}/query"
all_pages = []
start_cursor = None
while True:
payload = {}
if start_cursor:
payload["start_cursor"] = start_cursor
if filter_payload:
payload["filter"] = filter_payload
response = requests.post(url, headers=HEADERS, json=payload)
if response.status_code != 200:
print(f"Error querying DB {db_id}: {response.status_code}")
print(response.json())
return None
data = response.json()
all_pages.extend(data["results"])
if data.get("has_more"):
start_cursor = data["next_cursor"]
else:
break
return all_pages
def create_page(db_id, properties):
"""Creates a new page in a Notion database."""
url = "https://api.notion.com/v1/pages"
payload = {"parent": {"database_id": db_id}, "properties": properties}
response = requests.post(url, headers=HEADERS, data=json.dumps(payload))
if response.status_code == 200:
return response.json()
else:
print(f"Error creating page in DB {db_id}: {response.status_code}")
print(response.json())
return None
def update_page(page_id, properties):
"""Updates properties of an existing page in Notion."""
url = f"https://api.notion.com/v1/pages/{page_id}"
payload = {"properties": properties}
response = requests.patch(url, headers=HEADERS, data=json.dumps(payload))
if response.status_code == 200:
return response.json()
else:
print(f"Error updating page {page_id}: {response.status_code}")
print(response.json())
return None
# --- STATE AWARENESS HELPERS ---
def get_existing_items_map(db_id, name_property="Name"):
"""Fetches all items from a DB and returns a map of {name: id}."""
print(f"Fetching existing items from DB {db_id} to build cache...")
pages = query_db(db_id)
if pages is None:
sys.exit(f"Could not fetch items from DB {db_id}. Aborting.")
item_map = {}
for page in pages:
try:
# Handle cases where title might be empty or malformed
title_list = page["properties"][name_property].get("title", [])
if title_list:
item_name = title_list[0].get("text", {}).get("content", "").strip()
if item_name:
item_map[item_name] = page["id"]
except (KeyError, IndexError):
continue
print(f" - Found {len(item_map)} existing items.")
return item_map
def get_existing_relations(db_id, relation_property_name, target_relation_id_prop_name):
"""Fetches all items from a DB and returns a set of (item_name, related_id) tuples."""
print(f"Fetching existing relations from DB {db_id}...")
pages = query_db(db_id)
if pages is None:
sys.exit(f"Could not fetch relations from DB {db_id}. Aborting.")
relation_set = set()
for page in pages:
try:
item_name = page["properties"]["Name"]["title"][0]["text"]["content"]
related_ids = [rel["id"] for rel in page["properties"][relation_property_name].get("relation", [])]
target_related_ids = [rel["id"] for rel in page["properties"][target_relation_id_prop_name].get("relation", [])]
if related_ids and target_related_ids:
relation_set.add((item_name, related_ids[0], target_related_ids[0]))
except (KeyError, IndexError):
continue
print(f" - Found {len(relation_set)} existing relations.")
return relation_set
def inspect_database(db_id):
"""Retrieves and prints the properties of a specific Notion database."""
print(f"🔍 Inspecting properties for database ID: {db_id}")
url = f"https://api.notion.com/v1/databases/{db_id}"
response = requests.get(url, headers=HEADERS)
if response.status_code != 200:
print(f"Error retrieving database properties: {response.status_code}")
print(response.json())
return
data = response.json()
properties = data.get("properties", {})
if not properties:
print("No properties found for this database.")
return
print("\n--- Database Properties ---")
for prop_name, prop_data in properties.items():
print(f"- Property Name: '{prop_name}'")
print(f" Type: {prop_data.get('type')}\n")
print("---------------------------\n")
# --- MAIN LOGIC ---
def main():
global NOTION_TOKEN, HEADERS
try:
with open("notion_token.txt", "r") as f:
NOTION_TOKEN = f.read().strip()
HEADERS["Authorization"] = f"Bearer {NOTION_TOKEN}"
except FileNotFoundError:
print("Error: `notion_token.txt` not found.")
return
parser = argparse.ArgumentParser(description="Import a single competitor from a JSON analysis file into Notion.")
parser.add_argument('--file', help="Path to the JSON analysis file.")
parser.add_argument('--name', help="Exact name of the competitor to import.")
parser.add_argument('--inspect', help="Database ID to inspect.")
args = parser.parse_args()
if args.inspect:
inspect_database(args.inspect)
return
if not args.file or not args.name:
parser.error("--file and --name are required.")
return
# --- Phase 1: State Awareness ---
print("\n--- Phase 1: Reading current state from Notion ---")
companies_map = get_existing_items_map(COMPANIES_DB_ID)
products_map = get_existing_items_map(CANONICAL_PRODUCTS_DB_ID)
industries_map = get_existing_items_map(INDUSTRIES_DB_ID, name_property="Vertical")
# For relations, we create a unique key to check for existence
existing_landmines = {f'{page["properties"]["Question"]["title"][0]["text"]["content"]}_{page["properties"]["Related Competitor"]["relation"][0]["id"]}' for page in query_db(LANDMINES_DB_ID) if "Question" in page["properties"] and page["properties"]["Question"]["title"] and page["properties"]["Related Competitor"]["relation"]}
print(f" - Found {len(existing_landmines)} existing landmines.")
existing_references = {f'{page["properties"]["Customer"]["title"][0]["text"]["content"]}_{page["properties"]["Related Competitor"]["relation"][0]["id"]}' for page in query_db(REFERENCES_DB_ID) if "Customer" in page["properties"] and page["properties"]["Customer"]["title"] and page["properties"]["Related Competitor"]["relation"]}
print(f" - Found {len(existing_references)} existing references.")
json_file_path = args.file
target_competitor_name = args.name
# --- Phase 2: Processing JSON ---
print(f"\n--- Phase 2: Processing local JSON file: {json_file_path} for {target_competitor_name} ---")
try:
with open(json_file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
except FileNotFoundError:
print(f"Error: `{json_file_path}` not found.")
return
except json.JSONDecodeError as e:
print(f"Error decoding JSON from {json_file_path}: {e}")
return
# Find the correct analysis and reference data for the target competitor
target_analysis = None
for analysis in data.get('analyses', []):
if analysis['competitor']['name'] == target_competitor_name:
target_analysis = analysis
break
# Find references from the separate reference_analysis block
target_references_data = None
if 'reference_analysis' in data:
for ref_block in data.get('reference_analysis', []):
if ref_block.get('competitor_name') == target_competitor_name:
target_references_data = ref_block.get('references', [])
break
target_battlecard = None
if 'battlecards' in data:
for bc in data.get('battlecards', []):
if bc['competitor_name'] == target_competitor_name:
target_battlecard = bc
break
if not target_analysis:
print(f"Error: Competitor '{target_competitor_name}' not found in 'analyses' list in {json_file_path}.")
return
print(f"\nProcessing target competitor: {target_competitor_name}")
# --- Phase 3: "Upsert" Company ---
if target_competitor_name not in companies_map:
print(f" - Company '{target_competitor_name}' not found. Creating...")
props = {"Name": {"title": [{"text": {"content": target_competitor_name}}]}}
new_company = create_page(COMPANIES_DB_ID, props)
if new_company:
companies_map[target_competitor_name] = new_company["id"]
else:
print(f" - Failed to create company '{target_competitor_name}'. Halting.")
return
company_id = companies_map[target_competitor_name]
# --- Phase 4: Create and Link Target Industries ---
print("\n--- Processing Target Industries ---")
target_industry_relation_ids = []
if INDUSTRIES_DB_ID:
for industry_name in target_analysis.get('target_industries', []):
if industry_name not in industries_map:
print(f" - Industry '{industry_name}' not found in Notion DB. Creating...")
props = {"Vertical": {"title": [{"text": {"content": industry_name}}]}}
new_industry = create_page(INDUSTRIES_DB_ID, props)
if new_industry:
industries_map[industry_name] = new_industry["id"]
target_industry_relation_ids.append({"id": new_industry["id"]})
else:
target_industry_relation_ids.append({"id": industries_map[industry_name]})
if target_industry_relation_ids:
print(f" - Linking company to {len(target_analysis.get('target_industries', []))} industries...")
# Format for multi-select is a list of objects with names
multi_select_payload = [{"name": name} for name in target_analysis.get('target_industries', [])]
update_props = {
"Target Industries": {"multi_select": multi_select_payload}
}
update_page(company_id, update_props)
else:
print(" - INDUSTRIES_DB_ID not set. Skipping.")
# --- Phase 5: Import Landmines ---
if target_battlecard and LANDMINES_DB_ID:
print("\n--- Processing Landmines ---")
for landmine in target_battlecard.get('landmine_questions', []):
unique_key = f"{landmine}_{company_id}"
if unique_key not in existing_landmines:
print(f" - Landmine '{landmine}' not found. Creating...")
props = {
"Question": {"title": [{"text": {"content": landmine}}]},
"Related Competitor": {"relation": [{"id": company_id}]}
}
new_landmine = create_page(LANDMINES_DB_ID, props)
if new_landmine:
existing_landmines.add(unique_key)
else:
print(f" - Landmine '{landmine}' already exists for this competitor. Skipping.")
# --- Phase 6: Import References ---
if target_references_data and REFERENCES_DB_ID:
print("\n--- Processing References ---")
for ref in target_references_data:
ref_name = ref.get("name", "Unknown Reference")
unique_key = f"{ref_name}_{company_id}"
if unique_key not in existing_references:
print(f" - Reference '{ref_name}' not found. Creating...")
props = {
"Customer": {"title": [{"text": {"content": ref_name}}]},
"Related Competitor": {"relation": [{"id": company_id}]},
"Quote": {"rich_text": [{"text": {"content": ref.get("testimonial_snippet", "")[:2000]}}]}
}
# Handle Industry as a select property
ref_industry_name = ref.get("industry")
if ref_industry_name:
props["Industry"] = {"select": {"name": ref_industry_name}}
new_ref = create_page(REFERENCES_DB_ID, props)
if new_ref:
existing_references.add(unique_key)
else:
print(f" - Reference '{ref_name}' already exists for this competitor. Skipping.")
print("\n--- ✅ Import script finished ---")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,24 @@
import sys
import os
import requests
import json
NOTION_TOKEN_FILE = "/app/notion_token.txt"
PERSONAS_DB_ID = "2e288f42-8544-8113-b878-ec99c8a02a6b"
def load_notion_token():
with open(NOTION_TOKEN_FILE, "r") as f:
return f.read().strip()
def query_notion_db(token, db_id):
url = f"https://api.notion.com/v1/databases/{db_id}/query"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28"
}
response = requests.post(url, headers=headers)
return response.json()
token = load_notion_token()
data = query_notion_db(token, PERSONAS_DB_ID)
print(json.dumps(data.get("results", [])[0], indent=2))

View File

@@ -0,0 +1,30 @@
import sys
import os
import requests
import json
NOTION_TOKEN_FILE = "/app/notion_token.txt"
PERSONAS_DB_ID = "30588f42-8544-80c3-8919-e22d74d945ea"
def load_notion_token():
with open(NOTION_TOKEN_FILE, "r") as f:
return f.read().strip()
def query_notion_db(token, db_id):
url = f"https://api.notion.com/v1/databases/{db_id}/query"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28"
}
response = requests.post(url, headers=headers)
return response.json()
token = load_notion_token()
data = query_notion_db(token, PERSONAS_DB_ID)
results = data.get("results", [])
for res in results:
props = res.get("properties", {})
role = "".join([t.get("plain_text", "") for t in props.get("Role", {}).get("title", [])])
print(f"Role: {role}")
print(json.dumps(props, indent=2))
print("-" * 40)

View File

@@ -0,0 +1,220 @@
import requests
import time
import json
# --- Configuration ---
NOTION_TOKEN = "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" # Replace with your actual Notion integration token
PARENT_PAGE_ID = "2e088f42854480248289deb383da3818" # Replace with the ID of the Notion page where you want to create the databases
NOTION_VERSION = "2022-06-28"
NOTION_API_BASE_URL = "https://api.notion.com/v1"
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Notion-Version": NOTION_VERSION,
"Content-Type": "application/json",
}
# --- Database Schemas ---
# Define basic properties for each database. Relations will be added in a second phase.
DATABASE_SCHEMAS = {
"Product Master": {
"title": [{"type": "text", "text": {"content": "Product Master"}}],
"properties": {
"Name": {"title": {}},
"Beschreibung": {"rich_text": {}},
"Spezifikationen": {"rich_text": {}},
"Layer": {"multi_select": {"options": [{"name": "Cleaning"}, {"name": "Service"}, {"name": "Security"}]}},
}
},
"Sector & Persona Master": {
"title": [{"type": "text", "text": {"content": "Sector & Persona Master"}}],
"properties": {
"Name": {"title": {}},
"RoboPlanet-Definition": {"rich_text": {}},
"Personas": {"multi_select": {"options": []}}, # Options can be added later if known
"Pains": {"rich_text": {}},
"Gains": {"rich_text": {}},
"Probing Questions": {"rich_text": {}},
}
},
"Messaging Matrix": {
"title": [{"type": "text", "text": {"content": "Messaging Matrix"}}],
"properties": {
"Name": {"title": {}},
"Satz 1": {"rich_text": {}},
"Satz 2": {"rich_text": {}},
"Voice Script": {"rich_text": {}},
}
},
"Competitive Radar": {
"title": [{"type": "text", "text": {"content": "Competitive Radar"}}],
"properties": {
"Wettbewerber": {"title": {}},
"News": {"url": {}},
"Blogposts": {"url": {}},
"Kill-Argumente": {"rich_text": {}},
"Technische Specs": {"rich_text": {}},
}
},
"Enrichment Factory & RevOps": {
"title": [{"type": "text", "text": {"content": "Enrichment Factory & RevOps"}}],
"properties": {
"Account Name": {"title": {}},
"Umsatz": {"number": {"format": "euro"}},
"Mitarbeiter": {"number": {"format": "number"}},
"Ansprechpartner": {"rich_text": {}},
"Job Titel": {"rich_text": {}},
"Klassifizierung": {"multi_select": {"options": []}}, # Options can be added later if known
"Outbound Metriken": {"rich_text": {}},
}
},
"The Brain": {
"title": [{"type": "text", "text": {"content": "The Brain"}}],
"properties": {
"Titel": {"title": {}},
"Lösungsfragmente": {"rich_text": {}},
"Quelle": {"url": {}},
}
},
"GTM Workspace": {
"title": [{"type": "text", "text": {"content": "GTM Workspace"}}],
"properties": {
"Kampagnen Name": {"title": {}},
}
}
}
# --- Database Relations (Phase B) ---
# Define which databases relate to each other.
# The keys are the database names, and the values are lists of (property_name, related_database_name) tuples.
DATABASE_RELATIONS = {
"Product Master": [
("Sector Master", "Sector & Persona Master"),
("Messaging Matrix", "Messaging Matrix"),
("The Brain", "The Brain"),
("GTM Workspace", "GTM Workspace"),
],
"Sector & Persona Master": [
("Product Master", "Product Master"),
("Messaging Matrix", "Messaging Matrix"),
],
"Messaging Matrix": [
("Product Master", "Product Master"),
("Sector Master", "Sector & Persona Master"),
],
"The Brain": [
("Product Master", "Product Master"),
],
"GTM Workspace": [
("Product Master", "Product Master"),
],
# Competitive Radar and Enrichment Factory & RevOps do not have explicit relations to other *created* databases based on the document's "Notion Datenbank-Relationen" section.
}
# --- Helper Functions ---
def create_notion_database(parent_page_id, db_name, properties):
print(f"Attempting to create database: {db_name}")
create_url = f"{NOTION_API_BASE_URL}/databases"
payload = {
"parent": {"type": "page_id", "page_id": parent_page_id},
"title": DATABASE_SCHEMAS[db_name]["title"],
"properties": properties,
}
try:
response = requests.post(create_url, headers=HEADERS, json=payload)
response.raise_for_status() # Raise an exception for HTTP errors
db_data = response.json()
db_id = db_data["id"]
print(f"Successfully created database '{db_name}' with ID: {db_id}")
return db_id
except requests.exceptions.HTTPError as e:
print(f"HTTP Error creating database {db_name}: {e}")
if response is not None:
print(f"Response content: {response.text}")
return None
except Exception as e:
print(f"An unexpected error occurred while creating database {db_name}: {e}")
return None
def update_notion_database_relations(database_id, relations_to_add, created_db_ids):
print(f"Attempting to update relations for database ID: {database_id}")
update_url = f"{NOTION_API_BASE_URL}/databases/{database_id}"
properties_to_add = {}
for prop_name, related_db_name in relations_to_add:
if related_db_name in created_db_ids:
related_db_id = created_db_ids[related_db_name]
properties_to_add[prop_name] = {
"relation": {
"database_id": related_db_id,
"dual_property": {} # Notion automatically creates a dual property
}
}
else:
print(f"Warning: Related database '{related_db_name}' not found among created databases. Skipping relation for '{prop_name}'.")
if not properties_to_add:
print(f"No relations to add for database ID: {database_id}")
return False
payload = {
"properties": properties_to_add
}
try:
response = requests.patch(update_url, headers=HEADERS, json=payload)
response.raise_for_status()
print(f"Successfully updated relations for database ID: {database_id}")
return True
except requests.exceptions.HTTPError as e:
print(f"HTTP Error updating relations for database ID {database_id}: {e}")
if response is not None:
print(f"Response content: {response.text}")
return False
except Exception as e:
print(f"An unexpected error occurred while updating relations for database ID {database_id}: {e}")
return False
def main():
if NOTION_TOKEN == "YOUR_NOTION_TOKEN" or PARENT_PAGE_ID == "YOUR_PARENT_PAGE_ID":
print("ERROR: Please update NOTION_TOKEN and PARENT_PAGE_ID in the script before running.")
return
created_db_ids = {}
print("--- Phase A: Creating Databases ---")
for db_name, schema in DATABASE_SCHEMAS.items():
db_id = create_notion_database(PARENT_PAGE_ID, db_name, schema["properties"])
if db_id:
created_db_ids[db_name] = db_id
print(f"Waiting 15 seconds for Notion to index database '{db_name}'...")
time.sleep(15)
else:
print(f"Failed to create database: {db_name}. Aborting Phase A.")
return
print("\n--- Phase B: Establishing Relations ---")
if not created_db_ids:
print("No databases were created in Phase A. Cannot establish relations.")
return
for db_name, relations_config in DATABASE_RELATIONS.items():
if db_name in created_db_ids:
db_id = created_db_ids[db_name]
print(f"Processing relations for '{db_name}' (ID: {db_id})...")
if update_notion_database_relations(db_id, relations_config, created_db_ids):
print(f"Waiting 15 seconds after updating relations for '{db_name}'...")
time.sleep(15)
else:
print(f"Failed to update relations for: {db_name}. Continuing with other databases.")
else:
print(f"Warning: Database '{db_name}' not found in created IDs. Skipping relation updates.")
print("\n--- Setup Complete ---")
print("Please ensure your Notion integration has access to the parent page and its sub-pages in Notion UI.")
print("Created database IDs:")
for name, id_val in created_db_ids.items():
print(f"- {name}: {id_val}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,41 @@
import os
import requests
import json
# Get the Notion API key from the environment variable
api_key = os.environ.get("NOTION_API_KEY")
# If the API key is not set, try to read it from the file
if not api_key:
try:
with open("notion_token.txt", "r") as f:
api_key = f.read().strip()
except FileNotFoundError:
print("Error: notion_token.txt not found.")
print("Please set the NOTION_API_KEY environment variable or create the notion_token.txt file.")
exit()
# The ID of the page to retrieve
page_id = "2e288f42-8544-81d8-96f5-c231f84f719a" # Product Master
# The Notion API endpoint for retrieving a page
url = f"https://api.notion.com/v1/pages/{page_id}"
# The headers for the API request
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
}
# Make the API request
response = requests.get(url, headers=headers)
# Check the response status code
if response.status_code == 200:
# Print the response content
print(json.dumps(response.json(), indent=2))
else:
print(f"Error: {response.status_code}")
print(response.text)

View File

@@ -0,0 +1,161 @@
import requests
import json
import os
# --- Configuration ---
try:
with open("notion_token.txt", "r") as f:
NOTION_TOKEN = f.read().strip()
except FileNotFoundError:
print("Error: notion_token.txt not found.")
exit(1)
NOTION_VERSION = "2022-06-28"
NOTION_API_BASE_URL = "https://api.notion.com/v1"
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Notion-Version": NOTION_VERSION,
"Content-Type": "application/json",
}
# DB: Personas / Roles
DB_ID = "30588f42854480c38919e22d74d945ea"
# --- Data for Archetypes ---
archetypes = [
{
"name": "Wirtschaftlicher Entscheider",
"pains": [
"Steigende Personalkosten im Reinigungs- und Servicebereich gefährden Profitabilität.",
"Fachkräftemangel und Schwierigkeiten bei der Stellenbesetzung.",
"Inkonsistente Qualitätsstandards schaden dem Ruf des Hauses.",
"Hoher Managementaufwand für manuelle operative Prozesse."
],
"gains": [
"Reduktion operativer Personalkosten um 10-25%.",
"Deutliche Abnahme der Überstunden (bis zu 50%).",
"Sicherstellung konstant hoher Qualitätsstandards.",
"Erhöhung der operativen Effizienz durch präzise Datenanalysen."
],
"kpis": "Betriebskosten pro Einheit, Gästezufriedenheit (NPS), Mitarbeiterfluktuation.",
"positions": "Direktor, Geschäftsführer, C-Level, Einkaufsleiter."
},
{
"name": "Operativer Entscheider",
"pains": [
"Team ist überlastet und gestresst (Gefahr hoher Fluktuation).",
"Zu viele manuelle Routineaufgaben wie Abräumen oder Materialtransport.",
"Mangelnde Personalverfügbarkeit in Stoßzeiten führt zu Engpässen."
],
"gains": [
"Signifikante Entlastung des Personals von Routineaufgaben (20-40% Zeitgewinn).",
"Garantierte Reinigungszyklen unabhängig von Personalausfällen.",
"Mehr Zeit für wertschöpfende Aufgaben (Gästebetreuung, Upselling)."
],
"kpis": "Zeitaufwand für Routineaufgaben, Abdeckungsrate der Zyklen, Servicegeschwindigkeit.",
"positions": "Leiter Housekeeping, F&B Manager, Restaurantleiter, Stationsleitung."
},
{
"name": "Infrastruktur-Verantwortlicher",
"pains": [
"Technische Komplexität der Integration in bestehende Infrastruktur (Aufzüge, WLAN).",
"Sorge vor hohen Ausfallzeiten und unplanmäßigen Wartungskosten.",
"Fehlendes internes Fachpersonal für die Wartung autonomer Systeme."
],
"gains": [
"Reibungslose Integration (20-30% schnellere Implementierung).",
"Minimierung von Ausfallzeiten um 80-90% durch proaktives Monitoring.",
"Planbare Wartung und transparente Kosten durch feste SLAs."
],
"kpis": "System-Uptime, Implementierungszeit, Wartungskosten (TCO).",
"positions": "Technischer Leiter, Facility Manager, IT-Leiter."
},
{
"name": "Innovations-Treiber",
"pains": [
"Verlust der Wettbewerbsfähigkeit durch veraltete Prozesse.",
"Schwierigkeit das Unternehmen als modernen Arbeitgeber zu positionieren.",
"Statische Informations- und Marketingflächen werden oft ignoriert."
],
"gains": [
"Positionierung als Innovationsführer am Markt.",
"Steigerung der Kundeninteraktion um 20-30%.",
"Gewinnung wertvoller Daten zur kontinuierlichen Prozessoptimierung.",
"Erhöhte Attraktivität für junge, technikaffine Talente."
],
"kpis": "Besucherinteraktionsrate, Anzahl Prozessinnovationen, Modernitäts-Sentiment.",
"positions": "Marketingleiter, Center Manager, CDO, Business Development."
}
]
# --- Helper Functions ---
def format_rich_text(text):
return {"rich_text": [{"type": "text", "text": {"content": text}}]}
def format_title(text):
return {"title": [{"type": "text", "text": {"content": text}}]}
def find_page(title):
url = f"{NOTION_API_BASE_URL}/databases/{DB_ID}/query"
payload = {
"filter": {
"property": "Role",
"title": {"equals": title}
}
}
resp = requests.post(url, headers=HEADERS, json=payload)
resp.raise_for_status()
results = resp.json().get("results")
return results[0] if results else None
def create_page(properties):
url = f"{NOTION_API_BASE_URL}/pages"
payload = {
"parent": {"database_id": DB_ID},
"properties": properties
}
resp = requests.post(url, headers=HEADERS, json=payload)
resp.raise_for_status()
print("Created.")
def update_page(page_id, properties):
url = f"{NOTION_API_BASE_URL}/pages/{page_id}"
payload = {"properties": properties}
resp = requests.patch(url, headers=HEADERS, json=payload)
resp.raise_for_status()
print("Updated.")
# --- Main Logic ---
def main():
print(f"Syncing {len(archetypes)} Personas to Notion DB {DB_ID}...")
for p in archetypes:
print(f"Processing '{p['name']}'...")
pains_text = "\n".join([f"- {item}" for item in p["pains"]])
gains_text = "\n".join([f"- {item}" for item in p["gains"]])
properties = {
"Role": format_title(p["name"]),
"Pains": format_rich_text(pains_text),
"Gains": format_rich_text(gains_text),
"KPIs": format_rich_text(p.get("kpis", "")),
"Typische Positionen": format_rich_text(p.get("positions", ""))
}
existing_page = find_page(p["name"])
if existing_page:
print(f" -> Found existing page {existing_page['id']}. Updating...")
update_page(existing_page["id"], properties)
else:
print(" -> Creating new page...")
create_page(properties)
print("Sync complete.")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,145 @@
import requests
import json
import os
import re
import sys
TOKEN_FILE = 'notion_api_key.txt'
PARENT_PAGE_ID = "2e088f42-8544-8024-8289-deb383da3818"
def parse_markdown_to_blocks(md_content):
blocks = []
lines = md_content.split('\n')
in_code_block = False
code_content = []
for line in lines:
stripped = line.strip()
if stripped.startswith("```"):
if in_code_block:
blocks.append({
"object": "block",
"type": "code",
"code": {
"rich_text": [{"type": "text", "text": {"content": '\n'.join(code_content)}}],
"language": "plain text"
}
})
code_content = []
in_code_block = False
else:
in_code_block = True
continue
if in_code_block:
code_content.append(line)
continue
if not stripped:
continue
if line.startswith("# "):
blocks.append({
"object": "block",
"type": "heading_1",
"heading_1": {"rich_text": [{"type": "text", "text": {"content": line[2:]}}]}}
)
elif line.startswith("## "):
blocks.append({
"object": "block",
"type": "heading_2",
"heading_2": {"rich_text": [{"type": "text", "text": {"content": line[3:]}}]}}
)
elif line.startswith("### "):
blocks.append({
"object": "block",
"type": "heading_3",
"heading_3": {"rich_text": [{"type": "text", "text": {"content": line[4:]}}]}}
)
elif stripped.startswith("* ") or stripped.startswith("- "):
content = stripped[2:]
blocks.append({
"object": "block",
"type": "bulleted_list_item",
"bulleted_list_item": {"rich_text": [{"type": "text", "text": {"content": content}}]}}
)
elif re.match(r"^\d+\.", stripped):
content = re.sub(r"^\d+\.\s*", "", stripped)
blocks.append({
"object": "block",
"type": "numbered_list_item",
"numbered_list_item": {"rich_text": [{"type": "text", "text": {"content": content}}]}}
)
elif stripped.startswith("|"):
blocks.append({
"object": "block",
"type": "code",
"code": {
"rich_text": [{"type": "text", "text": {"content": line}}],
"language": "plain text"
}
})
else:
blocks.append({
"object": "block",
"type": "paragraph",
"paragraph": {"rich_text": [{"type": "text", "text": {"content": line}}]}}
)
return blocks
def upload_doc(token, file_path):
try:
with open(file_path, 'r') as f:
content = f.read()
except FileNotFoundError:
print(f"Error: Could not find '{file_path}'")
return
title = os.path.basename(file_path)
if content.startswith("# "):
title = content.split('\n')[0][2:].strip()
print(f"Parsing '{file_path}'...")
children_blocks = parse_markdown_to_blocks(content)
url = "https://api.notion.com/v1/pages"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28",
"Content-Type": "application/json"
}
payload = {
"parent": { "page_id": PARENT_PAGE_ID },
"properties": {
"title": [{"text": {"content": f"📘 {title}"}}]
},
"children": children_blocks[:100]
}
print(f"Uploading '{title}' to Notion...")
try:
response = requests.post(url, headers=headers, json=payload)
response.raise_for_status()
data = response.json()
print(f"SUCCESS: {data.get('url')}")
except Exception as e:
print(f"ERROR: {e}")
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: python sync_docs_to_notion.py <filename>")
sys.exit(1)
try:
with open(TOKEN_FILE, 'r') as f:
token = f.read().strip()
except FileNotFoundError:
print(f"Error: Could not find '{TOKEN_FILE}'")
sys.exit(1)
upload_doc(token, sys.argv[1])

View File

@@ -0,0 +1,150 @@
import requests
import json
# --- Configuration ---
try:
with open("notion_token.txt", "r") as f:
NOTION_TOKEN = f.read().strip()
except FileNotFoundError:
print("Error: notion_token.txt not found.")
exit(1)
NOTION_VERSION = "2022-06-28"
NOTION_API_BASE_URL = "https://api.notion.com/v1"
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Notion-Version": NOTION_VERSION,
"Content-Type": "application/json",
}
# DB: Sector & Persona Master
DB_ID = "2e288f42-8544-8113-b878-ec99c8a02a6b"
# --- Data ---
archetypes = [
{
"name": "Wirtschaftlicher Entscheider",
"pains": [
"Steigende operative Personalkosten und Fachkräftemangel gefährden die Profitabilität.",
"Unklare Amortisation (ROI) und Risiko von Fehlinvestitionen bei neuen Technologien.",
"Intransparente Folgekosten (TCO) und schwierige Budgetplanung über die Lebensdauer."
],
"gains": [
"Nachweisbare Senkung der operativen Kosten (10-25%) und schnelle Amortisation.",
"Sicherung der Wettbewerbsfähigkeit durch effizientere Kostenstrukturen.",
"Volle Transparenz und Planbarkeit durch klare Service-Modelle (SLAs)."
]
},
{
"name": "Operativer Entscheider",
"pains": [
"Personelle Unterbesetzung führt zu Überstunden, Stress und Qualitätsmängeln.",
"Wiederkehrende Routineaufgaben binden wertvolle Fachkräfte-Ressourcen.",
"Schwierigkeit, gleichbleibend hohe Standards (Hygiene/Service) 24/7 zu garantieren."
],
"gains": [
"Spürbare Entlastung des Teams von Routineaufgaben (20-40%).",
"Garantierte, gleichbleibend hohe Ausführungsqualität unabhängig von der Tagesform.",
"Stabilisierung der operativen Abläufe und Kompensation von Personalausfällen."
]
},
{
"name": "Infrastruktur-Verantwortlicher",
"pains": [
"Sorge vor komplexer Integration in bestehende IT- und Gebäudeinfrastruktur (WLAN, Türen, Aufzüge).",
"Risiko von hohen Ausfallzeiten und aufwändiger Fehlerbehebung ohne internes Spezialwissen.",
"Unklare Wartungsaufwände und Angst vor 'Insel-Lösungen' ohne Schnittstellen."
],
"gains": [
"Reibungslose, fachgerechte Integration durch Experten-Support (Plug & Play).",
"Maximale Betriebssicherheit durch proaktives Monitoring und schnelle Reaktionszeiten.",
"Zentrales Management und volle Transparenz über Systemstatus und Wartungsbedarf."
]
},
{
"name": "Innovations-Treiber",
"pains": [
"Verlust der Attraktivität als moderner Arbeitgeber oder Dienstleister (Veraltetes Image).",
"Fehlende 'Wow-Effekte' in der Kundeninteraktion und mangelnde Differenzierung vom Wettbewerb.",
"Verpasste Chancen durch fehlende Datengrundlage für digitale Optimierungen."
],
"gains": [
"Positionierung als Innovationsführer und Steigerung der Markenattraktivität.",
"Schaffung einzigartiger Kundenerlebnisse durch sichtbare High-Tech-Lösungen.",
"Gewinnung wertvoller Daten zur kontinuierlichen Prozessoptimierung und Digitalisierung."
]
}
]
# --- Helper Functions ---
def format_rich_text(text):
return {"rich_text": [{"type": "text", "text": {"content": text}}]}
def format_title(text):
return {"title": [{"type": "text", "text": {"content": text}}]}
def find_page(title):
url = f"{NOTION_API_BASE_URL}/databases/{DB_ID}/query"
payload = {
"filter": {
"property": "Name",
"title": {"equals": title}
}
}
resp = requests.post(url, headers=HEADERS, json=payload)
resp.raise_for_status()
results = resp.json().get("results")
return results[0] if results else None
def create_page(properties):
url = f"{NOTION_API_BASE_URL}/pages"
payload = {
"parent": {"database_id": DB_ID},
"properties": properties
}
resp = requests.post(url, headers=HEADERS, json=payload)
resp.raise_for_status()
print("Created.")
def update_page(page_id, properties):
url = f"{NOTION_API_BASE_URL}/pages/{page_id}"
payload = {"properties": properties}
resp = requests.patch(url, headers=HEADERS, json=payload)
resp.raise_for_status()
print("Updated.")
# --- Main Sync Loop ---
def main():
print(f"Syncing {len(archetypes)} Personas to Notion DB {DB_ID}...")
for p in archetypes:
print(f"Processing '{p['name']}'...")
# Format Pains/Gains as lists with bullets for Notion Text field
pains_text = "\n".join([f"- {item}" for item in p["pains"]])
gains_text = "\n".join([f"- {item}" for item in p["gains"]])
properties = {
"Name": format_title(p["name"]),
"Pains": format_rich_text(pains_text),
"Gains": format_rich_text(gains_text),
# Optional: Add a tag to distinguish them from Sectors if needed?
# Currently just relying on Name uniqueness.
}
existing_page = find_page(p["name"])
if existing_page:
print(f" -> Found existing page {existing_page['id']}. Updating...")
update_page(existing_page["id"], properties)
else:
print(" -> Creating new page...")
create_page(properties)
print("Sync complete.")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,92 @@
import requests
import json
import os
import sys
# Use the same token file as the other scripts
TOKEN_FILE = 'notion_token.txt'
def get_notion_token():
"""Reads the Notion API token from the specified file."""
try:
with open(TOKEN_FILE, 'r') as f:
return f.read().strip()
except FileNotFoundError:
print(f"Error: Token file not found at '{TOKEN_FILE}'")
print("Please create this file and place your Notion Integration Token inside.")
sys.exit(1)
def parse_markdown_to_blocks(md_content):
"""
Parses a simple markdown string into Notion API block objects.
This is a simplified parser for this specific task.
"""
blocks = []
lines = md_content.split('\n')
for line in lines:
stripped = line.strip()
if line.startswith("# "):
blocks.append({ "object": "block", "type": "heading_1", "heading_1": {"rich_text": [{"type": "text", "text": {"content": line[2:]}}]}})
elif line.startswith("## "):
blocks.append({ "object": "block", "type": "heading_2", "heading_2": {"rich_text": [{"type": "text", "text": {"content": line[3:]}}]}})
elif stripped.startswith("* ") or stripped.startswith("- "):
blocks.append({ "object": "block", "type": "bulleted_list_item", "bulleted_list_item": {"rich_text": [{"type": "text", "text": {"content": stripped[2:]}}]}})
elif stripped: # Any non-empty line becomes a paragraph
blocks.append({ "object": "block", "type": "paragraph", "paragraph": {"rich_text": [{"type": "text", "text": {"content": line}}]}})
# Add a divider for visual separation
blocks.insert(0, {"type": "divider", "divider": {}})
blocks.insert(0, {
"object": "block", "type": "heading_2", "heading_2": {
"rich_text": [{"type": "text", "text": {"content": "Gemini Task-Update:"}}]
}
})
return blocks
def append_blocks_to_page(token, page_id, blocks):
"""
Appends a list of block objects to a Notion page.
"""
# In Notion, the page ID is the block ID for appending content
url = f"https://api.notion.com/v1/blocks/{page_id}/children"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28",
"Content-Type": "application/json"
}
payload = {"children": blocks}
print(f"Appending {len(blocks)} blocks to Notion Page ID: {page_id}...")
try:
response = requests.patch(url, headers=headers, json=payload)
response.raise_for_status()
print("SUCCESS: Content appended to Notion task.")
except requests.exceptions.HTTPError as e:
print(f"ERROR: Failed to update Notion page. Response: {e.response.text}")
sys.exit(1)
except Exception as e:
print(f"ERROR: An unexpected error occurred: {e}")
sys.exit(1)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: python update_notion_task.py <page_id> \"<content_string>\"")
print("Example: python update_notion_task.py 12345-abc... \"- Task 1\n- Task 2\"")
sys.exit(1)
page_id = sys.argv[1]
content_to_append = sys.argv[2]
# Basic validation for page_id
if not isinstance(page_id, str) or len(page_id) < 32:
print(f"Error: Invalid Page ID provided: '{page_id}'")
sys.exit(1)
notion_token = get_notion_token()
content_blocks = parse_markdown_to_blocks(content_to_append)
append_blocks_to_page(notion_token, page_id, content_blocks)