Files
Brancheneinstufung2/import_relational_radar.py
Floke e1d115e0ba feat(notion): Implement relational Competitive Radar import
- Added import_relational_radar.py for bidirectional database structure in Notion.
- Added refresh_references.py to populate analysis data with grounded facts via scraping.
- Updated documentation for Competitive Radar v2.0.
2026-01-11 11:57:43 +00:00

231 lines
8.3 KiB
Python

import json
import os
import requests
import sys
# Configuration
JSON_FILE = 'analysis_robo-planet.de.json'
TOKEN_FILE = 'notion_token.txt'
PARENT_PAGE_ID = "2e088f42-8544-8024-8289-deb383da3818"
# Database Titles
DB_TITLE_HUB = "📦 Competitive Radar (Companies)"
DB_TITLE_LANDMINES = "💣 Competitive Radar (Landmines & Intel)"
DB_TITLE_REFS = "🏆 Competitive Radar (References)"
def load_json_data(filepath):
try:
with open(filepath, 'r') as f:
return json.load(f)
except Exception as e:
print(f"Error loading JSON: {e}")
sys.exit(1)
def load_notion_token(filepath):
try:
with open(filepath, 'r') as f:
return f.read().strip()
except Exception as e:
print(f"Error loading token: {e}")
sys.exit(1)
def create_database(token, parent_page_id, title, properties):
url = "https://api.notion.com/v1/databases"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28",
"Content-Type": "application/json"
}
payload = {
"parent": {"type": "page_id", "page_id": parent_page_id},
"title": [{"type": "text", "text": {"content": title}}],
"properties": properties
}
response = requests.post(url, headers=headers, json=payload)
if response.status_code != 200:
print(f"Error creating DB '{title}': {response.status_code}")
print(response.text)
sys.exit(1)
db_data = response.json()
print(f"✅ Created DB '{title}' (ID: {db_data['id']})")
return db_data['id']
def create_page(token, db_id, properties):
url = "https://api.notion.com/v1/pages"
headers = {
"Authorization": f"Bearer {token}",
"Notion-Version": "2022-06-28",
"Content-Type": "application/json"
}
payload = {
"parent": {"database_id": db_id},
"properties": properties
}
response = requests.post(url, headers=headers, json=payload)
if response.status_code != 200:
print(f"Error creating page: {response.status_code}")
# print(response.text)
return None
return response.json()['id']
def format_list_as_bullets(items):
if not items: return ""
return "\n".join([f"{item}" for item in items])
def main():
token = load_notion_token(TOKEN_FILE)
data = load_json_data(JSON_FILE)
print("🚀 Starting Relational Import...")
# --- STEP 1: Define & Create Competitors Hub DB ---
props_hub = {
"Name": {"title": {}},
"Website": {"url": {}},
"Target Industries": {"multi_select": {}},
"Portfolio Summary": {"rich_text": {}},
"Silver Bullet": {"rich_text": {}},
"USPs": {"rich_text": {}}
}
hub_db_id = create_database(token, PARENT_PAGE_ID, DB_TITLE_HUB, props_hub)
# --- STEP 2: Define & Create Satellite DBs (Linked to Hub) ---
# Landmines DB
props_landmines = {
"Statement / Question": {"title": {}},
"Type": {"select": {
"options": [
{"name": "Landmine Question", "color": "red"},
{"name": "Competitor Weakness", "color": "green"},
{"name": "Competitor Strength", "color": "orange"}
]
}},
"Related Competitor": {
"relation": {
"database_id": hub_db_id,
"dual_property": {"synced_property_name": "Related Landmines & Intel"}
}
}
}
landmines_db_id = create_database(token, PARENT_PAGE_ID, DB_TITLE_LANDMINES, props_landmines)
# References DB
props_refs = {
"Customer Name": {"title": {}},
"Industry": {"select": {}},
"Snippet": {"rich_text": {}},
"Case Study URL": {"url": {}},
"Related Competitor": {
"relation": {
"database_id": hub_db_id,
"dual_property": {"synced_property_name": "Related References"}
}
}
}
refs_db_id = create_database(token, PARENT_PAGE_ID, DB_TITLE_REFS, props_refs)
# --- STEP 3: Import Competitors (and store IDs) ---
competitor_map = {} # Maps Name -> Notion Page ID
competitors = data.get('competitors_shortlist', []) or data.get('competitor_candidates', [])
print(f"\nImporting {len(competitors)} Competitors...")
for comp in competitors:
c_name = comp.get('name')
if not c_name: continue
# Gather Data
c_url = comp.get('url', '')
# Find extended analysis data
analysis_data = next((a for a in data.get('analyses', []) if a.get('competitor', {}).get('name') == c_name), {})
battlecard_data = next((b for b in data.get('battlecards', []) if b.get('competitor_name') == c_name), {})
industries = analysis_data.get('target_industries', [])
portfolio = analysis_data.get('portfolio', [])
portfolio_text = "\n".join([f"{p.get('product')}: {p.get('purpose')}" for p in portfolio])
usps = format_list_as_bullets(analysis_data.get('differentiators', []))
silver_bullet = battlecard_data.get('silver_bullet', '')
# Create Page
props = {
"Name": {"title": [{"text": {"content": c_name}}]},
"Portfolio Summary": {"rich_text": [{"text": {"content": portfolio_text[:2000]}}]},
"USPs": {"rich_text": [{"text": {"content": usps[:2000]}}]},
"Silver Bullet": {"rich_text": [{"text": {"content": silver_bullet[:2000]}}]},
"Target Industries": {"multi_select": [{"name": i.replace(',', '')} for i in industries]},
}
if c_url: props["Website"] = {"url": c_url}
page_id = create_page(token, hub_db_id, props)
if page_id:
competitor_map[c_name] = page_id
print(f" - Created: {c_name}")
# --- STEP 4: Import Landmines & Intel ---
print("\nImporting Landmines & Intel...")
for card in data.get('battlecards', []):
c_name = card.get('competitor_name')
comp_page_id = competitor_map.get(c_name)
if not comp_page_id: continue
# 1. Landmines
for q in card.get('landmine_questions', []):
props = {
"Statement / Question": {"title": [{"text": {"content": q}}]},
"Type": {"select": {"name": "Landmine Question"}},
"Related Competitor": {"relation": [{"id": comp_page_id}]}
}
create_page(token, landmines_db_id, props)
# 2. Weaknesses
# The JSON has "strengths_vs_weaknesses" combined. We'll import them as general Intel points.
for point in card.get('strengths_vs_weaknesses', []):
# Try to guess type based on text, or just default to Weakness context from Battlecard
p_type = "Competitor Weakness" # Assuming these are points for us to exploit
props = {
"Statement / Question": {"title": [{"text": {"content": point}}]},
"Type": {"select": {"name": p_type}},
"Related Competitor": {"relation": [{"id": comp_page_id}]}
}
create_page(token, landmines_db_id, props)
print(" - Landmines imported.")
# --- STEP 5: Import References ---
print("\nImporting References...")
count_refs = 0
for ref_group in data.get('reference_analysis', []):
c_name = ref_group.get('competitor_name')
comp_page_id = competitor_map.get(c_name)
if not comp_page_id: continue
for ref in ref_group.get('references', []):
r_name = ref.get('name', 'Unknown')
r_industry = ref.get('industry', 'Unknown')
r_snippet = ref.get('testimonial_snippet', '')
r_url = ref.get('case_study_url', '')
props = {
"Customer Name": {"title": [{"text": {"content": r_name}}]},
"Industry": {"select": {"name": r_industry}},
"Snippet": {"rich_text": [{"text": {"content": r_snippet[:2000]}}]},
"Related Competitor": {"relation": [{"id": comp_page_id}]}
}
if r_url and r_url.startswith('http'):
props["Case Study URL"] = {"url": r_url}
create_page(token, refs_db_id, props)
count_refs += 1
print(f" - {count_refs} References imported.")
print("\n✅ Relational Import Complete!")
if __name__ == "__main__":
main()