feat(notion): Add import script for competitor references
This commit is contained in:
143
import_references_to_notion.py
Normal file
143
import_references_to_notion.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
|
||||
# Configuration
|
||||
JSON_FILE = 'analysis_robo-planet.de.json'
|
||||
TOKEN_FILE = 'notion_token.txt'
|
||||
# Root Page ID from notion_integration.md
|
||||
PARENT_PAGE_ID = "2e088f42-8544-8024-8289-deb383da3818"
|
||||
DB_TITLE = "Competitive Radar - References"
|
||||
|
||||
def load_json_data(filepath):
|
||||
"""Loads the analysis JSON data."""
|
||||
try:
|
||||
with open(filepath, 'r') as f:
|
||||
return json.load(f)
|
||||
except FileNotFoundError:
|
||||
print(f"Error: File '{filepath}' not found.")
|
||||
sys.exit(1)
|
||||
except json.JSONDecodeError:
|
||||
print(f"Error: Failed to decode JSON from '{filepath}'.")
|
||||
sys.exit(1)
|
||||
|
||||
def load_notion_token(filepath):
|
||||
"""Loads the Notion API token."""
|
||||
try:
|
||||
with open(filepath, 'r') as f:
|
||||
return f.read().strip()
|
||||
except FileNotFoundError:
|
||||
print(f"Error: Token file '{filepath}' not found.")
|
||||
sys.exit(1)
|
||||
|
||||
def create_database(token, parent_page_id):
|
||||
"""Creates the References database in Notion."""
|
||||
url = "https://api.notion.com/v1/databases"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Notion-Version": "2022-06-28",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
payload = {
|
||||
"parent": {"type": "page_id", "page_id": parent_page_id},
|
||||
"title": [{"type": "text", "text": {"content": DB_TITLE}}],
|
||||
"properties": {
|
||||
"Reference Name": {"title": {}},
|
||||
"Competitor": {"select": {}},
|
||||
"Industry": {"select": {}},
|
||||
"Snippet": {"rich_text": {}},
|
||||
"Case Study URL": {"url": {}}
|
||||
}
|
||||
}
|
||||
|
||||
print(f"Creating database '{DB_TITLE}'...")
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
|
||||
if response.status_code != 200:
|
||||
print(f"Error creating database: {response.status_code}")
|
||||
print(response.text)
|
||||
sys.exit(1)
|
||||
|
||||
db_data = response.json()
|
||||
print(f"Database created successfully! ID: {db_data['id']}")
|
||||
return db_data['id']
|
||||
|
||||
def add_reference(token, db_id, competitor, reference):
|
||||
"""Adds a single reference entry to the database."""
|
||||
url = "https://api.notion.com/v1/pages"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Notion-Version": "2022-06-28",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
ref_name = reference.get("name", "Unknown Reference")
|
||||
industry = reference.get("industry", "Unknown")
|
||||
snippet = reference.get("testimonial_snippet", "")
|
||||
case_url = reference.get("case_study_url")
|
||||
|
||||
# Truncate snippet if too long (Notion limit is 2000 chars for text block, but good practice anyway)
|
||||
if len(snippet) > 2000:
|
||||
snippet = snippet[:1997] + "..."
|
||||
|
||||
properties = {
|
||||
"Reference Name": {"title": [{"text": {"content": ref_name}}]},
|
||||
"Competitor": {"select": {"name": competitor}},
|
||||
"Industry": {"select": {"name": industry}},
|
||||
"Snippet": {"rich_text": [{"text": {"content": snippet}}]}
|
||||
}
|
||||
|
||||
if case_url and case_url.startswith("http"):
|
||||
properties["Case Study URL"] = {"url": case_url}
|
||||
|
||||
payload = {
|
||||
"parent": {"database_id": db_id},
|
||||
"properties": properties
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
print(f" - Added: {ref_name}")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
print(f" - Failed to add {ref_name}: {e}")
|
||||
# print(response.text) # Uncomment for debugging
|
||||
|
||||
def main():
|
||||
token = load_notion_token(TOKEN_FILE)
|
||||
data = load_json_data(JSON_FILE)
|
||||
|
||||
# Check if we have reference analysis data
|
||||
ref_analysis = data.get("reference_analysis", [])
|
||||
if not ref_analysis:
|
||||
print("Warning: No reference analysis data found in JSON.")
|
||||
# We proceed anyway to create the DB and show it works,
|
||||
# but in a real scenario we might want to exit or use dummy data.
|
||||
|
||||
# 1. Create Database (You could also check if it exists, but for this script we create new)
|
||||
# Ideally, we would search for an existing DB first to avoid duplicates.
|
||||
# For this task, we create it.
|
||||
db_id = create_database(token, PARENT_PAGE_ID)
|
||||
|
||||
# 2. Iterate and Import
|
||||
print("Importing references...")
|
||||
count = 0
|
||||
for entry in ref_analysis:
|
||||
competitor = entry.get("competitor_name", "Unknown Competitor")
|
||||
refs = entry.get("references", [])
|
||||
|
||||
if not refs:
|
||||
print(f"Skipping {competitor} (No references found)")
|
||||
continue
|
||||
|
||||
print(f"Processing {competitor} ({len(refs)} references)...")
|
||||
for ref in refs:
|
||||
add_reference(token, db_id, competitor, ref)
|
||||
count += 1
|
||||
|
||||
print(f"\nImport complete! {count} references added.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user