Files
Brancheneinstufung2/scripts/update_projects_cache.py
2026-02-18 11:21:46 +00:00

56 lines
1.8 KiB
Python

import os
import requests
import json
from dotenv import load_dotenv
# This script is intended to be run by a cron job to keep Notion data fresh.
# It's a simplified version of the sync scripts used for CE.
load_dotenv(dotenv_path="/home/node/clawd/.env")
NOTION_TOKEN = os.getenv("NOTION_API_KEY")
HEADERS = {
"Authorization": f"Bearer {NOTION_TOKEN}",
"Content-Type": "application/json",
"Notion-Version": "2022-06-28"
}
def find_db_id(query_name):
url = "https://api.notion.com/v1/search"
payload = {"query": query_name, "filter": {"value": "database", "property": "object"}}
resp = requests.post(url, headers=HEADERS, json=payload)
if resp.status_code == 200:
results = resp.json().get("results", [])
if results:
return results[0]['id']
return None
def fetch_and_cache(db_name, output_file):
print(f"Fetching {db_name}...")
db_id = find_db_id(db_name)
if not db_id:
print(f"Error: Could not find DB '{db_name}'.")
return
url = f"https://api.notion.com/v1/databases/{db_id}/query"
resp = requests.post(url, headers=HEADERS, json={})
if resp.status_code == 200:
data = resp.json()
with open(output_file, "w") as f:
json.dump(data.get("results", []), f, indent=2)
print(f"✅ Cached {db_name} to {output_file}")
else:
print(f"Error fetching {db_name}: {resp.text}")
if __name__ == "__main__":
# Define what to cache and where
# This keeps a local snapshot of key Notion DBs for quick reference without API calls.
os.makedirs("data/cache", exist_ok=True)
fetch_and_cache("Industries", "data/cache/industries.json")
fetch_and_cache("Personas", "data/cache/personas.json")
fetch_and_cache("Tasks", "data/cache/tasks.json")
print("Cache update complete.")