[31e88f42] Add weekly summary generation script for Notion tasks
This commit is contained in:
174
scripts/generate_weekly_summary.py
Normal file
174
scripts/generate_weekly_summary.py
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
from typing import List, Dict, Tuple
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
import sys
|
||||||
|
# Make dev_session from /app available
|
||||||
|
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||||
|
from dev_session import find_database_by_title, query_notion_database, get_page_content, get_page_title
|
||||||
|
|
||||||
|
def parse_time(time_str: str) -> float:
|
||||||
|
"""Parses 'HH:MM' into decimal hours."""
|
||||||
|
try:
|
||||||
|
hours, minutes = map(int, time_str.split(':'))
|
||||||
|
return hours + (minutes / 60.0)
|
||||||
|
except:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
def format_time(decimal_hours: float) -> str:
|
||||||
|
hours = int(decimal_hours)
|
||||||
|
minutes = int(round((decimal_hours - hours) * 60))
|
||||||
|
if minutes == 60:
|
||||||
|
hours += 1
|
||||||
|
minutes = 0
|
||||||
|
return f"{hours:02d}:{minutes:02d}"
|
||||||
|
|
||||||
|
def extract_status_updates(content: str, cutoff_date: datetime.datetime) -> List[Dict]:
|
||||||
|
"""
|
||||||
|
Extracts status updates from the markdown content.
|
||||||
|
Looks for: ## 🤖 Status-Update (YYYY-MM-DD HH:MM Berlin Time)
|
||||||
|
"""
|
||||||
|
updates = []
|
||||||
|
# Pattern to match the heading and the subsequent code block
|
||||||
|
pattern = r"## 🤖 Status-Update \((?P<date>\d{4}-\d{2}-\d{2}) (?P<time>\d{2}:\d{2}).*?\)\n```\n(?P<body>.*?)\n```"
|
||||||
|
|
||||||
|
for match in re.finditer(pattern, content, re.DOTALL):
|
||||||
|
date_str = match.group('date')
|
||||||
|
time_str = match.group('time')
|
||||||
|
body = match.group('body').strip()
|
||||||
|
|
||||||
|
# Check if the date is within the cutoff
|
||||||
|
update_date = datetime.datetime.strptime(date_str, "%Y-%m-%d")
|
||||||
|
if update_date >= cutoff_date:
|
||||||
|
# Parse invested time
|
||||||
|
invested_time_match = re.search(r"Investierte Zeit in dieser Session:\s*(?P<hhmm>\d{2}:\d{2})", body)
|
||||||
|
invested_hours = 0.0
|
||||||
|
if invested_time_match:
|
||||||
|
invested_hours = parse_time(invested_time_match.group('hhmm'))
|
||||||
|
|
||||||
|
# Extract the summary part
|
||||||
|
summary_match = re.search(r"Arbeitszusammenfassung:\s*(.*)", body, re.DOTALL)
|
||||||
|
summary_text = summary_match.group(1).strip() if summary_match else body
|
||||||
|
|
||||||
|
updates.append({
|
||||||
|
"date": date_str,
|
||||||
|
"time": time_str,
|
||||||
|
"invested_hours": invested_hours,
|
||||||
|
"summary": summary_text
|
||||||
|
})
|
||||||
|
|
||||||
|
return updates
|
||||||
|
|
||||||
|
def main():
|
||||||
|
load_dotenv(os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), '.env'))
|
||||||
|
token = os.environ.get('NOTION_API_KEY')
|
||||||
|
if not token:
|
||||||
|
print("Error: NOTION_API_KEY environment variable not found.")
|
||||||
|
return
|
||||||
|
|
||||||
|
print("Fetching Notion configuration...")
|
||||||
|
tasks_db_id = find_database_by_title(token, "Tasks [UT]")
|
||||||
|
projects_db_id = find_database_by_title(token, "Projects [UT]")
|
||||||
|
|
||||||
|
if not tasks_db_id or not projects_db_id:
|
||||||
|
print("Could not find Tasks [UT] or Projects [UT] databases.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1. Fetch Projects for lookup
|
||||||
|
print("Fetching Projects...")
|
||||||
|
projects_data = query_notion_database(token, projects_db_id)
|
||||||
|
project_lookup = {}
|
||||||
|
for proj in projects_data:
|
||||||
|
p_id = proj['id']
|
||||||
|
p_name = get_page_title(proj)
|
||||||
|
project_lookup[p_id] = p_name
|
||||||
|
|
||||||
|
# 2. Fetch Tasks modified in the last 7 days
|
||||||
|
# Using a 7-day lookback window
|
||||||
|
now = datetime.datetime.utcnow()
|
||||||
|
cutoff_date = now - datetime.timedelta(days=7)
|
||||||
|
cutoff_iso = cutoff_date.isoformat() + "Z"
|
||||||
|
|
||||||
|
filter_payload = {
|
||||||
|
"property": "Edited",
|
||||||
|
"last_edited_time": {
|
||||||
|
"on_or_after": cutoff_iso
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
print(f"Fetching Tasks edited since {cutoff_date.strftime('%Y-%m-%d')}...")
|
||||||
|
tasks_data = query_notion_database(token, tasks_db_id, filter_payload=filter_payload)
|
||||||
|
print(f"Found {len(tasks_data)} recently edited tasks.")
|
||||||
|
|
||||||
|
# Data structure to hold the report
|
||||||
|
report_data = {}
|
||||||
|
|
||||||
|
for task in tasks_data:
|
||||||
|
task_id = task['id']
|
||||||
|
task_name = get_page_title(task)
|
||||||
|
|
||||||
|
# Get Project ID
|
||||||
|
project_id = None
|
||||||
|
relation_prop = task.get('properties', {}).get('Project', {}).get('relation', [])
|
||||||
|
if relation_prop:
|
||||||
|
project_id = relation_prop[0]['id']
|
||||||
|
|
||||||
|
project_name = project_lookup.get(project_id, "Kein Projekt zugeordnet")
|
||||||
|
|
||||||
|
content = get_page_content(token, task_id)
|
||||||
|
updates = extract_status_updates(content, cutoff_date.replace(hour=0, minute=0, second=0, microsecond=0))
|
||||||
|
|
||||||
|
if updates:
|
||||||
|
if project_name not in report_data:
|
||||||
|
report_data[project_name] = {"invested_hours": 0.0, "tasks": {}}
|
||||||
|
|
||||||
|
if task_name not in report_data[project_name]["tasks"]:
|
||||||
|
report_data[project_name]["tasks"][task_name] = []
|
||||||
|
|
||||||
|
for update in updates:
|
||||||
|
report_data[project_name]["invested_hours"] += update["invested_hours"]
|
||||||
|
report_data[project_name]["tasks"][task_name].append(update)
|
||||||
|
|
||||||
|
# 3. Generate Markdown Report
|
||||||
|
report_lines = []
|
||||||
|
report_lines.append(f"# 📅 Weekly Summary ({cutoff_date.strftime('%Y-%m-%d')} bis {now.strftime('%Y-%m-%d')})")
|
||||||
|
report_lines.append("")
|
||||||
|
|
||||||
|
total_hours = sum(p_data["invested_hours"] for p_data in report_data.values())
|
||||||
|
report_lines.append(f"**Gesamte investierte Zeit:** {format_time(total_hours)}")
|
||||||
|
report_lines.append("")
|
||||||
|
|
||||||
|
if not report_data:
|
||||||
|
report_lines.append("*Keine Status-Updates in den letzten 7 Tagen gefunden.*")
|
||||||
|
else:
|
||||||
|
for project_name, p_data in sorted(report_data.items()):
|
||||||
|
report_lines.append(f"## 📁 Projekt: {project_name}")
|
||||||
|
report_lines.append(f"**Zeit für Projekt:** {format_time(p_data['invested_hours'])}")
|
||||||
|
report_lines.append("")
|
||||||
|
|
||||||
|
for task_name, updates in p_data["tasks"].items():
|
||||||
|
report_lines.append(f"### 📋 Task: {task_name}")
|
||||||
|
for update in sorted(updates, key=lambda x: x['date']):
|
||||||
|
report_lines.append(f"**Update vom {update['date']} {update['time']}** (Zeit: {format_time(update['invested_hours'])})")
|
||||||
|
report_lines.append("")
|
||||||
|
# Indent the summary slightly for better readability
|
||||||
|
summary_indented = "\n".join(f"> {line}" for line in update['summary'].split("\n"))
|
||||||
|
report_lines.append(summary_indented)
|
||||||
|
report_lines.append("")
|
||||||
|
report_lines.append("---")
|
||||||
|
report_lines.append("")
|
||||||
|
|
||||||
|
report_content = "\n".join(report_lines)
|
||||||
|
|
||||||
|
output_filename = f"Weekly_Summary_{now.strftime('%Y-%m-%d')}.md"
|
||||||
|
output_path = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), output_filename)
|
||||||
|
|
||||||
|
with open(output_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(report_content)
|
||||||
|
|
||||||
|
print(f"✅ Weekly Summary erfolgreich generiert: {output_path}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user