[31e88f42] Update weekly summary script to use Gemini AI for executive summarization and add Mermaid charts

This commit is contained in:
2026-03-09 02:37:08 +00:00
parent f88a2e25a4
commit d1e881fd0d
4 changed files with 448 additions and 958 deletions

View File

@@ -1,6 +1,8 @@
import os
import re
import datetime
import json
import requests
from typing import List, Dict, Tuple
from dotenv import load_dotenv
@@ -61,9 +63,89 @@ def extract_status_updates(content: str, cutoff_date: datetime.datetime) -> List
return updates
def summarize_with_gemini(api_key: str, project_name: str, total_hours: float, raw_updates: str) -> str:
"""Uses Gemini REST API to summarize the project updates."""
if not api_key:
return "Kein Gemini API-Key gefunden. Generiere unkomprimierte Zusammenfassung...\n\n" + raw_updates
url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent?key={api_key}"
headers = {'Content-Type': 'application/json'}
prompt = f"""
Du bist ein technischer Projektmanager, der einen prägnanten Executive Summary für ein wöchentliches Montags-Meeting vorbereitet.
Deine Aufgabe ist es, die unstrukturierten Status-Updates des Entwicklers der letzten Woche zusammenzufassen.
Projekt: {project_name}
Investierte Zeit diese Woche: {format_time(total_hours)}
Hier sind die rohen Update-Logs der Woche:
<logs>
{raw_updates}
</logs>
Erstelle eine stark komprimierte Zusammenfassung mit folgendem Markdown-Format (verwende keine h1/h2, starte direkt mit Text oder h3):
### 🏆 Major Milestones
(Was wurde konkret erreicht/ausgeliefert/abgeschlossen? Max. 3-4 prägnante Bullet-Points)
### 💡 Wichtige Beschlüsse / Erkenntnisse
(Falls im Log vorhanden. Sonst weglassen. Max 2 Bullet-Points)
### 🚀 Nächste Schritte / Offene To-Dos
(Welche To-Dos wurden explizit für die Zukunft genannt? Max 3 Bullet-Points)
Fasse dich so kurz und präzise wie möglich. Ignoriere kleine Detail-Änderungen im Code und fokussiere dich auf den "Impact" und die übergeordneten Ziele.
"""
payload = {
"contents": [{"parts": [{"text": prompt}]}],
"generationConfig": {"temperature": 0.2}
}
try:
response = requests.post(url, headers=headers, json=payload, timeout=30)
response.raise_for_status()
data = response.json()
summary = data['candidates'][0]['content']['parts'][0]['text']
return summary.strip()
except Exception as e:
print(f"Fehler bei der Gemini-Zusammenfassung für {project_name}: {e}")
return f"Fehler bei der Zusammenfassung.\n\nRohdaten:\n{raw_updates}"
def generate_mermaid_pie(report_data: Dict) -> str:
"""Generates a Mermaid JS pie chart string."""
lines = ["```mermaid", "pie title Zeitverteilung nach Projekten (in Stunden)"]
for project, p_data in sorted(report_data.items(), key=lambda x: x[1]['invested_hours'], reverse=True):
hours = round(p_data['invested_hours'], 1)
if hours > 0:
lines.append(f' "{project}": {hours}')
lines.append("```")
return "\n".join(lines)
def generate_ascii_bar_chart(report_data: Dict, max_width: int = 40) -> str:
"""Generates a simple ASCII bar chart for environments where Mermaid is not rendered."""
lines = ["```text"]
lines.append("Zeitverteilung nach Projekten (Stunden)")
lines.append("-" * 50)
max_hours = max((p_data['invested_hours'] for p_data in report_data.values()), default=0)
for project, p_data in sorted(report_data.items(), key=lambda x: x[1]['invested_hours'], reverse=True):
hours = p_data['invested_hours']
if hours > 0:
bar_len = int((hours / max_hours) * max_width) if max_hours > 0 else 0
bar = "" * bar_len
project_short = (project[:25] + '..') if len(project) > 27 else project
lines.append(f"{project_short:<27} | {format_time(hours):>6} | {bar}")
lines.append("```")
return "\n".join(lines)
def main():
load_dotenv(os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), '.env'))
token = os.environ.get('NOTION_API_KEY')
gemini_key = os.environ.get('GEMINI_API_KEY')
if not token:
print("Error: NOTION_API_KEY environment variable not found.")
return
@@ -86,7 +168,6 @@ def main():
project_lookup[p_id] = p_name
# 2. Fetch Tasks modified in the last 7 days
# Using a 7-day lookback window
now = datetime.datetime.utcnow()
cutoff_date = now - datetime.timedelta(days=7)
cutoff_iso = cutoff_date.isoformat() + "Z"
@@ -102,7 +183,6 @@ def main():
tasks_data = query_notion_database(token, tasks_db_id, filter_payload=filter_payload)
print(f"Found {len(tasks_data)} recently edited tasks.")
# Data structure to hold the report
report_data = {}
for task in tasks_data:
@@ -131,44 +211,57 @@ def main():
report_data[project_name]["invested_hours"] += update["invested_hours"]
report_data[project_name]["tasks"][task_name].append(update)
# 3. Generate Markdown Report
# 3. Generate Markdown Report (AI Summarized)
report_lines = []
report_lines.append(f"# 📅 Weekly Summary ({cutoff_date.strftime('%Y-%m-%d')} bis {now.strftime('%Y-%m-%d')})")
report_lines.append(f"# 📊 Executive Weekly Summary ({cutoff_date.strftime('%Y-%m-%d')} bis {now.strftime('%Y-%m-%d')})")
report_lines.append("")
total_hours = sum(p_data["invested_hours"] for p_data in report_data.values())
report_lines.append(f"**Gesamte investierte Zeit:** {format_time(total_hours)}")
report_lines.append(f"**Gesamte investierte Zeit der Woche:** {format_time(total_hours)}")
report_lines.append("")
if not report_data:
report_lines.append("*Keine Status-Updates in den letzten 7 Tagen gefunden.*")
else:
for project_name, p_data in sorted(report_data.items()):
report_lines.append(f"## 📁 Projekt: {project_name}")
report_lines.append(f"**Zeit für Projekt:** {format_time(p_data['invested_hours'])}")
report_lines.append("")
# Add Graphical time distribution
report_lines.append("## ⏱️ Zeitverteilung & Fokus")
report_lines.append(generate_mermaid_pie(report_data))
report_lines.append("\n<details><summary>Text-basierte Zeitverteilung (Fallback)</summary>\n")
report_lines.append(generate_ascii_bar_chart(report_data))
report_lines.append("\n</details>\n")
report_lines.append("---")
report_lines.append("")
for project_name, p_data in sorted(report_data.items(), key=lambda x: x[1]['invested_hours'], reverse=True):
print(f"Fasse zusammen (AI): {project_name} ...")
report_lines.append(f"## 📁 {project_name} ({format_time(p_data['invested_hours'])})")
# Combine all raw texts for the project to send to Gemini
raw_updates_text = ""
for task_name, updates in p_data["tasks"].items():
report_lines.append(f"### 📋 Task: {task_name}")
raw_updates_text += f"\nTASK: {task_name}\n"
for update in sorted(updates, key=lambda x: x['date']):
report_lines.append(f"**Update vom {update['date']} {update['time']}** (Zeit: {format_time(update['invested_hours'])})")
report_lines.append("")
# Indent the summary slightly for better readability
summary_indented = "\n".join(f"> {line}" for line in update['summary'].split("\n"))
report_lines.append(summary_indented)
report_lines.append("")
report_lines.append("---")
raw_updates_text += f"UPDATE ({update['date']}):\n{update['summary']}\n"
ai_summary = summarize_with_gemini(gemini_key, project_name, p_data['invested_hours'], raw_updates_text)
report_lines.append(ai_summary)
report_lines.append("\n---")
report_lines.append("")
report_content = "\n".join(report_lines)
output_filename = f"Weekly_Summary_{now.strftime('%Y-%m-%d')}.md"
output_filename = f"Executive_Weekly_Summary_{now.strftime('%Y-%m-%d')}.md"
output_path = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), output_filename)
with open(output_path, "w", encoding="utf-8") as f:
f.write(report_content)
print(f"✅ Weekly Summary erfolgreich generiert: {output_path}")
print(f" Executive Weekly Summary erfolgreich generiert: {output_path}")
# Update latest summary shortcut
shortcut_path = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), 'LATEST_WEEKLY_SUMMARY.md')
with open(shortcut_path, "w", encoding="utf-8") as f:
f.write(report_content)
if __name__ == "__main__":
main()