fix(competitor-analysis): final migration fixes and documentation updates

This commit is contained in:
2026-05-15 20:07:45 +00:00
parent efdd134556
commit 6e1a3be8cf
4 changed files with 160 additions and 53 deletions

130
patch_reminder.py Normal file
View File

@@ -0,0 +1,130 @@
import sys
with open('fotograf-de-scraper/backend/main.py', 'r') as f:
content = f.read()
old_code = """ # 3. Aggregate results by Email
aggregation = {}
missing_links_count = 0
for c in candidates:
email = c.email_eltern.lower()
# Skip if this email already has a purchase for ANOTHER child
if exclude_purchased_emails and email in purchased_emails:
continue
# STRICT LINK CHECK: If we don't have a scraped Quick Login URL, skip this child.
# We don't want to send broken /login/access/ links.
if not c.quick_login_url:
missing_links_count += 1
continue
if email not in aggregation:
aggregation[email] = {
"email": email,
"parent_name": c.vorname_eltern if c.vorname_eltern else "Liebe Eltern",
"children": [],
"links": []
}
# Add child name
child_name = c.vorname_kind or ""
child_label = "Familienbilder" if child_name.lower() == "familie" else child_name
if child_label and child_label not in aggregation[email]["children"]:
aggregation[email]["children"].append(child_label)
# Add Quick Login Link (Guaranteed to exist here)
html_link = f'<a href="{c.quick_login_url}">Fotos von {child_label}</a>'
if html_link not in aggregation[email]["links"]:
aggregation[email]["links"].append(html_link)
# 4. Format for Supermailer/Gmail
final_result = []
for email, data in aggregation.items():
children_str = " und ".join(data["children"]) if len(data["children"]) > 1 else (data["children"][0] if data["children"] else "Eurem Kind")
links_html = "".join([f"{l}<br>" for l in data["links"]])
final_result.append({
"E-Mail-Adresse Käufer": email,
"Name Käufer": data["parent_name"],
"Kindernamen": children_str,
"Anzahl Kinder": len(data["children"]),
"LinksHTML": links_html
})
progress_msg = f"Analyse fertig! {len(final_result)} Empfänger identifiziert."
if missing_links_count > 0:
progress_msg += f" (Hinweis: {missing_links_count} Kinder ignoriert, da Quick-Login-Link fehlt. Bitte vorher 'Daten abgleichen' drücken!)"
task_store[task_id] = {"""
new_code = """ # 3. Aggregate results by Email
aggregation = {}
missing_links_count = 0
base_url = "https://kinderfoto-erding.fotograf.de" if account_type == "kiga" else "https://kinderfotos-erding.fotograf.de"
for c in candidates:
email = c.email_eltern.lower()
# Skip if this email already has a purchase for ANOTHER child
if exclude_purchased_emails and email in purchased_emails:
continue
if email not in aggregation:
aggregation[email] = {
"email": email,
"parent_name": c.vorname_eltern if c.vorname_eltern else "Liebe Eltern",
"children": [],
"links": []
}
# Add child name
child_name = c.vorname_kind or ""
child_label = "Familienbilder" if child_name.lower() == "familie" else child_name
if child_label and child_label not in aggregation[email]["children"]:
aggregation[email]["children"].append(child_label)
# Determine best link
if c.quick_login_url and "/gc/" in c.quick_login_url:
# Use scraped direct link if available
final_link = c.quick_login_url
link_text = f"Fotos von {child_label}"
else:
# Fallback to direct code navigation link
final_link = f"{base_url}/login/{c.zugangscode}"
link_text = f"Fotos von {child_label}"
missing_links_count += 1
html_link = f'<a href="{final_link}">{link_text}</a>'
if html_link not in aggregation[email]["links"]:
aggregation[email]["links"].append(html_link)
# 4. Format for Supermailer/Gmail
final_result = []
for email, data in aggregation.items():
children_str = " und ".join(data["children"]) if len(data["children"]) > 1 else (data["children"][0] if data["children"] else "Eurem Kind")
links_html = "".join([f"{l}<br>" for l in data["links"]])
final_result.append({
"E-Mail-Adresse Käufer": email,
"Name Käufer": data["parent_name"],
"Kindernamen": children_str,
"Anzahl Kinder": len(data["children"]),
"LinksHTML": links_html
})
progress_msg = f"Analyse fertig! {len(final_result)} Empfänger identifiziert."
if missing_links_count > 0:
progress_msg += f" (Hinweis: {missing_links_count} Links wurden generiert, da sie noch nicht gescraped wurden.)"
task_store[task_id] = {"""
if old_code in content:
content = content.replace(old_code, new_code)
with open('fotograf-de-scraper/backend/main.py', 'w') as f:
f.write(content)
print("Patched successfully")
else:
print("Old code not found")