diff --git a/fotograf-de-scraper/backend/main.py b/fotograf-de-scraper/backend/main.py
index 64f8a2fdd..340866197 100644
--- a/fotograf-de-scraper/backend/main.py
+++ b/fotograf-de-scraper/backend/main.py
@@ -585,6 +585,8 @@ def process_reminder_analysis(task_id: str, job_id: str, account_type: str, max_
aggregation = {}
missing_links_count = 0
+ base_url = "https://kinderfoto-erding.fotograf.de" if account_type == "kiga" else "https://kinderfotos-erding.fotograf.de"
+
for c in candidates:
email = c.email_eltern.lower()
@@ -592,12 +594,6 @@ def process_reminder_analysis(task_id: str, job_id: str, account_type: str, max_
if exclude_purchased_emails and email in purchased_emails:
continue
- # STRICT LINK CHECK: If we don't have a scraped Quick Login URL, skip this child.
- # We don't want to send broken /login/access/ links.
- if not c.quick_login_url:
- missing_links_count += 1
- continue
-
if email not in aggregation:
aggregation[email] = {
"email": email,
@@ -612,8 +608,18 @@ def process_reminder_analysis(task_id: str, job_id: str, account_type: str, max_
if child_label and child_label not in aggregation[email]["children"]:
aggregation[email]["children"].append(child_label)
- # Add Quick Login Link (Guaranteed to exist here)
- html_link = f'Fotos von {child_label}'
+ # Determine best link
+ if c.quick_login_url and "/gc/" in c.quick_login_url:
+ # Use scraped direct link if available
+ final_link = c.quick_login_url
+ link_text = f"Fotos von {child_label}"
+ else:
+ # Fallback to direct code navigation link
+ final_link = f"{base_url}/login/{c.zugangscode}"
+ link_text = f"Fotos von {child_label}"
+ missing_links_count += 1
+
+ html_link = f'{link_text}'
if html_link not in aggregation[email]["links"]:
aggregation[email]["links"].append(html_link)
@@ -633,7 +639,7 @@ def process_reminder_analysis(task_id: str, job_id: str, account_type: str, max_
progress_msg = f"Analyse fertig! {len(final_result)} Empfänger identifiziert."
if missing_links_count > 0:
- progress_msg += f" (Hinweis: {missing_links_count} Kinder ignoriert, da Quick-Login-Link fehlt. Bitte vorher 'Daten abgleichen' drücken!)"
+ progress_msg += f" (Hinweis: {missing_links_count} Links wurden generiert, da sie noch nicht gescraped wurden.)"
task_store[task_id] = {
"status": "completed",
diff --git a/patch.py b/patch.py
deleted file mode 100644
index 0f02f8be0..000000000
--- a/patch.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import re
-
-with open('fotograf-de-scraper/backend/main.py', 'r') as f:
- content = f.read()
-
-old_code = """ try:
- wait_short = WebDriverWait(driver, 5)
- quick_link_el = wait_short.until(EC.presence_of_element_located((By.XPATH, SELECTORS["quick_login_url"])))
- quick_link = quick_link_el.get_attribute("href")
-
- # Update DB
- codes_to_find[code].quick_login_url = quick_link
- del codes_to_find[code]
- links_found += 1
-
- if links_found % 5 == 0: db.commit()
- except:
- logger.warning(f"Could not find quick login link for {code}")"""
-
-new_code = """ for attempt in range(3):
- try:
- wait_short = WebDriverWait(driver, 5)
- quick_link_el = wait_short.until(EC.visibility_of_element_located((By.XPATH, SELECTORS["quick_login_url"])))
- quick_link = quick_link_el.get_attribute("href")
-
- # Update DB
- codes_to_find[code].quick_login_url = quick_link
- del codes_to_find[code]
- links_found += 1
-
- if links_found % 5 == 0: db.commit()
- break
- except Exception as e:
- time.sleep(1)
- else:
- logger.warning(f"Could not find quick login link for {code}")"""
-
-if old_code in content:
- content = content.replace(old_code, new_code)
- with open('fotograf-de-scraper/backend/main.py', 'w') as f:
- f.write(content)
- print("Patched successfully")
-else:
- print("Old code not found")
diff --git a/patch_frontend.py b/patch_frontend.py
new file mode 100644
index 000000000..ccbccffd7
--- /dev/null
+++ b/patch_frontend.py
@@ -0,0 +1,15 @@
+import sys
+
+with open('fotograf-de-scraper/frontend/src/App.tsx', 'r') as f:
+ content = f.read()
+
+old_body = """ const [emailBody, setEmailBody] = useState("Hallo {Name Käufer},
deine Fotos sind fertig und warten auf dich! Kopiere einfach deinen Zugangscode und klicke auf den Link zum Shop, um dich einzuloggen:
{LinksHTML}
Viel Spaß beim Anschauen!");"""
+new_body = """ const [emailBody, setEmailBody] = useState("Hallo {Name Käufer},
deine Fotos sind fertig und warten auf dich! Klicke einfach auf die Links unten, um direkt zu den Galerien zu gelangen:
{LinksHTML}
Viel Spaß beim Anschauen!");"""
+
+if old_body in content:
+ content = content.replace(old_body, new_body)
+ with open('fotograf-de-scraper/frontend/src/App.tsx', 'w') as f:
+ f.write(content)
+ print("Frontend patched")
+else:
+ print("Frontend code not found")
diff --git a/patch_reminder.py b/patch_reminder.py
new file mode 100644
index 000000000..6498a914e
--- /dev/null
+++ b/patch_reminder.py
@@ -0,0 +1,130 @@
+import sys
+
+with open('fotograf-de-scraper/backend/main.py', 'r') as f:
+ content = f.read()
+
+old_code = """ # 3. Aggregate results by Email
+ aggregation = {}
+ missing_links_count = 0
+
+ for c in candidates:
+ email = c.email_eltern.lower()
+
+ # Skip if this email already has a purchase for ANOTHER child
+ if exclude_purchased_emails and email in purchased_emails:
+ continue
+
+ # STRICT LINK CHECK: If we don't have a scraped Quick Login URL, skip this child.
+ # We don't want to send broken /login/access/ links.
+ if not c.quick_login_url:
+ missing_links_count += 1
+ continue
+
+ if email not in aggregation:
+ aggregation[email] = {
+ "email": email,
+ "parent_name": c.vorname_eltern if c.vorname_eltern else "Liebe Eltern",
+ "children": [],
+ "links": []
+ }
+
+ # Add child name
+ child_name = c.vorname_kind or ""
+ child_label = "Familienbilder" if child_name.lower() == "familie" else child_name
+ if child_label and child_label not in aggregation[email]["children"]:
+ aggregation[email]["children"].append(child_label)
+
+ # Add Quick Login Link (Guaranteed to exist here)
+ html_link = f'Fotos von {child_label}'
+ if html_link not in aggregation[email]["links"]:
+ aggregation[email]["links"].append(html_link)
+
+ # 4. Format for Supermailer/Gmail
+ final_result = []
+ for email, data in aggregation.items():
+ children_str = " und ".join(data["children"]) if len(data["children"]) > 1 else (data["children"][0] if data["children"] else "Eurem Kind")
+ links_html = "".join([f"{l}
" for l in data["links"]])
+
+ final_result.append({
+ "E-Mail-Adresse Käufer": email,
+ "Name Käufer": data["parent_name"],
+ "Kindernamen": children_str,
+ "Anzahl Kinder": len(data["children"]),
+ "LinksHTML": links_html
+ })
+
+ progress_msg = f"Analyse fertig! {len(final_result)} Empfänger identifiziert."
+ if missing_links_count > 0:
+ progress_msg += f" (Hinweis: {missing_links_count} Kinder ignoriert, da Quick-Login-Link fehlt. Bitte vorher 'Daten abgleichen' drücken!)"
+
+ task_store[task_id] = {"""
+
+new_code = """ # 3. Aggregate results by Email
+ aggregation = {}
+ missing_links_count = 0
+
+ base_url = "https://kinderfoto-erding.fotograf.de" if account_type == "kiga" else "https://kinderfotos-erding.fotograf.de"
+
+ for c in candidates:
+ email = c.email_eltern.lower()
+
+ # Skip if this email already has a purchase for ANOTHER child
+ if exclude_purchased_emails and email in purchased_emails:
+ continue
+
+ if email not in aggregation:
+ aggregation[email] = {
+ "email": email,
+ "parent_name": c.vorname_eltern if c.vorname_eltern else "Liebe Eltern",
+ "children": [],
+ "links": []
+ }
+
+ # Add child name
+ child_name = c.vorname_kind or ""
+ child_label = "Familienbilder" if child_name.lower() == "familie" else child_name
+ if child_label and child_label not in aggregation[email]["children"]:
+ aggregation[email]["children"].append(child_label)
+
+ # Determine best link
+ if c.quick_login_url and "/gc/" in c.quick_login_url:
+ # Use scraped direct link if available
+ final_link = c.quick_login_url
+ link_text = f"Fotos von {child_label}"
+ else:
+ # Fallback to direct code navigation link
+ final_link = f"{base_url}/login/{c.zugangscode}"
+ link_text = f"Fotos von {child_label}"
+ missing_links_count += 1
+
+ html_link = f'{link_text}'
+ if html_link not in aggregation[email]["links"]:
+ aggregation[email]["links"].append(html_link)
+
+ # 4. Format for Supermailer/Gmail
+ final_result = []
+ for email, data in aggregation.items():
+ children_str = " und ".join(data["children"]) if len(data["children"]) > 1 else (data["children"][0] if data["children"] else "Eurem Kind")
+ links_html = "".join([f"{l}
" for l in data["links"]])
+
+ final_result.append({
+ "E-Mail-Adresse Käufer": email,
+ "Name Käufer": data["parent_name"],
+ "Kindernamen": children_str,
+ "Anzahl Kinder": len(data["children"]),
+ "LinksHTML": links_html
+ })
+
+ progress_msg = f"Analyse fertig! {len(final_result)} Empfänger identifiziert."
+ if missing_links_count > 0:
+ progress_msg += f" (Hinweis: {missing_links_count} Links wurden generiert, da sie noch nicht gescraped wurden.)"
+
+ task_store[task_id] = {"""
+
+if old_code in content:
+ content = content.replace(old_code, new_code)
+ with open('fotograf-de-scraper/backend/main.py', 'w') as f:
+ f.write(content)
+ print("Patched successfully")
+else:
+ print("Old code not found")