Compare commits
61 Commits
e5add77a50
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| d90d856620 | |||
| 7cb29cd8da | |||
| 991e338d67 | |||
| db94eca626 | |||
| 1ae8b3e353 | |||
| 02b17d53ea | |||
| d49f6d51f4 | |||
| 995b3ff829 | |||
| 472f392107 | |||
| e6061868e6 | |||
| 2a85cab4ab | |||
| c458a9c26c | |||
| aa3ff2998f | |||
| 9645859091 | |||
| 8d7f5cbbb6 | |||
| 806fa199ce | |||
| 19247280a0 | |||
| da4995bb3e | |||
| 080a202a9f | |||
| ba06e6d033 | |||
| 3f6b27a89f | |||
| 9b4f80a44f | |||
| 1f5805e64c | |||
| 929d92afeb | |||
| 1a3568f69e | |||
| 0cca30a956 | |||
| 2592607b04 | |||
| f148f40d9e | |||
| 1dd4c6b6da | |||
| daa3637ef6 | |||
| 5e0186c534 | |||
| c2f614d7ad | |||
| e8c2cdfff9 | |||
| 2cfda1da57 | |||
| 4baece46bb | |||
| 5d28a34f02 | |||
| 831ec7e71c | |||
| 229ad10e6b | |||
| 43658c2921 | |||
| fa68e42f5f | |||
| e411addfe2 | |||
| 53ccdd2b69 | |||
| 6bf9260923 | |||
| 7c5b584890 | |||
| a128ca9921 | |||
| 965696b1ca | |||
| 787002532d | |||
| ba8565e59a | |||
| 539f30bdb7 | |||
| 7546b4021d | |||
| 1c98566e93 | |||
| d3987ea20b | |||
| 02a1ecb53d | |||
| 70adecae58 | |||
| 066470e82c | |||
| 106cfe6e33 | |||
| d4b20eb113 | |||
| f72719b9a4 | |||
| c62db8a2ef | |||
| 567dd9a2ca | |||
| ec877ef65b |
@@ -1 +1 @@
|
|||||||
{"task_id": "32788f42-8544-80e1-a13a-c26114cf9b34", "token": "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", "readme_path": "readme.md", "session_start_time": "2026-03-21T09:05:51.504328"}
|
{"task_id": "34288f42-8544-800e-b866-dfcbc22bd4e5", "token": "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", "readme_path": "readme.md", "session_start_time": "2026-05-04T06:53:43.831976"}
|
||||||
@@ -0,0 +1,126 @@
|
|||||||
|
"Child ID";"Vorname Kind";"Nachname Kind";Geschlecht;Geburtsdatum;Einrichtung;Gruppe;Lehrer;"Gültig bis";Bezeichner;Referenz;Straße;PLZ;Ort;Staat;"Geschwisterkind Vorname (1)";"Geschwisterkind Nachname (1)";"Geschwisterkind Vorname (2)";"Geschwisterkind Nachname (2)";Einzelfotos;Gruppenfotos;"Familie / Geschwister";Foto;"Vom Kunden ausgewählt";"Vorname Eltern (1)";"Nachname Eltern (1)";"Email der Eltern (1)";"Telefonnummer der Eltern (1)";"Vorname Eltern (2)";"Nachname Eltern (2)";"Email der Eltern (2)";"Telefonnummer der Eltern (2)";"Zugangscode (1)";"Barcode (1)";"Logins (1)";"Zugangscode (2)";"Barcode (2)";"Logins (2)";Bestellungen
|
||||||
|
49663204;Fares;AL-KHADHER;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8069.jpg;Nein;Familie;"Al Khadher";Husseinalkhadher8@gmail.com;;;;;;9CKZ9FRB;859242970856177;2;;;0;0
|
||||||
|
49656019;Entoni;Altoni;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7999.jpg;Nein;Yuliia;Altoni;julichka.altony@gmail.com;;;;;;8PH6DT65;590974350307121;1;;;0;1
|
||||||
|
49659604;"Rashane Tyler";Asasana;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8048.jpg;Nein;Penphaka;Asasana;asa-sa-na@hotmail.com;;;;;;57VSYGKZ;742438249864838;2;;;0;0
|
||||||
|
49955890;Yunus;Batuge;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7831.jpg;Nein;Sümeyra;Senyurt;senyurtsumeyra7@gmail.com;;;;;;Y9LFLVQ6;807433233164209;15;;;0;1
|
||||||
|
49652597;Josip;Bungic;;;;Bären;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Mirela;"Marijan Bungic";m.bungic@web.de;;;;;;JYCSXJTX;967076735653408;0;;;0;0
|
||||||
|
50064753;Hazal;Cicek;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7714.jpg;Nein;Familie;Cicek;uelke.ardak@hotmail.de;;;;;;VNFYB935;306933685807165;0;;;0;0
|
||||||
|
49601392;Levi;Damia;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;Louisa;Damian;damian.louisa@web.de;;;;;;3VP45KKX;107953830470294;0;;;0;0
|
||||||
|
50314236;Levi;Damian;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7936.jpg;Nein;Louisa;Damian;damian.louisa@web.de;;;;;;DVXDP3PH;677393543795054;1;;;0;1
|
||||||
|
50211537;Gökhan;Dogan;;;;Bären;;;;;;;;;Eray;Dogan;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8096.jpg;Nein;Familie;Dogan;goeksel_dogan@web.de;;;;;;V9FBBSMP;152677334111372;2;;;0;0
|
||||||
|
50220839;"Magdalena Personal";Forster;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;Magdalena;Forster;magdalenaforster@aol.de;;;;;;7NG54JNY;74394435366624;0;;;0;0
|
||||||
|
49629572;Philipp;Gabauer;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Familie;Gabauer;luzia.gabauer@web.de;;;;;;4HP8FX8K;20692770537744;0;;;0;0
|
||||||
|
49652592;Emilia;"Herrmann Rodriguez";;;;Bären;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Lukas;Herrmann;Familie.Herrmann.Rodriguez@web.de;;;;;;7X7Y4BKV;73798042174951;0;;;0;0
|
||||||
|
50060415;Konstantin;Karl;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7806.jpg;Nein;Katharina;Karl;katharina_karl@mailbox.org;;;;;;XNCV6XM7;810263015266358;0;;;0;0
|
||||||
|
50060407;Paulina;Karl;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Katharina;Karl;katharina_karl@mailbox.org;;;;;;HSKMY37G;607082088640959;0;;;0;0
|
||||||
|
49901894;Salomia;Karpenko;;;;Bären;;;;;;;;;Miroslav;Karpenko;;;Ja;Ja;Nein;IMG_7786.jpg;Nein;Familie;Karpenko;denis.k88@web.de;;;;;;4P7TJXJL;826081492713003;4;;;0;0
|
||||||
|
49654259;Jan;Klyszcz;;;;Bären;;;;;;;;;Christoph;Klyszcz;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7859.jpg;Nein;Familie;Klyszcz;klyszcz.ewa92@gmail.com;;;;;;V9QQ3MHT;635050103722845;4;;;0;0
|
||||||
|
50220757;Personal;Lang;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;Susanne;Lang;susi67.sl@gmail.com;;;;;;J2B9F4FH;84529853902827;0;;;0;0
|
||||||
|
49663258;Tuldi;"Lennart & Hannes";;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7915.jpg;Nein;Familie;Tuldi;olga_tuldi@yahoo.de;;;;;;Z7D4PJHV;628485247329265;10;;;0;0
|
||||||
|
49727295;Leonardo;Liquori;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7654.jpg;Nein;Elisa;Mandelli;e.mandelli1@icloud.com;;;;;;CZBSHZXD;112332574322427;3;;;0;0
|
||||||
|
49694659;Mara;Schmid;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7737.jpg;Nein;Familie;Schmid;izuther@googlemail.com;;;;;;M2QWP8PN;693636596918854;4;;;0;0
|
||||||
|
49553844;Niklas;Schulze;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8027.jpg;Nein;Kristina;Schulze;m-k-ammersdorf@gmx.de;;Kristina;Schulze;kristina-anna-schulze@web.de;;TDJ47324;213569357182904;4;;;0;1
|
||||||
|
49605342;Zoe;Seget;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7764.jpg;Nein;Sandra;Seget;sandra.seget@hotmail.de;;;;;;GTY9QMWP;335161472735404;3;;;0;1
|
||||||
|
50211319;Valentin;Slugocki;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;Bartek;Slugocki;bartek@slugocki.de;;;;;;24632X5S;557733375991183;0;;;0;0
|
||||||
|
50219244;"Hannes & Lennart";Tuldi;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7959.jpg;Nein;Familie;Tuldi;olga_tuldi@yahoo.de;;;;;;SZ7D82KL;195111473283743;9;;;0;1
|
||||||
|
49697372;Xaver;Wego;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7883.jpg;Nein;Luisa;Wego;luisa.wego@web.deq;;Luisa;Wego;luisa.wego@web.de;;JT22FL8Y;470837065491819;0;6XJVMHVQ;423156711490859;6;1
|
||||||
|
49655774;Maximilian;Wild;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7977.jpg;Nein;Familie;Wild;wildramona@gmx.de;;;;;;XYCPRYX3;841975500351515;6;;;0;0
|
||||||
|
49613372;Anton;Adelberger;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8367.jpg;Nein;Catharina;Adelberger;catharina.adelberger@web.de;;;;;;GCSBWRRG;255252947890362;5;;;0;0
|
||||||
|
49655260;Ludwig;Baumgartner;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Franziska;Baumgartner;franziwild@gmx.de;;;;;;JJDBSW2Y;1346656807317;0;;;0;0
|
||||||
|
49652607;Josip;Bungic;;;;Bienen;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Mirela;"Marijan Bungic";m.bungic@web.de;;;;;;XR3LFNTW;896957772773017;1;;;0;0
|
||||||
|
50064781;Havin;Cicek;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8198.jpg;Nein;Familie;Cicek;uelke.ardak@hotmail.de;;;;;;8X8GYWR3;847511991706072;1;;;0;0
|
||||||
|
50055747;Mattea;Fusarri;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8151.jpg;Nein;Nadia;Fusarri;nadia.fusarri@gmx.de;;;;;;ZRGWQM3W;439731985455440;3;;;0;0
|
||||||
|
50247238;Maliya;Gildner;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8341.jpg;Nein;Alisa;Gildner;gildner31@gmail.com;;;;;;KRTVJ4M5;910013114016383;2;;;0;0
|
||||||
|
49825283;Kilian;Hartl;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8410.jpg;Nein;Familie;Schreibauer;a.schreibauer@gmail.com;;;;;;NM92G8PK;534850382393461;3;;;0;0
|
||||||
|
50153154;"Elara Carolina";Hintermaier;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8289.jpg;Nein;Adriana;Hintermaier;adri.shunka@gmail.com;;;;;;N6G67PPZ;233647866343524;1;;;0;0
|
||||||
|
49700913;Luka;Loncar;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8435.jpg;Nein;Szilvia;Palinkas;silvijapalinkas@yahoo.com;;;;;;7FGK48GQ;345687401851686;5;;;0;1
|
||||||
|
50056989;Elias;Minksz;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Carolin;Dirndorfer;c.dirndorfer@gmx.de;;;;;;GZ3DQSPL;632143387747513;0;;;0;0
|
||||||
|
49770856;Anna;Nguyen;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8219.jpg;Nein;Thi;"Hien Minh Nguyen";nthm30121996@gmail.com;;Anna;Nguyen;ging318@gmail.com;;CM9CMLBJ;122574286373832;2;;;0;0
|
||||||
|
50180008;Ilia;Nickl;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8390.jpg;Nein;Familie;Nickl;cela990@hotmail.com;;;;;;KHPY6LQV;652142151577775;9;;;0;0
|
||||||
|
49575500;Mika;Rubinstein;;;;Bienen;;;;;;;;;Mia;Rubinstein;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8548.jpg;Nein;Familie;Rubinstein;n.d.rubinstein@googlemail.com;;;;;;K7PX4J8Y;415300008608215;2;;;0;0
|
||||||
|
49652538;Alina;Schillinger;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8244.jpg;Nein;Familie;Schillinger;schneggeno1@web.de;;;;;;X27P5L9Q;180935518874486;3;;;0;0
|
||||||
|
49663277;Malia;Schlesinger;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8126.jpg;Nein;Familie;Schlesinger;stefanie2011@gmx.net;;;;;;6672SN99;377539049099605;2;;;0;0
|
||||||
|
50257156;Marie;Schöberl;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8265.jpg;Nein;Michaela;Schöberl;michaela.schoeberl@gmx.de;;;;;;BKZWFCS4;504469516218803;2;;;0;0
|
||||||
|
50057519;Letizia;Stachanczyk;;;;Bienen;;;;;;;;;Leonardo;Stachanczyk;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Familie;Stachanczyk;Suzanna.Stachanczyk@web.de;;;;;;C7GX6BM2;268376387434609;0;;;0;0
|
||||||
|
49919594;Ela;Torres;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8313.jpg;Nein;Familie;Torres;ftorrestapia@me.com;;;;;;YGL954RX;63682236385188;4;;;0;0
|
||||||
|
49837810;Maximilian;Weber;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8522.jpg;Nein;Familie;Weber;mail.weber.melanie@googlemail.com;;;;;;4QS8GPWR;7954462978689;3;;;0;0
|
||||||
|
50006492;Musa;Yilmaz;;;;Bienen;;;;;;;;;Ömer;Yilmaz;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8466.jpg;Nein;Familie;Yilmaz;merve-ymz@hotmail.com;;;;;;82YTD8FK;912359706713774;4;;;0;0
|
||||||
|
49652523;Nina;Zhang;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8174.jpg;Nein;Hua;Zhang;zhanghua0411@hotmail.com;;;;;;DTWR882P;201986576299456;3;;;0;1
|
||||||
|
49663112;Elias;Bonifati;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6891.jpg;Nein;Familie;Misiano;bonifati@hotmail.de;;;;;;BVMZFPHK;582378219071480;5;;;0;0
|
||||||
|
49652608;Mihael;Bungic;;;;Fische;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Mirela;"Marijan Bungic";m.bungic@web.de;;;;;;2664S6D3;848993713584313;1;;;0;0
|
||||||
|
50248018;Alina;Catak;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6424.jpg;Nein;Catak;Admir;catakadmir@gmail.com;;;;;;ML42KL42;532022002681433;7;;;0;1
|
||||||
|
50258123;Jakov;Ceko;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7294.jpg;Nein;Familie;Ceko;brankoceko91@gmail.com;;;;;;CVBYPKBV;550993218062367;0;;;0;0
|
||||||
|
50258100;Luka;Ceko;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7043.jpg;Nein;Familie;Ceko;brankoceko91@gmail.com;;;;;;DJKP3CBY;120492680122927;0;;;0;0
|
||||||
|
50222105;"Saide Mira";Cildir;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6617.jpg;Nein;Hasret;Cildir;hasretcildir@web.de;;;;;;M9B773FR;568587367870302;1;;;0;0
|
||||||
|
50218962;Valentin;Gabauer;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7004.jpg;Nein;Sabine;Gabauer;sabine.gabauer@gmx.de;;;;;;QXWD3ZNS;377128045906117;2;;;0;1
|
||||||
|
50079276;Anika;Gaßner;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6555.jpg;Nein;Familie;Karyakina;veronika20@hotmail.de;;;;;;3BSPHDHW;851569123151027;1;;;0;0
|
||||||
|
50211546;Kilian;Glück;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6960.jpg;Nein;Familie;Glück;katja_glueck@web.de;;;;;;2LGTH3VN;783740741149373;2;;;0;0
|
||||||
|
50282221;Lisa;Gumberger;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6470.jpg;Nein;Sarah;Gumberger;sarah.gumberger@gmx.de;;;;;;2PHHTXT9;383948464807600;3;;;0;1
|
||||||
|
49616818;Nadine;Hamed;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6648.jpg;Nein;;Nadine;sarasadaby@gmail.com;;;;;;V3KTTVNV;319851445592837;3;;;0;1
|
||||||
|
50208499;Noela;Islami;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6524.jpg;Nein;Familie;Islami;Zineta.islami@gmx.de;;;;;;CK5B7WWN;410922852620998;2;;;0;0
|
||||||
|
49901895;Miroslav;Karpenko;;;;Fische;;;;;;;;;Salomia;Karpenko;;;Ja;Ja;Nein;IMG_6848.jpg;Nein;Familie;Karpenko;denis.k88@web.de;;;;;;922GC8BH;915901506033102;1;;;0;0
|
||||||
|
50221939;Merjem;Kaukovic;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6502.jpg;Nein;Edita;Porcic-Kaukovic;editta1996@hotmail.com;;;;;;KGVKW8HZ;305550439054156;1;;;0;0
|
||||||
|
50288355;Frauke;Klinge;;;;Fische;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Frauke;Klinge;fs.klinge@t-online.de;;;;;;K4GCTQJG;948438313552204;0;;;0;0
|
||||||
|
49653461;Max;Krämer;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6778.jpg;Nein;Michael;Krämer;m.k326@web.de;;;;;;PF5DCT5N;929652051737843;2;;;0;1
|
||||||
|
49663714;Casper;Mettig;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Familie;Roder;stephanie.roder@gmail.com;;;;;;4L2ZBL3M;296176221032781;6;;;0;0
|
||||||
|
50208680;Emre;Mujic;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6927.jpg;Nein;Amra;Mujic;amra.mujic95@gmail.com;;;;;;QVBTGS73;16951665504680;2;;;0;0
|
||||||
|
49635375;Mila;Nickl;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6685.jpg;Nein;Familie;Nickl;cela990@hotmail.com;;;;;;S4Z3HPZY;736840096508400;8;;;0;0
|
||||||
|
49575499;Mia;Rubinstein;;;;Fische;;;;;;;;;Mika;Rubinstein;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6443.jpg;Nein;Familie;Rubinstein;n.d.rubinstein@googlemail.com;;;;;;BGGHXNLC;908242966462743;3;;;0;0
|
||||||
|
49786711;Zoe;Scholpp;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6721.jpg;Nein;Sabrina;Scholpp;sabrinasch1107@gmail.com;;;;;;CXMT4R9T;585861217320080;2;;;0;0
|
||||||
|
49755578;Valerie;Schultze;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6743.jpg;Nein;Anita;Schultze;anitajuliane.schultze@gmail.com;;;;;;4WVF24SR;499960849175102;2;;;0;0
|
||||||
|
50057518;Leonardo;Stachanczyk;;;;Fische;;;;;;;;;Letizia;Stachanczyk;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7072.jpg;Nein;Familie;Stachanczyk;Suzanna.Stachanczyk@web.de;;;;;;BZC28W7T;900613948231467;7;;;0;0
|
||||||
|
50211898;Maya;Watanabe;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6591.jpg;Nein;Barbara;Watanabe;barbara.j@live.de;;;;;;ZPVJ8R5Q;341636130475078;1;;;0;0
|
||||||
|
50006491;Ömer;Yilmaz;;;;Fische;;;;;;;;;Musa;Yilmaz;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6809.jpg;Nein;Familie;Yilmaz;merve-ymz@hotmail.com;;;;;;C4NB42PX;659038103936299;4;;;0;0
|
||||||
|
50078572;Aurelia;Adelsberger;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7493.jpg;Nein;Familie;Adelsberger;barbara.adelsberger@yahoo.de;;Christian;Godelmann;floke.com@gmail.com;;3NVRB2BM;230676178020824;1;;;0;0
|
||||||
|
50220084;Eymen;Baldir;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6406.jpg;Nein;Seda;Baldir;seda.ay@icloud.com;;;;;;JDG5CDT8;381447885366279;1;;;0;0
|
||||||
|
49602297;Magdalena;Bauer;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7463.jpg;Nein;Familie;Bauer;bonprix29@yahoo.de;;;;;;V7W5ZPVY;411274063112493;8;;;0;0
|
||||||
|
49992146;Zoe;Cajic;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7414.jpg;Nein;Amar;Cajic;amar.cajic@gmail.com;;;;;;JMZD8SNN;53184897942750;3;;;0;0
|
||||||
|
50057147;Mario;Cakic;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7182.jpg;Nein;Lucija;Zivkovic;lucija.zivkovic16@gmail.com;;;;;;D8SZH5XL;508565079338980;3;;;0;1
|
||||||
|
50211538;Eray;Dogan;;;;Spatzen;;;;;;;;;Gökhan;Dogan;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7380.jpg;Nein;Familie;Dogan;goeksel_dogan@web.de;;;;;;LMLH64KS;61030672835452;2;;;0;0
|
||||||
|
49552513;Antonia;Freiwald;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7643.jpg;Nein;Stephanie;Freiwald;stephanie.freiwald@gmx.de;;;;;;4BV76XQS;224897626646913;1;;;0;1
|
||||||
|
49601982;Heidi;Götzberger;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7618.jpg;Nein;Familie;Götzberger;franziska.lanzinger@t-online.de;;;;;;KSH3Y552;141723213815881;2;;;0;0
|
||||||
|
50063666;Una;Hodzic;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7561.jpg;Nein;Hodžić;Aldin;menager21@hotmail.com;;;;;;9W4CYMRX;170368609363861;4;;;0;1
|
||||||
|
49603438;Liara;Honisch;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7519.jpg;Nein;Familie;Karayilan;yasemin.karayilan@yahoo.de;;;;;;MSNCXQ77;787504756287604;2;;;0;0
|
||||||
|
49623482;Matteo;Katterfeld;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7347.jpg;Nein;Familie;Ketterfeld;madlen.katterfeld@gmx.de;;;;;;YJ9MM349;888691047601122;6;;;0;0
|
||||||
|
49654260;Christoph;Klyszcz;;;;Spatzen;;;;;;;;;Jan;Klyszcz;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7320.jpg;Nein;Familie;Klyszcz;klyszcz.ewa92@gmail.com;;;;;;V3KSRPDM;389595058391936;6;;;0;0
|
||||||
|
50056841;Ludwig;Lacen;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7246.jpg;Nein;Michael;Lacen;michael.lacen@gmx.de;;;;;;GJFNWHMY;205672235649590;2;;;0;1
|
||||||
|
50056690;Emilia;Rodriguez;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7591.jpg;Nein;Daniela;Rodriguez;daniela-hinz-82@gmx.de;;;;;;9LQLW7YV;289213156745302;1;;;0;1
|
||||||
|
49652595;Vaiana;Slaiman;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;;Slaiman;hadeer94hasan@web.de;;;;;;YB24BVQR;230552964517174;0;;;0;0
|
||||||
|
49838169;Raphael;Weber;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7214.jpg;Nein;Familie;Weber;mail.weber.melanie@googlemail.com;;;;;;W3VBKM3W;362639250675953;3;;;0;0
|
||||||
|
49906413;Ludwig;Welz;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7149.jpg;Nein;Familie;Welz;eva_welz@gmx.de;;;;;;VPJYZ48P;785597492180163;3;;;0;0
|
||||||
|
49726920;Amy;Wieters;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7443.jpg;Nein;Janine;Wieters;janine28@gmx.de;;;;;;69KFXLBD;921506261142206;4;;;0;1
|
||||||
|
50453287;Familie;Adelsberger;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0728.jpg;Nein;Familie;Adelsberger;barbara.adelsberger@yahoo.de;;;;;;JVXV9T2M;916908422224646;1;;;0;1
|
||||||
|
50451311;Familie;"Al Khadher";;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0153.jpg;Nein;Familie;"Al Khadher";Husseinalkhadher8@gmail.com;;;;;;4VTSN5J6;437618998198555;2;;;0;1
|
||||||
|
50453454;Familie;Bauer;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0353.jpg;Nein;Familie;Bauer;bonprix29@yahoo.de;;;;;;N8DZBDLW;169896993687826;2;;;0;0
|
||||||
|
50491788;Familie;Baumgartner;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0451.jpg;Nein;Franziska;Baumgartner;franziwild@gmx.de;;;;;;ZRH36VRS;847462383118786;2;;;0;1
|
||||||
|
50463803;Amla;Bobo;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7689.jpg;Nein;Xhulia;Xhelci;xhuliaxhelci@gmail.com;;;;;;C35CQ55V;950839783885570;1;;;0;0
|
||||||
|
50451304;Familie;Ceko;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0099.jpg;Nein;Familie;Ceko;brankoceko91@gmail.com;;;;;;4NZXSHTW;798398153397116;2;;;0;0
|
||||||
|
50453898;Familie;Cicek;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_2693.jpg;Nein;Familie;Cicek;uelke.ardak@hotmail.de;;;;;;YC9KVV76;290706892284805;3;;;0;1
|
||||||
|
50453136;Familie;Dogan;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_9830.jpg;Nein;Familie;Dogan;goeksel_dogan@web.de;;;;;;GDBRDW6K;918773718877810;2;;;0;0
|
||||||
|
50453529;Familie;Gabauer;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0286.jpg;Nein;Familie;Gabauer;luzia.gabauer@web.de;;;;;;SQHNHMH6;49585341392454;6;;;0;1
|
||||||
|
50452028;Familie;Glück;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0406.jpg;Nein;Familie;Glück;katja_glueck@web.de;;;;;;7WTXJNDC;628871407778415;2;;;0;0
|
||||||
|
50453448;Familie;Götzberger;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1262.jpg;Nein;Familie;Götzberger;franziska.lanzinger@t-online.de;;;;;;MPHRG7SP;954770009741299;2;;;0;1
|
||||||
|
50453434;Familie;Islami;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1330.jpg;Nein;Familie;Islami;Zineta.islami@gmx.de;;;;;;KF7CNCYZ;620178179159158;2;;;0;0
|
||||||
|
50452019;Familie;Karayilan;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0200.jpg;Nein;Familie;Karayilan;yasemin.karayilan@yahoo.de;;;;;;6H73JV6B;765446752804075;1;;;0;0
|
||||||
|
50453439;Familie;Karpenko;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_2825.jpg;Nein;Familie;Karpenko;denis.k88@web.de;;;;;;LR87SQ8C;963707649418838;1;;;0;0
|
||||||
|
50453495;Familie;Karyakina;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0800.jpg;Nein;Familie;Karyakina;veronika20@hotmail.de;;;;;;RYK4BQLQ;638219110542782;1;;;0;0
|
||||||
|
50453488;Familie;Ketterfeld;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1211.jpg;Nein;Familie;Ketterfeld;madlen.katterfeld@gmx.de;;;;;;PLX9G4V3;117752011222601;6;;;0;1
|
||||||
|
50453446;Familie;Klyszcz;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_2740.jpg;Nein;Familie;Klyszcz;klyszcz.ewa92@gmail.com;;;;;;LZR8WFP9;874820410323668;9;;;0;1
|
||||||
|
50452151;Familie;Misiano;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1577.jpg;Nein;Familie;Misiano;bonifati@hotmail.de;;;;;;7ZW9V666;394112462489259;5;;;0;1
|
||||||
|
50451284;Familie;Nickl;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1054.jpg;Nein;Familie;Nickl;cela990@hotmail.com;;;;;;35CH589Q;438824910404667;8;;;0;1
|
||||||
|
50452230;Familie;Roder;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1138.jpg;Nein;Familie;Roder;stephanie.roder@gmail.com;;;;;;848D3SWY;745487201848290;6;;;0;0
|
||||||
|
50452913;Familie;Rubinstein;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0041.jpg;Nein;Familie;Rubinstein;n.d.rubinstein@googlemail.com;;;;;;G9H8YFC4;180523151134386;1;;;0;1
|
||||||
|
50453768;Familie;Schillinger;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1404.jpg;Nein;Familie;Schillinger;schneggeno1@web.de;;;;;;SQJSP49C;593384265020703;3;;;0;1
|
||||||
|
50452236;Familie;Schlesinger;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1512.jpg;Nein;Familie;Schlesinger;stefanie2011@gmx.net;;;;;;946G6HJH;269413107409936;3;;;0;1
|
||||||
|
50453894;Familie;Schmid;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0625.jpg;Nein;Familie;Schmid;izuther@googlemail.com;;;;;;W7W8P32C;486167508950250;4;;;0;1
|
||||||
|
50452248;Familie;Schreibauer;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0671.jpg;Nein;Familie;Schreibauer;a.schreibauer@gmail.com;;;;;;97R4TRBC;130440825414681;3;;;0;0
|
||||||
|
50452273;Familie;Stachanczyk;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_9974.jpg;Nein;Familie;Stachanczyk;Suzanna.Stachanczyk@web.de;;;;;;C334SSSL;733864213043388;5;;;0;0
|
||||||
|
50453485;Familie;Torres;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0967.jpg;Nein;Familie;Torres;ftorrestapia@me.com;;;;;;PCQ4CNV9;553742663210606;4;;;0;0
|
||||||
|
50452252;Familie;Tuldi;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_2788.jpg;Nein;Familie;Tuldi;olga_tuldi@yahoo.de;;;;;;B99BYYYF;657381798122682;11;;;0;0
|
||||||
|
50452022;Familie;Weber;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0984.jpg;Nein;Familie;Weber;mail.weber.melanie@googlemail.com;;;;;;7954G4C5;820357028620317;3;;;0;1
|
||||||
|
50451320;Familie;Welz;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1459.jpg;Nein;Familie;Welz;eva_welz@gmx.de;;;;;;69N5WYFK;952025141929986;3;;;0;1
|
||||||
|
50452419;Familie;Wild;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0539.jpg;Nein;Familie;Wild;wildramona@gmx.de;;;;;;DVXKKJCZ;789239059675168;9;;;0;1
|
||||||
|
50452462;Familie;Wolf;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0841.jpg;Nein;Familie;Wolf;anjamichi77@gmail.com;;;;;;FXPLQYH9;784676508389646;1;;;0;0
|
||||||
|
50410050;Jonas;Wolf;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8489.jpg;Nein;Familie;Wolf;anjamichi77@gmail.com;;;;;;Q52NYB4N;18810570193338;0;;;0;0
|
||||||
|
50453882;Familie;Yilmaz;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_9907.jpg;Nein;Familie;Yilmaz;merve-ymz@hotmail.com;;;;;;TXK86QSB;467856804734432;4;;;0;0
|
||||||
|
49655787;Joseph;Wild;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7119.jpg;Nein;Familie;Wild;wildramona@gmx.de;;;;;;8C56662R;226166391326912;5;;;0;0
|
||||||
|
25
check_db_links.py
Normal file
25
check_db_links.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
|
||||||
|
db_path = "/app/fotograf-de-scraper/backend/data/fotograf_jobs.db"
|
||||||
|
if not os.path.exists(db_path):
|
||||||
|
print(f"Database not found at {db_path}")
|
||||||
|
else:
|
||||||
|
conn = sqlite3.connect(db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Check candidates missing links for the current job
|
||||||
|
job_id = "576228454"
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM job_participants
|
||||||
|
WHERE job_id = ?
|
||||||
|
AND has_orders = 0
|
||||||
|
AND digital_package_ordered = 0
|
||||||
|
AND logins <= 5
|
||||||
|
AND quick_login_url IS NULL
|
||||||
|
""", (job_id,))
|
||||||
|
missing = cursor.fetchone()[0]
|
||||||
|
print(f"Missing links for candidates in job {job_id}: {missing}")
|
||||||
|
|
||||||
|
conn.close()
|
||||||
9
check_tables.py
Normal file
9
check_tables.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import sqlite3
|
||||||
|
|
||||||
|
db_path = "/app/fotograf-de-scraper/backend/data/fotograf_jobs.db"
|
||||||
|
conn = sqlite3.connect(db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||||
|
tables = cursor.fetchall()
|
||||||
|
print(f"Tables: {[t[0] for t in tables]}")
|
||||||
|
conn.close()
|
||||||
@@ -133,18 +133,18 @@
|
|||||||
</header>
|
</header>
|
||||||
|
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<!-- B2B Marketing Assistant -->
|
<!-- B2B Marketing Assistant (Inactive)
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<span class="card-icon">🚀</span>
|
<span class="card-icon">🚀</span>
|
||||||
<h2>B2B Marketing Assistant</h2>
|
<h2>B2B Marketing Assistant</h2>
|
||||||
<p>
|
<p>
|
||||||
KI-gestützte Analyse von Unternehmens-Websites zur Erstellung von Personas, Pain-Points und Marketing-Botschaften.
|
KI-gestützte Analyse von Unternehmens-Websites zur Erstellung von Personas, Pain-Points und Marketing-Botschaften.
|
||||||
</p>
|
</p>
|
||||||
<!-- WICHTIG: Relativer Link für Reverse Proxy -->
|
|
||||||
<a href="/b2b/" class="btn">Starten →</a>
|
<a href="/b2b/" class="btn">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
<!-- General Market Intelligence -->
|
<!-- General Market Intelligence (Inactive)
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<span class="card-icon">📊</span>
|
<span class="card-icon">📊</span>
|
||||||
<h2>Market Intelligence</h2>
|
<h2>Market Intelligence</h2>
|
||||||
@@ -152,22 +152,22 @@
|
|||||||
Allgemeine Marktanalyse und Recherche-Tool.
|
Allgemeine Marktanalyse und Recherche-Tool.
|
||||||
Nutzt Web-Scraping und KI für tiefe Einblicke.
|
Nutzt Web-Scraping und KI für tiefe Einblicke.
|
||||||
</p>
|
</p>
|
||||||
<!-- WICHTIG: Relativer Link für Reverse Proxy -->
|
|
||||||
<a href="/market/" class="btn">Starten →</a>
|
<a href="/market/" class="btn">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
<!-- GTM Architect -->
|
<!-- GTM Architect (Inactive)
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<span class="card-icon">🏛️</span>
|
<span class="card-icon">🏛️</span>
|
||||||
<h2>GTM Architect</h2>
|
<h2>GTM Architect</h2>
|
||||||
<p>
|
<p>
|
||||||
Entwickelt eine komplette Go-to-Market-Strategie für neue technische Produkte, von der Analyse bis zum Sales-Kit.
|
Entwickelt eine komplette Go-to-Market-Strategie für neue technische Produkte, von der Analyse bis zum Sales-Kit.
|
||||||
</p>
|
</p>
|
||||||
<!-- WICHTIG: Relativer Link für Reverse Proxy -->
|
|
||||||
<a href="/gtm/" class="btn">Starten →</a>
|
<a href="/gtm/" class="btn">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
<!-- Content Engine -->
|
<!-- Content Engine (Inactive)
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<span class="card-icon">✍️</span>
|
<span class="card-icon">✍️</span>
|
||||||
<h2>Content Engine</h2>
|
<h2>Content Engine</h2>
|
||||||
@@ -176,19 +176,19 @@
|
|||||||
</p>
|
</p>
|
||||||
<a href="/content/" class="btn">Starten →</a>
|
<a href="/content/" class="btn">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
<!-- Company Explorer (Robotics) -->
|
<!-- Company Explorer -->
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<span class="card-icon">🤖</span>
|
<span class="card-icon">🤖</span>
|
||||||
<h2>Company Explorer</h2>
|
<h2>Company Explorer</h2>
|
||||||
<p>
|
<p>
|
||||||
Das zentrale CRM-Data-Mining Tool. Importieren, Deduplizieren und Anreichern von Firmenlisten mit Fokus auf Robotik-Potential.
|
Das zentrale CRM-Data-Mining Tool. Importieren, Deduplizieren und Anreichern von Firmenlisten mit Fokus auf Robotik-Potential.
|
||||||
</p>
|
</p>
|
||||||
<!-- Jetzt direkt zum Frontend -->
|
|
||||||
<a href="/ce/" class="btn">Starten →</a>
|
<a href="/ce/" class="btn">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Competitor Analysis Agent -->
|
<!-- Competitor Analysis Agent (Inactive)
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<span class="card-icon">⚔️</span>
|
<span class="card-icon">⚔️</span>
|
||||||
<h2>Competitor Analysis</h2>
|
<h2>Competitor Analysis</h2>
|
||||||
@@ -197,8 +197,9 @@
|
|||||||
</p>
|
</p>
|
||||||
<a href="/competitor/" class="btn">Starten →</a>
|
<a href="/competitor/" class="btn">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
<!-- Lead Engine: TradingTwins -->
|
<!-- Lead Engine: TradingTwins (Inactive)
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<span class="card-icon">📈</span>
|
<span class="card-icon">📈</span>
|
||||||
<h2>Lead Engine: TradingTwins</h2>
|
<h2>Lead Engine: TradingTwins</h2>
|
||||||
@@ -207,6 +208,7 @@
|
|||||||
</p>
|
</p>
|
||||||
<a href="/lead/" class="btn" target="_blank">Starten →</a>
|
<a href="/lead/" class="btn" target="_blank">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
<!-- Meeting Assistant (Transcription) -->
|
<!-- Meeting Assistant (Transcription) -->
|
||||||
<div class="card">
|
<div class="card">
|
||||||
@@ -218,7 +220,7 @@
|
|||||||
<a href="/tr/" class="btn">Starten →</a>
|
<a href="/tr/" class="btn">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Heatmap Tool -->
|
<!-- Heatmap Tool (Inactive)
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<span class="card-icon">🗺️</span>
|
<span class="card-icon">🗺️</span>
|
||||||
<h2>Heatmap Tool</h2>
|
<h2>Heatmap Tool</h2>
|
||||||
@@ -227,10 +229,21 @@
|
|||||||
</p>
|
</p>
|
||||||
<a href="/heatmap/" class="btn">Starten →</a>
|
<a href="/heatmap/" class="btn">Starten →</a>
|
||||||
</div>
|
</div>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Fotograf.de Scraper -->
|
||||||
|
<div class="card">
|
||||||
|
<span class="card-icon">📸</span>
|
||||||
|
<h2>Fotograf.de ERP</h2>
|
||||||
|
<p>
|
||||||
|
Automatisierter Workflow zum Download und Formatieren der Anmeldelisten von fotograf.de als sortiertes PDF.
|
||||||
|
</p>
|
||||||
|
<a href="/fotograf-de/" class="btn">Starten →</a>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<footer>
|
<footer>
|
||||||
© 2025 Local AI Suite | Secured Access
|
© 2026 Local AI Suite | Secured Access
|
||||||
</footer>
|
</footer>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -19,23 +19,25 @@ services:
|
|||||||
condition: service_started
|
condition: service_started
|
||||||
company-explorer:
|
company-explorer:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
connector-superoffice:
|
# connector-superoffice:
|
||||||
condition: service_healthy
|
# condition: service_healthy
|
||||||
lead-engine:
|
# lead-engine:
|
||||||
condition: service_started
|
# condition: service_started
|
||||||
gtm-architect:
|
# gtm-architect:
|
||||||
condition: service_started
|
# condition: service_started
|
||||||
b2b-marketing-assistant:
|
# b2b-marketing-assistant:
|
||||||
condition: service_started
|
# condition: service_started
|
||||||
transcription-tool:
|
transcription-tool:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
heatmap-frontend:
|
# heatmap-frontend:
|
||||||
condition: service_started
|
# condition: service_started
|
||||||
competitor-analysis:
|
# competitor-analysis:
|
||||||
condition: service_started
|
# condition: service_started
|
||||||
content-engine:
|
# content-engine:
|
||||||
condition: service_started
|
# condition: service_started
|
||||||
market-intelligence:
|
# market-intelligence:
|
||||||
|
# condition: service_started
|
||||||
|
fotograf-de-scraper-frontend:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
|
|
||||||
# --- DASHBOARD ---
|
# --- DASHBOARD ---
|
||||||
@@ -47,60 +49,60 @@ services:
|
|||||||
- ./dashboard:/usr/share/nginx/html:ro
|
- ./dashboard:/usr/share/nginx/html:ro
|
||||||
|
|
||||||
# --- APPS ---
|
# --- APPS ---
|
||||||
market-intelligence:
|
# market-intelligence:
|
||||||
build:
|
# build:
|
||||||
context: .
|
# context: .
|
||||||
dockerfile: general-market-intelligence/Dockerfile.fullstack
|
# dockerfile: general-market-intelligence/Dockerfile.fullstack
|
||||||
container_name: market-intelligence
|
# container_name: market-intelligence
|
||||||
restart: unless-stopped
|
# restart: unless-stopped
|
||||||
ports:
|
# ports:
|
||||||
- "8098:3001"
|
# - "8098:3001"
|
||||||
environment:
|
# environment:
|
||||||
GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
# GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
||||||
SERP_API_KEY: "${SERP_API}"
|
# SERP_API_KEY: "${SERP_API}"
|
||||||
PYTHONUNBUFFERED: "1"
|
# PYTHONUNBUFFERED: "1"
|
||||||
volumes:
|
# volumes:
|
||||||
- market_intel_data:/data
|
# - market_intel_data:/data
|
||||||
- ./Log_from_docker:/app/Log
|
# - ./Log_from_docker:/app/Log
|
||||||
|
|
||||||
content-engine:
|
# content-engine:
|
||||||
build:
|
# build:
|
||||||
context: .
|
# context: .
|
||||||
dockerfile: content-engine/Dockerfile
|
# dockerfile: content-engine/Dockerfile
|
||||||
container_name: content-engine
|
# container_name: content-engine
|
||||||
restart: unless-stopped
|
# restart: unless-stopped
|
||||||
ports:
|
# ports:
|
||||||
- "8093:3000"
|
# - "8093:3000"
|
||||||
environment:
|
# environment:
|
||||||
GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
# GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
||||||
PYTHONUNBUFFERED: "1"
|
# PYTHONUNBUFFERED: "1"
|
||||||
GTM_DB_PATH: "/gtm_data/gtm_projects.db"
|
# GTM_DB_PATH: "/gtm_data/gtm_projects.db"
|
||||||
CONTENT_DB_PATH: "/data/content_engine.db"
|
# CONTENT_DB_PATH: "/data/content_engine.db"
|
||||||
volumes:
|
# volumes:
|
||||||
- content_engine_data:/data
|
# - content_engine_data:/data
|
||||||
- gtm_architect_data:/gtm_data:ro
|
# - gtm_architect_data:/gtm_data:ro
|
||||||
- ./Log_from_docker:/app/logs_debug
|
# - ./Log_from_docker:/app/logs_debug
|
||||||
healthcheck:
|
# healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:3006"]
|
# test: ["CMD", "curl", "-f", "http://localhost:3006"]
|
||||||
interval: 10s
|
# interval: 10s
|
||||||
timeout: 5s
|
# timeout: 5s
|
||||||
retries: 5
|
# retries: 5
|
||||||
start_period: 30s
|
# start_period: 30s
|
||||||
|
|
||||||
competitor-analysis:
|
# competitor-analysis:
|
||||||
build:
|
# build:
|
||||||
context: ./competitor-analysis-app
|
# context: ./competitor-analysis-app
|
||||||
dockerfile: Dockerfile
|
# dockerfile: Dockerfile
|
||||||
container_name: competitor-analysis
|
# container_name: competitor-analysis
|
||||||
restart: unless-stopped
|
# restart: unless-stopped
|
||||||
ports:
|
# ports:
|
||||||
- "8097:3000"
|
# - "8097:3000"
|
||||||
environment:
|
# environment:
|
||||||
GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
# GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
||||||
PYTHONUNBUFFERED: "1"
|
# PYTHONUNBUFFERED: "1"
|
||||||
volumes:
|
# volumes:
|
||||||
- competitor_analysis_data:/data
|
# - competitor_analysis_data:/data
|
||||||
- ./Log_from_docker:/app/logs_debug
|
# - ./Log_from_docker:/app/logs_debug
|
||||||
|
|
||||||
transcription-tool:
|
transcription-tool:
|
||||||
build:
|
build:
|
||||||
@@ -117,58 +119,58 @@ services:
|
|||||||
- transcription_uploads:/app/uploads
|
- transcription_uploads:/app/uploads
|
||||||
- ./Log_from_docker:/app/logs_debug
|
- ./Log_from_docker:/app/logs_debug
|
||||||
|
|
||||||
heatmap-backend:
|
# heatmap-backend:
|
||||||
build:
|
# build:
|
||||||
context: ./heatmap-tool/backend
|
# context: ./heatmap-tool/backend
|
||||||
container_name: heatmap-backend
|
# container_name: heatmap-backend
|
||||||
restart: unless-stopped
|
# restart: unless-stopped
|
||||||
ports:
|
# ports:
|
||||||
- "8002:8000"
|
# - "8002:8000"
|
||||||
environment:
|
# environment:
|
||||||
ORS_API_KEY: "${ORS_API_KEY}"
|
# ORS_API_KEY: "${ORS_API_KEY}"
|
||||||
PYTHONUNBUFFERED: "1"
|
# PYTHONUNBUFFERED: "1"
|
||||||
|
|
||||||
heatmap-frontend:
|
# heatmap-frontend:
|
||||||
build:
|
# build:
|
||||||
context: ./heatmap-tool/frontend
|
# context: ./heatmap-tool/frontend
|
||||||
dockerfile: Dockerfile
|
# dockerfile: Dockerfile
|
||||||
container_name: heatmap-frontend
|
# container_name: heatmap-frontend
|
||||||
restart: unless-stopped
|
# restart: unless-stopped
|
||||||
ports:
|
# ports:
|
||||||
- "8096:80"
|
# - "8096:80"
|
||||||
depends_on:
|
# depends_on:
|
||||||
- heatmap-backend
|
# - heatmap-backend
|
||||||
|
|
||||||
b2b-marketing-assistant:
|
# b2b-marketing-assistant:
|
||||||
build:
|
# build:
|
||||||
context: .
|
# context: .
|
||||||
dockerfile: b2b-marketing-assistant/Dockerfile
|
# dockerfile: b2b-marketing-assistant/Dockerfile
|
||||||
container_name: b2b-marketing-assistant
|
# container_name: b2b-marketing-assistant
|
||||||
restart: unless-stopped
|
# restart: unless-stopped
|
||||||
ports:
|
# ports:
|
||||||
- "8092:3002"
|
# - "8092:3002"
|
||||||
environment:
|
# environment:
|
||||||
GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
# GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
||||||
PYTHONUNBUFFERED: "1"
|
# PYTHONUNBUFFERED: "1"
|
||||||
volumes:
|
# volumes:
|
||||||
- b2b_marketing_data:/data
|
# - b2b_marketing_data:/data
|
||||||
- ./Log_from_docker:/app/logs_debug
|
# - ./Log_from_docker:/app/logs_debug
|
||||||
|
|
||||||
gtm-architect:
|
# gtm-architect:
|
||||||
build:
|
# build:
|
||||||
context: .
|
# context: .
|
||||||
dockerfile: gtm-architect/Dockerfile
|
# dockerfile: gtm-architect/Dockerfile
|
||||||
container_name: gtm-architect
|
# container_name: gtm-architect
|
||||||
restart: unless-stopped
|
# restart: unless-stopped
|
||||||
ports:
|
# ports:
|
||||||
- "8094:80"
|
# - "8094:80"
|
||||||
environment:
|
# environment:
|
||||||
GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
# GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
||||||
VITE_API_BASE_URL: "/gtm/api"
|
# VITE_API_BASE_URL: "/gtm/api"
|
||||||
GTM_DB_PATH: "/data/gtm_projects.db"
|
# GTM_DB_PATH: "/data/gtm_projects.db"
|
||||||
volumes:
|
# volumes:
|
||||||
- ./Log_from_docker:/app/logs_debug
|
# - ./Log_from_docker:/app/logs_debug
|
||||||
- gtm_architect_data:/data
|
# - gtm_architect_data:/data
|
||||||
|
|
||||||
company-explorer:
|
company-explorer:
|
||||||
build:
|
build:
|
||||||
@@ -197,64 +199,90 @@ services:
|
|||||||
retries: 5
|
retries: 5
|
||||||
start_period: 30s
|
start_period: 30s
|
||||||
|
|
||||||
connector-superoffice:
|
# connector-superoffice:
|
||||||
|
# build:
|
||||||
|
# context: ./connector-superoffice
|
||||||
|
# dockerfile: Dockerfile
|
||||||
|
# container_name: connector-superoffice
|
||||||
|
# restart: unless-stopped
|
||||||
|
# ports:
|
||||||
|
# - "8003:8000"
|
||||||
|
# volumes:
|
||||||
|
# - ./connector-superoffice:/app
|
||||||
|
# - ./connector-superoffice/data:/data # Persistent local DB storage
|
||||||
|
# environment:
|
||||||
|
# PYTHONUNBUFFERED: "1"
|
||||||
|
# GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
||||||
|
# SO_CLIENT_ID: "${SO_CLIENT_ID}"
|
||||||
|
# SO_CLIENT_SECRET: "${SO_CLIENT_SECRET}"
|
||||||
|
# SO_REFRESH_TOKEN: "${SO_REFRESH_TOKEN}"
|
||||||
|
# SO_ENVIRONMENT: "${SO_ENVIRONMENT}"
|
||||||
|
# SO_CONTEXT_IDENTIFIER: "${SO_CONTEXT_IDENTIFIER}"
|
||||||
|
# WEBHOOK_TOKEN: "${WEBHOOK_TOKEN}"
|
||||||
|
# WEBHOOK_SECRET: "${WEBHOOK_SECRET}"
|
||||||
|
# healthcheck:
|
||||||
|
# test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
# interval: 10s
|
||||||
|
# timeout: 5s
|
||||||
|
# retries: 5
|
||||||
|
# start_period: 30s
|
||||||
|
|
||||||
|
# lead-engine:
|
||||||
|
# build:
|
||||||
|
# context: ./lead-engine
|
||||||
|
# dockerfile: Dockerfile
|
||||||
|
# container_name: lead-engine
|
||||||
|
# restart: unless-stopped
|
||||||
|
# ports:
|
||||||
|
# - "8501:8501" # UI (Streamlit)
|
||||||
|
# - "8004:8004" # API / Monitor
|
||||||
|
# - "8099:8004" # Direct Test Port
|
||||||
|
# environment:
|
||||||
|
# PYTHONUNBUFFERED: "1"
|
||||||
|
# GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
||||||
|
# SERP_API: "${SERP_API}"
|
||||||
|
# INFO_Application_ID: "${INFO_Application_ID}"
|
||||||
|
# INFO_Tenant_ID: "${INFO_Tenant_ID}"
|
||||||
|
# INFO_Secret: "${INFO_Secret}"
|
||||||
|
# CAL_APPID: "${CAL_APPID}"
|
||||||
|
# CAL_SECRET: "${CAL_SECRET}"
|
||||||
|
# CAL_TENNANT_ID: "${CAL_TENNANT_ID}"
|
||||||
|
# TEAMS_WEBHOOK_URL: "${TEAMS_WEBHOOK_URL}"
|
||||||
|
# FEEDBACK_SERVER_BASE_URL: "${FEEDBACK_SERVER_BASE_URL}"
|
||||||
|
# WORDPRESS_BOOKING_URL: "${WORDPRESS_BOOKING_URL}"
|
||||||
|
# MS_BOOKINGS_URL: "${MS_BOOKINGS_URL}"
|
||||||
|
# volumes:
|
||||||
|
# - ./lead-engine:/app
|
||||||
|
# - ./lead-engine/data:/app/data # Local persistent database
|
||||||
|
|
||||||
|
fotograf-de-scraper-backend:
|
||||||
build:
|
build:
|
||||||
context: ./connector-superoffice
|
context: ./fotograf-de-scraper/backend
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
container_name: connector-superoffice
|
container_name: fotograf-de-scraper-backend
|
||||||
restart: unless-stopped
|
env_file:
|
||||||
ports:
|
- ./fotograf-de-scraper/.env
|
||||||
- "8003:8000"
|
|
||||||
volumes:
|
|
||||||
- ./connector-superoffice:/app
|
|
||||||
- ./connector-superoffice/data:/data # Persistent local DB storage
|
|
||||||
environment:
|
environment:
|
||||||
PYTHONUNBUFFERED: "1"
|
- TZ=Europe/Berlin
|
||||||
GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
ports:
|
||||||
SO_CLIENT_ID: "${SO_CLIENT_ID}"
|
- "8002:8000"
|
||||||
SO_CLIENT_SECRET: "${SO_CLIENT_SECRET}"
|
volumes:
|
||||||
SO_REFRESH_TOKEN: "${SO_REFRESH_TOKEN}"
|
- ./fotograf-de-scraper/backend:/app
|
||||||
SO_ENVIRONMENT: "${SO_ENVIRONMENT}"
|
- ./fotograf-de-scraper/backend/data:/app/data
|
||||||
SO_CONTEXT_IDENTIFIER: "${SO_CONTEXT_IDENTIFIER}"
|
restart: unless-stopped
|
||||||
WEBHOOK_TOKEN: "${WEBHOOK_TOKEN}"
|
|
||||||
WEBHOOK_SECRET: "${WEBHOOK_SECRET}"
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 5
|
|
||||||
start_period: 30s
|
|
||||||
|
|
||||||
lead-engine:
|
fotograf-de-scraper-frontend:
|
||||||
build:
|
build:
|
||||||
context: ./lead-engine
|
context: ./fotograf-de-scraper/frontend
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
container_name: lead-engine
|
args:
|
||||||
restart: unless-stopped
|
VITE_API_BASE_URL: "http://192.168.178.6:8002"
|
||||||
|
container_name: fotograf-de-scraper-frontend
|
||||||
ports:
|
ports:
|
||||||
- "8501:8501" # UI (Streamlit)
|
- "3009:80"
|
||||||
- "8004:8004" # API / Monitor
|
depends_on:
|
||||||
- "8099:8004" # Direct Test Port
|
- fotograf-de-scraper-backend
|
||||||
environment:
|
restart: unless-stopped
|
||||||
PYTHONUNBUFFERED: "1"
|
|
||||||
GEMINI_API_KEY: "${GEMINI_API_KEY}"
|
|
||||||
SERP_API: "${SERP_API}"
|
|
||||||
INFO_Application_ID: "${INFO_Application_ID}"
|
|
||||||
INFO_Tenant_ID: "${INFO_Tenant_ID}"
|
|
||||||
INFO_Secret: "${INFO_Secret}"
|
|
||||||
CAL_APPID: "${CAL_APPID}"
|
|
||||||
CAL_SECRET: "${CAL_SECRET}"
|
|
||||||
CAL_TENNANT_ID: "${CAL_TENNANT_ID}"
|
|
||||||
TEAMS_WEBHOOK_URL: "${TEAMS_WEBHOOK_URL}"
|
|
||||||
FEEDBACK_SERVER_BASE_URL: "${FEEDBACK_SERVER_BASE_URL}"
|
|
||||||
WORDPRESS_BOOKING_URL: "${WORDPRESS_BOOKING_URL}"
|
|
||||||
MS_BOOKINGS_URL: "${MS_BOOKINGS_URL}"
|
|
||||||
volumes:
|
|
||||||
- ./lead-engine:/app
|
|
||||||
- ./lead-engine/data:/app/data # Local persistent database
|
|
||||||
|
|
||||||
# --- INFRASTRUCTURE SERVICES ---
|
|
||||||
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
gtm_architect_data: {}
|
gtm_architect_data: {}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Fotograf.de Scraper & Management UI
|
# Fotograf.de Scraper & Management UI
|
||||||
|
|
||||||
**Status:** Production-Ready Microservice (Core Feature: PDF List Generation & QR Cards)
|
**Status:** Production-Ready Microservice (Core Feature: PDF List Generation, QR Cards, Shooting Schedule, **SQLite Data Sync**, **Gmail API Integration** & **Automated Release Requests**)
|
||||||
|
|
||||||
Dieser Service modernisiert die alten `Fotograf.de` Skripte, indem er eine robuste, web-basierte UI zur Verwaltung und Automatisierung von Foto-Aufträgen bereitstellt. Er ist als eigenständiger Microservice konzipiert, der unabhängig vom Haupt-Stack läuft.
|
Dieser Service modernisiert die alten `Fotograf.de` Skripte, indem er eine robuste, web-basierte UI zur Verwaltung und Automatisierung von Foto-Aufträgen bereitstellt. Er ist als eigenständiger Microservice konzipiert, der unabhängig vom Haupt-Stack läuft.
|
||||||
|
|
||||||
@@ -10,52 +10,62 @@ Der Service besteht aus zwei Hauptkomponenten:
|
|||||||
|
|
||||||
1. **Backend (Python / FastAPI / Selenium / SQLAlchemy):**
|
1. **Backend (Python / FastAPI / Selenium / SQLAlchemy):**
|
||||||
* **Automatisierung:** Nutzt Selenium für das Scraping von `fotograf.de`.
|
* **Automatisierung:** Nutzt Selenium für das Scraping von `fotograf.de`.
|
||||||
* **Persistenz:** Eine SQLite-Datenbank (`fotograf_jobs.db`) speichert die Auftragsliste, sodass langsame Scraping-Vorgänge nur bei Bedarf (Refresh) nötig sind.
|
* **Persistenz:** Eine SQLite-Datenbank (`fotograf_jobs.db`) speichert die Auftragsliste, OAuth-Tokens (`GmailToken`), Gutscheincodes (`DiscountCode`), Teilnehmerdaten (`ReleaseParticipant`), **Auftragsteilnehmer (`JobParticipant`)** und die **Versand-Historie (`ReleaseHistory`)**.
|
||||||
* **PDF-Engine:** Nutzt WeasyPrint für Teilnehmerlisten und ReportLab/PyPDF2 für präzise PDF-Overlays (QR-Karten).
|
* **PDF-Engine:** Nutzt WeasyPrint für Teilnehmerlisten und ReportLab/PyPDF2 für präzise PDF-Overlays (QR-Karten).
|
||||||
* **API-Integration:** Direkte Anbindung an die **Calendly API (v2)** zum Abruf von Live-Buchungsdaten mittels Personal Access Token (JWT).
|
* **API-Integration:** Direkte Anbindung an die **Calendly API (v2)** sowie an die **Gmail API** für direkten E-Mail-Versand und automatisierte Webhook-Antworten.
|
||||||
|
|
||||||
2. **Frontend (TypeScript / React / Vite / TailwindCSS):**
|
2. **Frontend (TypeScript / React / Vite / TailwindCSS):**
|
||||||
* **Modernes UI:** Ein vollständig responsives Dashboard mit Tailwind CSS (Kachel-Layout, Tabs für Kiga/Schule).
|
* **Modernes UI:** Ein vollständig responsives Dashboard mit Tailwind CSS (Kachel-Layout, Tabs für Kiga/Schule).
|
||||||
* **Echtzeit-Feedback:** Polling-Mechanismus für langlaufende Hintergrund-Tasks (z. B. Statistiken).
|
* **Arbeitsfluss:** Tools sind in der Detailansicht eines Auftrags in logische Phasen (Vorbereitung, Follow-Up, Statistik) unterteilt.
|
||||||
* **Tools:** Integrierte Formulare für PDF-Downloads und ein dedizierter QR-Karten-Generator im Header.
|
|
||||||
|
|
||||||
## ✨ Core Features
|
## ✨ Core Features
|
||||||
|
|
||||||
### Feature 1: PDF-Teilnehmerlisten-Generierung (Vollständig)
|
### 🚀 Performance-Optimierung (SQLite Sync)
|
||||||
Automatisierter Workflow:
|
Statt wie früher jedes Mal mühsam durch alle Foto-Alben zu "crawlen", nutzt das System nun eine intelligente Synchronisierung:
|
||||||
1. **Daten-Caching:** Aufträge werden aus der lokalen DB geladen (Millisekunden-Reaktionszeit).
|
* **One-Click Sync:** Über den Button "Daten von Fotograf.de abgleichen" lädt das System die detaillierte Namensliste (CSV) herunter.
|
||||||
2. **Selenium-Download:** Auf Knopfdruck wird die CSV-Anmeldeliste von `fotograf.de` im Hintergrund geladen.
|
* **Lokale Datenbank:** Alle relevanten Infos (E-Mail der Eltern, Login-Zahlen, Bestellstatus, Zugangscodes) werden in der Tabelle `job_participants` gespeichert.
|
||||||
3. **PDF-Erstellung:** Generierung eines sortierten PDFs (Klassen/Gruppen) mit "Kinderfotos Erding" Branding.
|
* **Blitzschnelle Analyse:** Nachfass-Mails und Statistiken werden nun in Sekunden (statt Minuten) direkt aus der Datenbank generiert.
|
||||||
|
|
||||||
### Feature 2: QR-Karten für Familien-Shootings (Vollständig)
|
### Feature 1: Teilnehmerlisten (Vollständig)
|
||||||
Spezielles Tool für Familien-Mini-Shootings:
|
Automatisierter Workflow zum Download und Formatieren der Anmeldelisten von `fotograf.de` als sortiertes PDF inkl. "Kinderfotos Erding" Branding.
|
||||||
* **Workflow:**
|
|
||||||
1. Manueller Upload eines Blanko-PDFs, das bereits individuelle QR-Codes pro Seite/Karte enthält.
|
|
||||||
2. Live-Abruf der Buchungsdaten (Name, Anzahl Kinder, Uhrzeit) via **Calendly API**.
|
|
||||||
3. **Präzises Overlay:** Die Texte werden exakt an zwei Positionen pro A4-Seite angedruckt:
|
|
||||||
* **Element 1 (Oben):** X: 72mm, Y: 22mm (vom oberen Rand).
|
|
||||||
* **Element 2 (Unten):** X: 72mm, Y: 171mm (vom oberen Rand).
|
|
||||||
* **Intelligente Seitenverwaltung:** Das Tool erkennt die Seitenanzahl der Vorlage und beschreibt jede Seite individuell, um die eindeutigen QR-Codes zu erhalten.
|
|
||||||
|
|
||||||
### Feature 3: Nachfass-E-Mails (Geplant)
|
### Feature 2: Shooting-Planung (QR-Karten & Terminliste) (Vollständig)
|
||||||
* Identifizierung von Käufern/Nicht-Käufern zur Generierung von Supermailer-Listen (Anbindung an Fotograf.de Scraping-Logik).
|
Spezielles Modul für Familien-Mini-Shootings:
|
||||||
|
* **QR-Karten-Andruck:** Präzises Overlay von Name, Kinderanzahl und Uhrzeit inkl. automatischer **Einwilligungs-Checkbox (☑)** aus Calendly-Daten.
|
||||||
|
* **Termin-Übersichtsliste:** Generiert eine A4-Tabelle für den Shooting-Tag im 6-Minuten-Takt inkl. Lückenfüller.
|
||||||
|
|
||||||
### Feature 4: Verkaufs-Statistiken (Vollständig)
|
### Feature 3: Nachfass-E-Mails & Gmail Direkt-Versand (Optimiert)
|
||||||
* Durchforstet alle Alben eines Auftrags via Selenium.
|
Identifizierung von Nicht-Käufern (0-1 Logins, keine Bestellung) basierend auf den synchronisierten Datenbank-Daten.
|
||||||
* Liefert eine Übersicht der Verkaufszahlen pro Album (Kinder gesamt, mit Käufen, alle Bilder gekauft).
|
* **Vorschau-Modus:** Ermöglicht das Durchklicken der personalisierten E-Mails an jeden Empfänger vor dem eigentlichen Versand.
|
||||||
* **UX:** Fortschrittsanzeige in Echtzeit ("Bearbeite Album X/Y, Seite Z...") direkt im Browser.
|
* **Quick-Login Automation:** Die Login-Links (`https://www.kinderfotos-erding.de/a/{code}`) werden automatisch generiert.
|
||||||
|
|
||||||
|
### Feature 4: Verkaufs-Statistiken (Optimiert)
|
||||||
|
Detaillierte Analyse des Kaufverhaltens pro Gruppe/Klasse basierend auf den lokalen Datenbank-Einträgen.
|
||||||
|
|
||||||
|
### Feature 5: Geschwisterliste (Einrichtungsintern) (Vollständig)
|
||||||
|
Tool zur Identifizierung von Geschwistergruppen innerhalb einer Einrichtung inkl. Cross-Check mit Calendly-Buchungen und speziellen Geschwister-QR-Karten.
|
||||||
|
* **Flexibilität:** Optionaler Modus "Ohne Nachmittags-Shooting", um die Liste auch ohne Calendly-Abgleich (rein einrichtungsintern) zu generieren.
|
||||||
|
|
||||||
|
### Feature 6: Freigabeanfragen & Gutschein-Automation (Vollständig)
|
||||||
|
Vollautomatisierter DSGVO-Workflow zur Einholung von Veröffentlichungsgenehmigungen:
|
||||||
|
* **Schlanker Versand:** Manuelle Eingabe von Empfängern (E-Mail, Vorname, Kindernamen) mit **E-Mail-Vorschau**.
|
||||||
|
* **Versand-Planung:** Einstellbare Versandzeit (Berlin Timezone) via Hintergrund-Tasks.
|
||||||
|
* **Webhook-Integration:** Direkte Anbindung an **Google Forms**. Bei Absenden des Freigabe-Formulars wird automatisch ein Gutscheincode reserviert und eine Dankes-E-Mail versendet.
|
||||||
|
* **Antwort-Übersicht:** Tabelle aller eingegangenen Freigaben inkl. zugewiesenem Code und Zeitstempel.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🛠️ Technische Details & Sicherheit
|
||||||
|
* **BCC-Kontrolle:** Jede vom System versendete E-Mail sendet automatisch eine Blindkopie (BCC) an `kontakt@kinderfotos-erding.de`.
|
||||||
|
* **Versand-Historie:** Alle Aussendungen (Anzahl Empfänger, Zeitpunkt) werden in der Tabelle `release_history` protokolliert.
|
||||||
|
* **Sicherer Test-Modus:** Über `DEV_MODE_EMAIL_RECIPIENT` können alle E-Mails global an eine Test-Adresse umgeleitet werden.
|
||||||
|
* **Zeitzonen:** Durchgängige Verwendung von `Europe/Berlin`.
|
||||||
|
* **Gmail OAuth:** Persistente Speicherung der Refresh-Tokens in der Datenbank.
|
||||||
|
|
||||||
## 🚀 Deployment & Konfiguration
|
## 🚀 Deployment & Konfiguration
|
||||||
|
|
||||||
Der Service wird über eine eigene `docker-compose.yml` im Unterverzeichnis gestartet.
|
Der Service wird über die Haupt-`docker-compose.yml` des Projekts verwaltet.
|
||||||
|
|
||||||
### Umgebungsvariablen (`.env`)
|
### URLs
|
||||||
Folgende Variablen müssen in der `.env` im Verzeichnis `/fotograf-de-scraper/` definiert sein:
|
* **Frontend:** `https://floke-ai.duckdns.org/fotograf-de/`
|
||||||
* `KIGA_USER` / `KIGA_PW`: Login für den Kindergarten-Account.
|
* **Webhook für Google Forms:** `https://floke-ai.duckdns.org/fotograf-de-api/api/publish-request/webhook`
|
||||||
* `SCHULE_USER` / `SCHULE_PW`: Login für den Schul-Account.
|
|
||||||
* `CALENDLY_TOKEN`: Personal Access Token (JWT, startet mit `eyJ...`) von Calendly (erfordert Professional Abo).
|
|
||||||
|
|
||||||
### URLs & Ports
|
|
||||||
* **Frontend:** `http://<HOST_IP>:3009` (Dashboard & Tools)
|
|
||||||
* **Backend:** `http://<HOST_IP>:8002` (API-Schnittstelle)
|
|
||||||
* **Persistenz:** Die Datenbank liegt unter `./backend/data/fotograf_jobs.db`.
|
|
||||||
BIN
fotograf-de-scraper/backend/assets/OpenSans-Regular.ttf
Normal file
BIN
fotograf-de-scraper/backend/assets/OpenSans-Regular.ttf
Normal file
Binary file not shown.
@@ -22,6 +22,61 @@ class Job(Base):
|
|||||||
account_type = Column(String, index=True) # 'kiga' or 'schule'
|
account_type = Column(String, index=True) # 'kiga' or 'schule'
|
||||||
last_updated = Column(DateTime, default=datetime.datetime.utcnow)
|
last_updated = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
class GmailToken(Base):
|
||||||
|
__tablename__ = "gmail_tokens"
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
token_json = Column(String) # Stores the full credentials JSON
|
||||||
|
updated_at = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
class DiscountCode(Base):
|
||||||
|
__tablename__ = "discount_codes"
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
code = Column(String, unique=True, index=True)
|
||||||
|
is_used = Column(Integer, default=0) # 0 for false, 1 for true
|
||||||
|
assigned_to_email = Column(String, nullable=True)
|
||||||
|
used_at = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
class ReleaseParticipant(Base):
|
||||||
|
__tablename__ = "release_participants"
|
||||||
|
email = Column(String, primary_key=True)
|
||||||
|
first_name = Column(String)
|
||||||
|
last_updated = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
class ReleaseHistory(Base):
|
||||||
|
__tablename__ = "release_history"
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
timestamp = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
recipient_count = Column(Integer)
|
||||||
|
scheduled_time = Column(String, nullable=True)
|
||||||
|
|
||||||
|
class ReminderHistory(Base):
|
||||||
|
__tablename__ = "reminder_history"
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
job_id = Column(String, index=True)
|
||||||
|
timestamp = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
recipient_count = Column(Integer)
|
||||||
|
max_logins = Column(Integer)
|
||||||
|
recipients_json = Column(String) # JSON list of emails/names/children
|
||||||
|
scheduled_time = Column(String, nullable=True)
|
||||||
|
|
||||||
|
class JobParticipant(Base):
|
||||||
|
__tablename__ = "job_participants"
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
job_id = Column(String, index=True)
|
||||||
|
child_id = Column(String, nullable=True)
|
||||||
|
vorname_kind = Column(String, nullable=True)
|
||||||
|
nachname_kind = Column(String, nullable=True)
|
||||||
|
vorname_eltern = Column(String, nullable=True)
|
||||||
|
nachname_eltern = Column(String, nullable=True)
|
||||||
|
email_eltern = Column(String, nullable=True)
|
||||||
|
zugangscode = Column(String, index=True)
|
||||||
|
gruppe = Column(String, nullable=True)
|
||||||
|
logins = Column(Integer, default=0)
|
||||||
|
has_orders = Column(Integer, default=0) # 0 for false, 1 for true
|
||||||
|
digital_package_ordered = Column(Integer, default=0) # 0 for false, 1 for true
|
||||||
|
quick_login_url = Column(String, nullable=True)
|
||||||
|
last_synced = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
|
|
||||||
def get_db():
|
def get_db():
|
||||||
|
|||||||
Binary file not shown.
|
After Width: | Height: | Size: 56 KiB |
140
fotograf-de-scraper/backend/gmail_service.py
Normal file
140
fotograf-de-scraper/backend/gmail_service.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import datetime
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
from google.oauth2.credentials import Credentials
|
||||||
|
from google_auth_oauthlib.flow import Flow
|
||||||
|
from googleapiclient.discovery import build
|
||||||
|
from google.auth.transport.requests import Request
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from database import GmailToken
|
||||||
|
import base64
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
|
||||||
|
logger = logging.getLogger("gmail-service")
|
||||||
|
|
||||||
|
# Scopes required for sending emails
|
||||||
|
SCOPES = ['https://www.googleapis.com/auth/gmail.send']
|
||||||
|
|
||||||
|
class GmailService:
|
||||||
|
def __init__(self, db: Session):
|
||||||
|
self.db = db
|
||||||
|
self.client_id = os.getenv("google_fotograf_client_id")
|
||||||
|
self.client_secret = os.getenv("google_fotograf_secret")
|
||||||
|
|
||||||
|
# Redirect URI - must match what was configured in Google Console
|
||||||
|
# We try to detect the public URL, fallback to duckdns
|
||||||
|
self.redirect_uri = os.getenv("GOOGLE_REDIRECT_URI", "https://floke-ai.duckdns.org/fotograf-de-api/api/auth/callback")
|
||||||
|
|
||||||
|
def _get_client_config(self) -> Dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"web": {
|
||||||
|
"client_id": self.client_id,
|
||||||
|
"project_id": "fotograf-tool",
|
||||||
|
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||||
|
"token_uri": "https://oauth2.googleapis.com/token",
|
||||||
|
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||||
|
"client_secret": self.client_secret,
|
||||||
|
"redirect_uris": [self.redirect_uri]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_auth_url(self) -> str:
|
||||||
|
flow = Flow.from_client_config(
|
||||||
|
self._get_client_config(),
|
||||||
|
scopes=SCOPES,
|
||||||
|
redirect_uri=self.redirect_uri
|
||||||
|
)
|
||||||
|
auth_url, _ = flow.authorization_url(prompt='consent', access_type='offline')
|
||||||
|
return auth_url
|
||||||
|
|
||||||
|
def handle_callback(self, code: str):
|
||||||
|
flow = Flow.from_client_config(
|
||||||
|
self._get_client_config(),
|
||||||
|
scopes=SCOPES,
|
||||||
|
redirect_uri=self.redirect_uri
|
||||||
|
)
|
||||||
|
flow.fetch_token(code=code)
|
||||||
|
credentials = flow.credentials
|
||||||
|
self._save_token(credentials)
|
||||||
|
return credentials
|
||||||
|
|
||||||
|
def _save_token(self, credentials):
|
||||||
|
token_data = {
|
||||||
|
'token': credentials.token,
|
||||||
|
'refresh_token': credentials.refresh_token,
|
||||||
|
'token_uri': credentials.token_uri,
|
||||||
|
'client_id': credentials.client_id,
|
||||||
|
'client_secret': credentials.client_secret,
|
||||||
|
'scopes': credentials.scopes
|
||||||
|
}
|
||||||
|
|
||||||
|
db_token = self.db.query(GmailToken).first()
|
||||||
|
if not db_token:
|
||||||
|
db_token = GmailToken(token_json=json.dumps(token_data))
|
||||||
|
self.db.add(db_token)
|
||||||
|
else:
|
||||||
|
db_token.token_json = json.dumps(token_data)
|
||||||
|
|
||||||
|
self.db.commit()
|
||||||
|
logger.info("Gmail OAuth token saved to database.")
|
||||||
|
|
||||||
|
def get_credentials(self) -> Optional[Credentials]:
|
||||||
|
db_token = self.db.query(GmailToken).first()
|
||||||
|
if not db_token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
token_data = json.loads(db_token.token_json)
|
||||||
|
creds = Credentials.from_authorized_user_info(token_data, SCOPES)
|
||||||
|
|
||||||
|
if creds and creds.expired and creds.refresh_token:
|
||||||
|
logger.info("Gmail token expired, refreshing...")
|
||||||
|
creds.refresh(Request())
|
||||||
|
self._save_token(creds)
|
||||||
|
|
||||||
|
return creds
|
||||||
|
|
||||||
|
def is_authenticated(self) -> bool:
|
||||||
|
try:
|
||||||
|
creds = self.get_credentials()
|
||||||
|
return creds is not None and creds.valid
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Auth check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_email(self, to: str, subject: str, body_html: str) -> bool:
|
||||||
|
creds = self.get_credentials()
|
||||||
|
if not creds:
|
||||||
|
logger.error("Cannot send email: Not authenticated.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# DEV MODE OVERRIDE
|
||||||
|
dev_email = os.getenv("DEV_MODE_EMAIL_RECIPIENT")
|
||||||
|
original_to = to
|
||||||
|
if dev_email:
|
||||||
|
logger.warning(f"⚠️ DEV MODE ACTIVE: Redirecting email originally intended for {original_to} to {dev_email}")
|
||||||
|
to = dev_email
|
||||||
|
|
||||||
|
service = build('gmail', 'v1', credentials=creds)
|
||||||
|
message = MIMEText(body_html, 'html')
|
||||||
|
message['to'] = to
|
||||||
|
message['subject'] = subject
|
||||||
|
message['bcc'] = 'kontakt@kinderfotos-erding.de'
|
||||||
|
|
||||||
|
raw_message = base64.urlsafe_b64encode(message.as_bytes()).decode()
|
||||||
|
|
||||||
|
send_result = service.users().messages().send(
|
||||||
|
userId='me',
|
||||||
|
body={'raw': raw_message}
|
||||||
|
).execute()
|
||||||
|
|
||||||
|
if dev_email:
|
||||||
|
logger.info(f"Test-Email sent to {to} (Original target: {original_to}). Message ID: {send_result['id']}")
|
||||||
|
else:
|
||||||
|
logger.info(f"Email sent to {to}. Message ID: {send_result['id']}")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send email to {to}: {e}")
|
||||||
|
return False
|
||||||
49
fotograf-de-scraper/backend/inspect_orders.py
Normal file
49
fotograf-de-scraper/backend/inspect_orders.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from database import Job
|
||||||
|
from main import setup_driver, login
|
||||||
|
import time
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
engine = create_engine("sqlite:////app/data/fotograf_jobs.db")
|
||||||
|
Session = sessionmaker(bind=engine)
|
||||||
|
db = Session()
|
||||||
|
|
||||||
|
# Get latest job
|
||||||
|
job = db.query(Job).order_by(Job.last_updated.desc()).first()
|
||||||
|
if not job:
|
||||||
|
print("No jobs found in database.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"Using Job ID: {job.id} ({job.name}), Account: {job.account_type}")
|
||||||
|
|
||||||
|
username = os.getenv(f"{job.account_type.upper()}_USER")
|
||||||
|
password = os.getenv(f"{job.account_type.upper()}_PW")
|
||||||
|
|
||||||
|
driver = setup_driver()
|
||||||
|
if not driver:
|
||||||
|
print("Failed to init driver")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not login(driver, username, password):
|
||||||
|
print("Login failed")
|
||||||
|
driver.quit()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
orders_url = f"https://app.fotograf.de/config_jobs_orders/index/{job.id}/customer_orders"
|
||||||
|
print(f"Navigating to {orders_url}")
|
||||||
|
driver.get(orders_url)
|
||||||
|
time.sleep(5) # wait for page to load
|
||||||
|
|
||||||
|
html = driver.page_source
|
||||||
|
with open("orders_page.html", "w", encoding="utf-8") as f:
|
||||||
|
f.write(html)
|
||||||
|
|
||||||
|
driver.save_screenshot("orders_page.png")
|
||||||
|
print("Saved orders_page.html and orders_page.png")
|
||||||
|
|
||||||
|
driver.quit()
|
||||||
File diff suppressed because it is too large
Load Diff
18
fotograf-de-scraper/backend/migrate_db.py
Normal file
18
fotograf-de-scraper/backend/migrate_db.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
|
||||||
|
db_path = "/app/data/fotograf_jobs.db"
|
||||||
|
if not os.path.exists(db_path):
|
||||||
|
db_path = "fotograf-de-scraper/backend/data/fotograf_jobs.db"
|
||||||
|
|
||||||
|
conn = sqlite3.connect(db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute("ALTER TABLE job_participants ADD COLUMN digital_package_ordered INTEGER DEFAULT 0;")
|
||||||
|
print("Column 'digital_package_ordered' added successfully.")
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
print("Column 'digital_package_ordered' already exists.")
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
12
fotograf-de-scraper/backend/orders_page.html
Normal file
12
fotograf-de-scraper/backend/orders_page.html
Normal file
File diff suppressed because one or more lines are too long
BIN
fotograf-de-scraper/backend/orders_page.png
Normal file
BIN
fotograf-de-scraper/backend/orders_page.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 78 KiB |
229
fotograf-de-scraper/backend/publish_request_api.py
Normal file
229
fotograf-de-scraper/backend/publish_request_api.py
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, Request, BackgroundTasks
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from database import get_db, DiscountCode, ReleaseParticipant, ReleaseHistory
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
from gmail_service import GmailService
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import asyncio
|
||||||
|
from typing import List, Dict, Optional
|
||||||
|
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/publish-request", tags=["publish-request"])
|
||||||
|
logger = logging.getLogger("publish-request")
|
||||||
|
|
||||||
|
# Timezone for Berlin
|
||||||
|
TZ_BERLIN = ZoneInfo("Europe/Berlin")
|
||||||
|
|
||||||
|
# Official Project Signature
|
||||||
|
SIGNATURE_HTML = """
|
||||||
|
<br><br>
|
||||||
|
<span style="color: #888;">--</span><br>
|
||||||
|
<div dir="ltr">
|
||||||
|
<table border="0" cellspacing="0" cellpadding="0" style="border-collapse:collapse; margin-top: 5px;">
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td width="220" valign="top" style="padding-right: 15px;">
|
||||||
|
<img width="200" src="https://lh3.googleusercontent.com/d/1K7RODOqKE2e1nRJ3D4dEWdjthoTMyXUq" alt="Kinderfotos Erding Logo" style="display: block;">
|
||||||
|
</td>
|
||||||
|
<td valign="bottom" style="padding-left: 15px; border-left: 1px solid #ddd; font-family: sans-serif; font-size: 13px; color: #333; line-height: 1.5;">
|
||||||
|
<p style="margin: 0;"><b>Kinderfotos Erding</b> | <a href="http://www.kinderfotos-erding.de/" target="_blank" style="color: #1155cc; text-decoration: none;">www.kinderfotos-erding.de</a></p>
|
||||||
|
<p style="margin: 0; color: #666;">Gartenstr. 10 | 85445 Oberding | 08122-8470867</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
"""
|
||||||
|
|
||||||
|
class CodesUpload(BaseModel):
|
||||||
|
codes: str # comma separated
|
||||||
|
|
||||||
|
class SendReleaseRequest(BaseModel):
|
||||||
|
emails: List[Dict[str, str]]
|
||||||
|
scheduled_time: Optional[str] = None # e.g. "10:00"
|
||||||
|
participants: Optional[List[Dict[str, str]]] = None # [{email, first_name}]
|
||||||
|
|
||||||
|
async def delayed_send(emails: List[Dict[str, str]], scheduled_time: str, db_session_factory):
|
||||||
|
try:
|
||||||
|
# Calculate delay using Berlin Timezone
|
||||||
|
now = datetime.datetime.now(TZ_BERLIN)
|
||||||
|
target_h, target_m = map(int, scheduled_time.split(":"))
|
||||||
|
target_time = now.replace(hour=target_h, minute=target_m, second=0, microsecond=0)
|
||||||
|
|
||||||
|
if target_time < now:
|
||||||
|
target_time += datetime.timedelta(days=1)
|
||||||
|
|
||||||
|
delay_seconds = (target_time - now).total_seconds()
|
||||||
|
logger.info(f"Scheduling {len(emails)} emails for {scheduled_time} Berlin Time (in {delay_seconds} seconds)")
|
||||||
|
|
||||||
|
await asyncio.sleep(delay_seconds)
|
||||||
|
|
||||||
|
# We need a fresh DB session for the background task
|
||||||
|
db = db_session_factory()
|
||||||
|
try:
|
||||||
|
service = GmailService(db)
|
||||||
|
success_count = 0
|
||||||
|
for email_data in emails:
|
||||||
|
if service.send_email(email_data["to"], email_data["subject"], email_data["body"]):
|
||||||
|
success_count += 1
|
||||||
|
await asyncio.sleep(1) # Rate limiting
|
||||||
|
|
||||||
|
logger.info(f"Scheduled send complete: {success_count}/{len(emails)} success.")
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Error in delayed_send background task")
|
||||||
|
|
||||||
|
@router.post("/send")
|
||||||
|
async def send_requests(data: SendReleaseRequest, background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
|
||||||
|
# Store participant names for later (webhook)
|
||||||
|
if data.participants:
|
||||||
|
for p in data.participants:
|
||||||
|
email = p.get("email", "").strip().lower()
|
||||||
|
first_name = p.get("first_name", "").strip()
|
||||||
|
if email and first_name:
|
||||||
|
existing = db.query(ReleaseParticipant).filter(ReleaseParticipant.email == email).first()
|
||||||
|
if existing:
|
||||||
|
existing.first_name = first_name
|
||||||
|
else:
|
||||||
|
db.add(ReleaseParticipant(email=email, first_name=first_name))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
if data.scheduled_time:
|
||||||
|
# Pass a way to get a new session to the background task
|
||||||
|
from database import SessionLocal
|
||||||
|
|
||||||
|
# Log to history
|
||||||
|
db.add(ReleaseHistory(recipient_count=len(data.emails), scheduled_time=data.scheduled_time))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
background_tasks.add_task(delayed_send, data.emails, data.scheduled_time, SessionLocal)
|
||||||
|
return {"status": "scheduled", "message": f"Versand für {data.scheduled_time} geplant."}
|
||||||
|
|
||||||
|
# Log immediate send to history
|
||||||
|
db.add(ReleaseHistory(recipient_count=len(data.emails), scheduled_time="Sofort"))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Immediate send
|
||||||
|
service = GmailService(db)
|
||||||
|
success = 0
|
||||||
|
failed = []
|
||||||
|
for email_data in data.emails:
|
||||||
|
if service.send_email(email_data["to"], email_data["subject"], email_data["body"]):
|
||||||
|
success += 1
|
||||||
|
else:
|
||||||
|
failed.append(email_data["to"])
|
||||||
|
|
||||||
|
return {"status": "success", "success": success, "failed": failed}
|
||||||
|
|
||||||
|
@router.get("/history")
|
||||||
|
def get_history(db: Session = Depends(get_db)):
|
||||||
|
history = db.query(ReleaseHistory).order_by(ReleaseHistory.timestamp.desc()).all()
|
||||||
|
return [{"id": h.id, "timestamp": h.timestamp.isoformat(), "recipient_count": h.recipient_count, "scheduled_time": h.scheduled_time} for h in history]
|
||||||
|
|
||||||
|
@router.get("/stats")
|
||||||
|
def get_stats(db: Session = Depends(get_db)):
|
||||||
|
total = db.query(DiscountCode).count()
|
||||||
|
used = db.query(DiscountCode).filter(DiscountCode.is_used == 1).count()
|
||||||
|
available = total - used
|
||||||
|
return {"total": total, "used": used, "available": available}
|
||||||
|
|
||||||
|
@router.get("/responses")
|
||||||
|
def get_responses(db: Session = Depends(get_db)):
|
||||||
|
responses = db.query(DiscountCode).filter(DiscountCode.is_used == 1).all()
|
||||||
|
return [{"email": r.assigned_to_email, "code": r.code, "used_at": r.used_at.isoformat()} for r in responses]
|
||||||
|
|
||||||
|
@router.post("/codes")
|
||||||
|
def upload_codes(data: CodesUpload, db: Session = Depends(get_db)):
|
||||||
|
codes_list = [c.strip() for c in data.codes.split(",") if c.strip()]
|
||||||
|
added = 0
|
||||||
|
for code in set(codes_list):
|
||||||
|
existing = db.query(DiscountCode).filter(DiscountCode.code == code).first()
|
||||||
|
if not existing:
|
||||||
|
new_code = DiscountCode(code=code, is_used=0)
|
||||||
|
db.add(new_code)
|
||||||
|
added += 1
|
||||||
|
db.commit()
|
||||||
|
return {"status": "success", "added": added}
|
||||||
|
|
||||||
|
class WebhookData(BaseModel):
|
||||||
|
email: str
|
||||||
|
|
||||||
|
@router.post("/webhook")
|
||||||
|
async def handle_webhook(request: Request, db: Session = Depends(get_db)):
|
||||||
|
# Try to parse JSON from Google Forms webhook
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
except:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid JSON")
|
||||||
|
|
||||||
|
# We expect {"email": "..."} or similar from the Google Apps Script
|
||||||
|
email = data.get("email") or data.get("Email")
|
||||||
|
if not email:
|
||||||
|
logger.error(f"Webhook received without email: {data}")
|
||||||
|
return {"status": "error", "message": "Email not found in webhook payload"}
|
||||||
|
|
||||||
|
email = email.strip().lower()
|
||||||
|
|
||||||
|
# Check if this email already got a code
|
||||||
|
already_assigned = db.query(DiscountCode).filter(DiscountCode.assigned_to_email == email).first()
|
||||||
|
if already_assigned:
|
||||||
|
logger.info(f"Email {email} already received code {already_assigned.code}")
|
||||||
|
return {"status": "success", "message": "Already sent"}
|
||||||
|
|
||||||
|
# Get a free code
|
||||||
|
free_code = db.query(DiscountCode).filter(DiscountCode.is_used == 0).first()
|
||||||
|
if not free_code:
|
||||||
|
logger.error("NO FREE DISCOUNT CODES LEFT!")
|
||||||
|
return {"status": "error", "message": "No codes available"}
|
||||||
|
|
||||||
|
# Look up participant name
|
||||||
|
participant = db.query(ReleaseParticipant).filter(ReleaseParticipant.email == email).first()
|
||||||
|
first_name = participant.first_name if participant else "Ihr Lieben"
|
||||||
|
|
||||||
|
# Mark as used
|
||||||
|
free_code.is_used = 1
|
||||||
|
free_code.assigned_to_email = email
|
||||||
|
free_code.used_at = datetime.datetime.utcnow()
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Send Thank You Email with GmailService
|
||||||
|
service = GmailService(db)
|
||||||
|
subject = "Dankeschön für Eure Freigabe & Euer Rabattcode"
|
||||||
|
|
||||||
|
# Image provided by user
|
||||||
|
INSTRUCTIONS_IMAGE_URL = "https://mail.google.com/mail/u/2?ui=2&ik=719adaa3c5&attid=0.1&permmsgid=msg-a:r7482671925923393616&th=196e322c399dbc7f&view=fimg&fur=ip&permmsgid=msg-a:r7482671925923393616&sz=s0-l75-ft&attbid=ANGjdJ9_U6ayMFgwbupt4HalTKO867IHx6N70eNbPfQmTLNzRXilJxI-n8a1gjM8xVcP5HEOgaVxfp3FnJPzTYEEYhK4gSU-Il_0a6OtzFYscp55_W4iyxuxjyPvK4&disp=emb&realattid=ii_maspzxv50&zw"
|
||||||
|
|
||||||
|
body_html = f"""
|
||||||
|
<p>Hallo {first_name},</p>
|
||||||
|
<p>Vielen Dank nochmal für die Freigabe zur Veröffentlichung, das ist super nett von Euch!</p>
|
||||||
|
<p>Hier ist euer Gutscheincode über 25 Euro: <strong style="font-size: 18px; color: #4F46E5;">{free_code.code}</strong></p>
|
||||||
|
<p>Um den Gutschein einzugeben, musst du auf den Preis des Warenkorbs drücken (über dem Button zur Kasse gehen):</p>
|
||||||
|
<p><img src="{INSTRUCTIONS_IMAGE_URL}" alt="Anleitung Gutschein einlösen" style="max-width: 100%; border: 1px solid #ddd; border-radius: 8px;"></p>
|
||||||
|
<p>Liebe Grüße,<br>das Team von Kinderfotos Erding</p>
|
||||||
|
{SIGNATURE_HTML}
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
success = service.send_email(email, subject, body_html)
|
||||||
|
if success:
|
||||||
|
logger.info(f"Successfully sent code {free_code.code} to {email}")
|
||||||
|
return {"status": "success", "message": "Email sent"}
|
||||||
|
else:
|
||||||
|
logger.error(f"Failed to send email to {email}")
|
||||||
|
free_code.is_used = 0
|
||||||
|
free_code.assigned_to_email = None
|
||||||
|
free_code.used_at = None
|
||||||
|
db.commit()
|
||||||
|
return {"status": "error", "message": "Failed to send email"}
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Error sending webhook email")
|
||||||
|
free_code.is_used = 0
|
||||||
|
free_code.assigned_to_email = None
|
||||||
|
free_code.used_at = None
|
||||||
|
db.commit()
|
||||||
|
return {"status": "error", "message": str(e)}
|
||||||
@@ -11,7 +11,38 @@ import logging
|
|||||||
|
|
||||||
logger = logging.getLogger("qr-card-generator")
|
logger = logging.getLogger("qr-card-generator")
|
||||||
|
|
||||||
def get_calendly_events_raw(api_token: str, start_time: str, end_time: str, event_type_name: str = None):
|
def get_calendly_event_types(api_token: str):
|
||||||
|
"""
|
||||||
|
Fetches available event types for the current user.
|
||||||
|
"""
|
||||||
|
headers = {
|
||||||
|
'Authorization': f'Bearer {api_token}',
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
|
# 1. Get current user info
|
||||||
|
user_url = "https://api.calendly.com/users/me"
|
||||||
|
user_response = requests.get(user_url, headers=headers)
|
||||||
|
if not user_response.ok:
|
||||||
|
raise Exception(f"Calendly API Error: {user_response.status_code}")
|
||||||
|
|
||||||
|
user_data = user_response.json()
|
||||||
|
user_uri = user_data['resource']['uri']
|
||||||
|
|
||||||
|
# 2. Get event types
|
||||||
|
event_types_url = "https://api.calendly.com/event_types"
|
||||||
|
params = {
|
||||||
|
'user': user_uri
|
||||||
|
}
|
||||||
|
|
||||||
|
types_response = requests.get(event_types_url, headers=headers, params=params)
|
||||||
|
if not types_response.ok:
|
||||||
|
raise Exception(f"Calendly API Error: {types_response.status_code}")
|
||||||
|
|
||||||
|
types_data = types_response.json()
|
||||||
|
return types_data['collection']
|
||||||
|
|
||||||
|
def get_calendly_events_raw(api_token: str, start_time: str = None, end_time: str = None, event_type_name: str = None):
|
||||||
"""
|
"""
|
||||||
Debug function to fetch raw Calendly data without formatting.
|
Debug function to fetch raw Calendly data without formatting.
|
||||||
"""
|
"""
|
||||||
@@ -20,6 +51,12 @@ def get_calendly_events_raw(api_token: str, start_time: str, end_time: str, even
|
|||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Defaults: current time to +2 years
|
||||||
|
if not start_time:
|
||||||
|
start_time = datetime.datetime.utcnow().isoformat() + "Z"
|
||||||
|
if not end_time:
|
||||||
|
end_time = (datetime.datetime.utcnow() + datetime.timedelta(days=730)).isoformat() + "Z"
|
||||||
|
|
||||||
# 1. Get current user info to get the user URI
|
# 1. Get current user info to get the user URI
|
||||||
user_url = "https://api.calendly.com/users/me"
|
user_url = "https://api.calendly.com/users/me"
|
||||||
user_response = requests.get(user_url, headers=headers)
|
user_response = requests.get(user_url, headers=headers)
|
||||||
@@ -33,22 +70,34 @@ def get_calendly_events_raw(api_token: str, start_time: str, end_time: str, even
|
|||||||
events_url = "https://api.calendly.com/scheduled_events"
|
events_url = "https://api.calendly.com/scheduled_events"
|
||||||
params = {
|
params = {
|
||||||
'user': user_uri,
|
'user': user_uri,
|
||||||
|
'status': 'active',
|
||||||
'min_start_time': start_time,
|
'min_start_time': start_time,
|
||||||
'max_start_time': end_time,
|
'max_start_time': end_time,
|
||||||
'status': 'active'
|
'count': 100
|
||||||
}
|
}
|
||||||
|
|
||||||
events_response = requests.get(events_url, headers=headers, params=params)
|
all_events = []
|
||||||
if not events_response.ok:
|
url = events_url
|
||||||
raise Exception(f"Calendly API Error: {events_response.status_code}")
|
|
||||||
|
|
||||||
events_data = events_response.json()
|
while url:
|
||||||
events = events_data['collection']
|
if url == events_url:
|
||||||
|
response = requests.get(url, headers=headers, params=params)
|
||||||
|
else:
|
||||||
|
response = requests.get(url, headers=headers)
|
||||||
|
|
||||||
|
if not response.ok:
|
||||||
|
raise Exception(f"Calendly API Error: {response.status_code} - {response.text}")
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
all_events.extend(data.get('collection', []))
|
||||||
|
|
||||||
|
pagination = data.get('pagination', {})
|
||||||
|
url = pagination.get('next_page') # Use the full URL provided by Calendly
|
||||||
|
|
||||||
raw_results = []
|
raw_results = []
|
||||||
|
|
||||||
# 3. Get invitees
|
# 3. Get invitees
|
||||||
for event in events:
|
for event in all_events:
|
||||||
event_name = event.get('name', '')
|
event_name = event.get('name', '')
|
||||||
# Filter by event type if provided
|
# Filter by event type if provided
|
||||||
if event_type_name and event_type_name.lower() not in event_name.lower():
|
if event_type_name and event_type_name.lower() not in event_name.lower():
|
||||||
@@ -75,39 +124,58 @@ def get_calendly_events_raw(api_token: str, start_time: str, end_time: str, even
|
|||||||
|
|
||||||
return raw_results
|
return raw_results
|
||||||
|
|
||||||
def get_calendly_events(api_token: str, start_time: str, end_time: str, event_type_name: str = None):
|
def get_calendly_events(api_token: str, start_time: str = None, end_time: str = None, event_type_name: str = None):
|
||||||
"""
|
"""
|
||||||
Fetches events from Calendly API for the current user within a time range.
|
Fetches events from Calendly API for the current user within a time range.
|
||||||
"""
|
"""
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
raw_data = get_calendly_events_raw(api_token, start_time, end_time, event_type_name)
|
raw_data = get_calendly_events_raw(api_token, start_time, end_time, event_type_name)
|
||||||
formatted_data = []
|
formatted_data = []
|
||||||
|
|
||||||
|
# Calculate midnight today in Berlin time for filtering
|
||||||
|
now_berlin = datetime.datetime.now(ZoneInfo("Europe/Berlin"))
|
||||||
|
midnight_today = now_berlin.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
|
||||||
for item in raw_data:
|
for item in raw_data:
|
||||||
# Parse start time
|
# Parse start time from UTC
|
||||||
start_dt = datetime.datetime.fromisoformat(item['start_time'].replace('Z', '+00:00'))
|
start_dt = datetime.datetime.fromisoformat(item['start_time'].replace('Z', '+00:00'))
|
||||||
|
# Convert to Europe/Berlin (CET/CEST)
|
||||||
|
start_dt = start_dt.astimezone(ZoneInfo("Europe/Berlin"))
|
||||||
|
|
||||||
|
# Filter out past events
|
||||||
|
if start_dt < midnight_today:
|
||||||
|
logger.debug(f"Skipping past event: {item['invitee_name']} at {start_dt}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info(f"Processing event: {item['invitee_name']} at {start_dt}")
|
||||||
# Format as HH:MM
|
# Format as HH:MM
|
||||||
time_str = start_dt.strftime('%H:%M')
|
time_str = start_dt.strftime('%H:%M')
|
||||||
|
|
||||||
name = item['invitee_name']
|
name = item['invitee_name']
|
||||||
|
|
||||||
# Extract specific answers from the Calendly form
|
# Extract specific answers from the Calendly form
|
||||||
# We look for the number of children and any additional notes
|
|
||||||
num_children = ""
|
num_children = ""
|
||||||
additional_notes = ""
|
additional_notes = ""
|
||||||
|
has_consent = False
|
||||||
questions_and_answers = item.get('questions_and_answers', [])
|
questions_and_answers = item.get('questions_and_answers', [])
|
||||||
|
|
||||||
|
|
||||||
for q_a in questions_and_answers:
|
for q_a in questions_and_answers:
|
||||||
q_text = q_a.get('question', '').lower()
|
q_text = q_a.get('question', '').lower()
|
||||||
a_text = q_a.get('answer', '')
|
a_text = q_a.get('answer', '')
|
||||||
|
|
||||||
if "wie viele kinder" in q_text:
|
# Flexible matching for number of children
|
||||||
|
if any(kw in q_text for kw in ["wie viele kinder", "anzahl kinder", "wieviele kinder"]):
|
||||||
num_children = a_text
|
num_children = a_text
|
||||||
elif "nachricht" in q_text or "anmerkung" in q_text:
|
elif "nachricht" in q_text or "anmerkung" in q_text:
|
||||||
# If there's a custom notes field in some events
|
|
||||||
additional_notes = a_text
|
additional_notes = a_text
|
||||||
|
elif "veröffentlichen" in q_text or "bilder" in q_text:
|
||||||
|
if "ja" in a_text.lower():
|
||||||
|
has_consent = True
|
||||||
|
|
||||||
# Construct the final string: "Name, X Kinder // HH:MM Uhr (Notes)"
|
|
||||||
# matching: Halime Türe, 1 Kind // 12:00 Uhr
|
# Construct the final string: "Name, X Kinder // HH:MM Uhr ☑"
|
||||||
final_text = f"{name}"
|
final_text = f"{name}"
|
||||||
if num_children:
|
if num_children:
|
||||||
final_text += f", {num_children}"
|
final_text += f", {num_children}"
|
||||||
@@ -117,20 +185,21 @@ def get_calendly_events(api_token: str, start_time: str, end_time: str, event_ty
|
|||||||
if additional_notes:
|
if additional_notes:
|
||||||
final_text += f" ({additional_notes})"
|
final_text += f" ({additional_notes})"
|
||||||
|
|
||||||
|
if has_consent:
|
||||||
|
final_text += " ☑"
|
||||||
|
|
||||||
formatted_data.append(final_text)
|
formatted_data.append(final_text)
|
||||||
|
|
||||||
|
|
||||||
logger.info(f"Processed {len(formatted_data)} invitees.")
|
logger.info(f"Processed {len(formatted_data)} invitees.")
|
||||||
return formatted_data
|
return formatted_data
|
||||||
|
|
||||||
|
|
||||||
def overlay_text_on_pdf(base_pdf_path: str, output_pdf_path: str, texts: list):
|
def overlay_text_on_pdf(base_pdf_path: str, output_pdf_path: str, texts: list):
|
||||||
"""
|
"""
|
||||||
Overlays text from the `texts` list onto a base PDF.
|
|
||||||
Expects two text entries per page (top and bottom element).
|
|
||||||
Coordinates are in mm from bottom-left (ReportLab default).
|
|
||||||
Target:
|
Target:
|
||||||
Element 1: X: 72mm, Y: 22mm (from top-left in user spec, need to convert)
|
Element 1: X: 72mm, Y: 22mm + 9mm = 31mm
|
||||||
Element 2: X: 72mm, Y: 171mm (from top-left in user spec, need to convert)
|
Element 2: X: 72mm, Y: 171mm + 9mm = 180mm
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Convert mm to points (1 mm = 2.83465 points)
|
# Convert mm to points (1 mm = 2.83465 points)
|
||||||
@@ -141,12 +210,12 @@ def overlay_text_on_pdf(base_pdf_path: str, output_pdf_path: str, texts: list):
|
|||||||
|
|
||||||
# User coordinates are from top-left.
|
# User coordinates are from top-left.
|
||||||
# ReportLab uses bottom-left as (0,0).
|
# ReportLab uses bottom-left as (0,0).
|
||||||
# Element 1 (Top): X = 72mm, Y = 22mm (from top) -> Y = page_height - 22mm
|
# Element 1 (Top): X = 72mm, Y = 31mm (from top) -> Y = page_height - 31mm
|
||||||
# Element 2 (Bottom): X = 72mm, Y = 171mm (from top) -> Y = page_height - 171mm
|
# Element 2 (Bottom): X = 72mm, Y = 180mm (from top) -> Y = page_height - 180mm
|
||||||
|
|
||||||
x_pos = 72 * mm_to_pt
|
x_pos = 72 * mm_to_pt
|
||||||
y_pos_1 = page_height - (22 * mm_to_pt)
|
y_pos_1 = page_height - (31 * mm_to_pt)
|
||||||
y_pos_2 = page_height - (171 * mm_to_pt)
|
y_pos_2 = page_height - (180 * mm_to_pt)
|
||||||
|
|
||||||
reader = PdfReader(base_pdf_path)
|
reader = PdfReader(base_pdf_path)
|
||||||
writer = PdfWriter()
|
writer = PdfWriter()
|
||||||
@@ -161,21 +230,46 @@ def overlay_text_on_pdf(base_pdf_path: str, output_pdf_path: str, texts: list):
|
|||||||
# We need to process pairs of texts for each page
|
# We need to process pairs of texts for each page
|
||||||
text_pairs = [texts[i:i+2] for i in range(0, len(texts), 2)]
|
text_pairs = [texts[i:i+2] for i in range(0, len(texts), 2)]
|
||||||
|
|
||||||
|
|
||||||
|
# Load OpenSans font to support UTF-8 extended characters
|
||||||
|
from reportlab.pdfbase.ttfonts import TTFont
|
||||||
|
from reportlab.pdfbase import pdfmetrics
|
||||||
|
font_path = os.path.join(os.path.dirname(__file__), "assets", "OpenSans-Regular.ttf")
|
||||||
|
pdfmetrics.registerFont(TTFont('OpenSans', font_path))
|
||||||
|
|
||||||
for page_idx, pair in enumerate(text_pairs):
|
for page_idx, pair in enumerate(text_pairs):
|
||||||
if page_idx >= total_pages:
|
if page_idx >= total_pages:
|
||||||
break # Should be caught by the truncation above, but safety first
|
break # Safety first
|
||||||
|
|
||||||
# Create a new blank page in memory to draw the text
|
# Create a new blank page in memory to draw the text
|
||||||
packet = io.BytesIO()
|
packet = io.BytesIO()
|
||||||
can = canvas.Canvas(packet, pagesize=A4)
|
can = canvas.Canvas(packet, pagesize=A4)
|
||||||
|
|
||||||
# Draw the text.
|
# Draw the text.
|
||||||
can.setFont("Helvetica", 12)
|
def draw_text_with_checkbox(can, x, y, text):
|
||||||
|
can.setFont("OpenSans", 12)
|
||||||
|
if text.endswith(" ☑"):
|
||||||
|
clean_text = text[:-2] # remove the checkmark part
|
||||||
|
can.drawString(x, y, clean_text)
|
||||||
|
|
||||||
|
# Calculate width to place the checkbox right after the text
|
||||||
|
text_width = can.stringWidth(clean_text, "OpenSans", 12)
|
||||||
|
box_x = x + text_width + 8
|
||||||
|
|
||||||
|
size = 10
|
||||||
|
can.rect(box_x, y - 1, size, size)
|
||||||
|
can.setLineWidth(1.5)
|
||||||
|
can.line(box_x + 2, y + 3, box_x + 4.5, y + 0.5)
|
||||||
|
can.line(box_x + 4.5, y + 0.5, box_x + 8.5, y + 7)
|
||||||
|
can.setLineWidth(1)
|
||||||
|
else:
|
||||||
|
can.drawString(x, y, text)
|
||||||
|
|
||||||
if len(pair) > 0:
|
if len(pair) > 0:
|
||||||
can.drawString(x_pos, y_pos_1, pair[0])
|
|
||||||
|
draw_text_with_checkbox(can, x_pos, y_pos_1, pair[0])
|
||||||
if len(pair) > 1:
|
if len(pair) > 1:
|
||||||
can.drawString(x_pos, y_pos_2, pair[1])
|
draw_text_with_checkbox(can, x_pos, y_pos_2, pair[1])
|
||||||
|
|
||||||
can.save()
|
can.save()
|
||||||
packet.seek(0)
|
packet.seek(0)
|
||||||
@@ -198,3 +292,62 @@ def overlay_text_on_pdf(base_pdf_path: str, output_pdf_path: str, texts: list):
|
|||||||
writer.write(output_file)
|
writer.write(output_file)
|
||||||
|
|
||||||
logger.info(f"Successfully generated overlaid PDF at {output_pdf_path}")
|
logger.info(f"Successfully generated overlaid PDF at {output_pdf_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_siblings_qr_overlay(base_pdf_path: str, output_pdf_path: str, families: list):
|
||||||
|
import io
|
||||||
|
from PyPDF2 import PdfReader, PdfWriter
|
||||||
|
from reportlab.pdfgen import canvas
|
||||||
|
from reportlab.lib.pagesizes import A4
|
||||||
|
from reportlab.pdfbase import pdfmetrics
|
||||||
|
from reportlab.pdfbase.ttfonts import TTFont
|
||||||
|
import os
|
||||||
|
|
||||||
|
font_path = os.path.join(os.path.dirname(__file__), "assets", "OpenSans-Regular.ttf")
|
||||||
|
if os.path.exists(font_path):
|
||||||
|
pdfmetrics.registerFont(TTFont('OpenSans', font_path))
|
||||||
|
font_name = 'OpenSans'
|
||||||
|
else:
|
||||||
|
font_name = 'Helvetica'
|
||||||
|
|
||||||
|
mm_to_pt = 2.83465
|
||||||
|
page_width, page_height = A4
|
||||||
|
x_pos = 72 * mm_to_pt
|
||||||
|
y_pos_1 = page_height - (31 * mm_to_pt)
|
||||||
|
y_pos_2 = page_height - (180 * mm_to_pt)
|
||||||
|
|
||||||
|
reader = PdfReader(base_pdf_path)
|
||||||
|
writer = PdfWriter()
|
||||||
|
|
||||||
|
family_idx = 0
|
||||||
|
total_families = len(families)
|
||||||
|
|
||||||
|
for i in range(len(reader.pages)):
|
||||||
|
page = reader.pages[i]
|
||||||
|
|
||||||
|
if family_idx < total_families:
|
||||||
|
packet = io.BytesIO()
|
||||||
|
c = canvas.Canvas(packet, pagesize=A4)
|
||||||
|
c.setFont(font_name, 11)
|
||||||
|
|
||||||
|
# First card on the page
|
||||||
|
if family_idx < total_families:
|
||||||
|
text_top = f"Geschwisterbilder Familie {families[family_idx]['nachname']}"
|
||||||
|
c.drawString(x_pos, y_pos_1, text_top)
|
||||||
|
family_idx += 1
|
||||||
|
|
||||||
|
# Second card on the page
|
||||||
|
if family_idx < total_families:
|
||||||
|
text_bottom = f"Geschwisterbilder Familie {families[family_idx]['nachname']}"
|
||||||
|
c.drawString(x_pos, y_pos_2, text_bottom)
|
||||||
|
family_idx += 1
|
||||||
|
|
||||||
|
c.save()
|
||||||
|
packet.seek(0)
|
||||||
|
overlay_pdf = PdfReader(packet)
|
||||||
|
page.merge_page(overlay_pdf.pages[0])
|
||||||
|
|
||||||
|
writer.add_page(page)
|
||||||
|
|
||||||
|
with open(output_pdf_path, "wb") as output_file:
|
||||||
|
writer.write(output_file)
|
||||||
|
|||||||
@@ -11,3 +11,7 @@ sqlalchemy==2.0.31
|
|||||||
requests==2.31.0
|
requests==2.31.0
|
||||||
reportlab==4.0.9
|
reportlab==4.0.9
|
||||||
PyPDF2==3.0.1
|
PyPDF2==3.0.1
|
||||||
|
tzdata
|
||||||
|
google-api-python-client==2.122.0
|
||||||
|
google-auth-httplib2==0.2.0
|
||||||
|
google-auth-oauthlib==1.2.0
|
||||||
|
|||||||
183
fotograf-de-scraper/backend/siblings_logic.py
Normal file
183
fotograf-de-scraper/backend/siblings_logic.py
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
import pandas as pd
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
from collections import defaultdict
|
||||||
|
from main import get_berlin_now_str, get_logo_base64
|
||||||
|
from weasyprint import HTML
|
||||||
|
|
||||||
|
logger = logging.getLogger("fotograf-scraper")
|
||||||
|
|
||||||
|
def generate_siblings_pdf_from_csv(csv_path: str, institution: str, calendly_events: list, list_type: str, output_path: str):
|
||||||
|
logger.info(f"Generating Siblings PDF for {institution} from {csv_path}")
|
||||||
|
df = None
|
||||||
|
for sep in [";", ","]:
|
||||||
|
try:
|
||||||
|
test_df = pd.read_csv(csv_path, sep=sep, encoding="utf-8-sig", nrows=5)
|
||||||
|
if len(test_df.columns) > 1:
|
||||||
|
df = pd.read_csv(csv_path, sep=sep, encoding="utf-8-sig")
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if df is None:
|
||||||
|
try:
|
||||||
|
df = pd.read_csv(csv_path, sep=";", encoding="latin1")
|
||||||
|
except:
|
||||||
|
raise Exception("CSV konnte nicht gelesen werden.")
|
||||||
|
|
||||||
|
df.columns = df.columns.str.strip().str.replace('"', "")
|
||||||
|
|
||||||
|
# Identify Email Column
|
||||||
|
email_col = next((c for c in df.columns if "email" in c.lower()), None)
|
||||||
|
if not email_col:
|
||||||
|
email_col = next((c for c in df.columns if "e-mail" in c.lower()), None)
|
||||||
|
|
||||||
|
if not email_col:
|
||||||
|
logger.warning("No email column found. Siblings logic cannot run.")
|
||||||
|
families = []
|
||||||
|
else:
|
||||||
|
# Columns mappings
|
||||||
|
group_col = next((c for c in df.columns if c.lower() in ["gruppe", "klasse", "group", "class"]), None)
|
||||||
|
lastname_col = next((c for c in df.columns if "nachname" in c.lower()), None)
|
||||||
|
firstname_col = next((c for c in df.columns if "vorname" in c.lower()), None)
|
||||||
|
wunsch_col = next((c for c in df.columns if "familie" in c.lower() or "geschwister" in c.lower() and "fotos" in c.lower()), None)
|
||||||
|
if not wunsch_col:
|
||||||
|
wunsch_col = next((c for c in df.columns if "familie / geschwister" in c.lower()), None)
|
||||||
|
|
||||||
|
# Build Calendly Dictionary for fast lookup (Email -> Time)
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
import datetime
|
||||||
|
calendly_map = {}
|
||||||
|
now_berlin = datetime.datetime.now(ZoneInfo("Europe/Berlin"))
|
||||||
|
midnight_today = now_berlin.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
|
||||||
|
for event in calendly_events:
|
||||||
|
try:
|
||||||
|
start_dt = datetime.datetime.fromisoformat(event['start_time'].replace('Z', '+00:00'))
|
||||||
|
start_dt = start_dt.astimezone(ZoneInfo("Europe/Berlin"))
|
||||||
|
calendly_map[event['invitee_email'].lower().strip()] = start_dt.strftime("%d.%m. %H:%M")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
families_dict = defaultdict(list)
|
||||||
|
df = df.fillna("")
|
||||||
|
|
||||||
|
# Group by email
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
email = str(row[email_col]).strip().lower()
|
||||||
|
if email and "@" in email:
|
||||||
|
families_dict[email].append(row)
|
||||||
|
|
||||||
|
families = []
|
||||||
|
for email, rows in families_dict.items():
|
||||||
|
if len(rows) > 1: # SIBLINGS DETECTED
|
||||||
|
family_last_name = str(rows[0][lastname_col]).strip() if lastname_col else "Unbekannt"
|
||||||
|
|
||||||
|
children = []
|
||||||
|
for r in rows:
|
||||||
|
child_first = str(r[firstname_col]).strip() if firstname_col else ""
|
||||||
|
child_group = str(r[group_col]).strip() if group_col else ""
|
||||||
|
children.append({"vorname": child_first, "gruppe": child_group})
|
||||||
|
|
||||||
|
# Check fotograf wunsch
|
||||||
|
fotograf_wunsch = False
|
||||||
|
if wunsch_col:
|
||||||
|
for r in rows:
|
||||||
|
val = str(r[wunsch_col]).lower()
|
||||||
|
if "ja" in val or "familien" in val or "geschwister" in val:
|
||||||
|
fotograf_wunsch = True
|
||||||
|
break
|
||||||
|
|
||||||
|
calendly_time = calendly_map.get(email, None)
|
||||||
|
|
||||||
|
families.append({
|
||||||
|
"nachname": family_last_name,
|
||||||
|
"children": children,
|
||||||
|
"fotograf_wunsch": fotograf_wunsch,
|
||||||
|
"calendly_time": calendly_time
|
||||||
|
})
|
||||||
|
|
||||||
|
# Sort by last name
|
||||||
|
families.sort(key=lambda x: x["nachname"])
|
||||||
|
|
||||||
|
template_dir = os.path.join(os.path.dirname(__file__), "templates")
|
||||||
|
env = Environment(loader=FileSystemLoader(template_dir))
|
||||||
|
template = env.get_template("siblings_list.html")
|
||||||
|
|
||||||
|
current_time = get_berlin_now_str()
|
||||||
|
logo_base64 = get_logo_base64()
|
||||||
|
|
||||||
|
render_context = {
|
||||||
|
"institution": institution,
|
||||||
|
"current_time": current_time,
|
||||||
|
"logo_base64": logo_base64,
|
||||||
|
"families": families
|
||||||
|
}
|
||||||
|
|
||||||
|
html_out = template.render(render_context)
|
||||||
|
pdf = HTML(string=html_out).write_pdf()
|
||||||
|
|
||||||
|
with open(output_path, "wb") as f:
|
||||||
|
f.write(pdf)
|
||||||
|
logger.info(f"Siblings PDF saved to {output_path}")
|
||||||
|
|
||||||
|
def get_sibling_families_from_csv(csv_path: str, calendly_events: list = None) -> list:
|
||||||
|
df = None
|
||||||
|
for sep in [";", ","]:
|
||||||
|
try:
|
||||||
|
test_df = pd.read_csv(csv_path, sep=sep, encoding="utf-8-sig", nrows=5)
|
||||||
|
if len(test_df.columns) > 1:
|
||||||
|
df = pd.read_csv(csv_path, sep=sep, encoding="utf-8-sig")
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if df is None:
|
||||||
|
try:
|
||||||
|
df = pd.read_csv(csv_path, sep=";", encoding="latin1")
|
||||||
|
except:
|
||||||
|
raise Exception("CSV konnte nicht gelesen werden.")
|
||||||
|
|
||||||
|
df.columns = df.columns.str.strip().str.replace('"', "")
|
||||||
|
|
||||||
|
email_col = next((c for c in df.columns if "email" in c.lower()), None)
|
||||||
|
if not email_col:
|
||||||
|
email_col = next((c for c in df.columns if "e-mail" in c.lower()), None)
|
||||||
|
|
||||||
|
if not email_col:
|
||||||
|
return []
|
||||||
|
|
||||||
|
lastname_col = next((c for c in df.columns if "nachname" in c.lower()), None)
|
||||||
|
|
||||||
|
# Build Calendly Email Set for filtering
|
||||||
|
booked_emails = set()
|
||||||
|
if calendly_events:
|
||||||
|
for event in calendly_events:
|
||||||
|
email = event.get('invitee_email', '').lower().strip()
|
||||||
|
if email:
|
||||||
|
booked_emails.add(email)
|
||||||
|
|
||||||
|
families_dict = defaultdict(list)
|
||||||
|
df = df.fillna("")
|
||||||
|
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
email = str(row[email_col]).strip().lower()
|
||||||
|
if email and "@" in email:
|
||||||
|
families_dict[email].append(row)
|
||||||
|
|
||||||
|
families = []
|
||||||
|
for email, rows in families_dict.items():
|
||||||
|
if len(rows) > 1: # SIBLINGS DETECTED
|
||||||
|
# FILTER OUT if they already have an appointment
|
||||||
|
if email in booked_emails:
|
||||||
|
logger.info(f"Family {email} already has Calendly appointment, skipping QR card.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
family_last_name = str(rows[0][lastname_col]).strip() if lastname_col else "Unbekannt"
|
||||||
|
families.append({
|
||||||
|
"nachname": family_last_name
|
||||||
|
})
|
||||||
|
|
||||||
|
families.sort(key=lambda x: x["nachname"])
|
||||||
|
return families
|
||||||
202
fotograf-de-scraper/backend/templates/appointment_list.html
Normal file
202
fotograf-de-scraper/backend/templates/appointment_list.html
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="de">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Terminübersicht</title>
|
||||||
|
<style>
|
||||||
|
@page {
|
||||||
|
size: A4 portrait;
|
||||||
|
margin: 20mm;
|
||||||
|
@bottom-right {
|
||||||
|
content: "Seite " counter(page) " von " counter(pages);
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
font-size: 10pt;
|
||||||
|
color: #666;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
font-size: 11pt;
|
||||||
|
color: #333;
|
||||||
|
line-height: 1.4;
|
||||||
|
}
|
||||||
|
.header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
border-bottom: 2px solid #ddd;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
.header-text {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
.header-logo {
|
||||||
|
width: 150px;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
.header-logo img {
|
||||||
|
max-width: 100%;
|
||||||
|
height: auto;
|
||||||
|
}
|
||||||
|
h1 {
|
||||||
|
font-size: 16pt;
|
||||||
|
margin: 0 0 5px 0;
|
||||||
|
color: #2c3e50;
|
||||||
|
}
|
||||||
|
h2 {
|
||||||
|
font-size: 14pt;
|
||||||
|
margin: 0 0 10px 0;
|
||||||
|
color: #34495e;
|
||||||
|
}
|
||||||
|
|
||||||
|
.date-header {
|
||||||
|
background-color: #ecf0f1;
|
||||||
|
padding: 8px 12px;
|
||||||
|
margin-top: 20px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 13pt;
|
||||||
|
border-left: 4px solid #3498db;
|
||||||
|
page-break-before: always;
|
||||||
|
}
|
||||||
|
.first-date-header {
|
||||||
|
page-break-before: avoid;
|
||||||
|
}
|
||||||
|
table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
page-break-inside: auto;
|
||||||
|
}
|
||||||
|
th, td {
|
||||||
|
border: 1px solid #bdc3c7;
|
||||||
|
padding: 6px 8px; /* Narrower rows */
|
||||||
|
text-align: left;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
.empty-row td {
|
||||||
|
height: 25px; /* Narrower empty rows */
|
||||||
|
color: transparent;
|
||||||
|
}
|
||||||
|
.compressed-row td {
|
||||||
|
background-color: #fcfcfc;
|
||||||
|
color: #7f8c8d !important;
|
||||||
|
font-style: italic;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
.time-col { width: 14%; white-space: nowrap; font-weight: bold; }
|
||||||
|
.family-col { width: 33%; }
|
||||||
|
|
||||||
|
.children-col { width: 15%; text-align: center; }
|
||||||
|
.consent-col { width: 20%; text-align: center; }
|
||||||
|
.done-col { width: 18%; text-align: center; }
|
||||||
|
|
||||||
|
.empty-row td {
|
||||||
|
height: 35px; /* ensure enough space for writing */
|
||||||
|
color: transparent; /* visually hide "Empty" text but keep structure if any */
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* CSS Checkmark (Ja) */
|
||||||
|
.consent-yes {
|
||||||
|
display: inline-block;
|
||||||
|
width: 16px;
|
||||||
|
height: 16px;
|
||||||
|
border: 2px solid #333;
|
||||||
|
border-radius: 3px;
|
||||||
|
position: relative;
|
||||||
|
background-color: #fcfcfc;
|
||||||
|
}
|
||||||
|
.consent-yes::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: 4px;
|
||||||
|
top: 0px;
|
||||||
|
width: 5px;
|
||||||
|
height: 10px;
|
||||||
|
border: solid #27ae60;
|
||||||
|
border-width: 0 3px 3px 0;
|
||||||
|
transform: rotate(45deg);
|
||||||
|
}
|
||||||
|
/* The checkbox square */
|
||||||
|
.checkbox-square {
|
||||||
|
display: inline-block;
|
||||||
|
width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
border: 1px solid #333;
|
||||||
|
background-color: #fff;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
{% for date, slots in grouped_slots.items() %}
|
||||||
|
|
||||||
|
{% if not loop.first %}
|
||||||
|
<div style="page-break-before: always;"></div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<div class="header">
|
||||||
|
<div class="header-text">
|
||||||
|
<h1>{{ event_type_name }}</h1>
|
||||||
|
<p>Auftrag: {{ job_name }} | Stand: {{ current_time }}</p>
|
||||||
|
</div>
|
||||||
|
<div class="header-logo">
|
||||||
|
{% if logo_base64 %}
|
||||||
|
<img src="data:image/png;base64,{{ logo_base64 }}" alt="Logo">
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="date-header first-date-header">{{ date }}</div>
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th class="time-col">Uhrzeit</th>
|
||||||
|
<th class="family-col">Familie</th>
|
||||||
|
<th class="children-col">Kinder</th>
|
||||||
|
<th class="consent-col">Veröffentlichung</th>
|
||||||
|
<th class="done-col">Erledigt</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for slot in slots %}
|
||||||
|
{% if slot.is_compressed %}
|
||||||
|
<tr class="compressed-row">
|
||||||
|
<td class="time-col" style="color: #7f8c8d;">{{ slot.time_str }}</td>
|
||||||
|
<td colspan="4">{{ slot.name }}</td>
|
||||||
|
</tr>
|
||||||
|
{% else %}
|
||||||
|
<tr class="{% if not slot.booked %}empty-row{% endif %}">
|
||||||
|
<td class="time-col" style="color: #333;">{{ slot.time_str }}</td>
|
||||||
|
<td class="family-col">{{ slot.name if slot.booked else '' }}</td>
|
||||||
|
<td class="children-col">{{ slot.children if slot.booked else '' }}</td>
|
||||||
|
<td class="consent-col">
|
||||||
|
|
||||||
|
{% if slot.booked and slot.consent %}
|
||||||
|
<span class="consent-yes"></span>
|
||||||
|
|
||||||
|
{% elif slot.booked %}
|
||||||
|
<!-- nein -->
|
||||||
|
{% else %}
|
||||||
|
<!-- leer -->
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
<td class="done-col">
|
||||||
|
{% if slot.booked %}
|
||||||
|
<span class="checkbox-square"></span>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{% endfor %}
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
90
fotograf-de-scraper/backend/templates/siblings_list.html
Normal file
90
fotograf-de-scraper/backend/templates/siblings_list.html
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<style>
|
||||||
|
@page { size: A4 portrait; margin: 20mm; }
|
||||||
|
body { font-family: Arial, sans-serif; font-size: 11pt; }
|
||||||
|
.header { margin-bottom: 20px; display: flex; justify-content: space-between; align-items: center; }
|
||||||
|
.institution-name { font-weight: bold; font-size: 16pt; margin-bottom: 5px; }
|
||||||
|
.doc-title { font-size: 14pt; font-weight: bold; color: #4f46e5; margin-bottom: 15px; }
|
||||||
|
.date-info { font-size: 11pt; color: #555; }
|
||||||
|
|
||||||
|
table { width: 100%; border-collapse: collapse; margin-top: 15px; }
|
||||||
|
th { text-align: left; background-color: #f3f4f6; border-bottom: 2px solid #d1d5db; padding: 8px 5px; font-size: 10pt; }
|
||||||
|
td { padding: 8px 5px; border-bottom: 1px solid #e5e7eb; font-size: 10pt; vertical-align: top; }
|
||||||
|
|
||||||
|
.checkbox { width: 20px; height: 20px; border: 1.5px solid #000; border-radius: 3px; display: inline-block; }
|
||||||
|
|
||||||
|
.footer { position: fixed; bottom: 0; left: 0; right: 0; display: flex; justify-content: space-between; font-size: 9pt; color: #888; }
|
||||||
|
.badge { display: inline-block; padding: 2px 6px; border-radius: 4px; font-size: 8.5pt; font-weight: bold; background-color: #e0e7ff; color: #3730a3; margin-left: 5px; }
|
||||||
|
.badge-time { background-color: #d1fae5; color: #065f46; font-size: 10pt; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="header" style="display: flex; justify-content: space-between; align-items: center;">
|
||||||
|
<div>
|
||||||
|
<div class="institution-name">{{ institution }}</div>
|
||||||
|
<div class="doc-title">Geschwisterliste (Einrichtungsintern)</div>
|
||||||
|
<div class="date-info">Generiert am: {{ current_time }}</div>
|
||||||
|
</div>
|
||||||
|
{% if logo_base64 %}
|
||||||
|
<div>
|
||||||
|
<img src="data:image/png;base64,{{ logo_base64 }}" alt="Logo" style="max-height: 50px;">
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th style="width: 20%">Nachname</th>
|
||||||
|
<th style="width: 35%">Kinder in der Einrichtung (Gruppe)</th>
|
||||||
|
<th style="width: 15%">Wunsch Online</th>
|
||||||
|
<th style="width: 20%">Termin (Calendly)</th>
|
||||||
|
<th style="width: 10%; text-align: center;">Erledigt</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for family in families %}
|
||||||
|
<tr>
|
||||||
|
<td style="font-weight: bold;">{{ family.nachname }}</td>
|
||||||
|
<td>
|
||||||
|
{% for child in family.children %}
|
||||||
|
<div style="margin-bottom: 4px;">
|
||||||
|
{{ child.vorname }} <span style="color: #666; font-size: 9pt;">({{ child.gruppe }})</span>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
{% if family.fotograf_wunsch %}
|
||||||
|
<span style="color: #059669; font-weight: bold;">Ja</span>
|
||||||
|
{% else %}
|
||||||
|
<span style="color: #9ca3af;">-</span>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
{% if family.calendly_time %}
|
||||||
|
<span class="badge badge-time">{{ family.calendly_time }}</span>
|
||||||
|
{% else %}
|
||||||
|
<span style="color: #9ca3af;">-</span>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
<td style="text-align: center;">
|
||||||
|
<div class="checkbox"></div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% else %}
|
||||||
|
<tr>
|
||||||
|
<td colspan="5" style="text-align: center; padding: 20px; color: #666;">Keine internen Geschwisterkinder in dieser Einrichtung gefunden.</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<div class="footer">
|
||||||
|
<div>Geschwisterliste</div>
|
||||||
|
<div>Kinderfotos Erding | www.kinderfotos-erding.de</div>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
44
fotograf-de-scraper/backend/test_thank_you_mail.py
Normal file
44
fotograf-de-scraper/backend/test_thank_you_mail.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
sys.path.append('/app/fotograf-de-scraper/backend')
|
||||||
|
|
||||||
|
from database import SessionLocal, ReleaseParticipant, DiscountCode
|
||||||
|
from gmail_service import GmailService
|
||||||
|
from publish_request_api import SIGNATURE_HTML
|
||||||
|
|
||||||
|
def test_webhook_mail():
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
# Simulate data
|
||||||
|
test_email = "floke.com@gmail.com"
|
||||||
|
first_name = "Christian"
|
||||||
|
test_code = "M984AU-TEST"
|
||||||
|
|
||||||
|
# Simulate logic
|
||||||
|
service = GmailService(db)
|
||||||
|
subject = "Dankeschön für Eure Freigabe & Euer Rabattcode"
|
||||||
|
|
||||||
|
INSTRUCTIONS_IMAGE_URL = "https://mail.google.com/mail/u/2?ui=2&ik=719adaa3c5&attid=0.1&permmsgid=msg-a:r7482671925923393616&th=196e322c399dbc7f&view=fimg&fur=ip&permmsgid=msg-a:r7482671925923393616&sz=s0-l75-ft&attbid=ANGjdJ9_U6ayMFgwbupt4HalTKO867IHx6N70eNbPfQmTLNzRXilJxI-n8a1gjM8xVcP5HEOgaVxfp3FnJPzTYmEEyhK4gSU-Il_0a6OtzFYscp55_W4iyxuxjyPvK4&disp=emb&realattid=ii_maspzxv50&zw"
|
||||||
|
|
||||||
|
body_html = f"""
|
||||||
|
<p>Hallo {first_name},</p>
|
||||||
|
<p>Vielen Dank nochmal für die Freigabe zur Veröffentlichung, das ist super nett von Euch!</p>
|
||||||
|
<p>Hier ist euer Gutscheincode über 25 Euro: <strong style="font-size: 18px; color: #4F46E5;">{test_code}</strong></p>
|
||||||
|
<p>Um den Gutschein einzugeben, musst du auf den Preis des Warenkorbs drücken (über dem Button zur Kasse gehen):</p>
|
||||||
|
<p><img src="{INSTRUCTIONS_IMAGE_URL}" alt="Anleitung Gutschein einlösen" style="max-width: 100%; border: 1px solid #ddd; border-radius: 8px;"></p>
|
||||||
|
<p>Liebe Grüße,<br>das Team von Kinderfotos Erding</p>
|
||||||
|
{SIGNATURE_HTML}
|
||||||
|
"""
|
||||||
|
|
||||||
|
print(f"Sende Test-E-Mail an {test_email}...")
|
||||||
|
success = service.send_email(test_email, subject, body_html)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
print("✅ E-Mail erfolgreich gesendet! Bitte prüfe dein Postfach.")
|
||||||
|
else:
|
||||||
|
print("❌ Fehler beim Senden. (Stelle sicher, dass Gmail Authentifiziert ist).")
|
||||||
|
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
test_webhook_mail()
|
||||||
@@ -3,6 +3,9 @@ FROM node:20-alpine AS builder
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Accept build arguments
|
||||||
|
ARG VITE_API_BASE_URL
|
||||||
|
|
||||||
# Copy package.json and package-lock.json
|
# Copy package.json and package-lock.json
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
|
|
||||||
@@ -12,6 +15,9 @@ RUN npm install
|
|||||||
# Copy the rest of the application source code
|
# Copy the rest of the application source code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
# Write the build arg to .env.production so Vite picks it up during build
|
||||||
|
RUN echo "VITE_API_BASE_URL=${VITE_API_BASE_URL}" > .env.production
|
||||||
|
|
||||||
# Build the application
|
# Build the application
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
|
|||||||
1
fotograf-de-scraper/frontend/dist/assets/index-BnIZj8RP.css
vendored
Normal file
1
fotograf-de-scraper/frontend/dist/assets/index-BnIZj8RP.css
vendored
Normal file
File diff suppressed because one or more lines are too long
47
fotograf-de-scraper/frontend/dist/assets/index-DnGj5v5p.js
vendored
Normal file
47
fotograf-de-scraper/frontend/dist/assets/index-DnGj5v5p.js
vendored
Normal file
File diff suppressed because one or more lines are too long
3
fotograf-de-scraper/frontend/dist/favicon.svg
vendored
Normal file
3
fotograf-de-scraper/frontend/dist/favicon.svg
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100">
|
||||||
|
<text y=".9em" font-size="90">📸</text>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 113 B |
24
fotograf-de-scraper/frontend/dist/icons.svg
vendored
Normal file
24
fotograf-de-scraper/frontend/dist/icons.svg
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<symbol id="bluesky-icon" viewBox="0 0 16 17">
|
||||||
|
<g clip-path="url(#bluesky-clip)"><path fill="#08060d" d="M7.75 7.735c-.693-1.348-2.58-3.86-4.334-5.097-1.68-1.187-2.32-.981-2.74-.79C.188 2.065.1 2.812.1 3.251s.241 3.602.398 4.13c.52 1.744 2.367 2.333 4.07 2.145-2.495.37-4.71 1.278-1.805 4.512 3.196 3.309 4.38-.71 4.987-2.746.608 2.036 1.307 5.91 4.93 2.746 2.72-2.746.747-4.143-1.747-4.512 1.702.189 3.55-.4 4.07-2.145.156-.528.397-3.691.397-4.13s-.088-1.186-.575-1.406c-.42-.19-1.06-.395-2.741.79-1.755 1.24-3.64 3.752-4.334 5.099"/></g>
|
||||||
|
<defs><clipPath id="bluesky-clip"><path fill="#fff" d="M.1.85h15.3v15.3H.1z"/></clipPath></defs>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="discord-icon" viewBox="0 0 20 19">
|
||||||
|
<path fill="#08060d" d="M16.224 3.768a14.5 14.5 0 0 0-3.67-1.153c-.158.286-.343.67-.47.976a13.5 13.5 0 0 0-4.067 0c-.128-.306-.317-.69-.476-.976A14.4 14.4 0 0 0 3.868 3.77C1.546 7.28.916 10.703 1.231 14.077a14.7 14.7 0 0 0 4.5 2.306q.545-.748.965-1.587a9.5 9.5 0 0 1-1.518-.74q.191-.14.372-.293c2.927 1.369 6.107 1.369 8.999 0q.183.152.372.294-.723.437-1.52.74.418.838.963 1.588a14.6 14.6 0 0 0 4.504-2.308c.37-3.911-.63-7.302-2.644-10.309m-9.13 8.234c-.878 0-1.599-.82-1.599-1.82 0-.998.705-1.82 1.6-1.82.894 0 1.614.82 1.599 1.82.001 1-.705 1.82-1.6 1.82m5.91 0c-.878 0-1.599-.82-1.599-1.82 0-.998.705-1.82 1.6-1.82.893 0 1.614.82 1.599 1.82 0 1-.706 1.82-1.6 1.82"/>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="documentation-icon" viewBox="0 0 21 20">
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="m15.5 13.333 1.533 1.322c.645.555.967.833.967 1.178s-.322.623-.967 1.179L15.5 18.333m-3.333-5-1.534 1.322c-.644.555-.966.833-.966 1.178s.322.623.966 1.179l1.534 1.321"/>
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M17.167 10.836v-4.32c0-1.41 0-2.117-.224-2.68-.359-.906-1.118-1.621-2.08-1.96-.599-.21-1.349-.21-2.848-.21-2.623 0-3.935 0-4.983.369-1.684.591-3.013 1.842-3.641 3.428C3 6.449 3 7.684 3 10.154v2.122c0 2.558 0 3.838.706 4.726q.306.383.713.671c.76.536 1.79.64 3.581.66"/>
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M3 10a2.78 2.78 0 0 1 2.778-2.778c.555 0 1.209.097 1.748-.047.48-.129.854-.503.982-.982.145-.54.048-1.194.048-1.749a2.78 2.78 0 0 1 2.777-2.777"/>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="github-icon" viewBox="0 0 19 19">
|
||||||
|
<path fill="#08060d" fill-rule="evenodd" d="M9.356 1.85C5.05 1.85 1.57 5.356 1.57 9.694a7.84 7.84 0 0 0 5.324 7.44c.387.079.528-.168.528-.376 0-.182-.013-.805-.013-1.454-2.165.467-2.616-.935-2.616-.935-.349-.91-.864-1.143-.864-1.143-.71-.48.051-.48.051-.48.787.051 1.2.805 1.2.805.695 1.194 1.817.857 2.268.649.064-.507.27-.857.49-1.052-1.728-.182-3.545-.857-3.545-3.87 0-.857.31-1.558.8-2.104-.078-.195-.349-1 .077-2.078 0 0 .657-.208 2.14.805a7.5 7.5 0 0 1 1.946-.26c.657 0 1.328.092 1.946.26 1.483-1.013 2.14-.805 2.14-.805.426 1.078.155 1.883.078 2.078.502.546.799 1.247.799 2.104 0 3.013-1.818 3.675-3.558 3.87.284.247.528.714.528 1.454 0 1.052-.012 1.896-.012 2.156 0 .208.142.455.528.377a7.84 7.84 0 0 0 5.324-7.441c.013-4.338-3.48-7.844-7.773-7.844" clip-rule="evenodd"/>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="social-icon" viewBox="0 0 20 20">
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M12.5 6.667a4.167 4.167 0 1 0-8.334 0 4.167 4.167 0 0 0 8.334 0"/>
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M2.5 16.667a5.833 5.833 0 0 1 8.75-5.053m3.837.474.513 1.035c.07.144.257.282.414.309l.93.155c.596.1.736.536.307.965l-.723.73a.64.64 0 0 0-.152.531l.207.903c.164.715-.213.991-.84.618l-.872-.52a.63.63 0 0 0-.577 0l-.872.52c-.624.373-1.003.094-.84-.618l.207-.903a.64.64 0 0 0-.152-.532l-.723-.729c-.426-.43-.289-.864.306-.964l.93-.156a.64.64 0 0 0 .412-.31l.513-1.034c.28-.562.735-.562 1.012 0"/>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="x-icon" viewBox="0 0 19 19">
|
||||||
|
<path fill="#08060d" fill-rule="evenodd" d="M1.893 1.98c.052.072 1.245 1.769 2.653 3.77l2.892 4.114c.183.261.333.48.333.486s-.068.089-.152.183l-.522.593-.765.867-3.597 4.087c-.375.426-.734.834-.798.905a1 1 0 0 0-.118.148c0 .01.236.017.664.017h.663l.729-.83c.4-.457.796-.906.879-.999a692 692 0 0 0 1.794-2.038c.034-.037.301-.34.594-.675l.551-.624.345-.392a7 7 0 0 1 .34-.374c.006 0 .93 1.306 2.052 2.903l2.084 2.965.045.063h2.275c1.87 0 2.273-.003 2.266-.021-.008-.02-1.098-1.572-3.894-5.547-2.013-2.862-2.28-3.246-2.273-3.266.008-.019.282-.332 2.085-2.38l2-2.274 1.567-1.782c.022-.028-.016-.03-.65-.03h-.674l-.3.342a871 871 0 0 1-1.782 2.025c-.067.075-.405.458-.75.852a100 100 0 0 1-.803.91c-.148.172-.299.344-.99 1.127-.304.343-.32.358-.345.327-.015-.019-.904-1.282-1.976-2.808L6.365 1.85H1.8zm1.782.91 8.078 11.294c.772 1.08 1.413 1.973 1.425 1.984.016.017.241.02 1.05.017l1.03-.004-2.694-3.766L7.796 5.75 5.722 2.852l-1.039-.004-1.039-.004z" clip-rule="evenodd"/>
|
||||||
|
</symbol>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 4.9 KiB |
14
fotograf-de-scraper/frontend/dist/index.html
vendored
Normal file
14
fotograf-de-scraper/frontend/dist/index.html
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<link rel="icon" type="image/svg+xml" href="/fotograf-de/favicon.svg" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>Fotograf.de ERP</title>
|
||||||
|
<script type="module" crossorigin src="/fotograf-de/assets/index-DnGj5v5p.js"></script>
|
||||||
|
<link rel="stylesheet" crossorigin href="/fotograf-de/assets/index-BnIZj8RP.css">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="root"></div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
@@ -4,7 +4,7 @@
|
|||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>frontend</title>
|
<title>Fotograf.de ERP</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="root"></div>
|
<div id="root"></div>
|
||||||
|
|||||||
1
fotograf-de-scraper/frontend/node_modules/.tmp/tsconfig.app.tsbuildinfo
generated
vendored
Normal file
1
fotograf-de-scraper/frontend/node_modules/.tmp/tsconfig.app.tsbuildinfo
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"root":["../../src/App.tsx","../../src/main.tsx"],"version":"5.9.3"}
|
||||||
1
fotograf-de-scraper/frontend/node_modules/.tmp/tsconfig.node.tsbuildinfo
generated
vendored
Normal file
1
fotograf-de-scraper/frontend/node_modules/.tmp/tsconfig.node.tsbuildinfo
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"root":["../../vite.config.ts"],"version":"5.9.3"}
|
||||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 9.3 KiB After Width: | Height: | Size: 113 B |
File diff suppressed because it is too large
Load Diff
@@ -4,4 +4,5 @@ import react from '@vitejs/plugin-react'
|
|||||||
// https://vite.dev/config/
|
// https://vite.dev/config/
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [react()],
|
plugins: [react()],
|
||||||
|
base: '/fotograf-de/', // Ensures assets are loaded with the correct prefix behind NGINX
|
||||||
})
|
})
|
||||||
|
|||||||
53
google_forms_webhook.js
Normal file
53
google_forms_webhook.js
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
// ANLEITUNG ZUR EINRICHTUNG IM GOOGLE FORMULAR
|
||||||
|
// 1. Öffne dein Google Formular (Freigabe zur Veröffentlichung).
|
||||||
|
// 2. Klicke oben rechts auf das Drei-Punkte-Menü und wähle "Skript-Editor".
|
||||||
|
// 3. Kopiere diesen Code hinein und speichere (Strg+S).
|
||||||
|
// 4. Ersetze die WEBHOOK_URL durch deine korrekte Domain.
|
||||||
|
// 5. Klicke im Skript-Editor links auf die "Uhr" (Trigger).
|
||||||
|
// 6. Füge einen neuen Trigger hinzu:
|
||||||
|
// - Funktion: onSubmit
|
||||||
|
// - Ereignisquelle: Aus Formular
|
||||||
|
// - Ereignistyp: Beim Senden des Formulars
|
||||||
|
// 7. Akzeptiere die Berechtigungen von Google.
|
||||||
|
|
||||||
|
const WEBHOOK_URL = "https://floke-ai.duckdns.org/fotograf-de-api/api/publish-request/webhook";
|
||||||
|
|
||||||
|
function onSubmit(e) {
|
||||||
|
try {
|
||||||
|
// Hole alle Antworten
|
||||||
|
var itemResponses = e.response.getItemResponses();
|
||||||
|
var email = e.response.getRespondentEmail(); // Geht nur, wenn "E-Mail-Adressen erfassen" aktiv ist!
|
||||||
|
|
||||||
|
// Fallback: Wenn E-Mail-Erfassung nicht global an ist, suche nach einem Feld namens "E-Mail"
|
||||||
|
if (!email) {
|
||||||
|
for (var i = 0; i < itemResponses.length; i++) {
|
||||||
|
var title = itemResponses[i].getItem().getTitle().toLowerCase();
|
||||||
|
if (title.indexOf("e-mail") !== -1 || title.indexOf("email") !== -1) {
|
||||||
|
email = itemResponses[i].getResponse();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!email) {
|
||||||
|
Logger.log("Keine E-Mail-Adresse gefunden.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var payload = {
|
||||||
|
"email": email
|
||||||
|
};
|
||||||
|
|
||||||
|
var options = {
|
||||||
|
"method": "post",
|
||||||
|
"contentType": "application/json",
|
||||||
|
"payload": JSON.stringify(payload)
|
||||||
|
};
|
||||||
|
|
||||||
|
UrlFetchApp.fetch(WEBHOOK_URL, options);
|
||||||
|
Logger.log("Webhook erfolgreich an " + WEBHOOK_URL + " gesendet. Email: " + email);
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
Logger.log("Fehler: " + err.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
66
modify_compose_for_user.py
Normal file
66
modify_compose_for_user.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import yaml
|
||||||
|
|
||||||
|
with open('docker-compose.yml', 'r') as f:
|
||||||
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
|
services = data.get('services', {})
|
||||||
|
|
||||||
|
# We only keep the following core services
|
||||||
|
core_services = [
|
||||||
|
'nginx',
|
||||||
|
'dashboard',
|
||||||
|
'company-explorer',
|
||||||
|
'transcription-tool'
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create a filtered services dict
|
||||||
|
new_services = {}
|
||||||
|
for k, v in services.items():
|
||||||
|
if k in core_services:
|
||||||
|
new_services[k] = v
|
||||||
|
|
||||||
|
# Add fotograf-de services since they are in subdirectories but belong to the main stack
|
||||||
|
new_services['fotograf-de-scraper-backend'] = {
|
||||||
|
'build': {
|
||||||
|
'context': './fotograf-de-scraper/backend',
|
||||||
|
'dockerfile': 'Dockerfile'
|
||||||
|
},
|
||||||
|
'container_name': 'fotograf-de-scraper-backend',
|
||||||
|
'env_file': ['./fotograf-de-scraper/.env'],
|
||||||
|
'environment': ['TZ=Europe/Berlin'],
|
||||||
|
'ports': ['8002:8000'],
|
||||||
|
'volumes': [
|
||||||
|
'./fotograf-de-scraper/backend:/app',
|
||||||
|
'./fotograf-de-scraper/backend/data:/app/data'
|
||||||
|
],
|
||||||
|
'restart': 'unless-stopped'
|
||||||
|
}
|
||||||
|
|
||||||
|
new_services['fotograf-de-scraper-frontend'] = {
|
||||||
|
'build': {
|
||||||
|
'context': './fotograf-de-scraper/frontend',
|
||||||
|
'dockerfile': 'Dockerfile',
|
||||||
|
'args': {'VITE_API_BASE_URL': '/fotograf-de-api'}
|
||||||
|
},
|
||||||
|
'container_name': 'fotograf-de-scraper-frontend',
|
||||||
|
'ports': ['3009:80'],
|
||||||
|
'depends_on': ['fotograf-de-scraper-backend'],
|
||||||
|
'restart': 'unless-stopped'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update NGINX depends_on
|
||||||
|
if 'nginx' in new_services and 'depends_on' in new_services['nginx']:
|
||||||
|
new_services['nginx']['depends_on'] = {
|
||||||
|
'dashboard': {'condition': 'service_started'},
|
||||||
|
'company-explorer': {'condition': 'service_healthy'},
|
||||||
|
'transcription-tool': {'condition': 'service_started'},
|
||||||
|
'fotograf-de-scraper-frontend': {'condition': 'service_started'}
|
||||||
|
}
|
||||||
|
|
||||||
|
data['services'] = new_services
|
||||||
|
|
||||||
|
# Prune unused volumes
|
||||||
|
data['volumes'] = {'transcription_uploads': {}}
|
||||||
|
|
||||||
|
with open('docker-compose.yml', 'w') as f:
|
||||||
|
yaml.dump(data, f, sort_keys=False)
|
||||||
@@ -38,47 +38,47 @@ http {
|
|||||||
proxy_set_header Connection "upgrade";
|
proxy_set_header Connection "upgrade";
|
||||||
}
|
}
|
||||||
|
|
||||||
location /lead/ {
|
# location /lead/ {
|
||||||
auth_basic "Restricted Access - Local AI Suite";
|
# auth_basic "Restricted Access - Local AI Suite";
|
||||||
auth_basic_user_file /etc/nginx/.htpasswd;
|
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
proxy_pass http://lead-engine:8501/;
|
# proxy_pass http://lead-engine:8501/;
|
||||||
proxy_set_header Host $host;
|
# proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
# proxy_set_header Connection "upgrade";
|
||||||
proxy_http_version 1.1;
|
# proxy_http_version 1.1;
|
||||||
proxy_read_timeout 86400;
|
# proxy_read_timeout 86400;
|
||||||
}
|
# }
|
||||||
|
|
||||||
location /gtm/ {
|
# location /gtm/ {
|
||||||
auth_basic "Restricted Access - Local AI Suite";
|
# auth_basic "Restricted Access - Local AI Suite";
|
||||||
auth_basic_user_file /etc/nginx/.htpasswd;
|
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
proxy_pass http://gtm-architect:3005/;
|
# proxy_pass http://gtm-architect:3005/;
|
||||||
proxy_set_header Host $host;
|
# proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
# proxy_set_header Connection "upgrade";
|
||||||
}
|
# }
|
||||||
|
|
||||||
location /market/ {
|
# location /market/ {
|
||||||
auth_basic "Restricted Access - Local AI Suite";
|
# auth_basic "Restricted Access - Local AI Suite";
|
||||||
auth_basic_user_file /etc/nginx/.htpasswd;
|
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
proxy_pass http://market-intelligence:3001/;
|
# proxy_pass http://market-intelligence:3001/;
|
||||||
proxy_set_header Host $host;
|
# proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
# proxy_set_header Connection "upgrade";
|
||||||
}
|
# }
|
||||||
|
|
||||||
location /b2b/ {
|
# location /b2b/ {
|
||||||
auth_basic "Restricted Access - Local AI Suite";
|
# auth_basic "Restricted Access - Local AI Suite";
|
||||||
auth_basic_user_file /etc/nginx/.htpasswd;
|
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
proxy_pass http://b2b-marketing-assistant:3002/;
|
# proxy_pass http://b2b-marketing-assistant:3002/;
|
||||||
proxy_set_header Host $host;
|
# proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
# proxy_set_header Connection "upgrade";
|
||||||
}
|
# }
|
||||||
|
|
||||||
location /tr/ {
|
location /tr/ {
|
||||||
auth_basic "Restricted Access - Local AI Suite";
|
auth_basic "Restricted Access - Local AI Suite";
|
||||||
@@ -91,72 +91,93 @@ http {
|
|||||||
proxy_set_header Connection "upgrade";
|
proxy_set_header Connection "upgrade";
|
||||||
}
|
}
|
||||||
|
|
||||||
location /content/ {
|
# location /content/ {
|
||||||
auth_basic "Restricted Access - Local AI Suite";
|
# auth_basic "Restricted Access - Local AI Suite";
|
||||||
auth_basic_user_file /etc/nginx/.htpasswd;
|
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
proxy_pass http://content-engine:3000/;
|
# proxy_pass http://content-engine:3000/;
|
||||||
proxy_set_header Host $host;
|
# proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
# proxy_set_header Connection "upgrade";
|
||||||
}
|
# }
|
||||||
|
|
||||||
location /competitor/ {
|
# location /competitor/ {
|
||||||
auth_basic "Restricted Access - Local AI Suite";
|
# auth_basic "Restricted Access - Local AI Suite";
|
||||||
auth_basic_user_file /etc/nginx/.htpasswd;
|
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
proxy_pass http://competitor-analysis:3000/;
|
# proxy_pass http://competitor-analysis:3000/;
|
||||||
proxy_set_header Host $host;
|
# proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
# proxy_set_header Connection "upgrade";
|
||||||
}
|
# }
|
||||||
|
|
||||||
location /heatmap/ {
|
# location /heatmap/ {
|
||||||
auth_basic "Restricted Access - Local AI Suite";
|
# auth_basic "Restricted Access - Local AI Suite";
|
||||||
auth_basic_user_file /etc/nginx/.htpasswd;
|
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
proxy_pass http://heatmap-frontend:80/;
|
# proxy_pass http://heatmap-frontend:80/;
|
||||||
proxy_set_header Host $host;
|
# proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
# proxy_set_header Connection "upgrade";
|
||||||
}
|
# }
|
||||||
|
|
||||||
location /feedback/ {
|
# location /feedback/ {
|
||||||
auth_basic off;
|
# auth_basic off;
|
||||||
proxy_http_version 1.1;
|
# proxy_http_version 1.1;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
# proxy_set_header Connection "upgrade";
|
||||||
rewrite ^/feedback/(.*)$ /$1 break;
|
# rewrite ^/feedback/(.*)$ /$1 break;
|
||||||
proxy_pass http://lead-engine:8004;
|
# proxy_pass http://lead-engine:8004;
|
||||||
proxy_set_header Host $host;
|
# proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
}
|
# }
|
||||||
|
|
||||||
# Smartlead Webhooks (public)
|
# Smartlead Webhooks (public)
|
||||||
location /public/smartlead/ {
|
# location /public/smartlead/ {
|
||||||
auth_basic off;
|
# auth_basic off;
|
||||||
# Rewrite the URL to remove the public prefix and pass the rest to the webhook handler
|
# # Rewrite the URL to remove the public prefix and pass the rest to the webhook handler
|
||||||
# e.g., /public/smartlead/hot-lead -> /webhook/hot-lead
|
# # e.g., /public/smartlead/hot-lead -> /webhook/hot-lead
|
||||||
rewrite ^/public/smartlead/(.*)$ /webhook/$1 break;
|
# rewrite ^/public/smartlead/(.*)$ /webhook/$1 break;
|
||||||
|
#
|
||||||
|
# proxy_pass http://lead-engine:8004;
|
||||||
|
# proxy_set_header Host $host;
|
||||||
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
# }
|
||||||
|
|
||||||
proxy_pass http://lead-engine:8004;
|
# location /connector/ {
|
||||||
|
# auth_basic off;
|
||||||
|
# proxy_pass http://connector-superoffice:8000/;
|
||||||
|
# proxy_set_header Host $host;
|
||||||
|
# proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
# proxy_set_header Upgrade $http_upgrade;
|
||||||
|
# proxy_set_header Connection "upgrade";
|
||||||
|
# }
|
||||||
|
|
||||||
|
location /fotograf-de/ {
|
||||||
|
auth_basic "Restricted Access - Local AI Suite";
|
||||||
|
auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
|
proxy_pass http://fotograf-de-scraper-frontend:80/;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /connector/ {
|
|
||||||
auth_basic off;
|
|
||||||
proxy_pass http://connector-superoffice:8000/;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
proxy_set_header Connection "upgrade";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Fotograf.de Backend API (internal)
|
||||||
|
location /fotograf-de-api/ {
|
||||||
|
auth_basic off;
|
||||||
|
rewrite ^/fotograf-de-api/(.*)$ /$1 break;
|
||||||
|
proxy_pass http://fotograf-de-scraper-backend:8000;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
240
readme.md
240
readme.md
@@ -100,3 +100,243 @@ Investierte Zeit in dieser Session: 01:51
|
|||||||
Arbeitszusammenfassung:
|
Arbeitszusammenfassung:
|
||||||
Keine Zusammenfassung angegeben.
|
Keine Zusammenfassung angegeben.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-03-21 14:35 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 00:22
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Investierte Zeit in dieser Session: 00:30
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Zusammenfassung der Ergebnisse:
|
||||||
|
|
||||||
|
1. QR-Karten Tool (Feinschliff):
|
||||||
|
* Die Y-Achse für den Andruck wurde um 9 mm nach unten korrigiert, sodass die Texte nun perfekt auf den Linien der Blankokarten sitzen. Die X-Achse bleibt bei 72 mm.
|
||||||
|
* Zeitzonen-Unterstützung integriert: Die aus der Calendly-API importierten Termine (UTC) werden jetzt automatisch in die Mitteleuropäische Zeit (Europe/Berlin) konvertiert (z. B. 12:00 Uhr statt 10:00 Uhr).
|
||||||
|
* Einwilligungs-Feature: Die Skripte prüfen nun, ob in Calendly der Veröffentlichung von Bildern ("Ja, gerne") zugestimmt wurde. Falls ja, wird ein manuell gezeichnetes Checkbox-Häkchen (☑) vor dem Namen im PDF angedruckt.
|
||||||
|
|
||||||
|
Neue Anforderungen für die nächste Session (im System erfasst):
|
||||||
|
* Workflow-Änderung: Das QR-Karten-Tool wird vom globalen Header in die auftragsspezifische Ansicht verschoben.
|
||||||
|
* Dynamische Event-Auswahl: Nutzer müssen pro Auftrag das spezifische Calendly-Event auswählen. Die Datumsauswahl entfällt dadurch.
|
||||||
|
* Neues PDF-Feature: Erstellung einer Übersichtsliste aller Termine (inklusive Lücken / Blank-Spacing für nicht gebuchte Termine im 6-Minuten-Takt).
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-03-21 15:07 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 00:32
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Investierte Zeit in dieser Session: 01:15
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Zusammenfassung der Ergebnisse:
|
||||||
|
|
||||||
|
1. QR-Karten Tool (Feinschliff):
|
||||||
|
* Die Y-Achse wurde um weitere 9 mm nach unten korrigiert (jetzt 31mm / 180mm), um perfekt auf den Linien zu sitzen.
|
||||||
|
* Volle Zeitzonen-Unterstützung (Europe/Berlin) für korrekte Uhrzeiten im PDF.
|
||||||
|
* Automatischer Andruck einer manuell gezeichneten Checkbox (☑) bei vorliegender Bildveröffentlichungseinwilligung aus Calendly.
|
||||||
|
|
||||||
|
2. Shooting-Planung (Integration):
|
||||||
|
* Das Tool wurde vom globalen Header direkt in die Detailansicht der Fotoaufträge verschoben.
|
||||||
|
* Dynamische Auswahl des Calendly-Event-Typs (z.B. "Neuching") über ein Dropdown-Menü. Die manuelle Datumseingabe entfällt.
|
||||||
|
|
||||||
|
3. Termin-Übersichtsliste (Neu):
|
||||||
|
* Generierung einer A4-PDF-Tabelle für den Shooting-Tag.
|
||||||
|
* Automatisches 6-Minuten-Raster zwischen erstem und letztem Termin, inklusive "Blank-Spacing" (leere Zeilen) für nicht gebuchte Slots.
|
||||||
|
* Layout mit Logo (oben rechts), Auftragsname (oben links) und Spalten für Familie, Kinder, Veröffentlichung und Erledigt-Häkchen.
|
||||||
|
|
||||||
|
4. Technische Fixes & Stabilität:
|
||||||
|
* Calendly-Pagination-Bug behoben: Das System blättert nun durch alle Ergebnisseiten, um auch bei über 100 Terminen alle Buchungen zu finden.
|
||||||
|
* Syntaxfehler in qr_generator.py korrigiert.
|
||||||
|
* README.md im Scraper-Verzeichnis auf den neuesten Stand gebracht.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-03-21 20:26 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 05:18
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Investierte Zeit in dieser Session: 00:30 (Zusatz-Fixes)
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Zusammenfassung der Ergebnisse:
|
||||||
|
|
||||||
|
1. Unicode- & Font-Fix:
|
||||||
|
* Einbindung von OpenSans-Regular.ttf zur korrekten Darstellung von Sonderzeichen (ć, ł, etc.) auf QR-Karten und Listen.
|
||||||
|
|
||||||
|
2. Layout-Optimierungen:
|
||||||
|
* PDF-Liste: Zeilenabstände verringert für höhere Datendichte pro Seite.
|
||||||
|
* "Pausen-Management": Automatische Komprimierung von mehr als zwei aufeinanderfolgenden freien Slots zu einer kompakten "Pause"-Zeile.
|
||||||
|
* Header-Fix: Automatisches Entfernen von "JOBXXXXX" Präfixen aus dem Auftragsnamen.
|
||||||
|
* Page-Breaks: Erzwungener Seitenumbruch pro Shooting-Tag inkl. Header-Wiederholung.
|
||||||
|
|
||||||
|
3. Consent-Logik (Synchronisation):
|
||||||
|
* Angleichung der Einwilligungserkennung für QR-Karten und Listen (Suche nach "veröffentlichen"/"bilder" + "ja").
|
||||||
|
* Positionierung des ☑ Symbols am Ende der Textzeile bei QR-Karten.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-03-21 20:32 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 00:05
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Investierte Zeit in dieser Session: 01:00
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Zusammenfassung der Ergebnisse:
|
||||||
|
|
||||||
|
1. Feature 3: Nachfass-E-Mails (Supermailer) implementiert:
|
||||||
|
* Portierung der Legacy-Scraping-Logik in den Microservice.
|
||||||
|
* Neuer Hintergrund-Task analysiert Käuferverhalten, identifiziert Nicht-Käufer mit 0-1 Logins und extrahiert E-Mail-Adressen sowie Schnell-Login-Links.
|
||||||
|
* Aggregations-Logik fasst mehrere Kinder pro E-Mail-Adresse zusammen (z.B. "Fotos von Max und Moritz").
|
||||||
|
* Neuer API-Endpunkt generiert eine fertige CSV-Datei für den Supermailer (UTF-8-SIG für Excel-Kompatibilität).
|
||||||
|
|
||||||
|
2. UI-Integration:
|
||||||
|
* Tool 3 im Auftrags-Modal ist nun aktiv.
|
||||||
|
* Echtzeit-Fortschrittsanzeige während der (langen) Analyse.
|
||||||
|
* Download-Button erscheint automatisch nach Abschluss der Analyse.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-03-21 20:56 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 00:24
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Investierte Zeit in dieser Session: 00:30 (Finaler Feinschliff)
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Zusammenfassung der Ergebnisse:
|
||||||
|
|
||||||
|
1. Finaler Listen-Fix:
|
||||||
|
* Das Verschwinden der Einwilligungs-Häkchen auf der Terminliste wurde behoben. Statt eines unsicheren Unicode-Zeichens wird nun ein robustes, CSS-gezeichnetes Checkbox-Symbol mit grünem Häkchen verwendet, das garantiert in jedem PDF erscheint.
|
||||||
|
* Die Einwilligungserkennung wurde durch Live-Datenanalyse von Calendly-Antworten ("Ja, gerne" vs. "Nein, eher nicht") verifiziert und stabilisiert.
|
||||||
|
|
||||||
|
2. Header-Optimierung:
|
||||||
|
* Der Titel der Terminliste wurde auf den Calendly-Event-Namen fokussiert.
|
||||||
|
* Die automatische Entfernung von (JOBXXXXX) Markierungen aus den Auftragsnamen wurde perfektioniert.
|
||||||
|
|
||||||
|
Damit sind alle Anforderungen für den Fotograf.de Scraper und die Shooting-Planung vollständig umgesetzt.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-03-25 13:11 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 00:08
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-07 20:10 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 01:12
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-08 10:21 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 14:11
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-08 18:39 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 08:17
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-10 23:51 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 01:12
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-12 21:57 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 00:23
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Bugfix in der QR-Karten-Generierung: Vergangene Calendly-Termine werden nun sowohl beim Abruf (Startzeit auf 'jetzt' gesetzt) als auch bei der Verarbeitung (Filterung auf Termine ab heute 00:00 Uhr Berlin Zeit) korrekt ausgeschlossen. Dies behebt die Anzeige von Altdaten aus dem Vorjahr.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-14 10:37 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 01:43
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-14 16:09 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 05:32
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-18 00:14 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 02:12
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-18 15:09 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 01:57
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-18 15:58 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 00:49
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-04-18 22:58 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 01:21
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## 🤖 Status-Update (2026-05-04 08:53 Berlin Time)
|
||||||
|
```yaml
|
||||||
|
Investierte Zeit in dieser Session: 00:39
|
||||||
|
|
||||||
|
Arbeitszusammenfassung:
|
||||||
|
Keine Zusammenfassung angegeben.
|
||||||
|
```
|
||||||
|
|||||||
Reference in New Issue
Block a user