Compare commits
10 Commits
25daaf0afe
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| d90d856620 | |||
| 7cb29cd8da | |||
| 991e338d67 | |||
| db94eca626 | |||
| 1ae8b3e353 | |||
| 02b17d53ea | |||
| d49f6d51f4 | |||
| 995b3ff829 | |||
| 472f392107 | |||
| e6061868e6 |
@@ -1 +1 @@
|
||||
{"task_id": "34588f42-8544-8046-85d4-d7895ed9b29c", "token": "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", "readme_path": null, "session_start_time": "2026-04-18T11:12:01.291297"}
|
||||
{"task_id": "34288f42-8544-800e-b866-dfcbc22bd4e5", "token": "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", "readme_path": "readme.md", "session_start_time": "2026-05-04T06:53:43.831976"}
|
||||
@@ -0,0 +1,126 @@
|
||||
"Child ID";"Vorname Kind";"Nachname Kind";Geschlecht;Geburtsdatum;Einrichtung;Gruppe;Lehrer;"Gültig bis";Bezeichner;Referenz;Straße;PLZ;Ort;Staat;"Geschwisterkind Vorname (1)";"Geschwisterkind Nachname (1)";"Geschwisterkind Vorname (2)";"Geschwisterkind Nachname (2)";Einzelfotos;Gruppenfotos;"Familie / Geschwister";Foto;"Vom Kunden ausgewählt";"Vorname Eltern (1)";"Nachname Eltern (1)";"Email der Eltern (1)";"Telefonnummer der Eltern (1)";"Vorname Eltern (2)";"Nachname Eltern (2)";"Email der Eltern (2)";"Telefonnummer der Eltern (2)";"Zugangscode (1)";"Barcode (1)";"Logins (1)";"Zugangscode (2)";"Barcode (2)";"Logins (2)";Bestellungen
|
||||
49663204;Fares;AL-KHADHER;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8069.jpg;Nein;Familie;"Al Khadher";Husseinalkhadher8@gmail.com;;;;;;9CKZ9FRB;859242970856177;2;;;0;0
|
||||
49656019;Entoni;Altoni;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7999.jpg;Nein;Yuliia;Altoni;julichka.altony@gmail.com;;;;;;8PH6DT65;590974350307121;1;;;0;1
|
||||
49659604;"Rashane Tyler";Asasana;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8048.jpg;Nein;Penphaka;Asasana;asa-sa-na@hotmail.com;;;;;;57VSYGKZ;742438249864838;2;;;0;0
|
||||
49955890;Yunus;Batuge;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7831.jpg;Nein;Sümeyra;Senyurt;senyurtsumeyra7@gmail.com;;;;;;Y9LFLVQ6;807433233164209;15;;;0;1
|
||||
49652597;Josip;Bungic;;;;Bären;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Mirela;"Marijan Bungic";m.bungic@web.de;;;;;;JYCSXJTX;967076735653408;0;;;0;0
|
||||
50064753;Hazal;Cicek;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7714.jpg;Nein;Familie;Cicek;uelke.ardak@hotmail.de;;;;;;VNFYB935;306933685807165;0;;;0;0
|
||||
49601392;Levi;Damia;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;Louisa;Damian;damian.louisa@web.de;;;;;;3VP45KKX;107953830470294;0;;;0;0
|
||||
50314236;Levi;Damian;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7936.jpg;Nein;Louisa;Damian;damian.louisa@web.de;;;;;;DVXDP3PH;677393543795054;1;;;0;1
|
||||
50211537;Gökhan;Dogan;;;;Bären;;;;;;;;;Eray;Dogan;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8096.jpg;Nein;Familie;Dogan;goeksel_dogan@web.de;;;;;;V9FBBSMP;152677334111372;2;;;0;0
|
||||
50220839;"Magdalena Personal";Forster;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;Magdalena;Forster;magdalenaforster@aol.de;;;;;;7NG54JNY;74394435366624;0;;;0;0
|
||||
49629572;Philipp;Gabauer;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Familie;Gabauer;luzia.gabauer@web.de;;;;;;4HP8FX8K;20692770537744;0;;;0;0
|
||||
49652592;Emilia;"Herrmann Rodriguez";;;;Bären;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Lukas;Herrmann;Familie.Herrmann.Rodriguez@web.de;;;;;;7X7Y4BKV;73798042174951;0;;;0;0
|
||||
50060415;Konstantin;Karl;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7806.jpg;Nein;Katharina;Karl;katharina_karl@mailbox.org;;;;;;XNCV6XM7;810263015266358;0;;;0;0
|
||||
50060407;Paulina;Karl;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Katharina;Karl;katharina_karl@mailbox.org;;;;;;HSKMY37G;607082088640959;0;;;0;0
|
||||
49901894;Salomia;Karpenko;;;;Bären;;;;;;;;;Miroslav;Karpenko;;;Ja;Ja;Nein;IMG_7786.jpg;Nein;Familie;Karpenko;denis.k88@web.de;;;;;;4P7TJXJL;826081492713003;4;;;0;0
|
||||
49654259;Jan;Klyszcz;;;;Bären;;;;;;;;;Christoph;Klyszcz;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7859.jpg;Nein;Familie;Klyszcz;klyszcz.ewa92@gmail.com;;;;;;V9QQ3MHT;635050103722845;4;;;0;0
|
||||
50220757;Personal;Lang;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;Susanne;Lang;susi67.sl@gmail.com;;;;;;J2B9F4FH;84529853902827;0;;;0;0
|
||||
49663258;Tuldi;"Lennart & Hannes";;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7915.jpg;Nein;Familie;Tuldi;olga_tuldi@yahoo.de;;;;;;Z7D4PJHV;628485247329265;10;;;0;0
|
||||
49727295;Leonardo;Liquori;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7654.jpg;Nein;Elisa;Mandelli;e.mandelli1@icloud.com;;;;;;CZBSHZXD;112332574322427;3;;;0;0
|
||||
49694659;Mara;Schmid;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7737.jpg;Nein;Familie;Schmid;izuther@googlemail.com;;;;;;M2QWP8PN;693636596918854;4;;;0;0
|
||||
49553844;Niklas;Schulze;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8027.jpg;Nein;Kristina;Schulze;m-k-ammersdorf@gmx.de;;Kristina;Schulze;kristina-anna-schulze@web.de;;TDJ47324;213569357182904;4;;;0;1
|
||||
49605342;Zoe;Seget;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7764.jpg;Nein;Sandra;Seget;sandra.seget@hotmail.de;;;;;;GTY9QMWP;335161472735404;3;;;0;1
|
||||
50211319;Valentin;Slugocki;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;Bartek;Slugocki;bartek@slugocki.de;;;;;;24632X5S;557733375991183;0;;;0;0
|
||||
50219244;"Hannes & Lennart";Tuldi;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7959.jpg;Nein;Familie;Tuldi;olga_tuldi@yahoo.de;;;;;;SZ7D82KL;195111473283743;9;;;0;1
|
||||
49697372;Xaver;Wego;;;;Bären;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7883.jpg;Nein;Luisa;Wego;luisa.wego@web.deq;;Luisa;Wego;luisa.wego@web.de;;JT22FL8Y;470837065491819;0;6XJVMHVQ;423156711490859;6;1
|
||||
49655774;Maximilian;Wild;;;;Bären;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7977.jpg;Nein;Familie;Wild;wildramona@gmx.de;;;;;;XYCPRYX3;841975500351515;6;;;0;0
|
||||
49613372;Anton;Adelberger;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8367.jpg;Nein;Catharina;Adelberger;catharina.adelberger@web.de;;;;;;GCSBWRRG;255252947890362;5;;;0;0
|
||||
49655260;Ludwig;Baumgartner;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Franziska;Baumgartner;franziwild@gmx.de;;;;;;JJDBSW2Y;1346656807317;0;;;0;0
|
||||
49652607;Josip;Bungic;;;;Bienen;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Mirela;"Marijan Bungic";m.bungic@web.de;;;;;;XR3LFNTW;896957772773017;1;;;0;0
|
||||
50064781;Havin;Cicek;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8198.jpg;Nein;Familie;Cicek;uelke.ardak@hotmail.de;;;;;;8X8GYWR3;847511991706072;1;;;0;0
|
||||
50055747;Mattea;Fusarri;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8151.jpg;Nein;Nadia;Fusarri;nadia.fusarri@gmx.de;;;;;;ZRGWQM3W;439731985455440;3;;;0;0
|
||||
50247238;Maliya;Gildner;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8341.jpg;Nein;Alisa;Gildner;gildner31@gmail.com;;;;;;KRTVJ4M5;910013114016383;2;;;0;0
|
||||
49825283;Kilian;Hartl;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8410.jpg;Nein;Familie;Schreibauer;a.schreibauer@gmail.com;;;;;;NM92G8PK;534850382393461;3;;;0;0
|
||||
50153154;"Elara Carolina";Hintermaier;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8289.jpg;Nein;Adriana;Hintermaier;adri.shunka@gmail.com;;;;;;N6G67PPZ;233647866343524;1;;;0;0
|
||||
49700913;Luka;Loncar;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8435.jpg;Nein;Szilvia;Palinkas;silvijapalinkas@yahoo.com;;;;;;7FGK48GQ;345687401851686;5;;;0;1
|
||||
50056989;Elias;Minksz;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Carolin;Dirndorfer;c.dirndorfer@gmx.de;;;;;;GZ3DQSPL;632143387747513;0;;;0;0
|
||||
49770856;Anna;Nguyen;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8219.jpg;Nein;Thi;"Hien Minh Nguyen";nthm30121996@gmail.com;;Anna;Nguyen;ging318@gmail.com;;CM9CMLBJ;122574286373832;2;;;0;0
|
||||
50180008;Ilia;Nickl;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8390.jpg;Nein;Familie;Nickl;cela990@hotmail.com;;;;;;KHPY6LQV;652142151577775;9;;;0;0
|
||||
49575500;Mika;Rubinstein;;;;Bienen;;;;;;;;;Mia;Rubinstein;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8548.jpg;Nein;Familie;Rubinstein;n.d.rubinstein@googlemail.com;;;;;;K7PX4J8Y;415300008608215;2;;;0;0
|
||||
49652538;Alina;Schillinger;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8244.jpg;Nein;Familie;Schillinger;schneggeno1@web.de;;;;;;X27P5L9Q;180935518874486;3;;;0;0
|
||||
49663277;Malia;Schlesinger;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8126.jpg;Nein;Familie;Schlesinger;stefanie2011@gmx.net;;;;;;6672SN99;377539049099605;2;;;0;0
|
||||
50257156;Marie;Schöberl;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8265.jpg;Nein;Michaela;Schöberl;michaela.schoeberl@gmx.de;;;;;;BKZWFCS4;504469516218803;2;;;0;0
|
||||
50057519;Letizia;Stachanczyk;;;;Bienen;;;;;;;;;Leonardo;Stachanczyk;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Familie;Stachanczyk;Suzanna.Stachanczyk@web.de;;;;;;C7GX6BM2;268376387434609;0;;;0;0
|
||||
49919594;Ela;Torres;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8313.jpg;Nein;Familie;Torres;ftorrestapia@me.com;;;;;;YGL954RX;63682236385188;4;;;0;0
|
||||
49837810;Maximilian;Weber;;;;Bienen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8522.jpg;Nein;Familie;Weber;mail.weber.melanie@googlemail.com;;;;;;4QS8GPWR;7954462978689;3;;;0;0
|
||||
50006492;Musa;Yilmaz;;;;Bienen;;;;;;;;;Ömer;Yilmaz;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_8466.jpg;Nein;Familie;Yilmaz;merve-ymz@hotmail.com;;;;;;82YTD8FK;912359706713774;4;;;0;0
|
||||
49652523;Nina;Zhang;;;;Bienen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8174.jpg;Nein;Hua;Zhang;zhanghua0411@hotmail.com;;;;;;DTWR882P;201986576299456;3;;;0;1
|
||||
49663112;Elias;Bonifati;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6891.jpg;Nein;Familie;Misiano;bonifati@hotmail.de;;;;;;BVMZFPHK;582378219071480;5;;;0;0
|
||||
49652608;Mihael;Bungic;;;;Fische;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Mirela;"Marijan Bungic";m.bungic@web.de;;;;;;2664S6D3;848993713584313;1;;;0;0
|
||||
50248018;Alina;Catak;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6424.jpg;Nein;Catak;Admir;catakadmir@gmail.com;;;;;;ML42KL42;532022002681433;7;;;0;1
|
||||
50258123;Jakov;Ceko;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7294.jpg;Nein;Familie;Ceko;brankoceko91@gmail.com;;;;;;CVBYPKBV;550993218062367;0;;;0;0
|
||||
50258100;Luka;Ceko;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7043.jpg;Nein;Familie;Ceko;brankoceko91@gmail.com;;;;;;DJKP3CBY;120492680122927;0;;;0;0
|
||||
50222105;"Saide Mira";Cildir;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6617.jpg;Nein;Hasret;Cildir;hasretcildir@web.de;;;;;;M9B773FR;568587367870302;1;;;0;0
|
||||
50218962;Valentin;Gabauer;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7004.jpg;Nein;Sabine;Gabauer;sabine.gabauer@gmx.de;;;;;;QXWD3ZNS;377128045906117;2;;;0;1
|
||||
50079276;Anika;Gaßner;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6555.jpg;Nein;Familie;Karyakina;veronika20@hotmail.de;;;;;;3BSPHDHW;851569123151027;1;;;0;0
|
||||
50211546;Kilian;Glück;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6960.jpg;Nein;Familie;Glück;katja_glueck@web.de;;;;;;2LGTH3VN;783740741149373;2;;;0;0
|
||||
50282221;Lisa;Gumberger;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6470.jpg;Nein;Sarah;Gumberger;sarah.gumberger@gmx.de;;;;;;2PHHTXT9;383948464807600;3;;;0;1
|
||||
49616818;Nadine;Hamed;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6648.jpg;Nein;;Nadine;sarasadaby@gmail.com;;;;;;V3KTTVNV;319851445592837;3;;;0;1
|
||||
50208499;Noela;Islami;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6524.jpg;Nein;Familie;Islami;Zineta.islami@gmx.de;;;;;;CK5B7WWN;410922852620998;2;;;0;0
|
||||
49901895;Miroslav;Karpenko;;;;Fische;;;;;;;;;Salomia;Karpenko;;;Ja;Ja;Nein;IMG_6848.jpg;Nein;Familie;Karpenko;denis.k88@web.de;;;;;;922GC8BH;915901506033102;1;;;0;0
|
||||
50221939;Merjem;Kaukovic;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6502.jpg;Nein;Edita;Porcic-Kaukovic;editta1996@hotmail.com;;;;;;KGVKW8HZ;305550439054156;1;;;0;0
|
||||
50288355;Frauke;Klinge;;;;Fische;;;;;;;;;;;;;Nein;Ja;Nein;;Nein;Frauke;Klinge;fs.klinge@t-online.de;;;;;;K4GCTQJG;948438313552204;0;;;0;0
|
||||
49653461;Max;Krämer;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6778.jpg;Nein;Michael;Krämer;m.k326@web.de;;;;;;PF5DCT5N;929652051737843;2;;;0;1
|
||||
49663714;Casper;Mettig;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";;Nein;Familie;Roder;stephanie.roder@gmail.com;;;;;;4L2ZBL3M;296176221032781;6;;;0;0
|
||||
50208680;Emre;Mujic;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6927.jpg;Nein;Amra;Mujic;amra.mujic95@gmail.com;;;;;;QVBTGS73;16951665504680;2;;;0;0
|
||||
49635375;Mila;Nickl;;;;Fische;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6685.jpg;Nein;Familie;Nickl;cela990@hotmail.com;;;;;;S4Z3HPZY;736840096508400;8;;;0;0
|
||||
49575499;Mia;Rubinstein;;;;Fische;;;;;;;;;Mika;Rubinstein;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6443.jpg;Nein;Familie;Rubinstein;n.d.rubinstein@googlemail.com;;;;;;BGGHXNLC;908242966462743;3;;;0;0
|
||||
49786711;Zoe;Scholpp;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6721.jpg;Nein;Sabrina;Scholpp;sabrinasch1107@gmail.com;;;;;;CXMT4R9T;585861217320080;2;;;0;0
|
||||
49755578;Valerie;Schultze;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6743.jpg;Nein;Anita;Schultze;anitajuliane.schultze@gmail.com;;;;;;4WVF24SR;499960849175102;2;;;0;0
|
||||
50057518;Leonardo;Stachanczyk;;;;Fische;;;;;;;;;Letizia;Stachanczyk;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7072.jpg;Nein;Familie;Stachanczyk;Suzanna.Stachanczyk@web.de;;;;;;BZC28W7T;900613948231467;7;;;0;0
|
||||
50211898;Maya;Watanabe;;;;Fische;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6591.jpg;Nein;Barbara;Watanabe;barbara.j@live.de;;;;;;ZPVJ8R5Q;341636130475078;1;;;0;0
|
||||
50006491;Ömer;Yilmaz;;;;Fische;;;;;;;;;Musa;Yilmaz;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_6809.jpg;Nein;Familie;Yilmaz;merve-ymz@hotmail.com;;;;;;C4NB42PX;659038103936299;4;;;0;0
|
||||
50078572;Aurelia;Adelsberger;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7493.jpg;Nein;Familie;Adelsberger;barbara.adelsberger@yahoo.de;;Christian;Godelmann;floke.com@gmail.com;;3NVRB2BM;230676178020824;1;;;0;0
|
||||
50220084;Eymen;Baldir;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_6406.jpg;Nein;Seda;Baldir;seda.ay@icloud.com;;;;;;JDG5CDT8;381447885366279;1;;;0;0
|
||||
49602297;Magdalena;Bauer;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7463.jpg;Nein;Familie;Bauer;bonprix29@yahoo.de;;;;;;V7W5ZPVY;411274063112493;8;;;0;0
|
||||
49992146;Zoe;Cajic;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7414.jpg;Nein;Amar;Cajic;amar.cajic@gmail.com;;;;;;JMZD8SNN;53184897942750;3;;;0;0
|
||||
50057147;Mario;Cakic;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7182.jpg;Nein;Lucija;Zivkovic;lucija.zivkovic16@gmail.com;;;;;;D8SZH5XL;508565079338980;3;;;0;1
|
||||
50211538;Eray;Dogan;;;;Spatzen;;;;;;;;;Gökhan;Dogan;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7380.jpg;Nein;Familie;Dogan;goeksel_dogan@web.de;;;;;;LMLH64KS;61030672835452;2;;;0;0
|
||||
49552513;Antonia;Freiwald;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7643.jpg;Nein;Stephanie;Freiwald;stephanie.freiwald@gmx.de;;;;;;4BV76XQS;224897626646913;1;;;0;1
|
||||
49601982;Heidi;Götzberger;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7618.jpg;Nein;Familie;Götzberger;franziska.lanzinger@t-online.de;;;;;;KSH3Y552;141723213815881;2;;;0;0
|
||||
50063666;Una;Hodzic;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7561.jpg;Nein;Hodžić;Aldin;menager21@hotmail.com;;;;;;9W4CYMRX;170368609363861;4;;;0;1
|
||||
49603438;Liara;Honisch;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7519.jpg;Nein;Familie;Karayilan;yasemin.karayilan@yahoo.de;;;;;;MSNCXQ77;787504756287604;2;;;0;0
|
||||
49623482;Matteo;Katterfeld;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7347.jpg;Nein;Familie;Ketterfeld;madlen.katterfeld@gmx.de;;;;;;YJ9MM349;888691047601122;6;;;0;0
|
||||
49654260;Christoph;Klyszcz;;;;Spatzen;;;;;;;;;Jan;Klyszcz;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7320.jpg;Nein;Familie;Klyszcz;klyszcz.ewa92@gmail.com;;;;;;V3KSRPDM;389595058391936;6;;;0;0
|
||||
50056841;Ludwig;Lacen;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7246.jpg;Nein;Michael;Lacen;michael.lacen@gmx.de;;;;;;GJFNWHMY;205672235649590;2;;;0;1
|
||||
50056690;Emilia;Rodriguez;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7591.jpg;Nein;Daniela;Rodriguez;daniela-hinz-82@gmx.de;;;;;;9LQLW7YV;289213156745302;1;;;0;1
|
||||
49652595;Vaiana;Slaiman;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;;Nein;;Slaiman;hadeer94hasan@web.de;;;;;;YB24BVQR;230552964517174;0;;;0;0
|
||||
49838169;Raphael;Weber;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7214.jpg;Nein;Familie;Weber;mail.weber.melanie@googlemail.com;;;;;;W3VBKM3W;362639250675953;3;;;0;0
|
||||
49906413;Ludwig;Welz;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;"Familien- / Geschwisterfotos";IMG_7149.jpg;Nein;Familie;Welz;eva_welz@gmx.de;;;;;;VPJYZ48P;785597492180163;3;;;0;0
|
||||
49726920;Amy;Wieters;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7443.jpg;Nein;Janine;Wieters;janine28@gmx.de;;;;;;69KFXLBD;921506261142206;4;;;0;1
|
||||
50453287;Familie;Adelsberger;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0728.jpg;Nein;Familie;Adelsberger;barbara.adelsberger@yahoo.de;;;;;;JVXV9T2M;916908422224646;1;;;0;1
|
||||
50451311;Familie;"Al Khadher";;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0153.jpg;Nein;Familie;"Al Khadher";Husseinalkhadher8@gmail.com;;;;;;4VTSN5J6;437618998198555;2;;;0;1
|
||||
50453454;Familie;Bauer;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0353.jpg;Nein;Familie;Bauer;bonprix29@yahoo.de;;;;;;N8DZBDLW;169896993687826;2;;;0;0
|
||||
50491788;Familie;Baumgartner;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0451.jpg;Nein;Franziska;Baumgartner;franziwild@gmx.de;;;;;;ZRH36VRS;847462383118786;2;;;0;1
|
||||
50463803;Amla;Bobo;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7689.jpg;Nein;Xhulia;Xhelci;xhuliaxhelci@gmail.com;;;;;;C35CQ55V;950839783885570;1;;;0;0
|
||||
50451304;Familie;Ceko;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0099.jpg;Nein;Familie;Ceko;brankoceko91@gmail.com;;;;;;4NZXSHTW;798398153397116;2;;;0;0
|
||||
50453898;Familie;Cicek;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_2693.jpg;Nein;Familie;Cicek;uelke.ardak@hotmail.de;;;;;;YC9KVV76;290706892284805;3;;;0;1
|
||||
50453136;Familie;Dogan;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_9830.jpg;Nein;Familie;Dogan;goeksel_dogan@web.de;;;;;;GDBRDW6K;918773718877810;2;;;0;0
|
||||
50453529;Familie;Gabauer;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0286.jpg;Nein;Familie;Gabauer;luzia.gabauer@web.de;;;;;;SQHNHMH6;49585341392454;6;;;0;1
|
||||
50452028;Familie;Glück;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0406.jpg;Nein;Familie;Glück;katja_glueck@web.de;;;;;;7WTXJNDC;628871407778415;2;;;0;0
|
||||
50453448;Familie;Götzberger;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1262.jpg;Nein;Familie;Götzberger;franziska.lanzinger@t-online.de;;;;;;MPHRG7SP;954770009741299;2;;;0;1
|
||||
50453434;Familie;Islami;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1330.jpg;Nein;Familie;Islami;Zineta.islami@gmx.de;;;;;;KF7CNCYZ;620178179159158;2;;;0;0
|
||||
50452019;Familie;Karayilan;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0200.jpg;Nein;Familie;Karayilan;yasemin.karayilan@yahoo.de;;;;;;6H73JV6B;765446752804075;1;;;0;0
|
||||
50453439;Familie;Karpenko;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_2825.jpg;Nein;Familie;Karpenko;denis.k88@web.de;;;;;;LR87SQ8C;963707649418838;1;;;0;0
|
||||
50453495;Familie;Karyakina;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0800.jpg;Nein;Familie;Karyakina;veronika20@hotmail.de;;;;;;RYK4BQLQ;638219110542782;1;;;0;0
|
||||
50453488;Familie;Ketterfeld;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1211.jpg;Nein;Familie;Ketterfeld;madlen.katterfeld@gmx.de;;;;;;PLX9G4V3;117752011222601;6;;;0;1
|
||||
50453446;Familie;Klyszcz;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_2740.jpg;Nein;Familie;Klyszcz;klyszcz.ewa92@gmail.com;;;;;;LZR8WFP9;874820410323668;9;;;0;1
|
||||
50452151;Familie;Misiano;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1577.jpg;Nein;Familie;Misiano;bonifati@hotmail.de;;;;;;7ZW9V666;394112462489259;5;;;0;1
|
||||
50451284;Familie;Nickl;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1054.jpg;Nein;Familie;Nickl;cela990@hotmail.com;;;;;;35CH589Q;438824910404667;8;;;0;1
|
||||
50452230;Familie;Roder;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1138.jpg;Nein;Familie;Roder;stephanie.roder@gmail.com;;;;;;848D3SWY;745487201848290;6;;;0;0
|
||||
50452913;Familie;Rubinstein;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0041.jpg;Nein;Familie;Rubinstein;n.d.rubinstein@googlemail.com;;;;;;G9H8YFC4;180523151134386;1;;;0;1
|
||||
50453768;Familie;Schillinger;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1404.jpg;Nein;Familie;Schillinger;schneggeno1@web.de;;;;;;SQJSP49C;593384265020703;3;;;0;1
|
||||
50452236;Familie;Schlesinger;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1512.jpg;Nein;Familie;Schlesinger;stefanie2011@gmx.net;;;;;;946G6HJH;269413107409936;3;;;0;1
|
||||
50453894;Familie;Schmid;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0625.jpg;Nein;Familie;Schmid;izuther@googlemail.com;;;;;;W7W8P32C;486167508950250;4;;;0;1
|
||||
50452248;Familie;Schreibauer;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0671.jpg;Nein;Familie;Schreibauer;a.schreibauer@gmail.com;;;;;;97R4TRBC;130440825414681;3;;;0;0
|
||||
50452273;Familie;Stachanczyk;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_9974.jpg;Nein;Familie;Stachanczyk;Suzanna.Stachanczyk@web.de;;;;;;C334SSSL;733864213043388;5;;;0;0
|
||||
50453485;Familie;Torres;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0967.jpg;Nein;Familie;Torres;ftorrestapia@me.com;;;;;;PCQ4CNV9;553742663210606;4;;;0;0
|
||||
50452252;Familie;Tuldi;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_2788.jpg;Nein;Familie;Tuldi;olga_tuldi@yahoo.de;;;;;;B99BYYYF;657381798122682;11;;;0;0
|
||||
50452022;Familie;Weber;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0984.jpg;Nein;Familie;Weber;mail.weber.melanie@googlemail.com;;;;;;7954G4C5;820357028620317;3;;;0;1
|
||||
50451320;Familie;Welz;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_1459.jpg;Nein;Familie;Welz;eva_welz@gmx.de;;;;;;69N5WYFK;952025141929986;3;;;0;1
|
||||
50452419;Familie;Wild;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0539.jpg;Nein;Familie;Wild;wildramona@gmx.de;;;;;;DVXKKJCZ;789239059675168;9;;;0;1
|
||||
50452462;Familie;Wolf;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_0841.jpg;Nein;Familie;Wolf;anjamichi77@gmail.com;;;;;;FXPLQYH9;784676508389646;1;;;0;0
|
||||
50410050;Jonas;Wolf;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_8489.jpg;Nein;Familie;Wolf;anjamichi77@gmail.com;;;;;;Q52NYB4N;18810570193338;0;;;0;0
|
||||
50453882;Familie;Yilmaz;;;;;;;;;;;;;;;;;Ja;Ja;Nein;IMG_9907.jpg;Nein;Familie;Yilmaz;merve-ymz@hotmail.com;;;;;;TXK86QSB;467856804734432;4;;;0;0
|
||||
49655787;Joseph;Wild;;;;Spatzen;;;;;;;;;;;;;Ja;Ja;Nein;IMG_7119.jpg;Nein;Familie;Wild;wildramona@gmx.de;;;;;;8C56662R;226166391326912;5;;;0;0
|
||||
|
25
check_db_links.py
Normal file
25
check_db_links.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
db_path = "/app/fotograf-de-scraper/backend/data/fotograf_jobs.db"
|
||||
if not os.path.exists(db_path):
|
||||
print(f"Database not found at {db_path}")
|
||||
else:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check candidates missing links for the current job
|
||||
job_id = "576228454"
|
||||
cursor.execute("""
|
||||
SELECT COUNT(*)
|
||||
FROM job_participants
|
||||
WHERE job_id = ?
|
||||
AND has_orders = 0
|
||||
AND digital_package_ordered = 0
|
||||
AND logins <= 5
|
||||
AND quick_login_url IS NULL
|
||||
""", (job_id,))
|
||||
missing = cursor.fetchone()[0]
|
||||
print(f"Missing links for candidates in job {job_id}: {missing}")
|
||||
|
||||
conn.close()
|
||||
9
check_tables.py
Normal file
9
check_tables.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import sqlite3
|
||||
|
||||
db_path = "/app/fotograf-de-scraper/backend/data/fotograf_jobs.db"
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
tables = cursor.fetchall()
|
||||
print(f"Tables: {[t[0] for t in tables]}")
|
||||
conn.close()
|
||||
@@ -1,6 +1,6 @@
|
||||
# Fotograf.de Scraper & Management UI
|
||||
|
||||
**Status:** Production-Ready Microservice (Core Feature: PDF List Generation, QR Cards, Shooting Schedule, **Siblings List**, **Gmail API Integration** & **Automated Release Requests**)
|
||||
**Status:** Production-Ready Microservice (Core Feature: PDF List Generation, QR Cards, Shooting Schedule, **SQLite Data Sync**, **Gmail API Integration** & **Automated Release Requests**)
|
||||
|
||||
Dieser Service modernisiert die alten `Fotograf.de` Skripte, indem er eine robuste, web-basierte UI zur Verwaltung und Automatisierung von Foto-Aufträgen bereitstellt. Er ist als eigenständiger Microservice konzipiert, der unabhängig vom Haupt-Stack läuft.
|
||||
|
||||
@@ -10,16 +10,22 @@ Der Service besteht aus zwei Hauptkomponenten:
|
||||
|
||||
1. **Backend (Python / FastAPI / Selenium / SQLAlchemy):**
|
||||
* **Automatisierung:** Nutzt Selenium für das Scraping von `fotograf.de`.
|
||||
* **Persistenz:** Eine SQLite-Datenbank (`fotograf_jobs.db`) speichert die Auftragsliste, OAuth-Tokens (`GmailToken`), Gutscheincodes (`DiscountCode`) und Teilnehmerdaten (`ReleaseParticipant`).
|
||||
* **Persistenz:** Eine SQLite-Datenbank (`fotograf_jobs.db`) speichert die Auftragsliste, OAuth-Tokens (`GmailToken`), Gutscheincodes (`DiscountCode`), Teilnehmerdaten (`ReleaseParticipant`), **Auftragsteilnehmer (`JobParticipant`)** und die **Versand-Historie (`ReleaseHistory`)**.
|
||||
* **PDF-Engine:** Nutzt WeasyPrint für Teilnehmerlisten und ReportLab/PyPDF2 für präzise PDF-Overlays (QR-Karten).
|
||||
* **API-Integration:** Direkte Anbindung an die **Calendly API (v2)** sowie an die **Gmail API** für direkten E-Mail-Versand und automatisierte Webhook-Antworten.
|
||||
|
||||
2. **Frontend (TypeScript / React / Vite / TailwindCSS):**
|
||||
* **Modernes UI:** Ein vollständig responsives Dashboard mit Tailwind CSS (Kachel-Layout, Tabs für Kiga/Schule).
|
||||
* **Arbeitsfluss:** Tools sind direkt in der Detailansicht des jeweiligen Auftrags integriert.
|
||||
* **Arbeitsfluss:** Tools sind in der Detailansicht eines Auftrags in logische Phasen (Vorbereitung, Follow-Up, Statistik) unterteilt.
|
||||
|
||||
## ✨ Core Features
|
||||
|
||||
### 🚀 Performance-Optimierung (SQLite Sync)
|
||||
Statt wie früher jedes Mal mühsam durch alle Foto-Alben zu "crawlen", nutzt das System nun eine intelligente Synchronisierung:
|
||||
* **One-Click Sync:** Über den Button "Daten von Fotograf.de abgleichen" lädt das System die detaillierte Namensliste (CSV) herunter.
|
||||
* **Lokale Datenbank:** Alle relevanten Infos (E-Mail der Eltern, Login-Zahlen, Bestellstatus, Zugangscodes) werden in der Tabelle `job_participants` gespeichert.
|
||||
* **Blitzschnelle Analyse:** Nachfass-Mails und Statistiken werden nun in Sekunden (statt Minuten) direkt aus der Datenbank generiert.
|
||||
|
||||
### Feature 1: Teilnehmerlisten (Vollständig)
|
||||
Automatisierter Workflow zum Download und Formatieren der Anmeldelisten von `fotograf.de` als sortiertes PDF inkl. "Kinderfotos Erding" Branding.
|
||||
|
||||
@@ -28,43 +34,38 @@ Spezielles Modul für Familien-Mini-Shootings:
|
||||
* **QR-Karten-Andruck:** Präzises Overlay von Name, Kinderanzahl und Uhrzeit inkl. automatischer **Einwilligungs-Checkbox (☑)** aus Calendly-Daten.
|
||||
* **Termin-Übersichtsliste:** Generiert eine A4-Tabelle für den Shooting-Tag im 6-Minuten-Takt inkl. Lückenfüller.
|
||||
|
||||
### Feature 3: Nachfass-E-Mails & Gmail Direkt-Versand (Vollständig)
|
||||
Identifizierung von Nicht-Käufern und automatisierter Massenversand personalisierter E-Mails via Gmail API.
|
||||
### Feature 3: Nachfass-E-Mails & Gmail Direkt-Versand (Optimiert)
|
||||
Identifizierung von Nicht-Käufern (0-1 Logins, keine Bestellung) basierend auf den synchronisierten Datenbank-Daten.
|
||||
* **Vorschau-Modus:** Ermöglicht das Durchklicken der personalisierten E-Mails an jeden Empfänger vor dem eigentlichen Versand.
|
||||
* **Quick-Login Automation:** Die Login-Links (`https://www.kinderfotos-erding.de/a/{code}`) werden automatisch generiert.
|
||||
|
||||
### Feature 4: Verkaufs-Statistiken (Vollständig)
|
||||
Detaillierte Analyse des Kaufverhaltens pro Album mit Echtzeit-Fortschrittsanzeige.
|
||||
### Feature 4: Verkaufs-Statistiken (Optimiert)
|
||||
Detaillierte Analyse des Kaufverhaltens pro Gruppe/Klasse basierend auf den lokalen Datenbank-Einträgen.
|
||||
|
||||
### Feature 5: Geschwisterliste (Einrichtungsintern) (Vollständig)
|
||||
Tool zur Identifizierung von Geschwistergruppen innerhalb einer Einrichtung inkl. Cross-Check mit Calendly-Buchungen und speziellen Geschwister-QR-Karten.
|
||||
* **Flexibilität:** Optionaler Modus "Ohne Nachmittags-Shooting", um die Liste auch ohne Calendly-Abgleich (rein einrichtungsintern) zu generieren.
|
||||
|
||||
### Feature 6: Freigabeanfragen & Gutschein-Automation (Vollständig - Neu April 2026)
|
||||
### Feature 6: Freigabeanfragen & Gutschein-Automation (Vollständig)
|
||||
Vollautomatisierter DSGVO-Workflow zur Einholung von Veröffentlichungsgenehmigungen:
|
||||
* **Schlanker Versand:** Manuelle Eingabe von Empfängern (E-Mail, Vorname, Kindernamen) für gezielte Anfragen.
|
||||
* **Intelligente Personalisierung:** Automatische Bereinigung von Einrichtungsnamen (entfernt "Kindergarten" und Jahreszahlen).
|
||||
* **Schlanker Versand:** Manuelle Eingabe von Empfängern (E-Mail, Vorname, Kindernamen) mit **E-Mail-Vorschau**.
|
||||
* **Versand-Planung:** Einstellbare Versandzeit (Berlin Timezone) via Hintergrund-Tasks.
|
||||
* **Webhook-Integration:** Direkte Anbindung an **Google Forms**. Bei Absenden des Freigabe-Formulars wird automatisch:
|
||||
1. Ein freier Gutscheincode aus der DB reserviert.
|
||||
2. Eine personalisierte Dankes-E-Mail mit dem Code und einer bebilderten Einlöse-Anleitung versendet.
|
||||
* **Gutschein-Management:** UI zum Hochladen und Überwachen des Gutschein-Pools.
|
||||
* **Webhook-Integration:** Direkte Anbindung an **Google Forms**. Bei Absenden des Freigabe-Formulars wird automatisch ein Gutscheincode reserviert und eine Dankes-E-Mail versendet.
|
||||
* **Antwort-Übersicht:** Tabelle aller eingegangenen Freigaben inkl. zugewiesenem Code und Zeitstempel.
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Technische Details & Sicherheit
|
||||
* **Sicherer Test-Modus:** Über die Umgebungsvariable `DEV_MODE_EMAIL_RECIPIENT` können alle ausgehenden E-Mails (Anfragen & Gutscheine) global an eine Test-Adresse umgeleitet werden.
|
||||
* **Zeitzonen:** Durchgängige Verwendung von `Europe/Berlin` für alle zeitgesteuerten Operationen.
|
||||
* **E-Mail Signatur:** Die offizielle HTML-Signatur von "Kinderfotos Erding" wird automatisch an alle ausgehenden E-Mails (auch vom Backend) angehängt.
|
||||
* **Gmail OAuth:** Persistente Speicherung der Refresh-Tokens in der Datenbank ermöglicht dauerhaften Betrieb ohne erneutes Einloggen.
|
||||
* **BCC-Kontrolle:** Jede vom System versendete E-Mail sendet automatisch eine Blindkopie (BCC) an `kontakt@kinderfotos-erding.de`.
|
||||
* **Versand-Historie:** Alle Aussendungen (Anzahl Empfänger, Zeitpunkt) werden in der Tabelle `release_history` protokolliert.
|
||||
* **Sicherer Test-Modus:** Über `DEV_MODE_EMAIL_RECIPIENT` können alle E-Mails global an eine Test-Adresse umgeleitet werden.
|
||||
* **Zeitzonen:** Durchgängige Verwendung von `Europe/Berlin`.
|
||||
* **Gmail OAuth:** Persistente Speicherung der Refresh-Tokens in der Datenbank.
|
||||
|
||||
## 🚀 Deployment & Konfiguration
|
||||
|
||||
Der Service wird über die Haupt-`docker-compose.yml` des Projekts verwaltet.
|
||||
|
||||
### Umgebungsvariablen (`.env`)
|
||||
Wichtige neue Variablen in `/fotograf-de-scraper/.env`:
|
||||
* `DEV_MODE_EMAIL_RECIPIENT`: (Optional) E-Mail für Umleitung im Testbetrieb.
|
||||
* `google_fotograf_client_id` / `google_fotograf_secret`: OAuth Credentials.
|
||||
* `CALENDLY_TOKEN`: API Zugriff.
|
||||
|
||||
### URLs
|
||||
* **Frontend:** `https://floke-ai.duckdns.org/fotograf-de/`
|
||||
* **Webhook für Google Forms:** `https://floke-ai.duckdns.org/fotograf-de-api/api/publish-request/webhook`
|
||||
* **Webhook für Google Forms:** `https://floke-ai.duckdns.org/fotograf-de-api/api/publish-request/webhook`
|
||||
@@ -42,6 +42,41 @@ class ReleaseParticipant(Base):
|
||||
first_name = Column(String)
|
||||
last_updated = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
|
||||
|
||||
class ReleaseHistory(Base):
|
||||
__tablename__ = "release_history"
|
||||
id = Column(Integer, primary_key=True)
|
||||
timestamp = Column(DateTime, default=datetime.datetime.utcnow)
|
||||
recipient_count = Column(Integer)
|
||||
scheduled_time = Column(String, nullable=True)
|
||||
|
||||
class ReminderHistory(Base):
|
||||
__tablename__ = "reminder_history"
|
||||
id = Column(Integer, primary_key=True)
|
||||
job_id = Column(String, index=True)
|
||||
timestamp = Column(DateTime, default=datetime.datetime.utcnow)
|
||||
recipient_count = Column(Integer)
|
||||
max_logins = Column(Integer)
|
||||
recipients_json = Column(String) # JSON list of emails/names/children
|
||||
scheduled_time = Column(String, nullable=True)
|
||||
|
||||
class JobParticipant(Base):
|
||||
__tablename__ = "job_participants"
|
||||
id = Column(Integer, primary_key=True)
|
||||
job_id = Column(String, index=True)
|
||||
child_id = Column(String, nullable=True)
|
||||
vorname_kind = Column(String, nullable=True)
|
||||
nachname_kind = Column(String, nullable=True)
|
||||
vorname_eltern = Column(String, nullable=True)
|
||||
nachname_eltern = Column(String, nullable=True)
|
||||
email_eltern = Column(String, nullable=True)
|
||||
zugangscode = Column(String, index=True)
|
||||
gruppe = Column(String, nullable=True)
|
||||
logins = Column(Integer, default=0)
|
||||
has_orders = Column(Integer, default=0) # 0 for false, 1 for true
|
||||
digital_package_ordered = Column(Integer, default=0) # 0 for false, 1 for true
|
||||
quick_login_url = Column(String, nullable=True)
|
||||
last_synced = Column(DateTime, default=datetime.datetime.utcnow)
|
||||
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
def get_db():
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 56 KiB |
49
fotograf-de-scraper/backend/inspect_orders.py
Normal file
49
fotograf-de-scraper/backend/inspect_orders.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import os
|
||||
import sys
|
||||
from dotenv import load_dotenv
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database import Job
|
||||
from main import setup_driver, login
|
||||
import time
|
||||
|
||||
load_dotenv()
|
||||
|
||||
engine = create_engine("sqlite:////app/data/fotograf_jobs.db")
|
||||
Session = sessionmaker(bind=engine)
|
||||
db = Session()
|
||||
|
||||
# Get latest job
|
||||
job = db.query(Job).order_by(Job.last_updated.desc()).first()
|
||||
if not job:
|
||||
print("No jobs found in database.")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Using Job ID: {job.id} ({job.name}), Account: {job.account_type}")
|
||||
|
||||
username = os.getenv(f"{job.account_type.upper()}_USER")
|
||||
password = os.getenv(f"{job.account_type.upper()}_PW")
|
||||
|
||||
driver = setup_driver()
|
||||
if not driver:
|
||||
print("Failed to init driver")
|
||||
sys.exit(1)
|
||||
|
||||
if not login(driver, username, password):
|
||||
print("Login failed")
|
||||
driver.quit()
|
||||
sys.exit(1)
|
||||
|
||||
orders_url = f"https://app.fotograf.de/config_jobs_orders/index/{job.id}/customer_orders"
|
||||
print(f"Navigating to {orders_url}")
|
||||
driver.get(orders_url)
|
||||
time.sleep(5) # wait for page to load
|
||||
|
||||
html = driver.page_source
|
||||
with open("orders_page.html", "w", encoding="utf-8") as f:
|
||||
f.write(html)
|
||||
|
||||
driver.save_screenshot("orders_page.png")
|
||||
print("Saved orders_page.html and orders_page.png")
|
||||
|
||||
driver.quit()
|
||||
@@ -10,13 +10,14 @@ from weasyprint import HTML
|
||||
import tempfile
|
||||
import shutil
|
||||
import time
|
||||
import json
|
||||
from dotenv import load_dotenv
|
||||
from fastapi import FastAPI, HTTPException, Depends, BackgroundTasks
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import FileResponse
|
||||
from typing import List, Dict, Any, Optional
|
||||
from sqlalchemy.orm import Session
|
||||
from database import get_db, Job as DBJob, engine, Base
|
||||
from database import get_db, Job as DBJob, engine, Base, JobParticipant, SessionLocal, ReminderHistory
|
||||
import math
|
||||
import uuid
|
||||
|
||||
@@ -116,6 +117,8 @@ SELECTORS = {
|
||||
"job_row_shooting_type": ".//td[count(//th[contains(., 'Typ')]/preceding-sibling::th) + 1]",
|
||||
"export_dropdown": "[data-qa-id='dropdown:export']",
|
||||
"export_csv_link": "button[data-qa-id='button:csv']",
|
||||
# --- Reminder & Quick Login Selectors ---
|
||||
"person_access_code_link": ".//a[contains(@data-qa-id, 'guest-access-banner-access-code')]",
|
||||
# --- Statistics Selectors ---
|
||||
"album_overview_rows": "//table/tbody/tr",
|
||||
"album_overview_link": ".//td[2]//a",
|
||||
@@ -488,264 +491,161 @@ def get_jobs_list(driver) -> List[Dict[str, Any]]:
|
||||
task_store: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
def process_statistics(task_id: str, job_id: str, account_type: str):
|
||||
logger.info(f"Task {task_id}: Starting statistics calculation for job {job_id}")
|
||||
task_store[task_id] = {"status": "running", "progress": "Initialisiere Browser...", "result": None}
|
||||
|
||||
username = os.getenv(f"{account_type.upper()}_USER")
|
||||
password = os.getenv(f"{account_type.upper()}_PW")
|
||||
driver = None
|
||||
logger.info(f"Task {task_id}: Starting fast statistics calculation for job {job_id}")
|
||||
task_store[task_id] = {"status": "running", "progress": "Berechne Statistiken...", "result": None}
|
||||
|
||||
db = SessionLocal()
|
||||
try:
|
||||
driver = setup_driver()
|
||||
if not driver or not login(driver, username, password):
|
||||
task_store[task_id] = {"status": "error", "progress": "Login fehlgeschlagen. Überprüfe die Zugangsdaten."}
|
||||
# Check if we have data at all
|
||||
count = db.query(JobParticipant).filter(JobParticipant.job_id == job_id).count()
|
||||
if count == 0:
|
||||
task_store[task_id] = {"status": "error", "progress": "Keine Daten vorhanden. Bitte erst oben auf 'Daten abgleichen' klicken."}
|
||||
return
|
||||
|
||||
task_store[task_id]["progress"] = f"Lade Alben-Übersicht für Auftrag..."
|
||||
# Query DB and group by 'gruppe'
|
||||
|
||||
albums_overview_url = f"https://app.fotograf.de/config_jobs_photos/index/{job_id}"
|
||||
logger.info(f"Navigating to albums: {albums_overview_url}")
|
||||
driver.get(albums_overview_url)
|
||||
wait = WebDriverWait(driver, 15)
|
||||
# Get all participants for this job
|
||||
participants = db.query(JobParticipant).filter(JobParticipant.job_id == job_id).all()
|
||||
|
||||
albums_to_visit = []
|
||||
try:
|
||||
album_rows = wait.until(EC.presence_of_all_elements_located((By.XPATH, SELECTORS["album_overview_rows"])))
|
||||
for row in album_rows:
|
||||
try:
|
||||
album_link = row.find_element(By.XPATH, SELECTORS["album_overview_link"])
|
||||
albums_to_visit.append({"name": album_link.text, "url": album_link.get_attribute('href')})
|
||||
except NoSuchElementException:
|
||||
continue
|
||||
except TimeoutException:
|
||||
task_store[task_id] = {"status": "error", "progress": "Konnte die Album-Liste nicht finden."}
|
||||
return
|
||||
|
||||
total_albums = len(albums_to_visit)
|
||||
task_store[task_id]["progress"] = f"{total_albums} Alben gefunden. Starte Auswertung..."
|
||||
|
||||
statistics = []
|
||||
|
||||
for index, album in enumerate(albums_to_visit):
|
||||
album_name = album['name']
|
||||
task_store[task_id]["progress"] = f"Bearbeite Album {index + 1}/{total_albums}: '{album_name}'..."
|
||||
driver.get(album['url'])
|
||||
|
||||
try:
|
||||
total_codes_text = wait.until(EC.visibility_of_element_located((By.XPATH, SELECTORS["access_code_count"]))).text
|
||||
num_pages = math.ceil(int(total_codes_text) / 20)
|
||||
|
||||
total_children_in_album = 0
|
||||
children_with_purchase = 0
|
||||
children_with_all_purchased = 0
|
||||
|
||||
for page_num in range(1, num_pages + 1):
|
||||
task_store[task_id]["progress"] = f"Bearbeite Album {index + 1}/{total_albums}: '{album_name}' (Seite {page_num}/{num_pages})..."
|
||||
|
||||
if page_num > 1:
|
||||
driver.get(album['url'] + f"?page_guest_accesses={page_num}")
|
||||
|
||||
person_rows = wait.until(EC.presence_of_all_elements_located((By.XPATH, SELECTORS["person_rows"])))
|
||||
|
||||
for person_row in person_rows:
|
||||
total_children_in_album += 1
|
||||
try:
|
||||
photo_container = person_row.find_element(By.XPATH, "./following-sibling::div[1]")
|
||||
|
||||
num_total_photos = len(photo_container.find_elements(By.XPATH, SELECTORS["person_all_photos"]))
|
||||
num_purchased_photos = len(photo_container.find_elements(By.XPATH, SELECTORS["person_purchased_photos"]))
|
||||
num_access_cards = len(photo_container.find_elements(By.XPATH, SELECTORS["person_access_card_photo"]))
|
||||
|
||||
buyable_photos = num_total_photos - num_access_cards
|
||||
|
||||
if num_purchased_photos > 0:
|
||||
children_with_purchase += 1
|
||||
|
||||
if buyable_photos > 0 and buyable_photos == num_purchased_photos:
|
||||
children_with_all_purchased += 1
|
||||
except NoSuchElementException:
|
||||
continue
|
||||
|
||||
statistics.append({
|
||||
"Album": album_name,
|
||||
"Kinder_insgesamt": total_children_in_album,
|
||||
"Kinder_mit_Käufen": children_with_purchase,
|
||||
"Kinder_Alle_Bilder_gekauft": children_with_all_purchased
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei Auswertung von Album '{album_name}': {e}")
|
||||
continue
|
||||
# Group by group
|
||||
groups = {}
|
||||
for p in participants:
|
||||
g_name = p.gruppe or "Unbekannt"
|
||||
if g_name not in groups:
|
||||
groups[g_name] = {
|
||||
"Album": g_name,
|
||||
"Kinder_insgesamt": 0,
|
||||
"Kinder_mit_Käufen": 0,
|
||||
"Kinder_Alle_Bilder_gekauft": 0
|
||||
}
|
||||
groups[g_name]["Kinder_insgesamt"] += 1
|
||||
if p.has_orders:
|
||||
groups[g_name]["Kinder_mit_Käufen"] += 1
|
||||
if p.digital_package_ordered:
|
||||
groups[g_name]["Kinder_Alle_Bilder_gekauft"] += 1
|
||||
statistics = list(groups.values())
|
||||
statistics.sort(key=lambda x: x["Album"])
|
||||
|
||||
task_store[task_id] = {
|
||||
"status": "completed",
|
||||
"progress": "Auswertung erfolgreich abgeschlossen!",
|
||||
"progress": "Statistik erfolgreich berechnet!",
|
||||
"result": statistics
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Unexpected error in task {task_id}")
|
||||
logger.exception(f"Unexpected error in statistics task {task_id}")
|
||||
task_store[task_id] = {"status": "error", "progress": f"Unerwarteter Fehler: {str(e)}"}
|
||||
finally:
|
||||
if driver:
|
||||
logger.debug(f"Task {task_id}: Closing driver.")
|
||||
driver.quit()
|
||||
db.close()
|
||||
|
||||
def process_reminder_analysis(task_id: str, job_id: str, account_type: str):
|
||||
logger.info(f"Task {task_id}: Starting reminder analysis for job {job_id}")
|
||||
task_store[task_id] = {"status": "running", "progress": "Initialisiere Browser...", "result": None}
|
||||
|
||||
username = os.getenv(f"{account_type.upper()}_USER")
|
||||
password = os.getenv(f"{account_type.upper()}_PW")
|
||||
driver = None
|
||||
def process_reminder_analysis(task_id: str, job_id: str, account_type: str, max_logins: int = 1, exclude_purchased_emails: bool = True):
|
||||
logger.info(f"Task {task_id}: Starting fast reminder analysis for job {job_id}")
|
||||
task_store[task_id] = {"status": "running", "progress": "Analysiere Datenbank-Einträge...", "result": None}
|
||||
|
||||
db = SessionLocal()
|
||||
try:
|
||||
driver = setup_driver()
|
||||
if not driver or not login(driver, username, password):
|
||||
task_store[task_id] = {"status": "error", "progress": "Login fehlgeschlagen."}
|
||||
# Check if we have data at all
|
||||
count = db.query(JobParticipant).filter(JobParticipant.job_id == job_id).count()
|
||||
if count == 0:
|
||||
task_store[task_id] = {"status": "error", "progress": "Keine Daten vorhanden. Bitte erst oben auf 'Daten abgleichen' klicken."}
|
||||
return
|
||||
|
||||
wait = WebDriverWait(driver, 15)
|
||||
|
||||
# 1. Navigate to albums overview
|
||||
albums_overview_url = f"https://app.fotograf.de/config_jobs_photos/index/{job_id}"
|
||||
task_store[task_id]["progress"] = "Lade Alben-Übersicht..."
|
||||
driver.get(albums_overview_url)
|
||||
|
||||
albums_to_visit = []
|
||||
try:
|
||||
album_rows = wait.until(EC.presence_of_all_elements_located((By.XPATH, SELECTORS["album_overview_rows"])))
|
||||
for row in album_rows:
|
||||
try:
|
||||
album_link = row.find_element(By.XPATH, SELECTORS["album_overview_link"])
|
||||
albums_to_visit.append({"name": album_link.text, "url": album_link.get_attribute('href')})
|
||||
except NoSuchElementException:
|
||||
continue
|
||||
except TimeoutException:
|
||||
task_store[task_id] = {"status": "error", "progress": "Konnte die Album-Liste nicht finden."}
|
||||
# 1. Get emails that have ALREADY purchased anything (in ANY job we have in DB)
|
||||
purchased_emails = set()
|
||||
if exclude_purchased_emails:
|
||||
from sqlalchemy import or_
|
||||
# We look globally across the whole job_participants table
|
||||
purchased_results = db.query(JobParticipant.email_eltern).filter(
|
||||
or_(JobParticipant.has_orders == 1, JobParticipant.digital_package_ordered == 1),
|
||||
JobParticipant.email_eltern != "",
|
||||
JobParticipant.email_eltern != None
|
||||
).all()
|
||||
purchased_emails = {r[0].lower() for r in purchased_results}
|
||||
logger.info(f"Task {task_id}: Found {len(purchased_emails)} unique emails with existing purchases in DB to exclude.")
|
||||
|
||||
# 2. Query DB for potential candidates (Logins <= max_logins and No Orders)
|
||||
candidates = db.query(JobParticipant).filter(
|
||||
JobParticipant.job_id == job_id,
|
||||
JobParticipant.has_orders == 0,
|
||||
JobParticipant.digital_package_ordered == 0,
|
||||
JobParticipant.logins <= max_logins,
|
||||
JobParticipant.email_eltern != "",
|
||||
JobParticipant.email_eltern != None
|
||||
).all()
|
||||
|
||||
if not candidates:
|
||||
task_store[task_id] = {
|
||||
"status": "completed",
|
||||
"progress": f"Keine passenden Empfänger (0-{max_logins} Logins, keine Bestellung) gefunden.",
|
||||
"result": []
|
||||
}
|
||||
return
|
||||
|
||||
raw_results = []
|
||||
total_albums = len(albums_to_visit)
|
||||
# 3. Aggregate results by Email
|
||||
aggregation = {}
|
||||
missing_links_count = 0
|
||||
|
||||
for index, album in enumerate(albums_to_visit):
|
||||
album_name = album['name']
|
||||
task_store[task_id]["progress"] = f"Album {index+1}/{total_albums}: '{album_name}'..."
|
||||
driver.get(album['url'])
|
||||
for c in candidates:
|
||||
email = c.email_eltern.lower()
|
||||
|
||||
try:
|
||||
total_codes_text = wait.until(EC.visibility_of_element_located((By.XPATH, SELECTORS["access_code_count"]))).text
|
||||
num_pages = math.ceil(int(total_codes_text) / 20)
|
||||
|
||||
for page_num in range(1, num_pages + 1):
|
||||
task_store[task_id]["progress"] = f"Album {index+1}/{total_albums}: '{album_name}' (Seite {page_num}/{num_pages})..."
|
||||
if page_num > 1:
|
||||
driver.get(album['url'] + f"?page_guest_accesses={page_num}")
|
||||
|
||||
person_rows = wait.until(EC.presence_of_all_elements_located((By.XPATH, SELECTORS["person_rows"])))
|
||||
num_persons = len(person_rows)
|
||||
|
||||
for i in range(num_persons):
|
||||
# Re-locate rows to avoid stale element reference
|
||||
person_rows = wait.until(EC.presence_of_all_elements_located((By.XPATH, SELECTORS["person_rows"])))
|
||||
person_row = person_rows[i]
|
||||
|
||||
login_count_text = person_row.find_element(By.XPATH, ".//span[text()='Logins']/following-sibling::strong").text
|
||||
|
||||
# Only interested in people with 0 or 1 logins (potential reminders)
|
||||
# Actually, if they haven't bought yet, they might need a reminder regardless of logins,
|
||||
# but the legacy logic uses login_count <= 1.
|
||||
# Let's stick to the legacy logic for now.
|
||||
if int(login_count_text) <= 1:
|
||||
vorname = person_row.find_element(By.XPATH, ".//span[text()='Vorname']/following-sibling::strong").text
|
||||
|
||||
try:
|
||||
photo_container = person_row.find_element(By.XPATH, "./following-sibling::div[1]")
|
||||
purchase_icons = photo_container.find_elements(By.XPATH, ".//img[@alt='Bestellungen mit diesem Foto']")
|
||||
if len(purchase_icons) > 0:
|
||||
continue
|
||||
except NoSuchElementException:
|
||||
pass
|
||||
|
||||
# Potential candidate
|
||||
access_code_page_url = person_row.find_element(By.XPATH, ".//a[contains(@data-qa-id, 'guest-access-banner-access-code')]").get_attribute('href')
|
||||
|
||||
# Open in new tab or navigate back and forth?
|
||||
# Scraper.py navigates back and forth.
|
||||
driver.get(access_code_page_url)
|
||||
|
||||
try:
|
||||
wait.until(EC.visibility_of_element_located((By.XPATH, "//a[@id='quick-login-url']")))
|
||||
quick_login_url = driver.find_element(By.XPATH, "//a[@id='quick-login-url']").get_attribute('href')
|
||||
potential_buyer_element = driver.find_element(By.XPATH, "//a[contains(@href, '/config_customers/view_customer')]")
|
||||
buyer_name = potential_buyer_element.text
|
||||
|
||||
potential_buyer_element.click()
|
||||
email = wait.until(EC.visibility_of_element_located((By.XPATH, "//span[contains(., '@')]"))).text
|
||||
|
||||
raw_results.append({
|
||||
"child_name": vorname,
|
||||
"buyer_name": buyer_name,
|
||||
"email": email,
|
||||
"quick_login": quick_login_url
|
||||
})
|
||||
except Exception as e:
|
||||
logger.warning(f"Error getting details for {vorname}: {e}")
|
||||
|
||||
# Go back to the album page
|
||||
driver.get(album['url'] + (f"?page_guest_accesses={page_num}" if page_num > 1 else ""))
|
||||
wait.until(EC.presence_of_element_located((By.XPATH, SELECTORS["person_rows"])))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei Album '{album_name}': {e}")
|
||||
# Skip if this email already has a purchase for ANOTHER child
|
||||
if exclude_purchased_emails and email in purchased_emails:
|
||||
continue
|
||||
|
||||
# STRICT LINK CHECK: If we don't have a scraped Quick Login URL, skip this child.
|
||||
# We don't want to send broken /login/access/ links.
|
||||
if not c.quick_login_url:
|
||||
missing_links_count += 1
|
||||
continue
|
||||
|
||||
# Aggregate Results
|
||||
task_store[task_id]["progress"] = "Aggregiere Ergebnisse..."
|
||||
aggregated_data = {}
|
||||
for res in raw_results:
|
||||
email = res['email']
|
||||
child_name = "Familienbilder" if res['child_name'] == "Familie" else res['child_name']
|
||||
html_link = f'<a href="{res["quick_login"]}">Fotos von {child_name}</a>'
|
||||
|
||||
if email not in aggregated_data:
|
||||
aggregated_data[email] = {
|
||||
'buyer_first_name': res['buyer_name'].split(' ')[0],
|
||||
'email': email,
|
||||
'children': [child_name],
|
||||
'links': [html_link]
|
||||
if email not in aggregation:
|
||||
aggregation[email] = {
|
||||
"email": email,
|
||||
"parent_name": c.vorname_eltern if c.vorname_eltern else "Liebe Eltern",
|
||||
"children": [],
|
||||
"links": []
|
||||
}
|
||||
else:
|
||||
if child_name not in aggregated_data[email]['children']:
|
||||
aggregated_data[email]['children'].append(child_name)
|
||||
aggregated_data[email]['links'].append(html_link)
|
||||
|
||||
final_list = []
|
||||
for email, data in aggregated_data.items():
|
||||
names = data['children']
|
||||
if len(names) > 2:
|
||||
names_str = ', '.join(names[:-1]) + ' und ' + names[-1]
|
||||
else:
|
||||
names_str = ' und '.join(names)
|
||||
|
||||
final_list.append({
|
||||
'Name Käufer': data['buyer_first_name'],
|
||||
'E-Mail-Adresse Käufer': email,
|
||||
'Kindernamen': names_str,
|
||||
'LinksHTML': '<br><br>'.join(data['links'])
|
||||
})
|
||||
|
||||
# Add child name
|
||||
child_name = c.vorname_kind or ""
|
||||
child_label = "Familienbilder" if child_name.lower() == "familie" else child_name
|
||||
if child_label and child_label not in aggregation[email]["children"]:
|
||||
aggregation[email]["children"].append(child_label)
|
||||
|
||||
# Add Quick Login Link (Guaranteed to exist here)
|
||||
html_link = f'<a href="{c.quick_login_url}">Fotos von {child_label}</a>'
|
||||
if html_link not in aggregation[email]["links"]:
|
||||
aggregation[email]["links"].append(html_link)
|
||||
|
||||
# 4. Format for Supermailer/Gmail
|
||||
final_result = []
|
||||
for email, data in aggregation.items():
|
||||
children_str = " und ".join(data["children"]) if len(data["children"]) > 1 else (data["children"][0] if data["children"] else "Eurem Kind")
|
||||
links_html = "".join([f"{l}<br>" for l in data["links"]])
|
||||
|
||||
final_result.append({
|
||||
"E-Mail-Adresse Käufer": email,
|
||||
"Name Käufer": data["parent_name"],
|
||||
"Kindernamen": children_str,
|
||||
"Anzahl Kinder": len(data["children"]),
|
||||
"LinksHTML": links_html
|
||||
})
|
||||
|
||||
progress_msg = f"Analyse fertig! {len(final_result)} Empfänger identifiziert."
|
||||
if missing_links_count > 0:
|
||||
progress_msg += f" (Hinweis: {missing_links_count} Kinder ignoriert, da Quick-Login-Link fehlt. Bitte vorher 'Daten abgleichen' drücken!)"
|
||||
|
||||
task_store[task_id] = {
|
||||
"status": "completed",
|
||||
"progress": "Analyse abgeschlossen!",
|
||||
"result": final_list
|
||||
"status": "completed",
|
||||
"progress": progress_msg,
|
||||
"result": final_result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error in task {task_id}")
|
||||
task_store[task_id] = {"status": "error", "progress": f"Fehler: {str(e)}"}
|
||||
finally:
|
||||
if driver: driver.quit()
|
||||
db.close()
|
||||
|
||||
from fastapi import FastAPI, HTTPException, Depends, BackgroundTasks, UploadFile, File, Form
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
@@ -1036,10 +936,16 @@ async def start_statistics(job_id: str, account_type: str, background_tasks: Bac
|
||||
return {"task_id": task_id}
|
||||
|
||||
@app.post("/api/jobs/{job_id}/reminder-analysis")
|
||||
async def start_reminder_analysis(job_id: str, account_type: str, background_tasks: BackgroundTasks):
|
||||
logger.info(f"API Request: Start reminder analysis for job {job_id} ({account_type})")
|
||||
async def start_reminder_analysis(
|
||||
job_id: str,
|
||||
account_type: str,
|
||||
background_tasks: BackgroundTasks,
|
||||
max_logins: int = 1,
|
||||
exclude_purchased_emails: bool = True
|
||||
):
|
||||
logger.info(f"API Request: Start reminder analysis for job {job_id} ({account_type}, max_logins={max_logins}, exclude_purchased={exclude_purchased_emails})")
|
||||
task_id = str(uuid.uuid4())
|
||||
background_tasks.add_task(process_reminder_analysis, task_id, job_id, account_type)
|
||||
background_tasks.add_task(process_reminder_analysis, task_id, job_id, account_type, max_logins, exclude_purchased_emails)
|
||||
return {"task_id": task_id}
|
||||
|
||||
@app.get("/api/tasks/{task_id}/download-csv")
|
||||
@@ -1092,6 +998,410 @@ async def send_bulk_emails(request: BulkEmailRequest, db: Session = Depends(get_
|
||||
"failed": failed_emails
|
||||
}
|
||||
|
||||
def sync_participants(job_id: str, account_type: str, db: Session, task_id: str = None):
|
||||
logger.info(f"Syncing participants for job {job_id} ({account_type})")
|
||||
username = os.getenv(f"{account_type.upper()}_USER")
|
||||
password = os.getenv(f"{account_type.upper()}_PW")
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
driver = setup_driver(download_path=temp_dir)
|
||||
try:
|
||||
if not login(driver, username, password):
|
||||
raise Exception("Login failed.")
|
||||
|
||||
# Navigate to the Persons tab
|
||||
if task_id: task_store[task_id]["progress"] = "Hole Teilnehmerliste (CSV)..."
|
||||
job_url = f"https://app.fotograf.de/config_jobs_settings/index/{job_id}"
|
||||
driver.get(job_url)
|
||||
wait = WebDriverWait(driver, 30)
|
||||
|
||||
personen_tab = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, "[data-qa-id='link:photo-jobs-tabs-names_list']")))
|
||||
driver.execute_script("arguments[0].click();", personen_tab)
|
||||
|
||||
# Click Export -> CSV
|
||||
export_btn = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, SELECTORS["export_dropdown"])))
|
||||
driver.execute_script("arguments[0].click();", export_btn)
|
||||
time.sleep(1)
|
||||
csv_btn = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, SELECTORS["export_csv_link"])))
|
||||
driver.execute_script("arguments[0].click();", csv_btn)
|
||||
|
||||
# Wait for download
|
||||
csv_file = None
|
||||
for _ in range(45):
|
||||
files = os.listdir(temp_dir)
|
||||
csv_files = [f for f in files if f.endswith('.csv')]
|
||||
if csv_files:
|
||||
csv_file = os.path.join(temp_dir, csv_files[0])
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
if not csv_file:
|
||||
raise Exception("CSV download timed out.")
|
||||
|
||||
# Read CSV with pandas
|
||||
df = None
|
||||
for sep in [";", ","]:
|
||||
try:
|
||||
df = pd.read_csv(csv_file, sep=sep, encoding="utf-8-sig")
|
||||
if len(df.columns) > 1: break
|
||||
except: continue
|
||||
|
||||
if df is None: raise Exception("Could not parse CSV.")
|
||||
|
||||
# Clean columns
|
||||
df.columns = df.columns.str.strip().str.replace("\"", "")
|
||||
logger.debug(f"Sync CSV Columns: {list(df.columns)}")
|
||||
|
||||
# Column Mapping
|
||||
mapping = {
|
||||
"Child ID": "child_id",
|
||||
"Email der Eltern (1)": "email_eltern",
|
||||
"Vorname Eltern (1)": "vorname_eltern",
|
||||
"Nachname Eltern (1)": "nachname_eltern",
|
||||
"Vorname Kind": "vorname_kind",
|
||||
"Nachname Kind": "nachname_kind",
|
||||
"Zugangscode (1)": "zugangscode",
|
||||
"Logins (1)": "logins",
|
||||
"Bestellungen": "has_orders",
|
||||
"Gruppe": "gruppe",
|
||||
"Klasse": "gruppe"
|
||||
}
|
||||
|
||||
if task_id: task_store[task_id]["progress"] = "Aktualisiere Datenbank..."
|
||||
|
||||
# Upsert into database
|
||||
for _, row in df.iterrows():
|
||||
code = str(row.get("Zugangscode (1)", "")).strip()
|
||||
if not code or code == "nan": continue
|
||||
|
||||
def clean_val(val):
|
||||
v = str(val).strip()
|
||||
return "" if v.lower() == "nan" else v
|
||||
|
||||
# Determine order status
|
||||
orders_val = str(row.get("Bestellungen", "0")).lower()
|
||||
has_orders = 1 if (orders_val != "0" and orders_val != "nan" and orders_val != "") else 0
|
||||
|
||||
# Determine logins
|
||||
logins_val = row.get("Logins (1)", 0)
|
||||
try: logins = int(float(logins_val))
|
||||
except: logins = 0
|
||||
|
||||
participant = db.query(JobParticipant).filter(JobParticipant.job_id == job_id, JobParticipant.zugangscode == code).first()
|
||||
if not participant:
|
||||
participant = JobParticipant(job_id=job_id, zugangscode=code)
|
||||
db.add(participant)
|
||||
|
||||
participant.child_id = clean_val(row.get("Child ID"))
|
||||
participant.vorname_kind = clean_val(row.get("Vorname Kind"))
|
||||
participant.nachname_kind = clean_val(row.get("Nachname Kind"))
|
||||
participant.vorname_eltern = clean_val(row.get("Vorname Eltern (1)"))
|
||||
participant.nachname_eltern = clean_val(row.get("Nachname Eltern (1)"))
|
||||
participant.email_eltern = clean_val(row.get("Email der Eltern (1)")).lower()
|
||||
participant.gruppe = clean_val(row.get("Gruppe", row.get("Klasse")))
|
||||
participant.logins = logins
|
||||
participant.has_orders = has_orders
|
||||
participant.last_synced = datetime.datetime.utcnow()
|
||||
|
||||
db.commit()
|
||||
logger.info(f"Successfully synced {len(df)} participants from CSV.")
|
||||
|
||||
# --- PHASE 2: Scrape Orders for Digital Packages (Price Magic) ---
|
||||
try:
|
||||
if task_id: task_store[task_id]["progress"] = "Suche nach digitalen Käufen (Price Magic)..."
|
||||
orders_url = f"https://app.fotograf.de/config_jobs_orders/{job_id}/customer_orders"
|
||||
logger.info(f"Navigating to orders page for price magic: {orders_url}")
|
||||
driver.get(orders_url)
|
||||
time.sleep(3) # Wait for page/table to load
|
||||
|
||||
# Find all order rows
|
||||
order_rows = driver.find_elements(By.XPATH, "//table/tbody/tr")
|
||||
logger.info(f"Found {len(order_rows)} order rows to analyze.")
|
||||
|
||||
digital_matches = 0
|
||||
for row in order_rows:
|
||||
try:
|
||||
cols = row.find_elements(By.TAG_NAME, "td")
|
||||
if len(cols) < 11: continue
|
||||
|
||||
fname = cols[4].text.strip()
|
||||
lname = cols[5].text.strip()
|
||||
sum_text = cols[8].text.strip()
|
||||
status_text = cols[10].text.strip()
|
||||
|
||||
# Parse Sum (e.g., "58,90 €")
|
||||
clean_sum_text = sum_text.replace("€", "").replace(",", ".").replace(" ", "").strip()
|
||||
try:
|
||||
order_sum = float(clean_sum_text)
|
||||
except:
|
||||
order_sum = 0.0
|
||||
|
||||
is_digital = False
|
||||
|
||||
# PRICE MAGIC: Defined package prices (regular & discounted)
|
||||
# Digital Single: 58.90 / 53.90
|
||||
# Digital Siblings: 109.90 / 94.90
|
||||
# Digital Family: 75.90 / 70.90
|
||||
target_prices = [58.90, 53.90, 109.90, 94.90, 75.90, 70.90]
|
||||
|
||||
if any(abs(order_sum - p) < 0.01 for p in target_prices):
|
||||
is_digital = True
|
||||
|
||||
# STATUS FALLBACK: If status already says download
|
||||
if "heruntergeladen" in status_text.lower() or "download" in status_text.lower():
|
||||
is_digital = True
|
||||
|
||||
if is_digital and fname and lname:
|
||||
# Update participants matching these parents
|
||||
db.query(JobParticipant).filter(
|
||||
JobParticipant.job_id == job_id,
|
||||
JobParticipant.vorname_eltern == fname,
|
||||
JobParticipant.nachname_eltern == lname
|
||||
).update({JobParticipant.digital_package_ordered: 1})
|
||||
digital_matches += 1
|
||||
except Exception as row_err:
|
||||
logger.warning(f"Error parsing order row: {row_err}")
|
||||
continue
|
||||
|
||||
db.commit()
|
||||
logger.info(f"Price Magic complete: Identified {digital_matches} digital packages.")
|
||||
|
||||
except Exception as order_err:
|
||||
logger.error(f"Failed to scrape orders for price magic: {order_err}")
|
||||
|
||||
# --- PHASE 3: Link Magic (Scrape Quick Login URLs) ---
|
||||
try:
|
||||
# Find candidates for reminders who don't have a link yet
|
||||
# We prioritize those with few logins and no orders
|
||||
link_candidates = db.query(JobParticipant).filter(
|
||||
JobParticipant.job_id == job_id,
|
||||
JobParticipant.has_orders == 0,
|
||||
JobParticipant.logins <= 5,
|
||||
JobParticipant.quick_login_url == None
|
||||
).all()
|
||||
|
||||
if link_candidates:
|
||||
if task_id: task_store[task_id]["progress"] = f"Sammle Login-Links für {len(link_candidates)} Personen (Link Magic)..."
|
||||
logger.info(f"Link Magic: Identified {len(link_candidates)} candidates for link scraping.")
|
||||
|
||||
# Navigate back to Persons tab
|
||||
albums_overview_url = f"https://app.fotograf.de/config_jobs_photos/index/{job_id}"
|
||||
logger.info(f"Navigating to Albums overview: {albums_overview_url}")
|
||||
driver.get(albums_overview_url)
|
||||
|
||||
# Find all album links
|
||||
album_elements = wait.until(EC.presence_of_all_elements_located((By.XPATH, SELECTORS["album_overview_link"])))
|
||||
albums = [{"name": e.text, "url": e.get_attribute("href")} for e in album_elements]
|
||||
|
||||
codes_to_find = {c.zugangscode: c for c in link_candidates}
|
||||
links_found = 0
|
||||
|
||||
for album in albums:
|
||||
if not codes_to_find: break
|
||||
logger.info(f"Searching for links in album: {album['name']}")
|
||||
driver.get(album['url'])
|
||||
|
||||
try:
|
||||
total_codes_text = wait.until(EC.visibility_of_element_located((By.XPATH, SELECTORS["access_code_count"]))).text
|
||||
num_pages = math.ceil(int(total_codes_text) / 20)
|
||||
|
||||
for page_num in range(1, num_pages + 1):
|
||||
if not codes_to_find: break
|
||||
if page_num > 1:
|
||||
driver.get(album['url'] + f"?page_guest_accesses={page_num}")
|
||||
|
||||
person_rows = wait.until(EC.presence_of_all_elements_located((By.XPATH, SELECTORS["person_rows"])))
|
||||
|
||||
# Map of codes on this page to their communication link
|
||||
page_links = {}
|
||||
for row in person_rows:
|
||||
row_text = row.text
|
||||
for code in list(codes_to_find.keys()):
|
||||
if code in row_text:
|
||||
try:
|
||||
comm_link = row.find_element(By.XPATH, SELECTORS["person_access_code_link"]).get_attribute("href")
|
||||
page_links[code] = comm_link
|
||||
except: pass
|
||||
|
||||
# Now visit each communication page
|
||||
for code, comm_link in page_links.items():
|
||||
if code not in codes_to_find: continue
|
||||
logger.debug(f"Scraping link for code {code}...")
|
||||
if task_id: task_store[task_id]["progress"] = f"Hole Link {links_found+1} / {len(link_candidates)}..."
|
||||
driver.get(comm_link)
|
||||
try:
|
||||
wait_short = WebDriverWait(driver, 5)
|
||||
quick_link_el = wait_short.until(EC.presence_of_element_located((By.XPATH, SELECTORS["quick_login_url"])))
|
||||
quick_link = quick_link_el.get_attribute("href")
|
||||
|
||||
# Update DB
|
||||
codes_to_find[code].quick_login_url = quick_link
|
||||
del codes_to_find[code]
|
||||
links_found += 1
|
||||
|
||||
if links_found % 5 == 0: db.commit()
|
||||
except:
|
||||
logger.warning(f"Could not find quick login link for {code}")
|
||||
|
||||
# Go back to album page if we visited communication pages
|
||||
if page_links:
|
||||
driver.get(album['url'] + (f"?page_guest_accesses={page_num}" if page_num > 1 else ""))
|
||||
wait.until(EC.presence_of_all_elements_located((By.XPATH, SELECTORS["person_rows"])))
|
||||
|
||||
except Exception as album_err:
|
||||
logger.error(f"Error in album {album['name']}: {album_err}")
|
||||
|
||||
db.commit()
|
||||
logger.info(f"Link Magic complete: Scraped {links_found} links.")
|
||||
except Exception as link_err:
|
||||
logger.error(f"Failed to scrape links: {link_err}")
|
||||
|
||||
return len(df)
|
||||
|
||||
finally:
|
||||
driver.quit()
|
||||
|
||||
@app.get("/api/jobs/{job_id}/reminder-history")
|
||||
async def get_reminder_history(job_id: str, db: Session = Depends(get_db)):
|
||||
history = db.query(ReminderHistory).filter(ReminderHistory.job_id == job_id).order_by(ReminderHistory.timestamp.desc()).all()
|
||||
return [
|
||||
{
|
||||
"id": h.id,
|
||||
"timestamp": h.timestamp.isoformat(),
|
||||
"recipient_count": h.recipient_count,
|
||||
"max_logins": h.max_logins,
|
||||
"scheduled_time": h.scheduled_time,
|
||||
"recipients": json.loads(h.recipients_json) if h.recipients_json else []
|
||||
}
|
||||
for h in history
|
||||
]
|
||||
|
||||
class SendReminderRequest(BaseModel):
|
||||
emails: List[Dict[str, str]]
|
||||
max_logins: int
|
||||
scheduled_time: Optional[str] = None
|
||||
recipients_data: List[Dict[str, Any]] # To store in history
|
||||
|
||||
@app.post("/api/jobs/{job_id}/reminder-send")
|
||||
async def send_reminders(
|
||||
job_id: str,
|
||||
data: SendReminderRequest,
|
||||
background_tasks: BackgroundTasks,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
logger.info(f"Sending {len(data.emails)} reminders for job {job_id}")
|
||||
|
||||
# Save to history
|
||||
new_history = ReminderHistory(
|
||||
job_id=job_id,
|
||||
recipient_count=len(data.emails),
|
||||
max_logins=data.max_logins,
|
||||
recipients_json=json.dumps(data.recipients_data),
|
||||
scheduled_time=data.scheduled_time or "Sofort"
|
||||
)
|
||||
db.add(new_history)
|
||||
db.commit()
|
||||
|
||||
# Reuse delayed_send logic from publish_request_api if scheduled
|
||||
if data.scheduled_time:
|
||||
from publish_request_api import delayed_send
|
||||
from database import SessionLocal
|
||||
background_tasks.add_task(delayed_send, data.emails, data.scheduled_time, SessionLocal)
|
||||
return {"status": "scheduled", "message": f"Versand für {data.scheduled_time} geplant."}
|
||||
|
||||
# Immediate send
|
||||
service = GmailService(db)
|
||||
success = 0
|
||||
failed = []
|
||||
for email_data in data.emails:
|
||||
if service.send_email(email_data["to"], email_data["subject"], email_data["body"]):
|
||||
success += 1
|
||||
else:
|
||||
failed.append(email_data["to"])
|
||||
|
||||
return {"status": "success", "success": success, "failed": failed}
|
||||
|
||||
@app.get("/api/jobs/{job_id}/login-distribution")
|
||||
async def get_login_distribution(job_id: str, db: Session = Depends(get_db)):
|
||||
from sqlalchemy import func
|
||||
results = db.query(
|
||||
JobParticipant.logins,
|
||||
func.count(JobParticipant.id)
|
||||
).filter(JobParticipant.job_id == job_id).group_by(JobParticipant.logins).order_by(JobParticipant.logins).all()
|
||||
|
||||
return [{"logins": r[0], "count": r[1]} for r in results]
|
||||
|
||||
@app.get("/api/jobs/{job_id}/fast-stats")
|
||||
|
||||
async def get_fast_stats(job_id: str, db: Session = Depends(get_db)):
|
||||
|
||||
participants = db.query(JobParticipant).filter(JobParticipant.job_id == job_id).all()
|
||||
|
||||
if not participants:
|
||||
|
||||
return []
|
||||
|
||||
|
||||
|
||||
groups = {}
|
||||
|
||||
for p in participants:
|
||||
|
||||
g_name = p.gruppe or "Unbekannt"
|
||||
|
||||
if g_name not in groups:
|
||||
|
||||
groups[g_name] = {
|
||||
|
||||
"Album": g_name,
|
||||
|
||||
"Kinder_insgesamt": 0,
|
||||
|
||||
"Kinder_mit_Käufen": 0,
|
||||
|
||||
"Kinder_Alle_Bilder_gekauft": 0
|
||||
|
||||
}
|
||||
|
||||
groups[g_name]["Kinder_insgesamt"] += 1
|
||||
|
||||
if p.has_orders:
|
||||
|
||||
groups[g_name]["Kinder_mit_Käufen"] += 1
|
||||
|
||||
if p.digital_package_ordered:
|
||||
|
||||
groups[g_name]["Kinder_Alle_Bilder_gekauft"] += 1
|
||||
|
||||
|
||||
|
||||
statistics = list(groups.values())
|
||||
statistics.sort(key=lambda x: x["Album"])
|
||||
return statistics
|
||||
|
||||
def process_sync_task(task_id: str, job_id: str, account_type: str):
|
||||
logger.info(f"Task {task_id}: Starting background sync for job {job_id}")
|
||||
task_store[task_id] = {"status": "running", "progress": "Starte Synchronisierung...", "result": None}
|
||||
db = SessionLocal()
|
||||
try:
|
||||
count = sync_participants(job_id, account_type, db, task_id)
|
||||
task_store[task_id] = {
|
||||
"status": "completed",
|
||||
"progress": f"Abgleich fertig! {count} Personen synchronisiert.",
|
||||
"result": count
|
||||
}
|
||||
except Exception as e:
|
||||
logger.exception(f"Unexpected error in sync task {task_id}")
|
||||
task_store[task_id] = {"status": "error", "progress": f"Fehler: {str(e)}"}
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
@app.post("/api/jobs/{job_id}/sync-participants")
|
||||
async def sync_participants_api(job_id: str, account_type: str, background_tasks: BackgroundTasks):
|
||||
task_id = str(uuid.uuid4())
|
||||
background_tasks.add_task(process_sync_task, task_id, job_id, account_type)
|
||||
return {"task_id": task_id}
|
||||
|
||||
@app.get("/api/jobs/{job_id}/generate-pdf")
|
||||
async def generate_pdf(job_id: str, account_type: str, db: Session = Depends(get_db)):
|
||||
logger.info(f"API Request: Generate PDF for job {job_id} ({account_type})")
|
||||
@@ -1200,23 +1510,24 @@ async def generate_pdf(job_id: str, account_type: str, db: Session = Depends(get
|
||||
|
||||
@app.get("/api/jobs/{job_id}/siblings-list")
|
||||
async def generate_siblings_list(job_id: str, account_type: str, event_type_name: str = "", db: Session = Depends(get_db)):
|
||||
logger.info(f"API Request: Generate siblings list for job {job_id}")
|
||||
logger.info(f"API Request: Generate siblings list for job {job_id}, event_type: {event_type_name}")
|
||||
username = os.getenv(f"{account_type.upper()}_USER")
|
||||
password = os.getenv(f"{account_type.upper()}_PW")
|
||||
api_token = os.getenv("CALENDLY_TOKEN")
|
||||
|
||||
if not api_token:
|
||||
raise HTTPException(status_code=400, detail="Calendly API token missing.")
|
||||
|
||||
# Get Calendly events
|
||||
from qr_generator import get_calendly_events_raw
|
||||
try:
|
||||
# Fetch ALL events to ensure we don't miss siblings due to event name mismatches
|
||||
calendly_events = get_calendly_events_raw(api_token, event_type_name=None)
|
||||
logger.info(f"Fetched {len(calendly_events)} total events from Calendly for siblings check.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Calendly events: {e}")
|
||||
calendly_events = []
|
||||
calendly_events = []
|
||||
if event_type_name:
|
||||
if not api_token:
|
||||
logger.warning("Calendly API token missing, skipping Calendly check.")
|
||||
else:
|
||||
# Get Calendly events
|
||||
from qr_generator import get_calendly_events_raw
|
||||
try:
|
||||
# Fetch ALL events to ensure we don't miss siblings due to event name mismatches
|
||||
calendly_events = get_calendly_events_raw(api_token, event_type_name=None)
|
||||
logger.info(f"Fetched {len(calendly_events)} total events from Calendly for siblings check.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Calendly events: {e}")
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
logger.debug(f"Using temp directory: {temp_dir}")
|
||||
|
||||
18
fotograf-de-scraper/backend/migrate_db.py
Normal file
18
fotograf-de-scraper/backend/migrate_db.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
db_path = "/app/data/fotograf_jobs.db"
|
||||
if not os.path.exists(db_path):
|
||||
db_path = "fotograf-de-scraper/backend/data/fotograf_jobs.db"
|
||||
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("ALTER TABLE job_participants ADD COLUMN digital_package_ordered INTEGER DEFAULT 0;")
|
||||
print("Column 'digital_package_ordered' added successfully.")
|
||||
except sqlite3.OperationalError:
|
||||
print("Column 'digital_package_ordered' already exists.")
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
12
fotograf-de-scraper/backend/orders_page.html
Normal file
12
fotograf-de-scraper/backend/orders_page.html
Normal file
File diff suppressed because one or more lines are too long
BIN
fotograf-de-scraper/backend/orders_page.png
Normal file
BIN
fotograf-de-scraper/backend/orders_page.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 78 KiB |
@@ -1,7 +1,7 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, BackgroundTasks
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
from database import get_db, DiscountCode, ReleaseParticipant
|
||||
from database import get_db, DiscountCode, ReleaseParticipant, ReleaseHistory
|
||||
import datetime
|
||||
import logging
|
||||
from gmail_service import GmailService
|
||||
@@ -96,9 +96,18 @@ async def send_requests(data: SendReleaseRequest, background_tasks: BackgroundTa
|
||||
if data.scheduled_time:
|
||||
# Pass a way to get a new session to the background task
|
||||
from database import SessionLocal
|
||||
|
||||
# Log to history
|
||||
db.add(ReleaseHistory(recipient_count=len(data.emails), scheduled_time=data.scheduled_time))
|
||||
db.commit()
|
||||
|
||||
background_tasks.add_task(delayed_send, data.emails, data.scheduled_time, SessionLocal)
|
||||
return {"status": "scheduled", "message": f"Versand für {data.scheduled_time} geplant."}
|
||||
|
||||
# Log immediate send to history
|
||||
db.add(ReleaseHistory(recipient_count=len(data.emails), scheduled_time="Sofort"))
|
||||
db.commit()
|
||||
|
||||
# Immediate send
|
||||
service = GmailService(db)
|
||||
success = 0
|
||||
@@ -111,6 +120,11 @@ async def send_requests(data: SendReleaseRequest, background_tasks: BackgroundTa
|
||||
|
||||
return {"status": "success", "success": success, "failed": failed}
|
||||
|
||||
@router.get("/history")
|
||||
def get_history(db: Session = Depends(get_db)):
|
||||
history = db.query(ReleaseHistory).order_by(ReleaseHistory.timestamp.desc()).all()
|
||||
return [{"id": h.id, "timestamp": h.timestamp.isoformat(), "recipient_count": h.recipient_count, "scheduled_time": h.scheduled_time} for h in history]
|
||||
|
||||
@router.get("/stats")
|
||||
def get_stats(db: Session = Depends(get_db)):
|
||||
total = db.query(DiscountCode).count()
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
1
fotograf-de-scraper/frontend/dist/assets/index-BnIZj8RP.css
vendored
Normal file
1
fotograf-de-scraper/frontend/dist/assets/index-BnIZj8RP.css
vendored
Normal file
File diff suppressed because one or more lines are too long
47
fotograf-de-scraper/frontend/dist/assets/index-DnGj5v5p.js
vendored
Normal file
47
fotograf-de-scraper/frontend/dist/assets/index-DnGj5v5p.js
vendored
Normal file
File diff suppressed because one or more lines are too long
4
fotograf-de-scraper/frontend/dist/index.html
vendored
4
fotograf-de-scraper/frontend/dist/index.html
vendored
@@ -5,8 +5,8 @@
|
||||
<link rel="icon" type="image/svg+xml" href="/fotograf-de/favicon.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Fotograf.de ERP</title>
|
||||
<script type="module" crossorigin src="/fotograf-de/assets/index-9o0T5Jx2.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="/fotograf-de/assets/index-BaSYoDWO.css">
|
||||
<script type="module" crossorigin src="/fotograf-de/assets/index-DnGj5v5p.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="/fotograf-de/assets/index-BnIZj8RP.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
27
readme.md
27
readme.md
@@ -313,3 +313,30 @@ Investierte Zeit in dieser Session: 01:57
|
||||
Arbeitszusammenfassung:
|
||||
Keine Zusammenfassung angegeben.
|
||||
```
|
||||
|
||||
|
||||
## 🤖 Status-Update (2026-04-18 15:58 Berlin Time)
|
||||
```yaml
|
||||
Investierte Zeit in dieser Session: 00:49
|
||||
|
||||
Arbeitszusammenfassung:
|
||||
Keine Zusammenfassung angegeben.
|
||||
```
|
||||
|
||||
|
||||
## 🤖 Status-Update (2026-04-18 22:58 Berlin Time)
|
||||
```yaml
|
||||
Investierte Zeit in dieser Session: 01:21
|
||||
|
||||
Arbeitszusammenfassung:
|
||||
Keine Zusammenfassung angegeben.
|
||||
```
|
||||
|
||||
|
||||
## 🤖 Status-Update (2026-05-04 08:53 Berlin Time)
|
||||
```yaml
|
||||
Investierte Zeit in dieser Session: 00:39
|
||||
|
||||
Arbeitszusammenfassung:
|
||||
Keine Zusammenfassung angegeben.
|
||||
```
|
||||
|
||||
Reference in New Issue
Block a user