dealfront_enrichment.py aktualisiert
This commit is contained in:
@@ -73,44 +73,48 @@ class DealfrontScraper:
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
def extract_current_page_results(self):
|
def extract_current_page_results(self):
|
||||||
# 1) Kurz Implicit-Wait absenken, damit Fehlversuche sofort zurückkehren
|
# 1) Kurz Implicit-Wait absenken
|
||||||
self.driver.implicitly_wait(1)
|
self.driver.implicitly_wait(1)
|
||||||
|
|
||||||
# 2) Auf erstes Firmen-Element warten (bis zu 15 s), dann kurzen Puffer
|
# 2) Auf erstes Firmen-Element warten und Puffer
|
||||||
first = (By.CSS_SELECTOR, ".sticky-column a.t-highlight-text")
|
first = (By.CSS_SELECTOR, ".sticky-column a.t-highlight-text")
|
||||||
self.wait.until(EC.visibility_of_element_located(first))
|
self.wait.until(EC.visibility_of_element_located(first))
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
logger.info("Extrahiere Ergebnisse von der aktuellen Seite...")
|
try:
|
||||||
results = []
|
logger.info("Extrahiere Ergebnisse von der aktuellen Seite...")
|
||||||
|
results = []
|
||||||
|
|
||||||
# 3) Auf mindestens eine Tabellenzeile warten
|
# 3) Auf mindestens eine Tabellenzeile warten
|
||||||
rows_sel = (By.CSS_SELECTOR, "table#t-result-table tbody tr[id]")
|
rows_sel = (By.CSS_SELECTOR, "table#t-result-table tbody tr[id]")
|
||||||
self.wait.until(EC.presence_of_all_elements_located(rows_sel))
|
self.wait.until(EC.presence_of_all_elements_located(rows_sel))
|
||||||
rows = self.driver.find_elements(*rows_sel)
|
rows = self.driver.find_elements(*rows_sel)
|
||||||
logger.info(f"{len(rows)} Firmen-Zeilen gefunden.")
|
logger.info(f"{len(rows)} Firmen-Zeilen gefunden.")
|
||||||
|
|
||||||
# 4) Schleife ganz ohne Sleeps oder Implicit-Waits
|
# 4) Ohne weitere Sleeps/Implicit-Waits extrahieren
|
||||||
for i, row in enumerate(rows, 1):
|
for i, row in enumerate(rows, 1):
|
||||||
# Name per bewährtem Selector
|
name_elems = row.find_elements(By.CSS_SELECTOR, ".sticky-column a.t-highlight-text")
|
||||||
name_elems = row.find_elements(By.CSS_SELECTOR, ".sticky-column a.t-highlight-text")
|
if not name_elems:
|
||||||
if not name_elems:
|
logger.warning(f"Zeile {i}: Kein Name-Element gefunden. Überspringe.")
|
||||||
logger.warning(f"Zeile {i}: Kein Name-Element gefunden. Überspringe.")
|
continue
|
||||||
continue
|
ne = name_elems[0]
|
||||||
ne = name_elems[0]
|
company_name = (ne.get_attribute("title") or ne.text).strip()
|
||||||
company_name = (ne.get_attribute("title") or ne.text).strip()
|
|
||||||
|
|
||||||
# Website per bewährtem Selector
|
web_elems = row.find_elements(By.CSS_SELECTOR, "a.text-gray-400.t-highlight-text")
|
||||||
web_elems = row.find_elements(By.CSS_SELECTOR, "a.text-gray-400.t-highlight-text")
|
website = web_elems[0].text.strip() if web_elems else ""
|
||||||
website = web_elems[0].text.strip() if web_elems else ""
|
|
||||||
|
|
||||||
results.append({'name': company_name, 'website': website})
|
results.append({'name': company_name, 'website': website})
|
||||||
|
|
||||||
logger.info(f"Extraktion abgeschlossen: {len(results)} Firmen.")
|
logger.info(f"Extraktion abgeschlossen: {len(results)} Firmen.")
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Schwerwiegender Fehler bei der Extraktion: {type(e).__name__}", exc_info=True)
|
||||||
|
self._save_debug_artifacts()
|
||||||
|
return []
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# 5) Implicit-Wait zurück auf Standard (z. B. 10 s)
|
# 5) Implicit-Wait wieder auf Standard setzen
|
||||||
self.driver.implicitly_wait(10)
|
self.driver.implicitly_wait(10)
|
||||||
|
|
||||||
def click_next_page(self):
|
def click_next_page(self):
|
||||||
|
|||||||
Reference in New Issue
Block a user