dritte spalte

This commit is contained in:
2025-07-08 16:55:42 +00:00
parent 0ccb4341fe
commit cfea6cc293

View File

@@ -132,26 +132,29 @@ class DealfrontScraper:
rows = self.driver.find_elements(*rows_selector) rows = self.driver.find_elements(*rows_selector)
logger.info(f"{len(rows)} Firmen-Datenzeilen zur Verarbeitung gefunden.") logger.info(f"{len(rows)} Firmen-Datenzeilen zur Verarbeitung gefunden.")
self.driver.implicitly_wait(1)
for i, row in enumerate(rows, 1): for i, row in enumerate(rows, 1):
try: try:
# Firmennamen holen # Firmennamen holen...
name_elem = row.find_element(By.CSS_SELECTOR, ".sticky-column a.t-highlight-text") name_elem = row.find_element(By.CSS_SELECTOR, ".sticky-column a.t-highlight-text")
company_name = (name_elem.get_attribute("title") or name_elem.text).strip() company_name = (name_elem.get_attribute("title") or name_elem.text).strip()
# Website aus der zweiten Spalte: erst href, dann Text-Fallback # Website aus der dritten Spalte: erst href, dann Text-Fallback
try: elems = row.find_elements(By.CSS_SELECTOR, "td:nth-of-type(3) a")
website_elem = row.find_element(By.CSS_SELECTOR, "td:nth-of-type(2) a") if elems:
# HREF bereinigen (ohne https://) website = elems[0].get_attribute("href").split("://", 1)[1].rstrip("/")
website = website_elem.get_attribute("href").split("://")[-1].strip("/") else:
except NoSuchElementException: website = row.find_element(By.CSS_SELECTOR, "td:nth-of-type(3)").text.strip()
website = row.find_element(By.CSS_SELECTOR, "td:nth-of-type(2)").text.strip()
results.append({'name': company_name, 'website': website}) results.append({'name': company_name, 'website': website})
except NoSuchElementException: except NoSuchElementException:
logger.warning(f"Zeile {i}: Name oder Webseite nicht extrahierbar. Überspringe.") logger.warning(f"Zeile {i}: Name oder Webseite nicht extrahierbar. Überspringe.")
continue continue
# Implicit-Wait wiederherstellen (z. B. 10 Sekunden)
self.driver.implicitly_wait(10)
logger.info(f"Extraktion abgeschlossen. {len(results)} Firmen gefunden.") logger.info(f"Extraktion abgeschlossen. {len(results)} Firmen gefunden.")
return results return results