can you tell me why my while loop isn't working, please? I get no error message, it just runs once.
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import pandas as pd
import time
PATH = "/Users/csongordoma/Documents/chromedriver"
driver = webdriver.Chrome(PATH)
current_page = 1
driver.get('https://ingatlan.com/lista/elado+lakas+budapest?page=' + str(current_page))
data = {}
df = pd.DataFrame(columns=['Price', 'Address', 'Size', 'Rooms', 'URL', 'Labels'])
listings = driver.find_elements_by_css_selector('div.listing__card')
while current_page < 10:
for listing in listings:
data['Price'] = listing.find_elements_by_css_selector('div.price')[0].text
data['Address'] = listing.find_elements_by_css_selector('div.listing__address')[0].text
data['Size'] = listing.find_elements_by_css_selector('div.listing__parameters')[0].text
data['Labels'] = listing.find_elements_by_css_selector('div.listing__labels')[0].text
data['URL'] = listing.find_elements_by_css_selector('a.listing__link.js-listing-active-area')[0].get_attribute('href')
df = df.append(data, ignore_index=True)
current_page += 1
print(len(listings))
print(df)
# driver.find_element_by_xpath("//a[. = 'Következő oldal']").click()
driver.quit()
the output is a good data frame of 20 items which is one page's worth. on the website I'm trying to scrape. Set the limit at 10 cycles to not overload anyone, but ideally, I want to run through all pages.