Right now my this code able to click on every single product of this page "https://www.daraz.com.bd/audio/?page=1&spm=a2a0e.home.cate_2.2.49c74591NNpWDU%27" which taking me to product details page of each item. Can anybody tell me how I will loop multiple pages such as page2, page3, page4? here is my code
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from bs4 import BeautifulSoup
#argument for incognito Chrome
option = Options()
option.add_argument("--incognito")
browser = webdriver.Chrome(options=option)
browser.get("https://www.daraz.com.bd/audio/?page=1&spm=a2a0e.home.cate_2.2.49c74591NNpWDU%27")
# Wait 20 seconds for page to load
timeout = 20
try:
WebDriverWait(browser, timeout).until(EC.visibility_of_element_located((By.XPATH, "//div[@class='c16H9d']")))
except TimeoutException:
print("Timed out waiting for page to load")
browser.quit()
soup = BeautifulSoup(browser.page_source, "html.parser")
product_items = soup.find_all("div", attrs={"data-qa-locator": "product-item"})
for item in product_items:
item_url = f"https:{item.find('a')['href']}"
print(item_url)
browser.get(item_url)
item_soup = BeautifulSoup(browser.page_source, "html.parser")
# Use the item_soup to find details about the item from its url.
container = item_soup.find_all("div",attrs={"id":"container"})
for items in container:
title = items.find("div",{"class":"pdp-product-title"})
print(title)
browser.quit()
now it getting information only from page1. I want it will also collect information from other pages such as page2,page3,page4,page5