I have a client that wants to web scrape this sketchy website and the loops works the first time, then the error occurs. Any help? I suggest not visiting the website, but hopefully the pays worth my time lol.
options = webdriver.ChromeOptions()
options.add_argument("--incognito")
PATH = 'C:\Program Files (x86)\chromedriver.exe'
URL = 'https://avbebe.com/archives/category/高清中字/page/5'
driver = webdriver.Chrome(executable_path=PATH, options=options)
driver.get(URL)
time.sleep(5)
Vid = driver.find_elements_by_class_name('entry-title')
for title in Vid:
actions = ActionChains(driver)
time.sleep(5)
WebDriverWait(title, 10).until(EC.element_to_be_clickable((By.TAG_NAME, 'a')))#where error occurs
actions.double_click(title).perform()
time.sleep(5)
VidUrl = driver.current_url
VidTitle = driver.find_element_by_xpath('//*[@id="post-69331"]/h1/a').text
try:
VidTags = driver.find_elements_by_class_name('tags')
for tag in VidTags:
VidTag = tag.find_element_by_tag_name('a').text
except NoSuchElementException or StaleElementReferenceException:
pass
with open('data.csv', 'w', newline='', encoding = "utf-8") as f:
fieldnames = ['Title', 'Tags', 'URL']
thewriter = csv.DictWriter(f, fieldnames=fieldnames)
thewriter.writeheader()
thewriter.writerow({'Title': VidTitle, 'Tags': VidTag, 'URL': VidUrl})
driver.back()
driver.refresh()
print('done')
Error:
WebDriverWait(title, 10).until(EC.element_to_be_clickable((By.TAG_NAME, 'a'))) File "C:\Users\Heage\AppData\Local\Programs\Python\Python39\lib\site-packages\selenium\webdriver\support\wait.py",line 80, in until raise TimeoutException(message, screen, stacktrace) selenium.common.exceptions.TimeoutException: Message: