I am trying to get data from website. Since it is dynamically loaded and sometimes it takes 1-2 minutes for 1 elements to load "time.sleep(1..2...3...10)" doesnt work, because it closes itself earlier. ("Message: element click intercepted: Element is not clickable with Selenium and Python") My code is as below:
import os
import platform
from selenium.webdriver.common.by import By
if platform.system() == "Windows":
try:
import undetected_chromedriver
from parsel import Selector
from selenium import webdriver
except ImportError:
os.system('python -m pip install parsel')
os.system('python -m pip install selenium')
os.system('python -m pip install undetected_chromedriver')
else:
try:
from parsel import Selector
from selenium import webdriver
import undetected_chromedriver
except ImportError:
os.system('python3 -m pip install parsel')
os.system('python3 -m pip install selenium')
os.system('python3 -m pip install undetected_chromedriver')
import undetected_chromedriver as uc
import csv
import os
from parsel import Selector
import time
from selenium import webdriver
filename = "cadastre"
if __name__ == '__main__':
driver = uc.Chrome()
time.sleep(5)
driver.get("https://kais.cadastre.bg/bg/Map")
input("Ready (Y/N) : ")
if filename '.csv' not in os.listdir(os.getcwd()):
with open(filename ".csv","a",newline="",encoding="utf-8") as f:
writer = csv.writer(f)
while True:
print("Waiting for 3 sec")
time.sleep(3)
response = Selector(text=driver.page_source)
uids = response.xpath('.//*[@id="resultsList"]//@data-uid').extract()
print(uids)
for uid in uids:
print("Clicking : " str(uid))
driver.find_element(by=By.XPATH,value='.//*[@data-uid="' str(uid) '"]/a').click()
time.sleep(1)
for uid in uids:
sel = Selector(text=driver.page_source).xpath('.//*[@data-uid="' str(uid) '"]')
textfile = ','.join([i.strip() for i in sel.xpath('.//*[@]/p//text()').extract() if i.strip()])
if textfile:
with open(filename ".csv","a",newline="",encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow([textfile])
print([textfile])
current_page = Selector(text=driver.page_source).xpath('.//*[@id="resultsList_pager"]//*[@]/text()').extract_first()
if Selector(text=driver.page_source).xpath('.//*[@data-page="' str(int(current_page) 1) '"]'):
driver.execute_script("document.getElementsByClassName('k-link k-pager-nav')[8].click();")
else:
break
driver.close()
I have searching through all possible ways to do the WebDriverWait, but all options did not work.
CodePudding user response:
Try this:
from time import sleep
counter = 1
aa = ""
while counter < 100:
try:
aa = driver.find_element_by_xpath(".//*[@id='resultsList']//@data-uid")
except:
print("Element not visible yet...")
if "selenium" in str(aa):
print("Element located!")
break
sleep(1)
counter = counter 1
CodePudding user response:
You didn't say which element makes problem.
And code makes problem only when it clicks to show details.
(but I tested on Firefox
instead of undetected_chromedriver
)
When it clicks to open details then it moves other elements and they are not visible in window - so Selenium can't click them.
I had to use javaScript
to click it
item = driver.find_element(by=By.XPATH, value=f'.//*[@data-uid="{uid}"]/a')
driver.execute_script('arguments[0].click()', item)
And this way I can get all selected elements.
I got ~123 items (from ~13 pages)