As it says in the title, when trying to scrape the site https://www.fragrantica.com/search/ while looping through filters, it throws a WebDriverException: Message: target frame detached sometimes and sometimes not.. and when it does throw it, it happens at random lines and not a specific place so I have no clue how to tackle it.. some people suggested that I use this line:
WebDriverWait(driver, 20).until(EC.frame_to_be_available_and_switch_to_it((By.CSS_SELECTOR,"iframeCssSelector")))
But where to place it if it just happens randomly throughout the code. Here is my code, it is to extract the perfumes the filtered page has:
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import ElementClickInterceptedException
import time
options = Options()
options.add_argument("--profile-directory=Default")
options.add_argument('--disable-blink-features=AutomationControlled')
options.add_argument("start-maximized")
options.add_argument("user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36")
driver = webdriver.Chrome(service = Service(executable_path='C:/Users/armon/Downloads/chromedriver_win32/chromedriver.exe'), options=options)
url = 'https://www.fragrantica.com/'
driver.get(url)
time.sleep(3)
perfumes_btn = driver.find_element(by=By.XPATH, value = '//*[@id="offCanvasLeft"]/ul/li[5]/a')
perfumes_btn.click()
search_btn = WebDriverWait(driver,20).until(EC.element_to_be_clickable((By.XPATH,'/html/body/div[2]/div[2]/div[2]/ul/li[5]/ul/li[1]/a')))
search_btn.click()
time.sleep(3)
Industry_more = '//*[@id="offCanvasLeftOverlap1"]/div/div/div[13]/div[2]/div/p/div/button'
Industry_dict = {'Industry_Fragrances' : '//*[@ and @value="Fragrances"]',
'Industry_Cosmetics' : '//*[@ and @value="Cosmetics"]',
'Industry_Fashion' : '//*[@ and @value="Fashion"]',
'Industry_NaturalPerfumery': '//*[@ and @value="Natural Perfumery"]',
'Industry_Celebrity' : '//*[@ and @value="Celebrity"]',
'Industry_Accessories' : '//*[@ and @value="Accessories"]',
'Industry_Jewelry': '//*[@ and @value="Jewelry"]',
'Industry_Retailer': '//*[@ and @value="Retailer"]',
'Industry_Lingerie': '//*[@ and @value="Lingerie"]',
'Industry_NichePerfumes': '//*[@ and @value="Niche Perfumes"]'
}
Gender_dict = {'Gender_Female' : '//*[@ and @value="female"]',
'Gender_Unisex' : '//*[@ and @value="unisex"]',
'Gender_Male' : '//*[@ and @value="male"]'
}
Country_more = '//*[@id="offCanvasLeftOverlap1"]/div/div/div[10]/div[2]/div/p/div/button'
Country_dict = {'Country_USA' : '//*[@ and @value="United States"]',
'Country_France' : '//*[@ and @value="France"]',
'Country_Italy' : '//*[@ and @value="Italy"]',
'Country_UK' : '//*[@ and @value="United Kigdom"]',
'Country_Brazil' : '//*[@ and @value="Brazil"]',
'Country_UAE' : '//*[@ and @value="United Arab Emirates"]',
'Country_Russia': '//*[@ and @value="Russia"]',
'Country_Spain' : '//*[@ and @value="Spain"]',
'Country_Germany' : '//*[@ and @value="Germany"]',
'Country_SaudiArabia' : '//*[@ and @value="Saudi Arabia"]',
'Country_Sweden' : '//*[@ and @value="Sweden"]',
'Country_Latvia' : '//*[@ and @value="Latvia"]',
'Country_Poland' : '//*[@ and @value="Poland"]',
'Country_Japan' : '//*[@ and @value="Japan"]',
'Country_Netherlands' : '//*[@ and @value="Netherlands"]',
'Country_Canada' : '//*[@ and @value="Canada"]',
'Country_Australia' : '//*[@ and @value="Australia"]',
'Country_Switzerland' : '//*[@ and @value="Switzerland"]',
'Country_Argentina' : '//*[@ and @value="Argentina"]',
'Country_Belgium' : '//*[@ and @value="Belgium"]',
'Country_Pakistan' : '//*[@ and @value="Pakistan"]',
'Country_Slovakia' : '//*[@ and @value="Slovakia"]',
'Country_Austria' : '//*[@ and @value="Austria"]',
'Country_Thailand' : '//*[@ and @value="Thailand"]',
'Country_Philippines' : '//*[@ and @value="Philippines"]',
'Country_SouthKorea' : '//*[@ and @value="South Korea"]',
'Country_Belarus' : '//*[@ and @value="Belarus"]',
'Country_Israel' : '//*[@ and @value="Israel"]',
'Country_Korea' : '//*[@ and @value="Korea"]',
'Country_Greece' : '//*[@ and @value="Greece"]',
'Country_Denmark' : '//*[@ and @value="Denmark"]',
'Country_Norway' : '//*[@ and @value="Norway"]',
'Country_Portugal' : '//*[@ and @value="Portugal"]',
'Country_Iceland' : '//*[@ and @value="Iceland"]',
'Country_Newzealand' : '//*[@ and @value="New Zealand"]',
'Country_Ukraine' : '//*[@ and @value="Ukraine"]',
'Country_Romania' : '//*[@ and @value="Romania"]'
}
start_year_xpath = '//*[@id="offCanvasLeftOverlap1"]/div/div/div[5]/div[2]/div/p/div/form/input[1]'
end_year_xpath = '//*[@id="offCanvasLeftOverlap1"]/div/div/div[5]/div[2]/div/p/div/form/input[2]'
load_more_button = '//*[@id="main-content"]/div[1]/div[1]/div/div/div/div[2]/div[1]/div/div[3]/div/div/div/div/div/button'
clear_filters_button = '//*[@id="offCanvasLeftOverlap1"]/div/div/div[1]/div[2]/p/div/button'
industry_load = WebDriverWait(driver,20).until(EC.element_to_be_clickable((By.XPATH, Industry_more)))
driver.execute_script("arguments[0].click();", industry_load)
country_load = WebDriverWait(driver,20).until(EC.element_to_be_clickable((By.XPATH, Country_more)))
driver.execute_script("arguments[0].click();", country_load)
def start_scraping():
elements = driver.find_elements(by = By.XPATH, value = '//*[@]')
if len(elements) == 0:
return []
elif len(elements) < 30:
return elements
else:
prev_count = len(elements)
loading = WebDriverWait(driver, 100).until(EC.element_to_be_clickable((By.XPATH, load_more_button)))
driver.execute_script("arguments[0].click();", loading)
time.sleep(2)
elements = driver.find_elements(by = By.XPATH, value = '//*[@]')
now_count = len(elements)
while now_count > prev_count:
loading = WebDriverWait(driver, 100).until(EC.element_to_be_clickable((By.XPATH, load_more_button)))
driver.execute_script("arguments[0].click();", loading)
time.sleep(2)
elements = driver.find_elements(by = By.XPATH, value = '//*[@]')
prev_count = now_count
now_count = len(elements)
return elements
all_links = []
# def scrape():
# links = []
# try:
# loadingButton = WebDriverWait(driver,100).until(EC.element_to_be_clickable((By.XPATH,load_more_button)))
# maxPerfumes = 1000;
# cond = True
# while loadingButton:
# time.sleep(3)
# driver.execute_script("arguments[0].click();", loadingButton)
# loadingButton = WebDriverWait(driver,100).until(EC.element_to_be_clickable((By.XPATH,load_more_button)))
# loadElems = driver.find_elements(by = By.XPATH, value = '//*[@]')
# if len(loadElems)>0:
# loadingButton = WebDriverWait(driver,100).until(EC.element_to_be_clickable((By.XPATH,load_more_button)))
# else:
# cond = False
# break
# if len(loadElems) >= maxPerfumes:
# break
# if cond :
# card_sections = [] # list of card sections (image section and fragrance home page section)
# sections = driver.find_elements(by=By.CLASS_NAME, value="card-section")
# for section in sections:
# card_sections.append(section)
# home_pages = card_sections[1::2] # Extracting fragrance home page link section from all sections (image section and fragrance home page section)
# links = [] # list of final links when extracted.
# inc = 0 # Increment value to get each card_seection
# # Looping through to get all links
# for link in home_pages:
# link = home_pages[inc] # Getting the each div section
# link = link.find_elements(by=By.TAG_NAME, value = "p")
# link = link[0] # Selecting first paragraph (the one housing the link) element out of two
# link = link.find_elements(by=By.TAG_NAME, value = "a")
# link = link[0].get_attribute("href") # Extracting link from list
# links.append(link)
# inc =1
# WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH,clear_filters_button))).click()
# all_links = all_links links
# except:
# pass
all_cards = []
def filter():
for industry in Industry_dict.values():
for gender in Gender_dict.values():
for country in Country_dict.values():
for year in range(1920, 2023):
industry_selected = WebDriverWait(driver, 100).until(EC.element_to_be_clickable((By.XPATH, industry)))
driver.execute_script("arguments[0].click();", industry_selected)
time.sleep(2)
gender_selected = WebDriverWait(driver, 100).until(EC.element_to_be_clickable((By.XPATH, gender)))
driver.execute_script("arguments[0].click();", gender_selected)
time.sleep(2)
try:
country_selected = WebDriverWait(driver, 100).until(EC.element_to_be_clickable((By.XPATH, country)))
driver.execute_script("arguments[0].click();", country_selected)
time.sleep(2)
except NoSuchElementException:
break
start = WebDriverWait(driver, 100).until(EC.element_to_be_clickable((By.XPATH, start_year_xpath)))
start.clear()
driver.execute_script("arguments[0].click();", start)
start.send_keys(year)
time.sleep(1)
end = WebDriverWait(driver, 100).until(EC.element_to_be_clickable((By.XPATH, end_year_xpath)))
end.clear()
driver.execute_script("arguments[0].click();", end)
end.send_keys(year)
time.sleep(2)
# start of new code
all_cards.append(start_scraping())
print(len(all_cards)) # to keep track.
# end of new code
clear = WebDriverWait(driver, 50).until(EC.element_to_be_clickable((By.XPATH, clear_filters_button)))
driver.execute_script("arguments[0].click();", clear)
filter()
print(len(all_cards))
any recommendations to how to approach this ? TIA
CodePudding user response:
There was an issue that was introduced in Chrome 99 bugs.chromium Issue 4048: target frame detached
. See comment #53
Status: Fixed (was: Assigned)
Frame detached issue is fixed in ChromeDriver 103
You are using version 102, update your browser and driver versions.