Home > Blockchain >  web scraping with python selenium loop and save problem
web scraping with python selenium loop and save problem

Time:07-04

'Hi,I want to save the data I took as csv and txt, but I couldn't.

Moreover;

How can I repeat this process multiple times?'

nextInput = driver.find_element("xpath",'//*[@id="pnnext"]/span[2]').click()
result = driver.find_elements(By.CSS_SELECTOR, ".GyAeWb cite.iUh30")

' Code; '

from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
import pandas as pd
import time
import csv
import re


driver = webdriver.Chrome()

url ="http://google.com"
driver.get(url)

searchInput = driver.find_element("xpath",'/html/body/div[1]/div[3]/form/div[1]/div[1]/div[1]/div/div[2]/input')
time.sleep(1)
searchInput.send_keys("dişçi")
time.sleep(2)
searchInput.send_keys(Keys.ENTER)
time.sleep(2)
result = driver.page_source
result = driver.find_elements(By.CSS_SELECTOR, ".GyAeWb cite.iUh30")

for index,element in enumerate (result):
  print(index 1,element.text)

result = []
result = list(set(result))
 
time.sleep(2)

nextInput = driver.find_element("xpath",'//*[@id="pnnext"]/span[2]').click()
result = driver.find_elements(By.CSS_SELECTOR, ".GyAeWb cite.iUh30")

for index,element in enumerate (result):
  print(index 1,element.text)

count = 1
with open("siteler.txt","w",encoding="UTF-8") as file:
  for item in result:
    file.write(f"{count}-{item}\n")
    count =1

driver.close()

CodePudding user response:

try:
    while 1:
        nextInput = driver.find_element("xpath",'//*[@id="pnnext"]/span[2]').click()
        result = driver.find_elements(By.CSS_SELECTOR, ".GyAeWb cite.iUh30")

        for index,element in enumerate (result):
            print(index 1,element.text)

        count = 1
        with open("siteler.txt","w",encoding="UTF-8") as file:
            for item in result:
                file.write(f"{count}-{item}\n")
                count =1
except Exception as e:
    print(e)

finally:
    print("there is no element with '//*[@id='pnnext']/span[2]' XPATH")

  • Related