Code doesn't find any products on website - python

enter image description hereI wrote some code (a piece of them below) to scrape all products from shop's website but it doesnt find any products... i dont know what is wrong with this code. Can someone help me? I added screnn to show html (product-tile represent some product-box so I think that I should use this class to have necessary information)
while True:
# if True:
try:
prod = driver.find_elements_by_class_name("product-tile")
for el in prod:
name = el.find_element_by_class_name("product-name").text
price = el.find_element_by_class_name("price-normal").text
product_list.append(x)
x = [name, price]
print(x)

Try the below code see if it helps.
prod=WebDriverWait(driver, 20).until(expected_conditions.visibility_of_all_elements_located((By.CSS_SELECTOR, "div.product-tile.js-UA-product")))
for el in prod:
name = el.find_element_by_class_name("product-name").get_attribute("innerHTML")
price = el.find_element_by_class_name("price-normal").get_attribute("innerHTML")
Please use following imports.
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.by import By

Related

selenium no way to use a visible select element python

I am working on a project that I first sign up instagram by given information and then scrape some data for the sign up form I have some problems with second form which is birthdate the Select class of selenium didn't work. Maybe it is because of visibility. when I tried to access the elements it says that there is no such elements but they are both visible graphically and in inspect page of chrome and I prefer to not use the pyautogui because it is kinda unflexible and hard to use.
here is my code
#information
driverSignUp = webdriver.Chrome()
while True:
while True:
try:
driverSignUp.get('https://www.instagram.com/accounts/emailsignup/')
break
except:
pass
try:
driverSignUp.find_element(By.CSS_SELECTOR, 'body.p-error.dialog-404')
driverSignUp.delete_all_cookies()
except:
break
driverSignUp.implicitly_wait(1)
driverSignUp.find_element(By.NAME, 'emailOrPhone').send_keys(email)
driverSignUp.find_element(By.NAME, 'fullName').send_keys(fullName)
driverSignUp.find_element(By.NAME, 'username').send_keys(username)
driverSignUp.find_element(By.NAME, 'password').send_keys(password)
element = WebDriverWait(driverSignUp, 3).until(
EC.all_of(EC.presence_of_element_located((By.CSS_SELECTOR, 'button.sqdOP.L3NKy.y3zKF')))
)
driverSignUp.find_element(By.CSS_SELECTOR, 'button.sqdOP.L3NKy.y3zKF').click()
#birthdate
actions = ActionChains(driverSignUp)
Month = driverSignUp.find_element(By.CSS_SELECTOR, 'select[title="Month:"]')
actions.move_to_element(Month).perform()
selectMonth = Select(Month)
selectMonth.select_by_visible_text('August')
Day = driverSignUp.find_element(By.CSS_SELECTOR, 'select[title="Day:"]')
actions.move_to_element(Day).perform()
selectDay = Select(Day)
selectDay.select_by_visible_text('1')
Year = driverSignUp.find_element(By.CSS_SELECTOR, 'select[title="Year:"]')
actions.move_to_element(Year).perform()
selectYear = Select(Year)
selectYear.select_by_visible_text('2000')
and the libraries I used:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support.ui import Select
thanks for your answers

How to print 2 values at once with selenium and python?

I hope everyone is having a good day. I am trying to extract values from a website and have them print out as a list, but I can't figure out how to do that. I have all the values printing as expecting, just can't figure out how to have them print one after another. I know this is a very basic question, but I can't figure it out. Any advice or information is appreciated! Thank you!
import time
import webbrowser
from os import O_SEQUENTIAL, link
import chromedriver_autoinstaller
from selenium import webdriver as wd
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
webdriver = wd.Chrome(executable_path= r"C:\Users\Stephanie\anaconda3\pkgs\python-chromedriver-binary-98.0.4758.48.0-py39hcbf5309_0\Lib\site-packages\chromedriver_binary\chromedriver.exe")
webdriver.implicitly_wait(1)
webdriver.maximize_window()
webdriver.get("https://pcpartpicker.com/user/stephwaters/saved/#view=HgH2xr")
time.sleep(2)
partname = webdriver.find_elements(By.CLASS_NAME, 'td__component')
for part in partname:
print(part.text + ': ')
prices = webdriver.find_elements(By.CLASS_NAME, 'td__price')
for price in prices:
print(price.text)
This is the output:
I would like it to print:
Case: $168.99
Power Supply: $182.00
and so on.
Instead of getting the partnames and prices separately you can iterate over a list of products extracting from each one it's name and price.
Also it's recommended to use Expected Conditions explicit waits, not a hardcoded pauses.
Your code could be something like this:
import time
import webbrowser
from os import O_SEQUENTIAL, link
import chromedriver_autoinstaller
from selenium import webdriver as wd
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
webdriver = wd.Chrome(executable_path= r"C:\Users\Stephanie\anaconda3\pkgs\python-chromedriver-binary-98.0.4758.48.0-py39hcbf5309_0\Lib\site-packages\chromedriver_binary\chromedriver.exe")
wait = WebDriverWait(webdriver, 20)
webdriver.maximize_window()
webdriver.get("https://pcpartpicker.com/user/stephwaters/saved/#view=HgH2xr")
wait.until(EC.visibility_of_element_located((By.XPATH, "//tr[#class='tr__product']")))
time.sleep(0.3) #short delay added to make sure not the first product only got loaded
products = = webdriver.find_elements(By.XPATH, '//tr[#class="tr__product"]')
for product in products:
name = product.find_element(By.XPATH, './/td[#class="td__component"]')
price = product.find_element(By.XPATH, './/td[#class="td__price"]//a')
print(name.text + ': ' + price.text)

Data scraping in Shopee product names are not showing their proper name respectively

So i am trying to scrape Data in Shopeee using Pycharm/Python and Selenium. here is the code:
from selenium import webdriver
import time
import csv
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium import webdriver
PATH = "C:\Program Files (x86)\chromedriver.exe"
driver = webdriver.Chrome(PATH)
driver.get("https://shopee.ph/search?keyword=nacific&noCorrection=true&page=0&withDiscount=true")
time.sleep(2)
driver.execute_script("window.scrollTo(0, document.body.scrollHeight)")
time.sleep(3)
Categories = []
Categories.append(["NAME"])
driver.implicitly_wait(10)
products = driver.find_elements_by_xpath("//div[#class='row shopee-search-item-result__items']/div")
for product in products:
name_p = product.find_element_by_xpath("//div[#class='yQmmFK _1POlWt _36CEnF']")
rowData = [name_p]
Categories.append(rowData)
with open('Shopee.csv', 'a', encoding='utf-8') as file:
Import = csv.writer(file,lineterminator='\n')
Import.writerows(Categories)
so after i ran it...i "succcessfully" run it but the problem is this:
rather than showing the name of product it show selenium.webdriver and so on
i tried to change to other code by not using xpath and do the regular way(find_element_by_class_name etc) but it still cause error. i wonder why it is not working? can someone help me?
Website im trying to scrape: Shopee.ph
Software: Pycharm and Selenium
You can use .text to get the text from Selenium WebElements.
Example
product.find_element_by_xpath("//div[#class='yQmmFK _1POlWt _36CEnF']").text
There're many ways to locate web elements.
element = driver.find_element_by_id("passwd-id")
element = driver.find_element_by_name("passwd")
element = driver.find_element_by_xpath("//input[#id='passwd-id']")
element = driver.find_element_by_css_selector("input#passwd-id")
You can find the document here.
https://selenium-python.readthedocs.io/navigating.html
I've also fixed some of your code. Here
products = driver.find_elements_by_class_name('yQmmFK')
for product in products:
name_p = product.text
rowData = [name_p]
Categories.append(rowData)
driver.close()

Webscraping using selenium

I am very new to Python and I am trying to get all store locations from website of Muthoot. the following is a code i wrote but i am not getting any output. Please let me know what is wrong and what i need to correct.
As i understand, the code is not getting the search button clicked and hence nothing is moving. But how to do that??
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.by import By
import pandas as pd
driver= webdriver.Chrome(executable_path="D:\Chromedriverpath\chromedriver_win32\chromedriver.exe")
driver.get("https://www.muthootfinance.com/branch-locator")
#Saving this element in a variable
drp=Select(driver.find_element_by_id("statelist"))
slist=drp.options
for ele in slist:
table=driver.select("table.table")
columns=table.find("thead").find_all("th")
column_names=[c.string for c in columns]
table_rows=table.find("tbody").find_all("tr")
l=[]
for tr in table_rows:
td=tr.find_all('td')
row=[str(tr.get_text()).strip() for tr in td]
l.append(row)
df=pd.DataFrame(l,columns=column_names)
df.head()
I think this will work for you now, I copied your code and it seems to work!
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.by import By
import pandas as pd
driver = webdriver.Chrome("C:\Program Files (x86)\chromedriver.exe")
driver.get("https://www.muthootfinance.com/branch-locator")
# Saving this element in a variable
html_list = driver.find_element_by_id("state_branch_list")
items = html_list.find_elements_by_tag_name("li")
for item in items:
places = item.text
print(places)
df = pd.DataFrame([places])

Selenium Webscraper with Python won't let me click an element

I am trying to put together a web scraper to get locations by zip code entered by the user. As of right now I am able to navagate to the website but I am not able to click on the drop down button that allows you to enter in a zip code. Here is what I have so far
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from bs4 import BeautifulSoup
import time
import pandas as pd
from selenium.webdriver.common.by import By
zipcode = input("What zip code would you like to search? ")
out_table = 'Oreilly_autp_parts_addresses_{}.csv'.format(zipcode)
#Using Selenium to navigate to website, search zipcode and get html data
driver = webdriver.Chrome() #requires geckodriver.exe
driver.get('https://www.oreillyauto.com/')
time.sleep(2)
driver.maximize_window()
el = driver.find_element_by_class_name("site-store")
time.sleep(2)
driver.execute_script("arguments[0].setAttribute('class','site-store site-nav_item dispatcher-trigger--active')", el)
It seems to be clicking on the correct element but the drop down that is supposed to show up isn't there. HTML Code Block
Any help is much appreciated!

Categories