I am trying to select for a particular information using xpath or css selector. But I keep on getting error messages. Can someone help with seeing what's wrong here?
This is part of my code
output = driver.find_element_by_xpath("//td[#class_= 'sku']")
print(output)
Using the modifications give below, I am obtaining this error message:
Traceback (most recent call last):
File "sa.py", line 25, in <module>
output = driver.find_element_by_xpath("//td[#cl
File "C:\Python27\lib\site-packages\selenium\webd
return self.find_element(by=By.XPATH, value=xpa
File "C:\Python27\lib\site-packages\selenium\webd
{'using': by, 'value': value})['value']
File "C:\Python27\lib\site-packages\selenium\webd
self.error_handler.check_response(response)
File "C:\Python27\lib\site-packages\selenium\webd
raise exception_class(message, screen, stacktra
selenium.common.exceptions.NoSuchElementException:
td[#class=\'sku\']/p"}' ; Stacktrace:
at FirefoxDriver.prototype.findElementInternal_
ver#googlecode.com/components/driver_component.js:9
at FirefoxDriver.prototype.findElement (file://
ecode.com/components/driver_component.js:9479)
at DelayedCommand.prototype.executeInternal_/h
er#googlecode.com/components/command_processor.js:1
at DelayedCommand.prototype.executeInternal_ (f
#googlecode.com/components/command_processor.js:114
at DelayedCommand.prototype.execute/< (file:///
code.com/components/command_processor.js:11402)
This is the code that I have written:
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
url = "http://www.sigmaaldrich.com/united-states.html"
#cas = "1300746-79-5"
cas = "100-44-7"
driver = webdriver.Firefox()
driver.get(url)
inputElement = driver.find_element_by_name("Query")
inputElement.send_keys(cas)
inputElement.submit()
pricing_link = driver.find_element_by_css_selector("li.priceValue a")
pricing_link.click()
output = driver.find_element_by_xpath("//td[#class='sku']/p")
print(output)
driver.quit()
After the discussion with OP and alecxe, this is a timing issue, where you need to use WebDriverWait to wait for table loading.
# XPath to select <p> inside <td> with class name 'sku'
outputs_by_xpath = WebDriverWait(driver, 10).until(
lambda driver : driver.find_elements_by_xpath(".//td[#class='sku']/p")
)
# or CSS selector
outputs_by_css = WebDriverWait(driver, 10).until(
lambda driver : driver.find_elements_by_css_selector("td.sku > p")
)
for output in outputs_by_xpath:
print(output.text)
print("\n")
for output in outputs_by_css:
print(output.text)
Output:
185558-50G
185558-250G
185558-1KG
185558-2KG
185558-50G
185558-250G
185558-1KG
185558-2KG
Related
I want to automate a simple task with selenium. Login to this website: https://www.lernsax.de/. I'am trying to locate the element via xpath but that doesn't work at all and I get a NoSuchElementException. I'am using Chromedriver and I have tried to use different locating methods like
find_element_by_id
find_element_by_name
find_element_by_xpath
find_element_by_link_text
find_element_by_partial_link_text
find_element_by_tag_name
find_element_by_class_name
but I always get this error. I have already tried different websites and it works fine with xpath.
Any help would mean a lot!
Here's my full code:
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
driver = webdriver.Chrome(executable_path="C:\chromedriver\chromedriver.exe")
driver.get("https://lernsax.de")
loginbtn = driver.find_element_by_xpath('//*[#id="skeleton_main"]/div[1]/div[2]/div/a')
loginbtn.click()
time.sleep(2)
driver.quit()
and the full error message:
Traceback (most recent call last):
File "C:/Users/.../lernsax.py", line 6, in <module>
loginbtn = driver.find_element_by_xpath('//*[#id="skeleton_main"]/div[1]/div[2]/div/a')
File "C:\Users\...\PycharmProjects\LernsaxAutomation\venv\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 394, in find_element_by_xpath
return self.find_element(by=By.XPATH, value=xpath)
File "C:\Users\...\PycharmProjects\LernsaxAutomation\venv\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 976, in find_element
return self.execute(Command.FIND_ELEMENT, {
File "C:\Users\...\PycharmProjects\LernsaxAutomation\venv\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 321, in execute
self.error_handler.check_response(response)
File "C:\Users\...\PycharmProjects\LernsaxAutomation\venv\lib\site-packages\selenium\webdriver\remote\errorhandler.py", line 242, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"//*[#id="skeleton_main"]/div[1]/div[2]/div/a"}
(Session info: chrome=80.0.3987.149)
An iframe is present on the page, so you need to first switch the driver to the iframe and the operate on the element:
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.Chrome()
driver.get("https://lernsax.de")
# Switch to iframe
driver.switch_to.frame(driver.find_element_by_id('main_frame'))
# Find the element by applying explicit wait on it and then click on it
WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "//*[#id='skeleton_main']/div[1]/div[2]/div/a"))).click()
Looks like the login form is contained within an iframe, which you will need to switch into, and perform the operations you need. Add below before you click on login button.
driver.switch_to.frame('main_frame')
As the the desired element is within an <iframe> so to invoke click() on the element you have to:
Induce WebDriverWait for the desired frame_to_be_available_and_switch_to_it().
Induce WebDriverWait for the desired element_to_be_clickable().
You can use either of the following Locator Strategies:
Using CSS_SELECTOR:
driver.get('https://www.lernsax.de/wws/9.php#/wws/101505.php?sid=97608267430324706358471707170230S5c89c6aa')
WebDriverWait(driver, 10).until(EC.frame_to_be_available_and_switch_to_it((By.CSS_SELECTOR,"iframe#content-frame")))
WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.CSS_SELECTOR, "button[data-qa='reporting-filter-trigger-toggle'][data-ember-action]"))).click()
Using XPATH:
driver.get('https://www.lernsax.de/wws/9.php#/wws/101505.php?sid=97608267430324706358471707170230S5c89c6aa')
WebDriverWait(driver, 10).until(EC.frame_to_be_available_and_switch_to_it((By.XPATH,"//iframe[#id='main_frame']")))
WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, "//a[#class='mo' and text()='Login']"))).click()
Note : You have to add the following imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
Browser Snapshot:
Reference
You can find a coupple of relevant discussions in:
Switch to an iframe through Selenium and python
Ways to deal with #document under iframe
I'm trying to get Selenium to wait until the title tag of a web page is present when loading with Python.
I've tried testing this code with other types of HTML tags and only the <body> tag didn't result in an error.
wait = WebDriverWait(driver, 10)
driver.get(link)
wait.until(EC.visibility_of_element_located((By.TAG_NAME, 'div')))
I expected the code to evaluate to completion but I got the following error:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib/python3.7/site-packages/selenium/webdriver/support/wait.py", line 80, in until
raise TimeoutException(message, screen, stacktrace)
selenium.common.exceptions.TimeoutException: Message:
The title tag is never visible. You can wait for its presence, though:
wait.until(EC.presence_of_element_located((By.TAG_NAME, 'title')))
The title also has its own expected conditions, title_is() and title_contains(). For example:
wait.until(EC.title_is("Google"))
You can review the complete list of supported expected conditions in the documentation.
If you want to access the Page Title locating the <title> tag is not the ideal way.
To print the title you need to induce WebDriverWait either for either of the following expected_conditions:
title_contains(partial_title)
title_is(title)
An example:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
options = webdriver.ChromeOptions()
options.add_argument("start-maximized")
options.add_argument('disable-infobars')
driver = webdriver.Chrome(chrome_options=options, executable_path=r'C:\WebDrivers\chromedriver.exe')
driver.get("https://www.google.com/")
WebDriverWait(driver, 10).until(EC.title_contains("G"))
print("Page title is: %s" %(driver.title))
Console Output:
Page title is: Google
I am trying to scrape all the links from a web page. I am using Selenium WebDriver to scroll and click the load more button present in the web page. The code which I am trying is as shown below:
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import ElementNotVisibleException
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException
from bs4 import BeautifulSoup
def fetch_links(url):
chrome_path = r"D:\nishant_pc_d_drive\nishant_pc\d_drive\update_engine\myntra_update\chromedriver.exe"
driver = webdriver.Chrome(chrome_path)
driver.get(url)
while True:
try:
scrollcount=1
while scrollcount<5:
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
WebDriverWait(driver, 5)
scrollcount+=1
WebDriverWait(driver, 10).until(EC.presence_of_element_located(driver.find_elements_by_css_selector('.load_more .sbt-button, .load_more_order .sbt-button')))
driver.find_element_by_id("loadmore").click()
except (ElementNotVisibleException,NoSuchElementException) as e:
print "done"
x = driver.page_source
soup2 = BeautifulSoup(x, 'html.parser')
linkcount=0
for each in soup2.find_all('a',attrs={"class":"thumb searchUrlClass"}):
print "https://www.shoppersstop.com/"+each.get('href')
linkcount+=1
print linkcount
# thumb searchUrlClass
fetch_links("https://www.shoppersstop.com/women-westernwear-tops-tees/c-A206020")
But unfortunately it is giving me an error, as shown below:
Traceback (most recent call last):
File "D:/INVENTORY/shopperstop/fetch_link.py", line 36, in <module>
fetch_links("https://www.shoppersstop.com/women-westernwear-tops-tees/c-A206020")
File "D:/INVENTORY/shopperstop/fetch_link.py", line 21, in fetch_links
WebDriverWait(driver, 10).until(EC.presence_of_element_located(driver.find_element_by_class_name('sbt-button')))
File "C:\Python27\lib\site-packages\selenium\webdriver\support\wait.py", line 71, in until
value = method(self._driver)
File "C:\Python27\lib\site-packages\selenium\webdriver\support\expected_conditions.py", line 63, in __call__
return _find_element(driver, self.locator)
File "C:\Python27\lib\site-packages\selenium\webdriver\support\expected_conditions.py", line 328, in _find_element
return driver.find_element(*by)
TypeError: find_element() argument after * must be an iterable, not WebElement
How can I fix this error? Thanks!
The error text is legitimately confusing.
Basically, some Expected Conditions (EC) methods use locators, while some use elements. The one you used only accepts a locator, but you provided an element instead.
The difference is sort of explained in the Selenium API docs here:
element is a WebElement object.
locator is a tuple of (by, path).
A practical example of a locator is (By.ID, 'someid') (You'll need to import Selenium's "By" class)
So, here's the initial code that incorrectly provides an element:
WebDriverWait(driver, 10).until(
EC.presence_of_element_located(driver.find_element_by_class_name('sbt-button'))
)
It should be updated to provide a locator instead:
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CLASS_NAME, 'sbt-button'))
)
Notice the double parens. That's a tuple being passed to the EC method.
Note: In your case, it also looks like you want multiple elements, so you also need to use EC.presence_of_all_elements_located() instead of EC.presence_of_element_located().
from selenium.webdriver.common.by import By
element = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.ID, "myDynamicElement"))
)
I have a simple task that I want to automate. I want to open a URL, click a button which takes me to the next page, fills in a search term, clicks the "search" button and prints out the url and source code of the results page. I have written the following.
from selenium import webdriver
import time
driver = webdriver.PhantomJS()
driver.set_window_size(1120, 550)
#open URL
driver.get("https://www.searchiqs.com/nybro/")
time.sleep(5)
#click Log In as Guest button
driver.find_element_by_id('btnGuestLogin').click()
time.sleep(5)
#insert search term into Party 1 form field and then search
driver.find_element_by_id('ContentPlaceholder1_txtName').send_keys("Moses")
driver.find_element_by_id('ContentPlaceholder1_cmdSearch').click()
time.sleep(10)
#print and source code
print driver.current_url
print driver.page_source
driver.quit()
I am not sure where I am going wrong but I have followed a number of tutorials on how to click buttons and fill forms. I get this error instead.
Traceback (most recent call last):
File "phant.py", line 12, in <module>
driver.find_element_by_id('btnGuestLogin').click()
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/remote/webdriver.py", line 269, in find_element_by_id
return self.find_element(by=By.ID, value=id_)
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/remote/webdriver.py", line 752, in find_element
'value': value})['value']
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/remote/webdriver.py", line 236, in execute
self.error_handler.check_response(response)
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/remote/errorhandler.py", line 192, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.NoSuchElementException: Message: {"errorMessage":"Unable to find element with id 'btnGuestLogin'","request":{"headers":{"Accept":"application/json","
Accept-Encoding":"identity","Connection":"close","Content-Length":"94","Content-Type":"application/json;charset=UTF-8","Host":"127.0.0.1:35670","User-Agent":"Python-urllib/2.7"
},"httpVersion":"1.1","method":"POST","post":"{\"using\": \"id\", \"sessionId\": \"d38e5fa0-5349-11e6-b0c2-758ad3d2c65e\", \"value\": \"btnGuestLogin\"}","url":"/element","urlP
arsed":{"anchor":"","query":"","file":"element","directory":"/","path":"/element","relative":"/element","port":"","host":"","password":"","user":"","userInfo":"","authority":""
,"protocol":"","source":"/element","queryKey":{},"chunks":["element"]},"urlOriginal":"/session/d38e5fa0-5349-11e6-b0c2-758ad3d2c65e/element"}}
Screenshot: available via screen
The error seems to suggest that the element with that id does not exist yet it does.
--- EDIT: Changed code to use WebDriverWait ---
I have changed some things around to implement WebDriverWait
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
driver = webdriver.PhantomJS()
driver.set_window_size(1120, 550)
#open URL
driver.get("https://www.searchiqs.com/nybro/")
#click Log In as Guest button
element = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.ID, "btnGuestLogin"))
)
element.click()
#wait for new page to load, fill in form and hit search
element2 = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.ID, "ContentPlaceholder1_cmdSearch"))
)
#insert search term into Party 1 form field and then search
driver.find_element_by_id('ContentPlaceholder1_txtName').send_keys("Moses")
element2.click()
driver.implicitly_wait(10)
#print and source code
print driver.current_url
print driver.page_source
driver.quit()
It still raises this error
Traceback (most recent call last):
File "phant.py", line 14, in <module>
EC.presence_of_element_located((By.ID, "btnGuestLogin"))
File "/usr/local/lib/python2.7/dist-packages/selenium/webdriver/support/wait.py", line 80, in until
raise TimeoutException(message, screen, stacktrace)
selenium.common.exceptions.TimeoutException: Message:
Screenshot: available via screen
The WebDriverWait approach actually works for me as is:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.PhantomJS()
driver.set_window_size(1120, 550)
driver.get("https://www.searchiqs.com/nybro/")
element = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.ID, "btnGuestLogin"))
)
element.click()
No errors. PhantomJS version 2.1.1, Selenium 2.53.6, Python 2.7.
The issue might be related to SSL and PhantomJS, either work through http:
driver.get("http://www.searchiqs.com/nybro/")
Or, try ignoring SSL errors:
driver = webdriver.PhantomJS(service_args=['--ignore-ssl-errors=true', '--ssl-protocol=any'])
I need your help. I'm trying to scrape some data from tripadvisor using Selenium in Python 2.7. However, I'm getting stuck at one point.
After browsing to the correct page, I'm trying to filter the hotels on certain prices. To do this, you do a mouse over or click on 'price' and then select the appropiate value like (€3 - € 13).
After clicking on price and then the value. I'm getting the error that the element is not visible or unable to locate, while it is clearly visible.
code
from urllib import urlopen
import time
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
city = 'nha thrang'
url = 'http://www.tripadvisor.nl/Hotels'
driver = webdriver.Firefox()
# open browser
driver.get(url)
time.sleep(5)
# insert city & dates
driver.find_element_by_id('searchbox').send_keys(city)
driver.find_element_by_id('date_picker_in_188616').click()
driver.find_elements_by_class_name('day')[15].click()
driver.find_element_by_id('date_picker_out_188616').click()
driver.find_elements_by_class_name('day')[16].click()
time.sleep(5)
# click search
driver.find_element_by_id('SUBMIT_HOTELS').click()
# close popup
time.sleep(5)
try:
driver.switch_to.window(driver.window_handles[1])
driver.close()
driver.switch_to.window(driver.window_handles[0])
except:
''
# click on 'price'. Works!
driver.find_element_by_xpath('//div[starts-with(#class, "JFY_hotel_filter_icon enabled price sprite-price")]').click()
# click on particular price. doesn't work.
driver.find_element_by_xpath('//div[starts-with(#class, "jfy_tag_style jfy_filter_p_4 jfy_cloud")]').click()
Error
Traceback (most recent call last):
File "<pyshell#30>", line 1, in <module>
driver.find_element_by_xpath('//div[starts-with(#class, "jfy_tag_style jfy_filter_p_4 jfy_cloud")]').click()
File "C:\Python27\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 230, in find_element_by_xpath
return self.find_element(by=By.XPATH, value=xpath)
File "C:\Python27\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 662, in find_element
{'using': by, 'value': value})['value']
File "C:\Python27\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 173, in execute
self.error_handler.check_response(response)
File "C:\Python27\lib\site-packages\selenium\webdriver\remote\errorhandler.py", line 166, in check_response
raise exception_class(message, screen, stacktrace)
NoSuchElementException: Message: Unable to locate element: {"method":"xpath","selector":"//div[starts-with(#class, \"jfy_tag_style jfy_filter_p_4 jfy_cloud\")]"}
Stacktrace:
at FirefoxDriver.prototype.findElementInternal_ (file:///c:/users/j6057~1.kro/appdata/local/temp/tmpdgovsc/extensions/fxdriver#googlecode.com/components/driver-component.js:9641:26)
at FirefoxDriver.prototype.findElement (file:///c:/users/j6057~1.kro/appdata/local/temp/tmpdgovsc/extensions/fxdriver#googlecode.com/components/driver-component.js:9650:3)
at DelayedCommand.prototype.executeInternal_/h (file:///c:/users/j6057~1.kro/appdata/local/temp/tmpdgovsc/extensions/fxdriver#googlecode.com/components/command-processor.js:11635:16)
at DelayedCommand.prototype.executeInternal_ (file:///c:/users/j6057~1.kro/appdata/local/temp/tmpdgovsc/extensions/fxdriver#googlecode.com/components/command-processor.js:11640:7)
at DelayedCommand.prototype.execute/< (file:///c:/users/j6057~1.kro/appdata/local/temp/tmpdgovsc/extensions/fxdriver#googlecode.com/components/command-processor.js:11582:5)
You need to apply multiple changes to make it work:
use Chrome() driver to avoid opening multiple windows after clicking "Search"
don't use hardcoded time.sleep() intervals - use "Explicit Waits"
date picker element ids are changing, you need to use starts-with() to find them
use ActionChains() to hover the price element and wait for range to become visible
Working code (selecting "USD 25 - 50" range):
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
city = 'nha thrang'
url = 'http://www.tripadvisor.nl/Hotels'
driver = webdriver.Chrome()
driver.get(url)
# insert city & dates
searchbox = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, 'searchbox')))
searchbox.send_keys(city)
driver.find_element_by_xpath('//span[starts-with(#id, "date_picker_in_")]').click()
driver.find_elements_by_class_name('day')[15].click()
driver.find_element_by_xpath('//span[starts-with(#id, "date_picker_out_")]').click()
driver.find_elements_by_class_name('day')[16].click()
# click search
driver.find_element_by_id('SUBMIT_HOTELS').click()
# select price range
price = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, '//div[starts-with(#class, "JFY_hotel_filter_icon enabled price sprite-price")]')))
ActionChains(driver).move_to_element(price).perform()
price_range = WebDriverWait(driver, 10).until(EC.visibility_of_element_located((By.XPATH, '(//div[contains(#class, "jfy_filter_bar_price")]//div[#value="p 8"])[last()]')))
price_range.click()
Results into:
i got the same Traceback , try add this before find your elements:
driver.switch_to_window(driver.window_handles[1])#locate the first new page (handles)
anyway it works for me