Selenium PhantomJS unable to find elements on page because page is blank - python

Whenever I run my python selenium test case I get this error:
NoSuchElementException: Message: {"errorMessage":"Unable to find element with name... etc
^^ I can't locate the username field because the page is not loading.
I am able to return the url and it is the correct url.
Whenever I save a screenshot of the login page, it returns a solid white page. PhantomJS is going to the correct address but not loading the page. It looks like this is only happening with https sites and not http.
import unittest
from selenium import webdriver
browser = webdriver.PhantomJS(service_args=['--ignore-ssl-errors=true', '-- ssl-protocol=any'])
class TestOne(unittest.TestCase):
def setUp(self):
self.driver = browser
self.driver.set_window_size(2000, 1500)
def test_url(self):
driver = self.driver
self.driver.get("https://urlhere")
print driver.current_url
driver.save_screenshot("path/toscreenshot/screenshot1")
driver.implicitly_wait(30)
driver.find_element_by_name("username").clear()
driver.find_element_by_name("username").send_keys("username")
driver.find_element_by_name("password").clear()
driver.find_element_by_name("password").send_keys("password")
driver.find_element_by_name("submit").click()
# End of login
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()

Related

Selenium not turning results when after signing into website

I am trying to make a bot that web scrapes a website. I have got it to the stage in which it puts in the user name and pass word. The website then takes me to a different URL, (from the portal to the home page). The bot cannot seem to find any elements on this new home page. The URL has changed, and I believe that the code is not scraping this new page. How do I update the code, if I simply use the URL of the home page, the website then asks for the username and password again.
I have provided code below that has the same problem, except this time with searching google and then finding the elements as I do not wish to provide personal details about the website I am scraping.
from selenium import webdriver
driver = webdriver.Chrome(r'C:\webdrivers\chromedriver.exe')
driver.get('https://google.com')
driver.implicitly_wait(3)
search_bar = driver.find_element_by_name('q')
search_bar.send_keys("github")
search_bar.submit()
driver.implicitly_wait(3)
element = driver.find_element_by_class_name("LC20lb DKV0Md")
print(element)
driver.implicitly_wait(3)
driver.close()
# If I was to try and find any of the elements on this page using driver.find_element_by_class_name, it would result in the following error message selenium.common.exceptions.NoSuchElementException: Message: no such element: Unable to locate element: {"method":"css selector","selector":"[name="inputfield"]"}
# (Session info: chrome=84.0.4147.105)
driver.close()
Edit:
I am on Windows 10 and using Google chrome
Edit 2:
I believe it may have something to do with the driver scraping the old URL, rather then the new URL that has been searched and contains the search results, is there anyway to perhaps update which URL it is scraping
try to use implicitly_wait instead of sleep
from selenium import webdriver
driver = webdriver.Chrome(r'C:\webdrivers\chromedriver.exe')
driver.get('https://google.com')
driver.implicitly_wait(3)
search_bar = driver.find_element_by_name('q')
search_bar.send_keys("github")
search_bar.submit()
driver.implicitly_wait(3)
driver.close()

Pop-up doesn't disappear by webscraping with Selenium and Python

I am trying to parse a table from https://www.morningstar.de/de/screener/fund.aspx#?filtersSelectedValue=%7B%22sustainabilityRating%22:%7B%22id%22:%225%22%7D%7D&page=1&perPage=10&sortField=legalName&sortOrder=asc.
However, by opening the website with selenium I always get at first a pop-up, to close which I need to select type of user (radiobutton) and then click on "accept" button .
After I proceed these "clicks" with the help of python and selenium, the pop-up doesn't disappear sbut I can see that clicks were proceeded. It doesn't show any error (all the needed fields are selected and python script also doesn't throw anything).
Here is my code:
from selenium import webdriver
import time
browser = webdriver.Firefox()
url="https://www.morningstar.de/de/screener/fund.aspx#?filtersSelectedValue=%7B%22sustainabilityRating%22:%7B%22id%22:%225%22%7D%7D&page=1&perPage=10&sortField=legalName&sortOrder=asc"
browser.get(url)
time.sleep(10)
try:
radio_button = browser.find_elements_by_xpath('/html/body/div[2]/div[3]/div/div[2]/div/div[3]/div[1]/div[1]/fieldset/div[2]/label/span/span[1]')[0]
radio_button.click()
time.sleep(3)
accept_button=browser.find_element_by_id('_evidon-accept-button')
accept_button.click()
print("accepted")
except:
print(" something went wrong")
I need to close this pop-up in order to get access to the table, what am I doing wrong?
Example for radio button #finaprofessional > span:nth-child(1) or #finaprofessional > span:nth-child(2)
import time
from selenium import webdriver
def example():
firefox_browser = webdriver.Firefox()
firefox_browser.get("https://www.morningstar.de/de/screener/fund.aspx#?filtersSelectedValue=%7B%22sustainabilityRating%22:%7B%22id%22:%225%22%7D%7D&page=1&perPage=10&sortField=legalName&sortOrder=asc")
time.sleep(10) # wait for page to load
radio_button = firefox_browser.find_element_by_css_selector("#finaprofessional > span:nth-child(1)")
radio_button.click()
time.sleep(10)
accept_button = firefox_browser.find_element_by_id("_evidon-accept-button")
accept_button.click()
if __name__ == "__main__":
example()

Python selenium: unable to locate element on a webpage

class PythonOrgSearch(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome() #opens chrome to complete the task
def test_search_in_python_org(self):
driver = self.driver
driver.get(URL) #uses the URL that was generated at the start of the task
self.assertIn("adidas", driver.title)
elem = driver.find_element_by_name("Add to Bag") #finds the 'add to bag' button (for adidas.com) and clicks it
elem.send_keys("pycon")
elem.send_keys(Keys.RETURN)
assert "No results found." not in driver.page_source
selenium.click("Add to Bag")
def tearDown(self):
self.driver.close()
if __name__ == "__main__":
unittest.main()
Right now i am experimenting with python and trying to make a simple bot for adidas.com that adds a product to the cart. I am using selenium to do so. I try to have selenium click the 'add to bag' button, but when i run i get this error:
NoSuchElementException: Message: no such element: Unable to locate element: {"method":"name","selector":"Add to Bag"}
It cannot find the 'add to bag' button on the website, even though i know its there. what am I doing wrong?
thanks in advance
sometimes I am also facing the same problem by getting element by text.It's best way to get an element by XPath.Hope this XPath will solve your problem
elem =driver.find_element_by_xpath("//*[text()='Add to Bag']")
Hope getting element by XPath will solve your problem
You need to feed the method the id attribute of the element not the text. Looking at the page and the docs it seems that
selenium.click('add-to-bag')
Should get you there.

PhantomJS Selenium site does not load

I am trying to create an app which monitors a webpage using phantomjs and selenium but I have found an issue with a certain url as seen in the code.
from selenium import webdriver
SITE = "http://www.adidas.com/"
def main():
print("Building Driver")
driver = webdriver.PhantomJS()
driver.set_window_size(1024, 768)
print("Driver Created")
print("Navigating to: "+SITE)
driver.get(SITE)
print("Site loaded")
print("Saving Screenshot")
driver.save_screenshot("screen.png")
print("Fetching Current URL")
print(driver.current_url)
print("Exiting")
driver.quit()
if __name__ == '__main__':
main()
The program never gets past the line driver.get(SITE) How can I make it so that the website will load?
It appears that this is an error in PhantomJS. I would try using either the firefox or the chrome driver instead.
from selenium import webdriver
SITE = "http://www.adidas.de"
def main():
print("Building Driver")
browser = webdriver.Chrome(*path to chrome driver*)
print("Driver Created")
print("Navigating to: "+SITE)
browser.get(SITE)
print("Site loaded")
browser.quit()
if __name__ == '__main__':
main()
Creating a headless application would also be possible if that's what you wanted.

Validating Log In functionality in website using Selenium webdriver with Python

I am trying to click the "Log In" element present in webpage.
"Log In" element is visible , when you click on ACCOUNT element on website.
code is:
import unittest
from selenium import webdriver
class registernewuser(unittest.TestCase):
#classmethod
def setUpClass(cls):
cls.driver = webdriver.Chrome()
cls.driver.implicitly_wait(10)
cls.driver.maximize_window()
cls.driver.get("http://demo.magentocommerce.com/")
def test_register_new_user(self):
driver = self.driver
account_click = driver.find_element_by_link_text("ACCOUNT").click()
driver.implicitly_wait(3)
self.driver.find_element_by_link_text('Log In').click()
#classmethod
def tearDownClass(cls):
cls.driver.quit()
Added
[account_click = driver.find_element_by_link_text("ACCOUNT").click() driver.implicitly_wait(3) ]
in code and it worked . If you want to click an element which is a part of dropdown of main element , click main element--> implicitily wait command ---> Click dropdown element.

Categories