Unable to select the element in a list - python

from selenium import webdriver
import time
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.remote.webelement import WebElement
driver = webdriver.Chrome('chromedriver')
driver.get('https://devbusiness.tunai.io/login')
time.sleep(2)
driver.maximize_window()
# Create variables for login credentials.
username = driver.find_element(By.NAME, "loginUsername");
username.send_keys("kevin#tunai");
password = driver.find_element(By.NAME, "loginPassword");
password.send_keys("xxxxx");
login = driver.find_element(By.XPATH,"//*[#id='app']/div/div/div/div/div/div[2]/form/div[4]/button");
login.submit();
time.sleep(2)
# Wait for login process to complete.
WebDriverWait(driver=driver, timeout=10).until(
lambda x: x.execute_script("return document.readyState === 'complete'")
)
# Verify that the login was successful.
error_message = "Incorrect username or password."
# Retrieve any errors found.
errors = driver.find_elements(By.CLASS_NAME, "flash-error")
# When errors are found, the login will fail.
if any(error_message in e.text for e in errors):
print("[!] Login failed")
else:
print("[+] Login successful")
driver.get("https://devbusiness.tunai.io/dashboard/salon_menu_service")
service = driver.find_element(By.XPATH,"//*[#id='page-content']/div/div[2]/div[1]/div[1]/button")
service.click();
driver.find_element(By.TAG_NAME,"input").send_keys("Hair Dying")
price = driver.find_element(By.XPATH,"//*[#id='page-content']/div/div[2]/div[1]/div[1]/div/div[2]/div/div/form/div[1]/div[1]/div/div[1]/div[2]/div[1]/div/div/input")
price.clear()
price.send_keys("50")
baseprice = driver.find_element(By.XPATH,"//*[#id='page-content']/div/div[2]/div[1]/div[1]/div/div[2]/div/div/form/div[1]/div[1]/div/div[1]/div[2]/div[2]/div/div/input")
baseprice.clear()
baseprice.send_keys("10")
# Category button
category_button = driver.find_element(By.XPATH,"//*[#id='page-content']/div/div[2]/div[1]/div[1]/div/div[2]/div/div/form/div[1]/div[1]/div/div[1]/div[3]/div/div/div/div[2]")
# An "Category 2 - BeautyPOS" item in list of categories
category_select = driver.find_element(By.XPATH,"""//*[#id="page-content"]/div/div[2]/div[1]/div[1]/div/div[2]/div/div/form/div[1]/div[1]/div/div[1]/div[3]/div/div/div/div[3]/ul/li[2]/span""")
# Click category button to show list.
category_button.click()
# Click on category you want select.
category_select.click()
time.sleep(3)
# sub-category button
subcategory_button = driver.find_element(By.XPATH,"//*[#id='page-content']/div/div[2]/div[1]/div[1]/div/div[2]/div/div/form/div[1]/div[1]/div/div[1]/div[4]/div/div/div/div[3]")
# An "Category 4 - HairCut" item in list of categories
subcategory_select = driver.find_element(By.XPATH,"""//*[#id="page-content"]/div/div[2]/div[1]/div[1]/div/div[2]/div/div/form/div[1]/div[1]/div/div[1]/div[4]/div/div/div/div[3]/ul/li[2]""")
# Click category button to show list.
subcategory_button.click()
# Click on category you want select.
subcategory_select.click()
time.sleep(3)
When I moving into the next part which is selecting a value(random from the list), but it said the element is not interactable. I tried with the click function and driver.implicity_wait.
Appreciate if someone could help. Thank in advance.
The link is https://devbusiness.tunai.io/dashboard/salon_menu_service
username and pass can find in the code.

In this case, the error element is not interactable appear because you try to click the element having display: none; attribute.
You need trigger the element with clicking related element first to make visible, and then scroll too.
....
....
# trigger with other element first, add this code
element = driver.find_element(By.XPATH,"//span[text()='No Sub-Category']")
driver.execute_script("arguments[0].scrollIntoView();", element)
time.sleep(1)
element.click()
# sub-category button
subcategory_button = self.driver.find_element(By.XPATH,"//*[#id='page-content']/div/div[2]/div[1]/div[1]/div/div[2]/div/div/form/div[1]/div[1]/div/div[1]/div[4]/div/div/div/div[3]")
# An "Category 4 - HairCut" item in list of categories
subcategory_select = self.driver.find_element(By.XPATH,"""//*[#id="page-content"]/div/div[2]/div[1]/div[1]/div/div[2]/div/div/form/div[1]/div[1]/div/div[1]/div[4]/div/div/div/div[3]/ul/li[2]""")

Related

selenium: stale element reference: element is not attached to the page document

from selenium.webdriver import Chrome
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import chromedriver_autoinstaller
chromedriver_autoinstaller.install()
TYPES = ['user', 'verified_audience', 'top_critics']
TYPE = TYPES[2]
URL = 'https://www.rottentomatoes.com/m/dunkirk_2017/reviews'
PAGES = 2
driver = Chrome()
driver.get(URL)
data_reviews = []
while PAGES != 0:
wait = WebDriverWait(driver, 30)
reviews = wait.until(lambda _driver: _driver.find_elements(
By.CSS_SELECTOR, '.review_table_row'))
# Extracting review data
for review in reviews:
if TYPE == 'top_critics':
critic_name_el = review.find_element(
By.CSS_SELECTOR, '[data-qa=review-critic-link]')
critic_review_text_el = review.find_element(
By.CSS_SELECTOR, '[data-qa=review-text]')
data_reviews.append(critic_name_el.text)
try:
next_button_el = driver.find_element(
By.CSS_SELECTOR, '[data-qa=next-btn]:not([disabled=disabled])'
)
if not next_button_el:
PAGES = 0
next_button_el.click() # refresh new reviews
PAGES -= 1
except Exception as e:
driver.quit()
Here, a rotten tomatoes review page is being opened and the reviews are being scraped, but when the next button is clicked and the new reviews are going to be scraped, this error pops up... I am guessing that the new reviews have not been loaded and trying to access them is causing the problem, I tried driver.implicitly_wait but that doesn't work too.
The error originates from line 33, data_reviews.append(critic_name_el.text)
By clicking a next page button next_button_el the new page is being loaded but this process takes some time while your Selenium code continues instantly after that click so probably on this line reviews = wait.until(lambda _driver: _driver.find_elements(By.CSS_SELECTOR, '.review_table_row')) it collects the elements on the old page but then the page is being refreshed so some of these elements critic_name_el collected after that (still on the old page) is no more there since the old page is refreshed.
To make your code working you need to introduce a short delay after clicking the next page button, as following:
data_reviews = []
while PAGES != 0:
wait = WebDriverWait(driver, 30)
reviews = wait.until(lambda _driver: _driver.find_elements(
By.CSS_SELECTOR, '.review_table_row'))
# Extracting review data
for review in reviews:
if TYPE == 'top_critics':
critic_name_el = review.find_element(
By.CSS_SELECTOR, '[data-qa=review-critic-link]')
critic_review_text_el = review.find_element(
By.CSS_SELECTOR, '[data-qa=review-text]')
data_reviews.append(critic_name_el.text)
try:
next_button_el = driver.find_element(
By.CSS_SELECTOR, '[data-qa=next-btn]:not([disabled=disabled])'
)
if not next_button_el:
PAGES = 0
next_button_el.click() # refresh new reviews
PAGES -= 1
time.sleep(2)
except Exception as e:
driver.quit()
Also I'd suggest to wait for elements visibility, not just presence here:
reviews = wait.until(lambda _driver: _driver.find_elements(By.CSS_SELECTOR, '.review_table_row'))
Also you need to understand that driver.implicitly_wait do not introduce any actual pause. This just sets the timeout for find_element and find_elements methods.

problem in clicking radio button can't able to select a radio button. Message: stale element reference: element is not attached to the page document

Error : selenium.common.exceptions.StaleElementReferenceException: Message: stale element reference: element is not attached to the page document.
website I'm scraping https://www.telekom.de/unterwegs/apple/apple-iphone-13-pro/graphit-512gb I wanted to loop this tariff details with each section and each radio button shows different prices. I wanted to scrape, price details for each radio buttons one by one and checked radio button name along with price till end of the page. I have tried but I couldn't make success.
could anyone help on this. I will be helpful for me to learn. I have tried till get entered in to change tariff link and I'm facing issue to scrape a details. change tariff links given below links,
https://i.stack.imgur.com/RRyJa.png
https://i.stack.imgur.com/fNafB.png
https://i.stack.imgur.com/jFnLA.png
https://i.stack.imgur.com/WlyLU.png
"I'm trying to click a radio button and need to scrape a price details for selected radio button."
import xlwt
from selenium import webdriver
import re
import time
from datetime import date
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
class telekommobiles:
def __init__(self):
self.url="https://www.telekom.de/mobilfunk/geraete/smartphone?page=1&pageFilter=promotion"
self.country='DE'
self.currency='GBP'
self.VAT='Included'
self.shipping = 'free shipping within 3-4 weeks'
self.Pre_PromotionPrice ='N/A'
self.color ='N/A'
def telekom(self):
#try:
driver=webdriver.Chrome()
driver.maximize_window()
driver.get(self.url)
today = date.today()
#time.sleep(5)
WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located((By.XPATH,"//*[#id='consentAcceptAll']")))
cookies = driver.find_element_by_css_selector('button.cl-btn.cl-btn--accept-all').click()
print("cookies accepted")
links_prod_check = []
prod_models = []
prod_manufacturer =[]
prod_memorys = []
product_colors =[]
product_price_monthly_payments = []
product_price_one_time_payments =[]
product_links = []
containers = driver.find_elements_by_css_selector('div[class="styles_item__12Aw4"]')
i = 1
for container in containers:
p_links =container.find_element_by_tag_name('a').get_attribute('href')
i = i + 1
product_links.append(p_links)
#print(p_links)
for links in product_links:
driver.get(links)
#time.sleep(5)
#print(driver.current_url)
#links_prod_check.append(driver.current_url)
coloroptions = WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located((By.XPATH,"//li[#data-qa='list_ColorVariant']")))
#print(coloroptions)
for i in range(len(coloroptions)):
coloroption = driver.find_elements_by_xpath("//li[#data-qa='list_ColorVariant']")
coloroption[i].click()
#print(coloroption[i])
time.sleep(3)
memoryoptions = WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located((By.XPATH,"//span[#class='phx-radio__element']")))
for i in range(len(memoryoptions)):
memoryoption = driver.find_elements_by_xpath("//span[#class='phx-radio__element']")
try:
memoryoption[i].click()
except:
pass
time.sleep(3)
change_traiff = driver.find_element_by_css_selector('button[class="phx-link phx-list-of-links__link js-mod tracking-added"]').click()
time.sleep(3)
section_loops = driver.find_elements_by_css_selector('section[class="tariff-catalog--layer"]')
for section_loop in section_loops:
#Headings
heading_1 = section_loop.find_element_by_css_selector('h2[class="page-title page-title--lowercase"]').text
print(heading_1)
looping_for_tariff = WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located((By.XPATH,"//span[#class='phx-radio__element']")))
subcontainers = section_loop.find_elements_by_css_selector('div[class="phx-tariff-box__section"]')
for subcontainer in subcontainers:
radio_buttons_list=subcontainer.find_elements_by_css_selector('div[class="phx-form__row phx-form__row--small phx-form__row--full-width phx-form__row--radio"]')
for radio in radio_buttons_list:
input=radio.find_elements_by_css_selector('span[class="phx-radio__element"]')
if input[0].is_enabled():
try:
ActionChains(driver).move_to_element(subcontainer).perform()
time.sleep(2)
input[0].click()
time.sleep(3)
except:
print('Not clickable')
pass
lable_list=radio.find_elements_by_css_selector('span[class="phx-radio__label"]')
label=""
if lable_list:
label=lable_list[0].text
heading_2 = subcontainer.find_element_by_css_selector('p[class="phx-t6 phx-t--medium"]').text
data_price_list= subcontainer.find_element_by_css_selector('div[class="phx-tariff-box__data-price"]')
volumn_list=data_price_list.find_elements_by_css_selector('div[data-qa="label_Tariff_VolumeSize"]')
volumn=""
if volumn_list:
volumn=volumn_list[0].text
price_list=subcontainer.find_elements_by_css_selector('p[class="phx-price phx-price--size_large phx-price--strong phx-price--color_brand"]')
price=""
nonBreakSpace = u'\xa0'
if price_list:
price=price_list[0].text
print(str(heading_2) + " " + str(label) + " " + str(volumn.replace(' ', '').replace( '\\r\\n','')) + " " + str(price))
#except:
#pass
telekom_de=telekommobiles()
telekom_de.telekom()
After selecting a different Option the page gets Refreshed, hence the issue. I was not able to find where you were trying to click on the buttons in your code. So tried to click on all the radio buttons with below code and was successful. Check the code once.
from selenium import webdriver
import time
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
driver = webdriver.Chrome(executable_path="path to chromedriver.exe")
driver.maximize_window()
driver.implicitly_wait(10)
driver.get("https://www.telekom.de/unterwegs/apple/apple-iphone-13-pro/sierrablau-128gb")
wait = WebDriverWait(driver,30)
wait.until(EC.element_to_be_clickable((By.XPATH,"//button[text()='Accept All']"))).click()
radiooptions = wait.until(EC.presence_of_all_elements_located((By.XPATH,"//span[#class='phx-radio__element']")))
for i in range(len(radiooptions)):
radiooptions = driver.find_elements_by_xpath("//span[#class='phx-radio__element']")
radiooptions[i].click()
time.sleep(2)
please li element instead of span
//li[#data-qa='list_ColorVariant']
and also add wait once you click on it. 5secs. then click the next one

How to hit back button after final iteration in nested for loop ? (Selenium)

So i have the back button defined just do not know where to put it or how to incorporate it. I'm trying to check all unchecked boxes in subcategories then go 'back' to categories to finish the main iterations and check the rest of the boxes, just do not know how to go about where to include the back button in the code and how.
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions as EC
import pandas as pd
import time
#driver path
PATH = "C:/Program Files (x86)\chromedriver.exe"
driver = webdriver.Chrome(PATH)
#access crunchbase ui
driver.get("https://www.crunchbase.com/search/organizations/field/organization.companies/categories/electric-vehicle")
driver.maximize_window()
time.sleep(5)
print(driver.title)
#navigate crunchbase & add parameters
addcolumn = driver.find_element_by_xpath('//button[#class="mat-focus-indicator add-column-button mat-stroked-button mat-button-base mat-primary"]')
addcolumn.click()
# get items only from first list
all_categories = driver.find_elements_by_xpath('(//mat-nav-list)[1]//mat-list-item')
print('len(all_categories):', len(all_categories))
for category in all_categories:
print('-----')
# select category
print('Category:', category.text.strip())
# scroll it to make it visible and clickable
#driver.execute_script("arguments[0].scrollIntoView(true);", category)
# or
ActionChains(driver).move_to_element(category).perform()
# click category to display list of columns in this category
category.click()
time.sleep(0.5)
# search columns ONLY in selected category
# it selects item only if `mat-checkbox` doesn't have class `mat-checkbox-checked`
# and it click `label` instead of `checkbox` because `label` is not hidden by `popup message`
columns = driver.find_elements_by_xpath('(//mat-nav-list)[2]//mat-checkbox[not(contains(#class, "mat-checkbox-checked"))]//label')
print('len(columns):', len(columns))
for col in columns:
print('click:', col.text.strip())
col.click()
# TODO: click subcategory, select checkboxes, click back button
subcategories = driver.find_elements_by_xpath('(//mat-nav-list)[2]//mat-list-item[.//icon[#key="icon_caret_right"]]')
print('len(subcategories):', len(subcategories))
for sub in subcategories:
sub.click()
subcolumns = driver.find_elements_by_xpath('(//mat-nav-list)[3]//mat-checkbox[not(contains(#class, "mat-checkbox-checked"))]//label')
for subc in subcolumns:
subc.click()
backbutton = driver.find_element_by_xpath('//*[#id="mat-dialog-1"]/column-panel/div/dialog-layout/div/mat-dialog-content/div/div/div[1]/button')
backbutton.click()
driver.find_element_by_xpath('//button[#aria-label="Apply Changes"]').click()
Any help is appreciated !
You use it in wrong moment. You click it in every category but you have to click it only when you selecte subcategories.
I needed different ID - dialog-0 instead of dialog-1 and I used shorter XPath.
for sub in subcategories:
sub.click()
subcolumns = driver.find_elements_by_xpath('(//mat-nav-list)[3]//mat-checkbox[not(contains(#class, "mat-checkbox-checked"))]//label')
for subc in subcolumns:
subc.click()
if subcategories:
backbutton = driver.find_element_by_xpath('//*[#id="mat-dialog-0"]//mat-dialog-content//button')
backbutton.click()

Next Page Iteration in Selenium/BeautfulSoup for Scraping E-Commerce Website

I'm scraping an E-Commerce website, Lazada using Selenium and bs4, I manage to scrape on the 1st page but I unable to iterate to the next page. What I'm tyring to achieve is to scrape the whole pages based on the categories I've selected.
Here what I've tried :
# Run the argument with incognito
option = webdriver.ChromeOptions()
option.add_argument(' — incognito')
driver = webdriver.Chrome(executable_path='chromedriver', chrome_options=option)
driver.get('https://www.lazada.com.my/')
driver.maximize_window()
# Select category item #
element = driver.find_elements_by_class_name('card-categories-li-content')[0]
webdriver.ActionChains(driver).move_to_element(element).click(element).perform()
t = 10
try:
WebDriverWait(driver,t).until(EC.visibility_of_element_located((By.ID,"a2o4k.searchlistcategory.0.i0.460b6883jV3Y0q")))
except TimeoutException:
print('Page Refresh!')
driver.refresh()
element = driver.find_elements_by_class_name('card-categories-li-content')[0]
webdriver.ActionChains(driver).move_to_element(element).click(element).perform()
print('Page Load!')
#Soup and select element
def getData(np):
soup = bs(driver.page_source, "lxml")
product_containers = soup.findAll("div", class_='c2prKC')
for p in product_containers:
title = (p.find(class_='c16H9d').text)#title
selling_price = (p.find(class_='c13VH6').text)#selling price
try:
original_price=(p.find("del", class_='c13VH6').text)#original price
except:
original_price = "-1"
if p.find("i", class_='ic-dynamic-badge ic-dynamic-badge-freeShipping ic-dynamic-group-2'):
freeShipping = 1
else:
freeShipping = 0
try:
discount = (p.find("span", class_='c1hkC1').text)
except:
discount ="-1"
if p.find(("div", {'class':['c16H9d']})):
url = "https:"+(p.find("a").get("href"))
else:
url = "-1"
nextpage_elements = driver.find_elements_by_class_name('ant-pagination-next')[0]
np=webdriver.ActionChains(driver).move_to_element(nextpage_elements).click(nextpage_elements).perform()
print("- -"*30)
toSave = [title,selling_price,original_price,freeShipping,discount,url]
print(toSave)
writerows(toSave,filename)
getData(np)
The problem might be that the driver is trying to click the button before the element is even loaded correctly.
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.Chrome(PATH, chrome_options=option)
# use this code after driver initialization
# this is make the driver wait 5 seconds for the page to load.
driver.implicitly_wait(5)
url = "https://www.lazada.com.ph/catalog/?q=phone&_keyori=ss&from=input&spm=a2o4l.home.search.go.239e359dTYxZXo"
driver.get(url)
next_page_path = "//ul[#class='ant-pagination ']//li[#class=' ant-pagination-next']"
# the following code will wait 5 seconds for
# element to become clickable
# and then try clicking the element.
try:
next_page = WebDriverWait(driver, 5).until(
EC.element_to_be_clickable((By.XPATH, next_page_path)))
next_page.click()
except Exception as e:
print(e)
EDIT 1
Changed the code to make the driver wait for the element to become clickable. You can add this code inside a while loop for iterating multiple times and break the loop if the button is not found and is not clickable.

i verify that autocomplete works well but there are no results appear

I verify if Autocomplte works well or not. I send the keys but he does not select the required element. Finally I want to print the URL of the page that appear after finding the required element and pressing on it. I recieve only this result:
Ran 1 test in 33.110s
OK
Process finished with exit code 0
Message:
def test_autocomplet(self):
try:
driver = webdriver.Chrome()
self.driver=webdriver.Chrome()
url = self.driver.get("http://automationpractice.com/index.php")
self.driver.maximize_window()
Serach_text_box=self.driver.find_element_by_id("search_query_top")
Serach_text_box.send_keys("Printed")
Serach_text_box.send_keys(Keys.ARROW_DOWN)
five_option= WebDriverWait(driver, 10).until(EC.visibility_of_element_located((By.XPATH,"//*[contains(text(),'Dress')]")))
five_option.send_keys(Keys.ENTER)
print self.driver.current_url
self.assertEqual("http://automationpractice.com/index.php?id_product=3&controller=product",self.driver.current_url, "This Test case is fallied")
except NoSuchElementException as e:
print (e)
except AssertionError as e:
print (e)
except TimeoutException as e:
print (e)
I want to know if any thing in the code is wrong and why he does not select and click on the required element and print the URL of the next page that appear after click on the required element.
I would be thanksfull for any help.
I put here code which I used to test this page.
To select item on menu I can use ARROW_DOWN but it doesn't give information about selected item.
Second method is to search
//div[#class='ac_results']//li[contains(text(),'Dress')]
or at least
//li[contains(text(),'Dress')]
eventually
//div[#class='ac_results']//li
to access item in menu. And then I can get full text .text or highlighted part .//strong
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException, TimeoutException
import time
try:
#driver = webdriver.Chrome()
driver = webdriver.Firefox()
url = driver.get("http://automationpractice.com/index.php")
#driver.maximize_window()
search_text_box = driver.find_element_by_id("search_query_top")
search_text_box.send_keys("Printed")
time.sleep(1) # page display (and update) autocompletion when you make little longer delay
# --- select using arrow key ---
# move selection on list and accept it
#search_text_box.send_keys(Keys.ARROW_DOWN)
#search_text_box.send_keys(Keys.ARROW_DOWN)
#search_text_box.send_keys(Keys.ARROW_DOWN)
#search_text_box.send_keys(Keys.ENTER)
# OR
# --- select using tag `<li>` and `text()` in autocompletion ---
# click on first matching item on list
#one_option = WebDriverWait(driver, 10).until(EC.visibility_of_element_located((By.XPATH, "//li[contains(text(),'Dress')]")))
one_option = WebDriverWait(driver, 10).until(EC.visibility_of_element_located((By.XPATH, "//div[#class='ac_results']//li[contains(text(),'Dress')]")))
print(' tag:', one_option.tag_name)
print('text:', one_option.text)
print('bold:', one_option.find_element_by_xpath('.//strong').text)
one_option.click()
# OR
# --- get all elements in autocompletion using `<li>` tag ---
# get many matching items and use [index] to click on some item on list
#one_option = WebDriverWait(driver, 10).until(EC.visibility_of_element_located((By.XPATH, "//li[contains(text(),'Dress')]")))
#all_options = driver.find_elements_by_xpath("//li[contains(text(),'Dress')]")
#for option in all_options:
# print(option.tag_name, ':', option.text)
#all_options[1].click()
print(' current:', driver.current_url)
print('expected:', "http://automationpractice.com/index.php?id_product=3&controller=product")
print('the same:', driver.current_url == "http://automationpractice.com/index.php?id_product=3&controller=product")
assert "http://automationpractice.com/index.php?id_product=3&controller=product" == driver.current_url, "This Test case is fallied"
#assertEqual("http://automationpractice.com/index.php?id_product=3&controller=product", self.driver.current_url, "This Test case is fallied")
except NoSuchElementException as e:
print('NoSuchElementException:', e)
except TimeoutException as e:
print('TimeoutException:', e)
except AssertionError as e:
print('AssertionError:', e)

Categories