.click() button doing nothing in Selenium python - python

I'm trying to build a bot for Nike.com.
I'm rotating user agent, automation blink is hidden and have done everything needed (Even using VPN).
URL : https://www.nike.com/ca/t/air-force-1-pixel-shoe-txmVNP/CK6649-100
Size:2
from selenium import webdriver
import time
from selenium.webdriver.common.keys import Keys
import requests
import sys
from selenium.webdriver.chrome.options import Options
from fake_useragent import UserAgent
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
options = webdriver.ChromeOptions()
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option('useAutomationExtension', False)
ua=UserAgent()
userAgent=ua.random
options.add_argument("--log-level=3")
options.add_argument("--disable-blink-features=AutomationControlled")
options.add_argument(f'user-agent={userAgent}')
driver = webdriver.Chrome(options=options)
driver.minimize_window()
driver.execute_script("Object.defineProperty(navigator, 'webdriver', {get: () => undefined})")
driver.execute_cdp_cmd('Network.setUserAgentOverride', {"userAgent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.53 Safari/537.36'})
URL=input("Enter URL : ")
SIZE=input("ENter Size : ")
driver.maximize_window()
driver.get(URL)
time.sleep(5)
print("Starting again")
while(True):
## try:
s_size=driver.find_element_by_xpath('//*[#id="buyTools"]/div[1]/fieldset/div/div['+SIZE+']/label')
s_size.click()
time.sleep(1)
## try:
print('here')
time.sleep(5)
## WebDriverWait(driver,10).until(EC.element_to_be_clickable((By.XPATH,'//*[#id="floating-atc-wrapper"]/div/button[1]'))).click()
add_to_bag=driver.find_element_by_xpath('//*[#id="floating-atc-wrapper"]/div/button[1]')
time.sleep(3)
add_to_bag.click()
print('1')
break
time.sleep(1)
while(True):
try:
move_to_cart=driver.find_element_by_xpath('//*[#id="nav-cart"]/a/div/span')
move_to_cart.click()
break
except:
time.sleep(1)
This code is selecting the required size, and also Clicks on Add to Bag button with clicking animation on website but after that also nothing happens even when I manually click on Add To Bag button or reload website nothing happen.
The only way out is open link in new tab and do all things manually
Can anyone give me a workaround for this.
I think selenium is doing its work its getting blocked by website

Related

Unable to locate a button

i want to click on a cookie button but i cant, i try to see if there is any iframe but there is not. Tank you for anyone who help me! Here is the site : https://serveur-prive.net/minecraft/hiveria-13093/vote
my code :
import random
import string
import time
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.chrome.options import Options
from selenium.webdriver import ActionChains
import undetected_chromedriver as uc
from selenium.webdriver.common.by import By
from colorama import Fore
# driver_options
def main():
chrome_options = uc.ChromeOptions()
chrome_options.add_argument("start-maximized")
chrome_options.add_extension("NopeCHA-CAPTCHA-Solver.crx")
chrome_options.add_extension("C:/Users/Alban/PycharmProjects/Programming/NopeCHA-CAPTCHA-Solver.crx")
chrome_options.add_argument(
"user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36")
print(Fore.LIGHTBLACK_EX + "################### " + Fore.LIGHTBLUE_EX
+ "Auto Vote Hiveria" + Fore.LIGHTBLACK_EX + " ###################")
print(Fore.LIGHTBLACK_EX + " Version 1.0")
print()
pseudo = input("Pseudonyme Minecraft : ")
driver = uc.Chrome(options=chrome_options, version_main=108)
driver.get('https://www.hiveria.fr/vote')
time.sleep(5)
driver.implicitly_wait(8)
driver.find_element(By.XPATH, '//*[#id="stepNameInput"]').send_keys(pseudo)
driver.find_element(By.XPATH, '//*[#id="voteNameForm"]/button').click()
driver.implicitly_wait(2)
# vote 1
time.sleep(4)
driver.implicitly_wait(6)
driver.find_element(By.XPATH, '//*[#id="vote-card"]/div[3]/div[1]/a').click()
time.sleep(13811)
WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[2]/div[2]/div[1]/div[2]/div[2]/button[1]'))).click()
time.sleep(8.5)
# vote 2
#driver.find_element(By.XPATH, '//*[#id="vote-card"]/div[3]/div[2]/a').click()
#driver.implicitly_wait(4)
#time.sleep(8.5)
#driver.find_element(By.XPATH, '//*[#id="main-content"]/div[1]/input[2]').send_keys(pseudo)
#driver.find_element(By.XPATH, '//*[#id="main-content"]/button').click()
#driver.switch_to.window(driver.window_handles[0])
driver.implicitly_wait(3)
# vote 3
#driver.find_element(By.XPATH, '//*[#id="vote-card"]/div[3]/div[3]/a').click()
#driver.implicitly_wait(5)
#driver.find_element(By.XPATH, '//*[#id="pseudo"]').send_keys(pseudo)
#driver.find_element(By.XPATH, '//*[#id="vote-button-action"]').click()
time.sleep(1000)
if __name__ == '__main__':
# freeze_support() here if program needs to be frozen
main()
screenshot:
screenshot before entering the site, button text is 'serveur-prive.net':

Python Selenium: How to avoid being detected/ blocked?

The error messageI try to extract data from the below website. But when the selenium click the "search" button (the last step of the code), error was returned, it seems blocked by the server (It is totally alright when I access the website manually. But when I use automated Chrome browser, the attached error message was returned when I click the "search" button). How should I get around this?
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.chrome.options import Options
import time
ser = Service(r"C:\Users\shekc\Documents\chromedriver.exe")
options = webdriver.ChromeOptions()
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option('useAutomationExtension', False)
options.add_argument("–Referer=https://www.dahsing.com/jsp/fundPlatform/index_e.jsp")
options.add_argument("user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36")
driver = webdriver.Chrome(options=options, service=ser)
url = "https://www.dahsing.com/jsp/fundPlatform/risk_warning_e.jsp"
driver.get(url)
time.sleep(3)
# click "Agree"
driver.find_element(By.LINK_TEXT,"Agree").click()
driver.switch_to.default_content()
driver.switch_to.frame(1)
# return the # Fund house
from selenium.webdriver.support.ui import Select
Select =Select(driver.find_element(By.XPATH,'//*[#id="mainContent_ddlFundHouse"]'))
FH_No=len(Select.options)
# select " all per page"
from selenium.webdriver.support.ui import Select
Select =Select(driver.find_element(By.XPATH,'//*[#id="mainContent_ddlPageNumber"]'))
Select.select_by_index(len(Select.options)-1)
Select =Select(driver.find_element(By.XPATH,'//*[#id="mainContent_ddlFundHouse"]'))
Select.select_by_index(1)
FH_name=Select.first_selected_option.text
# click "Search"
driver.find_element(By.LINK_TEXT,"Search").click()

Click on all elements of the same class at the same time

The code clicks on a element that may or may not exist on the page and then needs to click on all elements of the same class:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.firefox.options import Options
import time
my_user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36'
options = Options()
options.set_preference("general.useragent.override", my_user_agent)
options.page_load_strategy = 'eager'
options.add_argument('--headless')
driver = webdriver.Firefox(options=options)
driver.get("https://int.soccerway.com/matches/2022/07/23/")
try:
WebDriverWait(driver, 3).until(EC.presence_of_element_located((By.XPATH, "//a[contains(#class,'tbl-read-more-btn')]")))
driver.find_element(by=By.XPATH, value="//a[contains(#class,'tbl-read-more-btn')]").click()
time.sleep(0.1)
except:
pass
WebDriverWait(driver, 3).until(EC.presence_of_element_located((By.XPATH, "//tr[contains(#class,'group-head')]")))
for btn in driver.find_elements(by=By.XPATH, value="//tr[contains(#class,'group-head')]"):
btn.click()
time.sleep(0.1)
But this work takes 90 seconds to do and when I remove the time.sleep it drops to 65 seconds, but if I remove it I notice that in some very random times some of the elements that should be clicked are ignored.
Is there any way to do this same service but clicking all the elements at the same time to speed up the process?
Buttons to click on visual examples:
Expected Result after clicks (open the boxes):
In order to speed up the process you can click on all the competition-link one by one in a sequence using either of the following Locator Strategies:
Using CSS_SELECTOR:
for ele in driver.find_elements(By.CSS_SELECTOR, "tr.group-head.clickable th.competition-link>a"):
ele.click()
Using XPATH:
for ele in driver.find_elements(By.XPATH, "//tr[#class='group-head clickable ']//th[#class='competition-link']/a"):
ele.click()

Selenium Error Website Access Denied Webdriver WebdriverWait

I get this weird access denied message when I try to login to the "offspring.co.uk" website. This denial message pops up right after clicking the login button. I heard something about the Akamai Bot-Protection on this website. Maybe this protection detects my automation. Does anyone know how to prevent this website access denial?
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import time
def call_Website():
# configurations
profile = webdriver.FirefoxProfile()
profile.accept_untrusted_certs = True
profile.set_preference("general.useragent.override","Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:92.0) Gecko/20100101 Firefox/92.0")
firefox_capabilities = webdriver.DesiredCapabilities.FIREFOX
firefox_capabilities['marionette'] = True
# start webdriver etc
browser = webdriver.Firefox(firefox_profile=profile, desired_capabilities=firefox_capabilities)
wait = WebDriverWait(browser,20)
action = ActionChains(browser)
###########checkig if proxy works, old snippet
try:
browser.get("https://httpbin.org/ip")
except:
browser.close()
print("proxy was not working")
##############################################
time.sleep(2)
browser.get('https://www.offspring.co.uk/view/secured/content/login')
time.sleep(2)
# accept cookie
browser.find_element_by_css_selector("#onetrust-accept-btn-handler").click()
time.sleep(1)
#choose currency
browser.find_element_by_css_selector("li.EUR:nth-child(2)").click()
#fills out username
username_form = browser.find_element_by_css_selector('#user')
action.move_to_element(username_form).pause(1).click().pause(0.5).send_keys('username')
#fills out password
password_form = browser.find_element_by_css_selector('#loginpassword')
action.pause(2).move_to_element(password_form).pause(1).click().pause(0.5).send_keys('password')
#clicks on login
Login_Btn = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR,'#loginButton')))
action.move_to_element(Login_Btn).pause(1).click().perform()
if __name__ == "__main__":
call_Website()
And here is the "Access Denied"-Page.

Selenium Python 2.7: how to fill in the gaps?

everyone! I'm new to coding and I'm trying to write a webcrawler using Selenium with Python 2.7. I'm still on an early stage, however I've been having problems filling in the gaps of the website. It's https://comtrade.un.org/data/ from the UN. I've already tried inspecting the webpage elements and using different methods (find_element_by_id, find_element_by_class, find_element_by_name, send_keys, etc.), but none seems to work.
Here's my code thus far:
import selenium.webdriver.support.ui as ui
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
HEADER = 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'
BASE_URL = 'https://comtrade.un.org/data/'
def initialize_webdriver():
profile = webdriver.Chrome()
profile.set_preference("general.useragent.override", HEADER)
driver = webdriver.Chrome(profile)
# driver.implicitly_wait(30)
driver.set_window_size(1180, 980)
return driver
def main():
# driver = initialize_webdriver()
driver = webdriver.Chrome()
driver.get(BASE_URL)
for period in driver.find_element_by_class('s2id_periods'):
period.clear()
period.send_keys('2013')
Can anyone help me out? Thanks in advance!

Categories