Unable to locate a button - python

i want to click on a cookie button but i cant, i try to see if there is any iframe but there is not. Tank you for anyone who help me! Here is the site : https://serveur-prive.net/minecraft/hiveria-13093/vote
my code :
import random
import string
import time
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.chrome.options import Options
from selenium.webdriver import ActionChains
import undetected_chromedriver as uc
from selenium.webdriver.common.by import By
from colorama import Fore
# driver_options
def main():
chrome_options = uc.ChromeOptions()
chrome_options.add_argument("start-maximized")
chrome_options.add_extension("NopeCHA-CAPTCHA-Solver.crx")
chrome_options.add_extension("C:/Users/Alban/PycharmProjects/Programming/NopeCHA-CAPTCHA-Solver.crx")
chrome_options.add_argument(
"user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36")
print(Fore.LIGHTBLACK_EX + "################### " + Fore.LIGHTBLUE_EX
+ "Auto Vote Hiveria" + Fore.LIGHTBLACK_EX + " ###################")
print(Fore.LIGHTBLACK_EX + " Version 1.0")
print()
pseudo = input("Pseudonyme Minecraft : ")
driver = uc.Chrome(options=chrome_options, version_main=108)
driver.get('https://www.hiveria.fr/vote')
time.sleep(5)
driver.implicitly_wait(8)
driver.find_element(By.XPATH, '//*[#id="stepNameInput"]').send_keys(pseudo)
driver.find_element(By.XPATH, '//*[#id="voteNameForm"]/button').click()
driver.implicitly_wait(2)
# vote 1
time.sleep(4)
driver.implicitly_wait(6)
driver.find_element(By.XPATH, '//*[#id="vote-card"]/div[3]/div[1]/a').click()
time.sleep(13811)
WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[2]/div[2]/div[1]/div[2]/div[2]/button[1]'))).click()
time.sleep(8.5)
# vote 2
#driver.find_element(By.XPATH, '//*[#id="vote-card"]/div[3]/div[2]/a').click()
#driver.implicitly_wait(4)
#time.sleep(8.5)
#driver.find_element(By.XPATH, '//*[#id="main-content"]/div[1]/input[2]').send_keys(pseudo)
#driver.find_element(By.XPATH, '//*[#id="main-content"]/button').click()
#driver.switch_to.window(driver.window_handles[0])
driver.implicitly_wait(3)
# vote 3
#driver.find_element(By.XPATH, '//*[#id="vote-card"]/div[3]/div[3]/a').click()
#driver.implicitly_wait(5)
#driver.find_element(By.XPATH, '//*[#id="pseudo"]').send_keys(pseudo)
#driver.find_element(By.XPATH, '//*[#id="vote-button-action"]').click()
time.sleep(1000)
if __name__ == '__main__':
# freeze_support() here if program needs to be frozen
main()
screenshot:
screenshot before entering the site, button text is 'serveur-prive.net':

Related

selenium wait is not working on linux server but working on windows

on linux server
def get_crawling_browser() :
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from webdriver_manager.chrome import ChromeDriverManager
options = webdriver.ChromeOptions()
options.add_argument('--headless')
options.add_argument('--window-size=1420,1080')
options.add_argument('--start-maximized')
options.add_argument('--no-sandbox')
options.add_argument('--disable-blink-features=AutomationControlled')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--disable-gpu')
options.add_argument("--incognito")
options.add_argument("user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36")
browser = webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=options)
return browser
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
url = "https://www.investing.com/commodities/copper-historical-data?cid=959211"
browser = get_crawling_browser()
browser.get(url)
try:
WebDriverWait(browser, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[#id="PromoteSignUpPopUp"]/div[2]/i'))).click()
browser.find_element(By.XPATH, "//body").click()
browser.find_element(By.XPATH, '//*[#id="PromoteSignUpPopUp"]/div[2]/i').click()
except TimeoutException:
pass
browser.maximize_window()
browser.find_element(By.CLASS_NAME, "DatePickerWrapper_icon__Qw9f8").click()
dates = browser.find_elements(By.CSS_SELECTOR, ".NativeDateInput_root__wbgyP > input")
dates[0].clear()
dates[0].send_keys('2022/10/01')
dates[1].clear()
dates[1].send_keys('2022/12/18')
#browser.implicitly_wait(60)
browser.find_element(By.CLASS_NAME, "HistoryDatePicker_apply-button__fPr_G").click()
try:
WebDriverWait(browser, 60).until(lambda X: len(X.find_elements(By.CSS_SELECTOR, ".datatable_row__qHMpQ > .datatable_cell__0y0eu > time")) >= 30)
print("Page is Loaded")
except TimeoutException:
print("Loading took too much time!")
pass
on windows
def get_crawling_browser() :
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from webdriver_manager.chrome import ChromeDriverManager
browser = webdriver.Chrome(service=Service(ChromeDriverManager().install()))
return browser
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
url = "https://www.investing.com/commodities/copper-historical-data?cid=959211"
browser = get_crawling_browser()
browser.get(url)
try:
WebDriverWait(browser, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[#id="PromoteSignUpPopUp"]/div[2]/i'))).click()
browser.find_element(By.XPATH, "//body").click()
browser.find_element(By.XPATH, '//*[#id="PromoteSignUpPopUp"]/div[2]/i').click()
except TimeoutException:
pass
browser.maximize_window()
browser.find_element(By.CLASS_NAME, "DatePickerWrapper_icon__Qw9f8").click()
dates = browser.find_elements(By.CSS_SELECTOR, ".NativeDateInput_root__wbgyP > input")
dates[0].clear()
dates[0].send_keys('2022/10/01')
dates[1].clear()
dates[1].send_keys('2022/12/18')
#browser.implicitly_wait(60)
browser.find_element(By.CLASS_NAME, "HistoryDatePicker_apply-button__fPr_G").click()
try:
WebDriverWait(browser, 60).until(lambda X: len(X.find_elements(By.CSS_SELECTOR, ".datatable_row__qHMpQ > .datatable_cell__0y0eu > time")) >= 30)
print("Page is Loaded")
except TimeoutException:
print("Loading took too much time!")
pass
selenium version : 4.7.2
Diffenrent scripts are just options. but the result is different.
I don't know how to solve it. Plz help me!!!
run the scripts with options on linux server, WebDriverWait is not working
printed "Loading took too much time!"
run the scripts without options on windows, WebDriverWait is working
printed "Page is Loaded"

Pyhton for loop "stale element reference: element is not attached to the page document"

python for loop "stale element reference: element is not attached to the page document"
here is my code
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
import time
options = Options()
options.add_argument("start-maximized")
webdriver_service = Service('F:\\work\\chromedriver_win32\\chromedriver.exe')
driver = webdriver.Chrome(options=options, service=webdriver_service)
wait = WebDriverWait(driver, 10)
Content =["listtext1","listtext2","listtext3","listtext4"]
for i in range(4):
time.sleep(7)
url = "https://quillbot.com/"
driver.get(url)
Text_block = driver.find_element(By.ID, "inputText")
Text_block.send_keys(Content[i])# Change (fetch from Search_list)
time.sleep(2)
I made a few fixes to your code:
Inserted user_agent (it will come in handy with other experiments on selenium)
inserted a web driver manager to run selenium on all operating systems.
You have to accept cookies before you can interact with the page.
removed unnecessary sleep.
This is the result, code tested:
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
options = Options()
options.add_argument("start-maximized")
# add user_agent
user_agent = "user-agent=[Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.5005.63 Safari/537.36]"
options.add_argument(user_agent)
driver = webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=options) # to use over all systems
browser_delay = 2 # set if based on your connection and device speed
Content =["listtext1","listtext2","listtext3","listtext4"]
for i in range(len(Content)):
url = "https://quillbot.com/"
driver.get(url)
try:
cookie_btn = WebDriverWait(driver, browser_delay).until(EC.element_to_be_clickable((By.ID, 'onetrust-accept-btn-handler')))
cookie_btn.click()
except TimeoutException:
pass # it's a timeout or element just clicked
Text_block = driver.find_element(By.ID, "inputText")
Text_block.send_keys(Content[i]) # Change (fetch from Search_list)
Here is one way of sending those texts into that textbox, based on your existing code and how you defined waits:
[....]
content =["listtext1","listtext2","listtext3","listtext4"]
for i in content:
driver.get('https://quillbot.com/')
try:
wait.until(EC.element_to_be_clickable((By.ID, "onetrust-accept-btn-handler"))).click()
print('accepted cookies')
except Exception as e:
print('no cookie button!')
text_block = wait.until(EC.element_to_be_clickable((By.ID, "inputText")))
text_block.send_keys(i)
print('sent', i)
t.sleep(5)
See Selenium documentation at https://www.selenium.dev/documentation/

Python Selenium: How to avoid being detected/ blocked?

The error messageI try to extract data from the below website. But when the selenium click the "search" button (the last step of the code), error was returned, it seems blocked by the server (It is totally alright when I access the website manually. But when I use automated Chrome browser, the attached error message was returned when I click the "search" button). How should I get around this?
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.chrome.options import Options
import time
ser = Service(r"C:\Users\shekc\Documents\chromedriver.exe")
options = webdriver.ChromeOptions()
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option('useAutomationExtension', False)
options.add_argument("–Referer=https://www.dahsing.com/jsp/fundPlatform/index_e.jsp")
options.add_argument("user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36")
driver = webdriver.Chrome(options=options, service=ser)
url = "https://www.dahsing.com/jsp/fundPlatform/risk_warning_e.jsp"
driver.get(url)
time.sleep(3)
# click "Agree"
driver.find_element(By.LINK_TEXT,"Agree").click()
driver.switch_to.default_content()
driver.switch_to.frame(1)
# return the # Fund house
from selenium.webdriver.support.ui import Select
Select =Select(driver.find_element(By.XPATH,'//*[#id="mainContent_ddlFundHouse"]'))
FH_No=len(Select.options)
# select " all per page"
from selenium.webdriver.support.ui import Select
Select =Select(driver.find_element(By.XPATH,'//*[#id="mainContent_ddlPageNumber"]'))
Select.select_by_index(len(Select.options)-1)
Select =Select(driver.find_element(By.XPATH,'//*[#id="mainContent_ddlFundHouse"]'))
Select.select_by_index(1)
FH_name=Select.first_selected_option.text
# click "Search"
driver.find_element(By.LINK_TEXT,"Search").click()

Selenium Error Website Access Denied Webdriver WebdriverWait

I get this weird access denied message when I try to login to the "offspring.co.uk" website. This denial message pops up right after clicking the login button. I heard something about the Akamai Bot-Protection on this website. Maybe this protection detects my automation. Does anyone know how to prevent this website access denial?
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import time
def call_Website():
# configurations
profile = webdriver.FirefoxProfile()
profile.accept_untrusted_certs = True
profile.set_preference("general.useragent.override","Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:92.0) Gecko/20100101 Firefox/92.0")
firefox_capabilities = webdriver.DesiredCapabilities.FIREFOX
firefox_capabilities['marionette'] = True
# start webdriver etc
browser = webdriver.Firefox(firefox_profile=profile, desired_capabilities=firefox_capabilities)
wait = WebDriverWait(browser,20)
action = ActionChains(browser)
###########checkig if proxy works, old snippet
try:
browser.get("https://httpbin.org/ip")
except:
browser.close()
print("proxy was not working")
##############################################
time.sleep(2)
browser.get('https://www.offspring.co.uk/view/secured/content/login')
time.sleep(2)
# accept cookie
browser.find_element_by_css_selector("#onetrust-accept-btn-handler").click()
time.sleep(1)
#choose currency
browser.find_element_by_css_selector("li.EUR:nth-child(2)").click()
#fills out username
username_form = browser.find_element_by_css_selector('#user')
action.move_to_element(username_form).pause(1).click().pause(0.5).send_keys('username')
#fills out password
password_form = browser.find_element_by_css_selector('#loginpassword')
action.pause(2).move_to_element(password_form).pause(1).click().pause(0.5).send_keys('password')
#clicks on login
Login_Btn = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR,'#loginButton')))
action.move_to_element(Login_Btn).pause(1).click().perform()
if __name__ == "__main__":
call_Website()
And here is the "Access Denied"-Page.

.click() button doing nothing in Selenium python

I'm trying to build a bot for Nike.com.
I'm rotating user agent, automation blink is hidden and have done everything needed (Even using VPN).
URL : https://www.nike.com/ca/t/air-force-1-pixel-shoe-txmVNP/CK6649-100
Size:2
from selenium import webdriver
import time
from selenium.webdriver.common.keys import Keys
import requests
import sys
from selenium.webdriver.chrome.options import Options
from fake_useragent import UserAgent
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
options = webdriver.ChromeOptions()
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option('useAutomationExtension', False)
ua=UserAgent()
userAgent=ua.random
options.add_argument("--log-level=3")
options.add_argument("--disable-blink-features=AutomationControlled")
options.add_argument(f'user-agent={userAgent}')
driver = webdriver.Chrome(options=options)
driver.minimize_window()
driver.execute_script("Object.defineProperty(navigator, 'webdriver', {get: () => undefined})")
driver.execute_cdp_cmd('Network.setUserAgentOverride', {"userAgent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.53 Safari/537.36'})
URL=input("Enter URL : ")
SIZE=input("ENter Size : ")
driver.maximize_window()
driver.get(URL)
time.sleep(5)
print("Starting again")
while(True):
## try:
s_size=driver.find_element_by_xpath('//*[#id="buyTools"]/div[1]/fieldset/div/div['+SIZE+']/label')
s_size.click()
time.sleep(1)
## try:
print('here')
time.sleep(5)
## WebDriverWait(driver,10).until(EC.element_to_be_clickable((By.XPATH,'//*[#id="floating-atc-wrapper"]/div/button[1]'))).click()
add_to_bag=driver.find_element_by_xpath('//*[#id="floating-atc-wrapper"]/div/button[1]')
time.sleep(3)
add_to_bag.click()
print('1')
break
time.sleep(1)
while(True):
try:
move_to_cart=driver.find_element_by_xpath('//*[#id="nav-cart"]/a/div/span')
move_to_cart.click()
break
except:
time.sleep(1)
This code is selecting the required size, and also Clicks on Add to Bag button with clicking animation on website but after that also nothing happens even when I manually click on Add To Bag button or reload website nothing happen.
The only way out is open link in new tab and do all things manually
Can anyone give me a workaround for this.
I think selenium is doing its work its getting blocked by website

Categories