Python script gives Syntax error while running it from Jenkins - python

I have a python script to update a Google sheet.The script is working fine when i execute it locally and update the Google sheet as expected, i want to execute it automatically every 3 hours.We are using Jenkins for job scheduling and when i tried to execute it from jenkins it is showing syntax error.
Error and scripts are mentioned below.Any suggestions on how to resolve it?
Started by user admin_123
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building in workspace /var/lib/jenkins/jobs/update_oos_gs/workspace
[workspace] $ /bin/sh -xe /tmp/jenkins6318169151390457385.sh
+ export PYTHONPATH=/home/etl/bi/
+ cd /home/etl/bi/crm
+ python3 -u oos_gs_update.py
File "oos_gs_update.py", line 22
r = f"{col_name}{header}:{col_name}{len(col)+header}"
^
SyntaxError: invalid syntax
Build step 'Execute shell' marked build as failure
Finished: FAILURE
Below is my Python script,
import os
import sys
import datetime
import psycopg2
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from time import sleep
from utils.config import Configuration as Config
from utils.postgres_helper import get_connection
from utils.utils import get_global_config
sys.path.append('/home/etl/bi/')
GSHEET_CONFIG_SECTION = 'gsheet'
SCOPE = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive']
SHEET_KEY='1Mq7_********y5WtB1R-ZKfz6o'
def update_sheet(sheet, table, columns="ABC", header=4):
to_update = []
table = list(zip(*table))
for col_name, col in zip(columns, table):
r = f"{col_name}{header}:{col_name}{len(col)+header}"
cells = sheet.range(r)
for cell, value in zip(cells, col):
cell.value = value
to_update.extend(cells)
sheet.update_cells(to_update)
cnx_psql =get_connection(get_global_config(), 'pg_dwh')
print('DB connected')
psql_cursor = cnx_psql.cursor()
METADATA_QUERY = '''SELECT sku,product_name,CAST(oos as TEXT) as oos FROM staging.oos_details order by oos DESC;'''
psql_cursor.execute(METADATA_QUERY)
results = psql_cursor.fetchall()
cell_values = (results)
home_dir = os.path.expanduser('~')
config=get_global_config()
gsheet_config_section = GSHEET_CONFIG_SECTION
secret_file_path = os.path.join(home_dir,config.get(gsheet_config_section, 'service_account_credentials'))
creds = ServiceAccountCredentials.from_json_keyfile_name(secret_file_path, scopes=SCOPE)
client = gspread.authorize(creds)
sheet = client.open_by_key(SHEET_KEY).sheet1
#Function Call
update_sheet(sheet, cell_values)
psql_cursor.close()
cnx_psql.close()

Python 3.6 introduced the f'string{interpolation}' format described in PEP 498. Given the error message Jenkins gave you about line 22 in your code is about the newer string formatting, just change the line as follows.
r = f"{col_name}{header}:{col_name}{len(col)+header}"
to
r = "{}{}:{}{}".format(col_name, header, col_name, len(col) + header)

Try this:
r = col_name + str(header) + ':' + col_name + str(len(col)+header)
Or you can use other formatting method as well
Or upgrade python to latest version

Related

Python pytds - Getting output from Microsoft SQL server commands

I'm attempting to write a quick Python program that executes the "sp_updatestats" command on a Microsoft SQL server. I think I have the actual command working, but I don't know for sure because I don't think I have the logging working properly. I want to get the messages returned from the server from running the command and write them to a log file, but I just can't suss it out.
If I use cur.fetchall(), I get: "Previous Statement didn't produce any results"
If I use cur.messages, I get: []
If I use cur.return_value, I get: 0
I'm expecting a return like I would get if I ran "sqlcmd" from a command prompt with a "-o c:\log.txt" switch.
I'm using python 3.9
Here's my (sanitized) code:
import pytds
import datetime
from pytds.login import NtlmAuth
import pytds.extensions
import time
current_date = datetime.date.today()
date_string = str(current_date)
extention = '.txt'
logname = 'log' + date_string + extention
with pytds.connect('server', 'database', 'username', 'password') as conn:
with conn.cursor() as cur:
cur.execute("sp_updatestats")
#results = cur.fetchall()
#results = cur.get_proc_return_status()
#results = cur.messages
#results = cur.return_value
resultstr = str(results)
with open(logname, 'w') as f:
f.write(resultstr)
cur.close()
quit()

Schedule Python script with crontab doesn't work

Trying to schedule a python code in crontab, but it doesn't work.
How Can I understand the reason why?
I've already add cron and termianl to Full disk access, so this shouldn't be a problem
everything works fine when I run in terminal command
python /users/myuser/slots_update.py
Crontab command which doesnt'work:
45 12 * * * /usr/bin/python /users/myuser/slots_update.py
Python script (put different sql inside to make it simplier)
#!/usr/bin/env python
# coding: utf-8
# In[2]:
# importing the required libraries
import gspread
import pandas as pd
from oauth2client.service_account import ServiceAccountCredentials
# In[50]:
# define the scope
scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive']
# add credentials to the account
creds = ServiceAccountCredentials.from_json_keyfile_name('key.json', scope)
# authorize the clientsheet
client = gspread.authorize(creds)
# In[51]:
# get the instance of the Spreadsheet
sheet = client.open('EGE_slots1')
# get the first sheet of the Spreadsheet
sheet_instance = sheet.get_worksheet(0)
# In[ ]:
sheet_instance.col_count
# In[52]:
sheet_instance.cell(col=1,row=1)
# In[12]:
import pandas as pd
import numpy as np
from sqlalchemy import create_engine
from datetime import datetime as dt
# In[13]:
connection = create_engine('postgresql://')
# In[47]:
slots = pd.read_sql("""
select * from teachers
""",connection)
# In[53]:
sheet_instance.update('A2',slots.values.tolist())
# In[ ]:
Use full path to the json file.
creds = ServiceAccountCredentials.from_json_keyfile_name('key.json', scope) --> creds = ServiceAccountCredentials.from_json_keyfile_name('/a/b/c/key.json', scope)

Running Python Script consistently from VBA

I have VBA code calling a Python script that works on my PC.
The Python code uses SQL Alchemy to create an Engine, connects to a database, binds the session and retrieve data using a SELECT * FROM basic query. After it reads it, the data is sent to an xlsx file.
Python code with some minor changes for security reasons:
# import dependencies
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
import pandas as pd
import os
import cx_Oracle
# Creating engine connection
engine = create_engine('oracle://user:pw#IP:Port/Schema')
# Binding session
session = Session(bind=engine)
# Indicating path for Oracle client 64bits
lib_dir = r"Full_Path\instantclient_19_9"
try:
cx_Oracle.init_oracle_client(lib_dir=lib_dir)
except Exception as err:
print("Error connecting: cx_Oracle.init_oracle_client()")
print(err);
sys.exit(1);
# read sql a run query
data = pd.read_sql("SELECT * FROM Schema.V_FLUJOS_SOLICITADOS", engine)
# save path
path = r'Another_Full_Path'
# save dataframe into csv
data.to_excel(os.path.join(path, r'Flow extraction.xlsx'), index = False)
For this code, I need to tell Python where to find the Oracle client of 64 bits because my company has its database connected to a PowerBuilder program that can only use 32 bit, so I just can't change the installation, that's why I re-route.
VBA code:
Sub runScript()
Application.ScreenUpdating = False
teoricos = ThisWorkbook.Name
Dim Ret_Val
user = Environ("UserName")
Select Case user
Case "USER1"
python_route = "C:\Users\USER1\Anaconda3\python.exe"
Case "USER2"
python_route = "C:\Users\USER2\Anaconda3\python.exe"
End Select
args = """Full_path\V_FLUJOS_SOLICITADOS.py"""
Ret_Val = Shell(python_route & " " & args, vbNormalFocus)
If Ret_Val = 0 Then
MsgBox "Couldn't run python script!", vbOKOnly
End If
End Sub
The code works on my PC (User 1), but it doesn't on my partner's PC.
What I've done so far:
Installed cx_Oracle on her machine
Tested the whole code on Jupyter Notebook of her machine and it worked
Tested full access to all paths
Activated the Microsoft Shell Reference in VBA
The VBA code on her PC runs, but it just opens the command window really fast and it closes.
On my PC it takes from 3 to 5 seconds to do the whole thing, so I can see the CMD for just a bit there (and also check that the file updated which is the most clear indicator that it worked).

Client SQL in Python and crontab

I have some script in Python which are executed by crontab on Raspbian.
I use the MySQLdb library for request in the local network.
All scripts work fine if i launch them directly, by Python-IDLE, or in the console.
But if they are launched by cron, those who just execute "INSERT" requests work, but those who execute "SELECT" request don't work.
I haven't found a clear solution, but it seems that crontab doesn't execute the same configuration of the SQL client as the user.
Maybe i have to change the path before all request ? (looking for a "./my.cnf" ?
tested with library MySQLdb and PyMySQL
#! /usr/bin/python
# -*- coding: utf-8 -*-
# importations
import os
import time
import sys
import pymysql as sql # or MySQLdb as...
from os import path as os_path
#-----------------------------------------------------------------#
# constants : use your own values / utilisez vos propres valeurs #
#-----------------------------------------------------------------#
PATH_THERM = "/home/pi/Documents/" #path to this script
DB_SERVER ='192.168.0.59' # MySQL : IP server
DB_USER='user' # MySQL : user
DB_PWD='password' # MySQL : password
DB_BASE='capteurs' # MySQL : database name
def log(texte):
datation = time.strftime('%d-%m-%Y %H:%M:%S')
logue = open('log_test.txt','a')
txt = "\n" + datation + "\t" + texte
txt = txt.encode('utf-8')
logue.write(txt)
logue.close()
def query_temp():
datebuff = time.strftime('%d-%m-%Y')
db = sql.connect(DB_SERVER, DB_USER, DB_PWD, DB_BASE)
cursor = db.cursor()
cursor.execute("""SELECT sonde2,date FROM `PiTemp` ORDER BY date DESC LIMIT 0, 1""")
rows = cursor.fetchall()
print datebuff, u" : Dernière température de l'eau :", rows[0][0], u"°C"
log(u"lecture température SQL - ok")
a = rows[0][0]
b = rows[0][1]
return (a, b)
#----------------------------------------------------------#
# principal code #
#----------------------------------------------------------#
PATH=os_path.abspath(os_path.split(__file__)[0])
os.chdir(PATH)
log('start')
log(PATH)
txt = str(query_temp()[0])
log(txt)
crontab :
*/1 * * * * python /home/pi/Documents/180623_test.py
Found raeson of fail, the print passed by cron don't encode/decode the same as the console (!??).
... print (u"Alerte, tous à poil !") UnicodeEncodeDecode Error !
Strange, looks like cron doesn't encode / decode the same than the user.... (!?)
Solved with no latin characters in print, or print passed by try/except...

Using Python to ssh with no modules

I'm in a pickle with writing a script that can SSH into device, run a command and parse that data out to a file. I've written this using Pyparsing and Exscript then I found out that the device I'm going to be using this on is using Python 2.4.4 and Debian 4.1.1 so the modules will not work on this. Now I am back to the drawing board trying to find out how to do this with NO modules. Anyone have any reference or point me in the right direction for this? Thank you in advance.
Here is my code:
from Exscript.util.interact import read_login
from Exscript.protocols import SSH2
import uuid
from pyparsing import *
import re
import yaml
account = read_login()
conn = SSH2()
conn.connect('172.0.0.1')
conn.login(account)
conn.execute('foobar')
data = conn.response
conn.send('exit\r')
conn.close()
###### PARSER ######
date_regex = re.compile(r'\d\d-\d\d-\d\d')
time_regex = re.compile(r'\d\d:\d\d:\d\d')
pairs = [{'category': 'General Information',
'kv': Group(Word(alphanums) + Word(alphanums))},
{'category': 'Last Reset:',
'kv': Group(Word(alphas, max=1) + Word(alphas)) + Literal(':').suppress()
+ Group(Regex(date_regex) + Regex(time_regex)
+ Optional(SkipTo(LineEnd())))
}
]
# build list of categories with associated parsing rules
categories = [Word("# ").suppress() + x['category']
+ OneOrMore(Group(x['kv']))
for x in pairs]
# account for thing you don't have specific rules for
categories.append(Word("#").suppress() + Optional(SkipTo(LineEnd())) +
Group(OneOrMore(Combine(Word(alphanums) + SkipTo(LineEnd()))))
)
# OR all the categories together
categories_ored = categories[0]
for c in categories[1:]:
categories_ored |= c
configDef = OneOrMore(categories_ored)
suppress_tokens = ["show all", "SSH>", "Active System Configuration"]
suppresses = [Literal(x).suppress() for x in suppress_tokens]
for s in suppresses:
configDef.ignore(s)
result = configDef.parseString(data)
for e in result:
print(e)
with open('/Users/MyMac/development/data.yml', 'w') as outfile:
outfile.write( yaml.dump(e))
UPDATE
I have followed the advice below and now have Pexpect installed and found a older version of Python-Pyparsing that I have also installed. So I'm on my way again to getting my scripts to work with modules. Thanks!
Looks like this is already solved, but...
As long as your SSH is configured for this host (or the host doesn't require you to log-in), you should be able to do the following.
import os
""" This will execute foobar on the remote host
and store the command output to a text file
on your machine."""
os.system("ssh 172.0.0.1 foobar > ~/data.txt")
""" Commence processing """
data = open("data.txt", mode='r')
# and so on and so on
You can also use the subprocess library, but os.system for these types of tasks is the simplest IMO.

Categories