I'm trying to create a directory with os.mkdir - python

import pathlib
import subprocess
import argparse
import os
from _datetime import datetime
def get_unique_run_id():
if os.environ.get("BUILD_NUMBER"):
unique_run_id = os.environ.get("BUILD_NUMBER")
elif os.environ.get("CUSTOM_BUILD_NUMBER"):
unique_run_id = os.environ.get("CUSTOM_BUILD_NUMBER")
else:
unique_run_id = datetime.now().strftime('%Y%M%D%H%M%S')
os.environ['UNIQUE_RUN_ID'] = unique_run_id
return unique_run_id
def create_output_directory(prefix='results_'):
global run_id
if not run_id:
raise Exception("Variable 'run_id' is not set. Unable to create output directory")
curr_file_path = pathlib.Path(__file__).parent.absolute()
dir_to_create = os.path.join(curr_file_path, prefix + str(run_id))
os.mkdir(dir_to_create)
print(f"Created output directory: {dir_to_create}")
return dir_to_create
if __name__ == "__main__":
run_id = get_unique_run_id()
output_dir = create_output_directory()
json_out_dir = os.path.join(output_dir, 'json_report_out.json')
junit_out_dir = os.path.join(output_dir, 'junit_report_out')
# import pdb; pdb.set_trace()
parser = argparse.ArgumentParser()
parser.add_argument('--test_directory', required=False, help='Specify the location of the test file')
parser.add_argument('--behave_options', type=str, required=False, help='String of behave options')
args = parser.parse_args()
test_directory = '' if not args.test_directory else args.test_directory
behave_options = '' if not args.behave_options else args.behave_options
command = f'behave -k--no-capture -f json.pretty -o {json_out_dir} ' \
f'--junit --junit-directory {junit_out_dir}' \
f'{behave_options} ' \
f'{test_directory}'
print(f"Running command : {command}")
rs = subprocess.run(command, shell=True)
When I try to run this I'm getting an error as follows:
FileNotFoundError: [WinError 3] The system cannot find the path specified: 'E:\Projects\results_20204710/11/20194751'. Please help me to find a solution for this.
Thought it could be installer error. So tried both 32bit and 64bit python installers. I'm totally lost here.

For a single directory:
os.mkdir(...)
For nested directories:
os.makedirs(...)
You can also check if a diretory exists:
os.path.exists(...)

Related

Python tar a directory and symmetric encrypt with gpg

At this point the script works great for a single file. When a directory is given it uses tar to create a singe file which works well, then the tar file is gpg encrypted with a password provided. The gpg works also. The problem is that when you decrypt the gpg file the tar is corrupted every time. I'm trying to find what I'm doing wrong here. Please help.
#!/usr/bin/env python3
# Takes file in does symmetric encryption with the password you provide
# then adds it to a running IPFS(ipfs.io) instance.
#
import os
import argparse
import gnupg
import ipfsapi
import tarfile
# Parse command arguments
parser = argparse.ArgumentParser(description='Encrypt file/directory and add it to IPFS')
parser.add_argument('-i','--input', help='File.txt or Directory', required=True)
parser.add_argument('-p','--password', help='Password to encrypt with', required=True)
args = parser.parse_args()
# Set GPG Home directory
gpg = gnupg.GPG(homedir='')
# Set GPG Encoding
gpg.encoding = 'utf-8'
# Get dataToEncrypt full path
dataToEncrypt = (os.path.abspath(args.input))
# Setup tar filename to end with .zip
tarFile = ("{}.tar".format(dataToEncrypt))
# Setup encrypted filename to end with .gpg
encryptedFile = ("{}.tar.gpg".format(dataToEncrypt))
# Tell module where IPFS instance is located
api = ipfsapi.connect('127.0.0.1', 5001)
def dataTar():
if os.path.isfile(dataToEncrypt):
return
else:
#return
with tarfile.open(tarFile, 'w|') as tar:
tar.add(dataToEncrypt)
tar.close()
def encryptFile():
passphrase = (args.password)
if os.path.isfile(dataToEncrypt):
with open(dataToEncrypt, 'rb') as f:
status = gpg.encrypt(f,
encrypt=False,
symmetric='AES256',
passphrase=passphrase,
armor=False,
output=dataToEncrypt + ".gpg")
else:
with open(tarFile, 'rb') as f:
status = gpg.encrypt(f,
encrypt=False,
symmetric='AES256',
passphrase=passphrase,
armor=False,
output=dataToEncrypt + ".tar.gpg")
print ('ok: ', status.ok)
print ('status: ', status.status)
print ('stderr: ', status.stderr)
def ipfsFile(encryptedFile):
# Add encrypted file to IPFS
ipfsLoadedFile = api.add(encryptedFile, wrap_with_directory=True)
# Return Hash of new IPFS File
fullHash = (ipfsLoadedFile[1])
ipfsHash = fullHash['Hash']
return(ipfsHash)
def delEncryptedFile(encryptedFile):
try:
os.remove(encryptedFile)
except:
print("Error: %s unable to find or delete file." % encryptedFile)
def main():
dataTar()
encryptFile()
#ipfsFile(encryptedFile)
#print ("File encrypted and added to IPFS with this hash " + ipfsFile(encryptedFile))
#delEncryptedFile(encryptedFile)
if __name__ == "__main__":
main()
Code looks fine. I just tried it with https://pypi.org/project/python-gnupg/ and it works fine. I had to fix the API's according to this package, but I don't think that matters. Just diff it to see the changes. I don't see any problem except that you should be using gpg -d file.tar.pgp | tar xvf -.
#!/usr/bin/env python3
# Takes file in does symmetric encryption with the password you provide then
# adds it to a running IPFS (ipfs.io) instance.
import os
import argparse
import gnupg
import tarfile
parser = argparse.ArgumentParser(
description='Encrypt file/directory and add it to IPFS')
parser.add_argument('-i','--input',
help='File.txt or Directory',
required=True)
parser.add_argument('-p','--password',
help='Password to encrypt with',
required=True)
args = parser.parse_args()
gpg = gnupg.GPG()
gpg.encoding = 'utf-8'
dataToEncrypt = (os.path.abspath(args.input))
tarFile = ("{}.tar".format(dataToEncrypt))
encryptedFile = ("{}.tar.gpg".format(dataToEncrypt))
def dataTar():
if os.path.isfile(dataToEncrypt):
return
else:
with tarfile.open(tarFile, 'w|') as tar:
tar.add(dataToEncrypt)
tar.close()
def encryptFile():
passphrase = (args.password)
if os.path.isfile(dataToEncrypt):
with open(dataToEncrypt, 'rb') as f:
status = gpg.encrypt(f.read(),
recipients=None,
symmetric='AES256',
passphrase=passphrase,
armor=False,
output=dataToEncrypt + ".gpg")
else:
with open(tarFile, 'rb') as f:
status = gpg.encrypt(f.read(),
recipients=None,
symmetric='AES256',
passphrase=passphrase,
armor=False,
output=dataToEncrypt + ".tar.gpg")
print ('ok: ', status.ok)
print ('status: ', status.status)
print ('stderr: ', status.stderr)
def ipfsFile(encryptedFile):
ipfsLoadedFile = api.add(encryptedFile, wrap_with_directory=True)
fullHash = (ipfsLoadedFile[1])
ipfsHash = fullHash['Hash']
return(ipfsHash)
def delEncryptedFile(encryptedFile):
try:
os.remove(encryptedFile)
except:
print("Error: %s unable to find or delete file." % encryptedFile)
def main():
dataTar()
encryptFile()
if __name__ == "__main__":
main()

Python SQLite3 Back Up Parameters

I am new to SQLite and wondering how to create a backup for a database, on a similar site I have found a question on how to create a backup for a database but I am having problems getting it to work.
This is the question:https://codereview.stackexchange.com/questions/78643/create-sqlite-backups
This is the code:
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import sqlite3
import shutil
import time
import os
NO_OF_DAYS = 7
def sqlite3_backup(dbfile, backupdir):
dbfile = Accounts.db
backupdir = r"E:\TESTING\BackUp.db"
"""Create timestamped database copy"""
if not os.path.isdir(backupdir):
raise Exception("Backup directory does not exist: {}".format(backupdir))
backup_file = r"E:\TESTING\BackUp.db" + time.strftime("-%Y%m%d-%H%M%S")
connection = sqlite3.connect(r"E:\TESTING\Accounts.db")
cursor = connection.cursor()
# Lock database before making a backup
cursor.execute('begin immediate')
# Make new backup file
shutil.copyfile(dbfile, backup_file)
print ("\nCreating {}...".format(backup_file))
# Unlock database
connection.rollback()
def clean_data(backup_dir):
backup_dir = r"E:\TESTING\BackUp.db"
print ("\n------------------------------")
print ("Cleaning up old backups")
for filename in os.listdir(backup_dir):
backup_file = os.path.join(backup_dir, filename)
if os.stat(backup_file).st_ctime < (time.time() - NO_OF_DAYS * 86400):
if os.path.isfile(backup_file):
os.remove(backup_file)
print ("Deleting {}...".format(ibackup_file))
def get_arguments():
## connection = sqlite3.connect(r"E:\TESTING\Accounts.db")
## cursor = connection.cursor()
backup_dir = r"E:\TESTING\BackUp.db"
db_file = sqlite3.connect(r"E:\TESTING\Accounts.db")
"""Parse the commandline arguments from the user"""
parser = argparse.ArgumentParser()
parser.add_argument('db_file',
help='the database file that needs backed up')
parser.add_argument('backup_dir',
help='the directory where the backup'
'file should be saved')
return parser.parse_args()
if __name__ == "__main__":
#args = get_arguments()
dbfile = Accounts
backup_dir = "E:\TESTING"
#sqlite3_backup(args.db_file, args.backup_dir)
sqlite3_backup(db_file, backup_dir)
clean_data(args.backup_dir)
print ("\nBackup update has been successful.")
When I run the code I get this error usage: backup.py [-h] db_file backup_dir
backup.py: error: the following arguments are required: db_file, backup_dir
I have subbed into the code the db_file and the backup_dir but it still appearing with the same error.
You may need to change this part:
if __name__ == "__main__":
#args = get_arguments()
dbfile = <<YOUR DB FILE NAME >>
backup_dir = <<YOUR BACK UP DIRECTORY PATH>>
#sqlite3_backup(args.db_file, args.backup_dir)
sqlite3_backup(db_file, backup_dir)
# CHANGE clean_data(args.backup_dir)
#TO:
clean_data(backup_dir)
print ("\nBackup update has been successful.")

Tableau workbook not displaying tabs from source file when migrated through Python Scipt

As a part of my assignment, i have been asked to migrate a tableau worksheet through a python script. The migration is successful but i do not get the tabs from source file in the output worksheet after the migration. Below are the images that will describe my position....
Screenshot 1:
Screenshot 2:
'''
Created on Sep 12, 2016
#author: johnsoja
'''
import argparse
import Utils
import os
dev_server="http://ftc-wberfapp202"
dev_user="sys_dev_erfadmin"
dev_pwd="xyz"
stg_server="https://ftc-wberfapp501"
stg_user="sys_dev_erfadmin"
stg_pwd="xyz"
prod_server="https://PTC-WBERFAPP101"
prod_user="sys_pr_erf_admin"
prod_pwd="xyz"
scriptdir = "E:\\Program Files\\Tableau\\Tableau Server\\10.1\\bin\\";
tabcmdlogincmd = "tabcmd login -s {0} -t {1} -u {2} -p {3} --cookie";
downloadfilecmd = "tabcmd get \"{0}\" -f \"{1}\" ";
publishfilecmd ="tabcmd publish \"{0}\" -r \"{1}\" --overwrite ";
tabcmdlogoutcmd = "tabcmd logout";
tmpFileLocation = "E:\\Tableau_Deployment_Application_SVN\\approved\\";
tmpDatasourceLocation = "E:\\Tableau_Deployment_Application_SVN\\datasources\\";
'''
Read from config file the data sources that are to be migrated.
'''
def readDataSourceConfig(configfilename):
print('reading config file name ({0})'.format(configfilename))
utils = Utils.ConfigUtil()
datasources = utils.ConfigSections(configfilename)
dataSourcesDict = dict()
dataSourceNames = []
for datasource in datasources:
print("Datasources to migrate ({0})".format(datasource))
dictionary = utils.ConfigSectionMap(configfilename, datasource)
dataSourcesDict[datasource] = dictionary
datasourcenm, connectionnm = datasource.split(":")
dataSourceNames.append(datasourcenm)
return dataSourcesDict, dataSourceNames
'''
Read from config file the data sources that are to be migrated.
'''
def readWorkbookConfig(configfilename):
print('reading config file name ({0})'.format(configfilename))
workbookprops = []
with open(configfilename) as file:
for line in file:
line = line.strip()
workbookprops.append(line)
print(workbookprops)
return workbookprops
def getWorkbooksFromSourceSite(source_server,source_username,source_password,source_site,source_project,dataSourceNames,workbookprops):
token, site_id, user_id = Utils.RestApiUtils.sign_in(source_site,source_server,source_username,source_password)
workbooks = Utils.RestApiUtils.query_workbooks(token, site_id, source_project);
datasources = Utils.RestApiUtils.query_datasources(token, site_id)
return workbooks, datasources
def uploadWorkbooksToDestinationSite(dest_server,dest_username,dest_password,dest_site,dest_project,workbooks,datasources,dataSourceNames,workbookprops,update_ds):
os.chdir(scriptdir)
print("********************")
print("Logging into the Tableau Server")
tabcmdlogincmdfrmt = tabcmdlogincmd.format(dest_server,dest_site,dest_username,dest_password)
tabcmdlogincmdfrmt = tabcmdlogincmdfrmt+" --no-certcheck "
print(tabcmdlogincmdfrmt)
Utils.Commons.runCommand(tabcmdlogincmdfrmt)
for workbook in workbooks:
workbook_name=workbook.get("contentUrl")
workbook_full_name=workbook.get("name")
if workbook_name in workbookprops:
workbookfile = "/workbooks/"+workbook_name+".twbx"
outputworkbookfile = tmpFileLocation+workbook_name+".twbx"
publishfilecmdfrmt=publishfilecmd.format(outputworkbookfile, dest_project,dest_username,dest_password)
print(publishfilecmdfrmt)
Utils.Commons.runCommand(publishfilecmdfrmt+" --no-certcheck ")
print("********************")
print("completed publishing workbooks")
Utils.Commons.runCommand(tabcmdlogoutcmd)
return workbooks, datasources
def stringComp(str_1, str_0):
if(str_1 is None):
str_1 = ""
if(str_0 is None):
str_0 = ""
return str_1.lower() == str_0.lower()
def usage():
print('\n This is the usage function: \n')
print('NonProd-Staging-Prod-Loader -a -f <location of erf<stg/prod>migration.properties> -s <sitename> -p <projectname>')
if __name__ == '__main__':
pass
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--all')
parser.add_argument('-f', '--filepath')
parser.add_argument('-s', '--sitename', required=True)
parser.add_argument('-d', '--destsitename', required=True)
parser.add_argument('-p', '--projectname', required=True)
parser.add_argument('-t', '--target', required=True)
parser.add_argument('-u', '--updatedatasource')
args = parser.parse_known_args()[0]
if(args.target=="PROD"):
source_server = dev_server
source_username = dev_user
source_password = dev_pwd
source_site = args.sitename
source_project = args.projectname
dest_server = prod_server
dest_username = prod_user
dest_password = prod_pwd
dest_site = args.destsitename
dest_project = args.projectname
update_ds=args.updatedatasource
print("moving site {0} from server {1} to server {2}".format(source_site, stg_server, prod_server))
dest_ds_properties = "E:\\Tableau_Deployment_Application_SVN\\migrationconfigs\\ds.prod.properties";
dest_wkbk_properties = "E:\\Tableau_Deployment_Application_SVN\\migrationconfigs\\wkbk.properties";
if(args.target=="STG"):
source_server = dev_server
source_username = dev_user
source_password = dev_pwd
source_site = args.sitename
source_project = args.projectname
dest_server = stg_server
dest_username = stg_user
dest_password = stg_pwd
dest_site = args.destsitename
dest_project = args.projectname
update_ds=args.updatedatasource
print("moving site {0} from server {1} to server {2}".format(dest_site, dev_server, stg_server))
dest_ds_properties = "E:\\Tableau_Deployment_Application_SVN\\migrationconfigs\\ds.prod.properties";
dest_wkbk_properties = "E:\\Tableau_Deployment_Application_SVN\\migrationconfigs\\wkbk.properties";
datasourceprops, dataSourceNames = readDataSourceConfig(dest_ds_properties);
##print("Data source names from properties")
##print(dataSourceNames)
workbookprops = readWorkbookConfig(dest_wkbk_properties);
workbooks, datasources = getWorkbooksFromSourceSite(source_server,source_username,source_password,source_site,source_project,dataSourceNames,workbookprops)
workbooks, datasources = uploadWorkbooksToDestinationSite(dest_server,dest_username,dest_password,dest_site,dest_project,workbooks,datasources,dataSourceNames,workbookprops,update_ds)
print("Completed Migration!!!!")
This is probably happening because you have not specified the --tabbed option in your tabcmd publish command.
Change this line of code:
publishfilecmd ="tabcmd publish \"{0}\" -r \"{1}\" --overwrite ";
to
publishfilecmd ="tabcmd publish \"{0}\" -r \"{1}\" --overwrite --tabbed";

Python tarfile fails

I am trying to write a script that tar a directory and scp's to a server which have lots of tar files. I am having trouble in creating tar of the directories, here is the complete script. Why is that happening?
Code:
#!/usr/bin/python
import json
from pprint import pprint
import subprocess
import os
from os.path import expanduser
import time
import os.path
import shutil
import tarfile
import smtplib
import zipfile
import glob
def checkFileDownload():
os.system("scp ***#***.***.***.***:/var/log/apache2/access.log ~/pingMeServeraccess.log")
def sendNotificationText(server="smtp.gmail.com",userName="***#***.com",password="********",cellNumber="***********",testLink="Test"):
server = smtplib.SMTP_SSL(server, ***)
server.login(userName,password)
server.sendmail(userName,cellNumber,testLink)
def sendTarFileToPingMeServer(locationOfTarFile="/home/autotest/tarPackage",nameOfTarFile=""):
fullPathOfFile = nameOfTarFile
scpCommand = "scp -r "+ fullPathOfFile +" ***#***.***.***.***:/home/autotest/untethered/"
try:
os.popen(scpCommand)
testLink= "\nhttp://***.***.***.***/" + nameOfTarFile.split('/')[-1]
sendNotificationText(testLink = testLink)
except:
print "something went wrong"
def makeTarFile(sourceDir):
if os.path.exists(expanduser("~/tarPackage")):
shutil.rmtree(expanduser("~/tarPackage"))
else:
pass
dstFolder = expanduser('~/tarPackage')
crtDstFolder = 'mkdir -p ' + dstFolder
os.system(crtDstFolder)
archiveName = str(time.time())+'.tar'
print 'creating archive, '+archiveName
out = tarfile.open(expanduser('~/tarPackage/'+archiveName), mode='w')
try:
out.add(sourceDir)
sendTarFileToPingMeServer(nameOfTarFile=archiveName)
finally:
out.close()
checkFileDownload()
def getTest(userName):
testLoc = check(userName)
gitList= [];TestList = []; packageDir = "mkdir ~/testPackageDir"
if os.path.exists(expanduser("~/testPackageDir")):
shutil.rmtree(expanduser("~/testPackageDir"))
else:
pass
originalDirectory = os.getcwd()
gitrepo = ""
for test,gitLink in testLoc.items():
if gitLink not in gitList:
gitRepo = expanduser("~/tempGit_"+str(time.time()))
p = subprocess.Popen(["git", "clone", gitLink,gitRepo], stdout=subprocess.PIPE)
out,err = p.communicate()
gitList.append(gitLink)
testLink = gitRepo + test
if os.path.isfile(testLink):
os.system(packageDir)
relPath = test.rstrip(test.split('/')[-1])
x = "mkdir -p ~/testPackageDir"+relPath
os.system(x)
y = "~/testPackageDir" + relPath
cpTest = "cp "+testLink+" "+ expanduser(y)
os.system(cpTest)
else:
print "git link already cloned, skipping, checking for test cases."
testLink = gitRepo + test
if os.path.isfile(testLink):
relPath = test.rstrip(test.split('/')[-1])
x = "mkdir -p ~/testPackageDir"+relPath
os.system(x)
y = "~/testPackageDir" + relPath
cpTest = "cp "+testLink+" "+ expanduser(y)
os.system(cpTest)
makeTarFile(expanduser("~/testPackageDir"))
os.system("cd ~; rm -rf tempGit_*;cd -; rm -rf ~/testPackageDir")
def check(userName):
p = subprocess.Popen(["ls", "/var/www/tempdata/testexec"], stdout=subprocess.PIPE)
out,err = p.communicate()
out = out.split('\n')[:-1]
for fileName in out:
if userName in fileName:
filePath = "/var/www/tempdata/testexec/"+fileName
json_data=open(filePath)
data = json.load(json_data)
testLoc = searchForGitTest(data)
curDict = os.popen("pwd")
os.system("cd ~")
return testLoc
def searchForGitTest(data):
aux = {};auxList= []
for idx in range(len(data["rows"])):
scriptPath = data["rows"][idx]["scriptPath"]
gitPath = data["rows"][idx]["gitPath"]
aux[scriptPath] = gitPath
return aux
if __name__ == "__main__":
getTest("user")
Attaching the run:
autotest#batman007:/var/www$ python testPackageUploader.py
remote: Counting objects: 38357, done
remote: Finding sources: 100% (38357/38357)
remote: Total 38357 (delta 15889), reused 36060 (delta 15889)
Receiving objects: 100% (38357/38357), 652.78 MiB | 17.08 MiB/s, done.
Resolving deltas: 100% (15889/15889), done.
git link already cloned, skipping, checking for test cases.
creating archive
1407871278.15.tar: No such file or directory
access.log 100% 21KB 21.3KB/s 00:00
/var/www
The problem in this script was I was not closing the file and sending it to the server. One of my colleagues helped me to figure out this problem.

wxPython and CEF Python 3

Looking at the example file in CEF Python 3 on Windows.
When running the Python example scripts, it opens a debug window in the Windows command prompt.
I want to know how not to show this?
Just to state the obvious:
DEBUG = True -- Does not make a difference, it just stops the debugging inside that window but the window still shows.
This is the example.py file:
# CEF Python 3 example application.
# Checking whether python architecture and version are valid, otherwise an obfuscated
# error will be thrown when trying to load cefpython.pyd with a message "DLL load failed".
import platform
if platform.architecture()[0] != "32bit":
raise Exception("Architecture not supported: %s" % platform.architecture()[0])
import os, sys
libcef_dll = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'libcef.dll')
if os.path.exists(libcef_dll):
# Import the local module.
if 0x02070000 <= sys.hexversion < 0x03000000:
import cefpython_py27 as cefpython
elif 0x03000000 <= sys.hexversion < 0x04000000:
import cefpython_py32 as cefpython
else:
raise Exception("Unsupported python version: %s" % sys.version)
else:
# Import the package.
from cefpython3 import cefpython
import cefwindow
import win32con
import win32gui
import time
DEBUG = True
def GetApplicationPath(file=None):
import re, os
# If file is None return current directory without trailing slash.
if file is None:
file = ""
# Only when relative path.
if not file.startswith("/") and not file.startswith("\\") and (
not re.search(r"^[\w-]+:", file)):
if hasattr(sys, "frozen"):
path = os.path.dirname(sys.executable)
elif "__file__" in globals():
path = os.path.dirname(os.path.realpath(__file__))
else:
path = os.getcwd()
path = path + os.sep + file
path = re.sub(r"[/\\]+", re.escape(os.sep), path)
path = re.sub(r"[/\\]+$", "", path)
return path
return str(file)
def ExceptHook(excType, excValue, traceObject):
import traceback, os, time, codecs
# This hook does the following: in case of exception write it to
# the "error.log" file, display it to the console, shutdown CEF
# and exit application immediately by ignoring "finally" (_exit()).
errorMsg = "\n".join(traceback.format_exception(excType, excValue,
traceObject))
errorFile = GetApplicationPath("error.log")
try:
appEncoding = cefpython.g_applicationSettings["string_encoding"]
except:
appEncoding = "utf-8"
if type(errorMsg) == bytes:
errorMsg = errorMsg.decode(encoding=appEncoding, errors="replace")
try:
with codecs.open(errorFile, mode="a", encoding=appEncoding) as fp:
fp.write("\n[%s] %s\n" % (
time.strftime("%Y-%m-%d %H:%M:%S"), errorMsg))
except:
print("cefpython: WARNING: failed writing to error file: %s" % (
errorFile))
# Convert error message to ascii before printing, otherwise
# you may get error like this:
# | UnicodeEncodeError: 'charmap' codec can't encode characters
errorMsg = errorMsg.encode("ascii", errors="replace")
errorMsg = errorMsg.decode("ascii", errors="replace")
print("\n"+errorMsg+"\n")
cefpython.QuitMessageLoop()
cefpython.Shutdown()
os._exit(1)
def InitDebugging():
# Whether to print & log debug messages
if DEBUG:
cefpython.g_debug = True
cefpython.g_debugFile = GetApplicationPath("debug.log")
cefwindow.g_debug = True
def CefAdvanced():
sys.excepthook = ExceptHook
InitDebugging()
appSettings = dict()
appSettings["log_file"] = GetApplicationPath("debug.log")
appSettings["log_severity"] = cefpython.LOGSEVERITY_INFO
appSettings["release_dcheck_enabled"] = True # Enable only when debugging
appSettings["browser_subprocess_path"] = "%s/%s" % (
cefpython.GetModuleDirectory(), "subprocess")
cefpython.Initialize(appSettings)
wndproc = {
win32con.WM_CLOSE: CloseWindow,
win32con.WM_DESTROY: QuitApplication,
win32con.WM_SIZE: cefpython.WindowUtils.OnSize,
win32con.WM_SETFOCUS: cefpython.WindowUtils.OnSetFocus,
win32con.WM_ERASEBKGND: cefpython.WindowUtils.OnEraseBackground
}
browserSettings = dict()
browserSettings["universal_access_from_file_urls_allowed"] = True
browserSettings["file_access_from_file_urls_allowed"] = True
windowHandle = cefwindow.CreateWindow(title="CEF Python 3 example",
className="cefpython3_example", width=800, height=600,
icon="icon.ico", windowProc=wndproc)
windowInfo = cefpython.WindowInfo()
windowInfo.SetAsChild(windowHandle)
browser = cefpython.CreateBrowserSync(windowInfo, browserSettings,
navigateUrl=GetApplicationPath("example.html"))
cefpython.MessageLoop()
cefpython.Shutdown()
def CloseWindow(windowHandle, message, wparam, lparam):
browser = cefpython.GetBrowserByWindowHandle(windowHandle)
browser.CloseBrowser()
return win32gui.DefWindowProc(windowHandle, message, wparam, lparam)
def QuitApplication(windowHandle, message, wparam, lparam):
win32gui.PostQuitMessage(0)
return 0
if __name__ == "__main__":
CefAdvanced()
I got it right, but it was actually the way in which I used py2exe.
In the setup.py file I had to change:
setup(
console=['wxwindow.py']
,data_files = get_data_files()
,options={"py2exe":{"dll_excludes":dll_excludes, 'optimize': 2}}
,zipfile = "shared.lib"
)
setup(
window=['wxwindow.py']
,data_files = get_data_files()
,options={"py2exe":{"dll_excludes":dll_excludes, 'optimize': 2}}
,zipfile = "shared.lib"
)
SO:
window=['wxwindow.py']

Categories