I have the following CMakeLists.txt file, which is instructed to use Python 3.4
cmake_minimum_required(VERSION 3.2 FATAL_ERROR)
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/../cmake/")
project(aConfigd VERSION 1.0)
string(TOLOWER aConfigd project_id)
find_package(PythonInterp 3.4 REQUIRED)
include(FindPythonInterp)
set(PYTHON ${PYTHON_EXECUTABLE})
message(STATUS "\${PYTHON_EXECUTABLE} == ${PYTHON_EXECUTABLE}")
set(pkgdatadir /usr/share/configd)
set(configdir /etc/amy)
set(SONARCONFIGID_SOURCE_DIR etc/configd)
set(SRC_DIR configd/src/)
include(common)
# "${SRC_DIR}/systemd_client.py"
# "${SRC_DIR}/amyconfig_service.py"
"${SRC_DIR}/__init__.py"
"${SRC_DIR}/main.py"
"${SRC_DIR}/application.py"
DESTINATION ${pkgdatadir}/configd/
)
#general
set(CPACK_PACKAGE_NAME "a-config")
set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "a-config-manager")
set(CPACK_PACKAGE_DESCRIPTION "a-config-manager")
# redhat
set(CPACK_RPM_EXCLUDE_FROM_AUTO_FILELIST_ADDITION
/etc/amy
)
include(cpack)
Indeed, it confirms that ${PYTHON_EXECUTABLE} == /usr/bin/python3.4 (see 4th line below):
$ make clean ; cmake -DCMAKE_BUILD_TYPE=Release -DSHORT_VERSION=NO -DCUSTOMER=NO .. ; make -j12 ; make package
-- Found PythonInterp: /usr/bin/python3.4 (found suitable version "3.4.5", minimum required is "3.4")
-- Found PythonInterp: /usr/bin/python3.4 (found version "3.4.5")
-- ${PYTHON_EXECUTABLE} == /usr/bin/python3.4
-- Build Type: Release
-- Detected distribution: rhel fedora
-- Detected aConfigd version: 2.3.0-3030-gf7733cf659
-- Detected distribution: rhel fedora
-- Configuring done
-- Generating done
-- Build files have been written to: /local/raid0/git/amy/aConfig/build
Run CPack packaging tool...
CPack: Create package using RPM
CPack: Install projects
CPack: - Run preinstall target for: aConfigd
CPack: - Install project: aConfigd
CPack: Create package
CPackRPM:Warning: CPACK_SET_DESTDIR is set (=ON) while requesting a relocatable package (CPACK_RPM_PACKAGE_RELOCATABLE is set): this is not supported, the package won't be relocatable.
CPackRPM: Will use GENERATED spec file: /local/raid0/git/my/aConfig/build/_CPack_Packages/Linux/RPM/SPECS/a-config.spec
CPack: - package: /local/raid0/git/my/aConfig/build/a-config-2.3.0-3030-gf7733cf659.el7.my.x86_64.rpm generated.
$
However, if I uncomment the "${SRC_DIR}/systemd_client.py" line, I get the error:
Compiling /local/raid0/git/my/aConfig/build/_CPack_Packages/Linux/RPM/a-config-2.3.0-3030-gf7733cf659.el7.my.x86_64/usr/share/configd/configd/systemd_client.py ...
File "/usr/share/configd/configd/systemd_client.py", line 21
def __init__(self, systemd_proxy:Gio.DBusProxy):
^
SyntaxError: invalid syntax
Isn't def __init__(self, systemd_proxy:Gio.DBusProxy): a valid Python 3.4 syntax?
If yes, why does Cmake complains?
The root-cause occurs in the rpmbuild step.
RPM is trying to be extra-helpful, and tries to (byte-code) compile .py files it encounters.
Alas, it wrongly uses the python2 interpreter to create a file's byte-code (even though find_package(PythonInterp 3.4 REQUIRED) is declared in the CMakeLists.txt file).
The fix that worked for me was:
set(CPACK_RPM_BUILDREQUIRES python34-devel)
set(CPACK_RPM_SPEC_MORE_DEFINE "%define __python ${PYTHON_EXECUTABLE}")
When you just run "${SRC_DIR}/systemd_client.py", you're telling it to run that script the same way it would be run by the shell: by looking at the #! line and running it with whatever interpreter is specified there. Which is probably something like #! /usr/bin/python or #! /usr/bin/env python.
If you want to run your script with a particular interpreter, you have to run that interpreter and pass it the script—just as you would at the shell. I'm pretty rusty with CMake, but I'd assume you do that like this:
"${PYTHON_EXECUTABLE}" "${SRC_DIR}/amyconfig_service.py"
Alternatively, since this is your code, maybe you want to use setuptools to programmatically generate scripts for your entry-points, which means it would create a #! line for them that runs whichever Python version was used to run setup.py.
Related
I am trying to create a python package (deb & rpm) from cmake, ideally using cpack. I did read
https://cmake.org/cmake/help/latest/cpack_gen/rpm.html and,
https://cmake.org/cmake/help/latest/cpack_gen/deb.html
The installation works just fine (using component install) for my shared library. However I cannot make sense of the documentation to install the python binding (glue) code. Using the standard cmake install mechanism, I tried:
install(
FILES __init__.py library.py
DESTINATION ${ACME_PYTHON_PACKAGE_DIR}/project_name
COMPONENT python)
And then using brute-force approach ended-up with:
# debian based package (relative path)
set(ACME_PYTHON_PACKAGE_DIR lib/python3/dist-packages)
and
# rpm based package (full path required)
set(ACME_PYTHON_PACKAGE_DIR /var/lang/lib/python3.8/site-packages)
The above is derived from:
debian % python -c 'import site; print(site.getsitepackages())'
['/usr/local/lib/python3.9/dist-packages', '/usr/lib/python3/dist-packages', '/usr/lib/python3.9/dist-packages']
while:
rpm % python -c 'import site; print(site.getsitepackages())'
['/var/lang/lib/python3.8/site-packages']
It is pretty clear that the brute-force approach will not be portable, and is doomed to fail on the next release of python. The only possible solution that I can think of is generating a temporary setup.py python script (using setuptools), that will do the install. Typically cmake would call the following process:
% python setup.py install --root ${ACME_PYTHON_INSTALL_ROOT}
My questions are:
Did I understand the cmake/cpack documentation correctly for python package ? If so this means I need to generate an intermediate setup.py script.
I have been searching through the cmake/cpack codebase (git grep setuptools) but did not find helper functions to handle generation of setup.py and passing the result files back to cpack. Is there an existing cmake module which I could re-use ?
I did read, some alternative solution, such as:
How to build debian package with CPack to execute setup.py?
Which seems overly complex, and geared toward Debian-only based system. I need to handle RPM in my case.
As mentionned in my other solution, the ugly part is dealing with absolute path in cmake install() commands. I was able to refactor the code to avoid usage of absolute path in install(). I simply changed the installation into:
install(
# trailing slash is important:
DIRECTORY ${SETUP_OUTPUT}/
# "." syntax is a reliable mechanism, see:
# https://gitlab.kitware.com/cmake/cmake/-/issues/22616
DESTINATION "."
COMPONENT python)
And then one simply needs to:
set(CMAKE_INSTALL_PREFIX "/")
set(CPACK_PACKAGING_INSTALL_PREFIX "/")
include(CPack)
At this point all install path now need to include explicitely /usr since we've cleared the value for CMAKE_INSTALL_PREFIX.
The above has been tested for deb and rpm packages. CPACK_BINARY_TGZ does properly run with the above solution:
https://gitlab.kitware.com/cmake/cmake/-/issues/22925
I am going to post the temporary solution I am using at the moment, until someone provide something more robust.
So I eventually manage to stumble upon:
https://alioth-lists.debian.net/pipermail/libkdtree-devel/2012-October/000366.html and,
Using CMake with setup.py
Re-using the above to do an install step instead of a build step can be done as follow:
find_package(Python COMPONENTS Interpreter)
set(SETUP_PY_IN "${CMAKE_CURRENT_SOURCE_DIR}/setup.py.in")
set(SETUP_PY "${CMAKE_CURRENT_BINARY_DIR}/setup.py")
set(SETUP_DEPS "${CMAKE_CURRENT_SOURCE_DIR}/project_name/__init__.py")
set(SETUP_OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/build-python")
configure_file(${SETUP_PY_IN} ${SETUP_PY})
add_custom_command(
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/setup_timestamp
COMMAND ${Python_EXECUTABLE} ARGS ${SETUP_PY} install --root ${SETUP_OUTPUT}
COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/setup_timestamp
DEPENDS ${SETUP_DEPS})
add_custom_target(target ALL DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/setup_timestamp)
And then the ugly part is:
install(
# trailing slash is important:
DIRECTORY ${SETUP_OUTPUT}/
DESTINATION "/" # FIXME may cause issues with other cpack generators
COMPONENT python)
Turns out that the documentation for install() is pretty clear about absolute paths:
https://cmake.org/cmake/help/latest/command/install.html#introduction
DESTINATION
[...]
As absolute paths are not supported by cpack installer generators,
it is preferable to use relative paths throughout.
For reference, here is my setup.py.in:
from setuptools import setup
if __name__ == '__main__':
setup(name='project_name_python',
version='${PROJECT_VERSION}',
package_dir={'': '${CMAKE_CURRENT_SOURCE_DIR}'},
packages=['project_name'])
You can be fancy and remove the __pycache__ folder using the -B flag:
COMMAND ${Python_EXECUTABLE} ARGS -B ${SETUP_PY} install --root ${SETUP_OUTPUT}
You can be extra fancy and add debian option such as:
if(CPACK_BINARY_DEB)
set(EXTRA_ARG "--install-layout" "deb")
endif()
use as:
COMMAND ${Python_EXECUTABLE} ARGS -B ${SETUP_PY} install --root ${SETUP_OUTPUT} ${EXTRA_ARG}
I am trying to build a RPM from a python application on RHEL8.2 machine.
the shebangs on the scripts are set correctly to #!/usr/bin/python3
however for some reason the shebang gets changed to #!/usr/libexec/platform-python -s when the RPM is built.
I have tried almost everything.
I have undefined the mangling according to doc: https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/8/html/packaging_and_distributing_software/advanced-topics
%undefine __brp_mangle_shebangs
but the shebangs gets still changed.
this is the relevant parts of the specs file:
%undefine __brp_mangle_shebangs
Name: myapp
Version: 2.0.0
Release: 1%{?dist}
summary: rpm for my APP
BuildArch: noarch
### Build Dependencies ###
BuildRequires: python3-setuptools
BuildRequires: python3-devel
%?python_enable_dependency_generator
%build
%py3_build
%install
%py3_install
%files
....
I can include python*-rpm-macros to the specs and that would set the shebang to something like /usr/bin/python3.6 but it is too restrictive. Our code works in anything > python3.6 so if we deploy the rpm in a system with python3.8 it should work.
how can I set /usr/bin/python3 or leave the shebang unchanged on the python scripts? when the rpm is packaged?
%undefine __brp_mangle_shebangs works for me.
$ rpmbuild --version
RPM version 4.14.3
Perhaps you need to put it later in your preamble? eg:
Name: myapp
Version: 2.0.0
Release: 1%{?dist}
summary: rpm for my APP
BuildArch: noarch
### Build Dependencies ###
BuildRequires: python3-setuptools
BuildRequires: python3-devel
## Fixes
# disable shebang mangling of python scripts
%undefine __brp_mangle_shebangs
...
Note also there seem to be some additional variables that give finer grained control, however I have not tried these:
Excluding based on shebang:
%global __mangle_shebangs_exclude ruby
Excluding based on path:
%global __mangle_shebangs_exclude_from /test/
Reference: https://src.fedoraproject.org/rpms/redhat-rpm-config/pull-request/19
I would also note that the document cited above is incorrect: https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/8/html/packaging_and_distributing_software/advanced-topics section 4.6.2
To prevent the BRP script from checking and modifying interpreter
directives, use the following RPM directive:
%undefine %brp_mangle_shebangs
as it is %undefine __brp_mangle_shebangs
I'm trying to wrap my head around f2py because my organization has a lot of legacy fortran code that I would like to incorporate into some newer python-based tools I'm writing. Ideally, I would package these tools either in source packages or wheels to make it easier to distribute to the rest of the organization.
I've written a small test package based on some other examples I've seen that just sums an array of floats. The package contents are included below. If I build a source distribution tarball using py setup.py sdist, everything looks like it works. It even looks like pip successfully installs it. However, if I open a python shell and try to import the newly installed module, I get an error on the from fastadd import fadd line in the initialization script saying
AttributeError: module 'fastadd' has no attribute 'fastadd'
So it seems like it didn't actually successfully build the f2py module. Doing some troubleshooting, if I open a powershell window in the package folder and just run
py -m numpy.f2py -c fadd.pyf fadd.f90
and then open a python shell in the same folder and try to import fastadd, I get an error, ImportError: DLL load failed: The specified module could not be found. (This is after I installed the Visual Studio build tools, a fix suggested on several threads). Following the advice on this thread, changing the command to
py -m numpy.f2py -c --fcompiler=gnu95 --compiler=mingw32 fadd.pyf fadd.f90
will build a module file that I can successfully import and use. Okay, great.
However, when I change config.add_extension in the setup file to include the keyword argument f2py_options=["--fcompiler=gnu95","--compiler=mingw32"] and try to build a package distribution file with setup.py sdist command and then install using py -m pip install fastadd-1.0a1.tar.gz, I get yet a different error that says
ERROR: No .egg-info directory found in C:\Users\username\AppData\Local\Temp\pip-pip-egg-info-c7406k03
And now I'm completely flummoxed. Other configurations of the f2py_options either result in setup.py throwing an error or fail to create the extension altogether, similar to above. Using a simple string for the options gives an error, so apparently f2py_options does in fact expect a list input. I can't seem to find any good documentation on whether I'm using f2py_options correctly, and I have no idea why just adding that option would cause pip to not know where its info directory is. That makes no sense to me. I'd really appreciate some help on this one.
I'm running Python 3.7.0 32-bit, numpy 1.20.1, and pip 21.0.1 on a Windows 10 machine.
--EDIT--
Looking in the installation directory of the test module, I found a new wrinkle to this problem: the installation directory does not actually include any files listed in MANIFEST, not even the __init__.py file. If I copy __init__.py into the directory, trying to import the module gives the same ImportError: DLL load failed error I've been getting.
Also, inspecting the output of py -m pip install, it looks like numpy.distutils doesn't recognize --fcompiler or --compiler as valid options and just ignores them, even though numpy.f2py does recognize them.
--END EDIT--
PACKAGE CONTENTS:
+-fastadd
---__init__.py
---fadd.f90
---fadd.pyf
-MANIFEST.in
-README
-setup.py
fadd.f90 has the following contents:
subroutine fadd(vals,n,mysum)
integer, intent(in) :: n
real*8, intent(out):: mysum
real*8, dimension(n), intent(in) :: vals
mysum = sum(vals)
end subroutine fadd
fadd.pyf has the following contents:
python module fastadd ! in
interface ! in :fastadd
subroutine fadd(vals,n,mysum) ! in :fastadd:fadd.f90
real*8 dimension(n),intent(in) :: vals
integer, optional,intent(in),check(len(vals)>=n),depend(vals) :: n=len(vals)
real*8 intent(out) :: mysum
end subroutine fadd
end interface
end python module fastadd
__init__.py:
"""This is the documentation!"""
from .fastadd import fadd
MANIFEST.in:
include README
recursive-include fastadd *.f90
recursive-include fastadd *.pyf
recursive-include fastadd *.py
and, finally, setup.py:
def configuration(pth=None):
from numpy.distutils.misc_util import Configuration
config = Configuration(
'fastadd',
top_path=pth,
version='1.0a1',
author='John Doe',
author_email='john.doe#fake-org.biz',
url='fake-org.biz/fastadd',
description="Testing f2py build process. Sums an arbitrary-length list of numbers.")
config.add_extension(
'fastadd',
sources=['fastadd\\fadd.pyf','fastadd\\fadd.f90']
)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration('fastadd').todict())
If it helps at all, the final MANIFEST file looks like this after the setup script is run:
# file GENERATED by distutils, do NOT edit
README
setup.py
C:\Users\username\Documents\Development\python_modules\fastadd\fastadd\fadd.f90
C:\Users\username\Documents\Development\python_modules\fastadd\fastadd\fadd.pyf
fastadd\__init__.py
fastadd\fadd.f90
fastadd\fadd.pyf
I would like to know if it is possible to create a debian package for a python file ?
I thought to use cx_Freeze to have an executable file and then to create the package this is really easy but without do this I mean just for a python file can I generate a debian package for my python file ?
And how can I do this ?
Thank you !
What follows is a basic example of how a source package for a python script might look. While most of the packaging tutorials are a bit complex, they can really help if you hit a problem. That said, I first learned the basics of Debian packaging by simply looking at Debian packages. apt-get source something similar and learn by example.
Here's your basic source package layout:
my-script/
-- myScript
-- debian/
-- changelog
-- copyright
-- compat
-- rules
-- control
-- install
Run dch --create in the directory to create a properly formatted debian/changelog entry.
debian/copyright should look like:
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: myScript
Upstream-Contact: Name, <email#address>
Files: *
Copyright: 2017, Name, <email#address>
License: (GPL-2+ | LGPL-2 | GPL-3 | whatever)
Full text of licence.
.
Unless there is a it can be found in /usr/share/common-licenses
debian/compat can just be: 7
debian/rules:
#!/usr/bin/make -f
%:
dh $# --with python2
Note that there must be "tab" before dh $# --with python2, not spaces.
debian/control:
Source: my-script
Section: python
Priority: optional
Maintainer: Name, <email#address>
Build-Depends: debhelper (>= 7),
python (>= 2.6.6-3~)
Standards-Version: 3.9.2
X-Python-Version: >= 2.6
Package: my-script
Architecture: all
Section: python
Depends: python-appindicator, ${misc:Depends}, ${python:Depends}
Description: short description
A long description goes here.
.
It can contain multiple paragraphs
debian/install:
myScript usr/bin/
This file indicates which file will be installed into which folder.
Now build it with debuild --no-tgz-check
This will create a functional deb package. Lintian is going to throw a few warnings regarding the lack of an orig.tar.gz, but unless you plan on creating a proper upstream project that makes tarball releases you'll probably just want to ignore that for now.
I am trying to install boost.numpy in y Ubuntu 16.04. I tried these commnads to install boost.numpy
**git clone https://github.com/ndarray/Boost.NumPy.git
cd Boost.NumPy && mkdir build && cd build
cmake -DPYTHON_LIBRARY=$HOME/anaconda3/lib/libpython3.5m.so ../**
After doing cmake i am facing this error:
Detected architecture 'x86_64'
-- Using Python3
CMake Error at /usr/share/cmake-3.5/Modules/FindBoost.cmake:1677 (message):
Unable to find the requested Boost libraries.
Boost version: 1.59.0
Boost include path: /home/sumit/Documents/Software/boost_1_59_0
Could not find the following static Boost libraries:
boost_python3
No Boost libraries were found. You may need to set BOOST_LIBRARYDIR to the
directory containing Boost libraries or BOOST_ROOT to the location of
Boost.
Call Stack (most recent call first):
CMakeLists.txt:48 (find_package)
Boost Paths:
Include : /home/sumit/Documents/Software/boost_1_59_0
**Libraries**: /home/sumit/Documents/Software/boost_1_59_0/libs
Configuring incomplete, errors occurred!
See also "/home/sumit/Documents/Software/Boost.NumPy/build/CMakeFiles/CMakeOutput.log".
Previously it was not able to find the boost libraries sp i manualy changed the CmakeList.txt library path with the path of boost_1_59_0 lib path. This path comes up in library option when i do cmake. But still boost_python3 is missing. I am new into this what i tried is just the result of google.
Please help.
On Ubuntu the library names for boost are:
libboost_python, libboost_python-py35, or libboost_python-py27
This means that in cmake you'll need to refer to them as python-py35 instead of python3. Alternatively, if you don't control the CMakeLists.txt you can create a symlink:
/usr/lib/x86_64-linux-gnu/libboost_python-py35.so -> /usr/lib/x86_64-linux-gnu/libboost_python3.so
In my CMakeLists.txt file I have the following:
if(UNIX)
set( BOOST_PYTHONLIB python-py35)
else()
set( BOOST_PYTHONLIB python3)
endif()
find_package (Boost 1.58 REQUIRED COMPONENTS
coroutine
context
filesystem
program_options
system
thread
${BOOST_PYTHONLIB}
chrono
)
Simple answer for this is that wherever boost_python3 is specified,
you replace it with boost_python-py35.
I tried this when I was setting up caffe for python 3.5 . In the Makefile.config file, I only made the above changes and it worked fine for me.