How to wrap c++ code calling python as .dll or .so? - python

I want to wrap my c++ code as .so and .dll file.I know how to wrap c++ code as dynamic library, But my c++ code is calling python, normally we call embedding python.
I write a basic simple code.
python code:
def init_test(env, mode):
print(env)
print(mode)
return 1
c++ code calling python:
#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include <iostream>
#include <exception>
/**
* #description: run risk performance use Python
* #param {string} env
* #param {string } mode
* #return {*}
*/
extern "C" int init_python_test(char* env, char* mode) {
std::cout << "start" <<std::endl;
if(Py_IsInitialized == 0){
std::cout << "not init" << std::endl;
}
else{
std::cout << "init already" <<std::endl;
//std::cout << Py_FinalizeEx() <<std::endl;
Py_Finalize();
}
std::cout << "init:"<<Py_IsInitialized() << std::endl;
Py_Initialize();
PyErr_Print();
std::cout <<"second" <<std::endl;
PyRun_SimpleString("import sys");
PyRun_SimpleString("sys.path.append('./')");
std::cout <<"ok" <<std::endl;
//int res;
PyObject *pModule,*pFunc = NULL;
PyObject *pArgs, *pValue = NULL;
pModule = PyImport_ImportModule("mini");//0x7ffff64b9cc0
if(!pModule)
std::cout << "can't open python file" << std::endl;
PyErr_Print();
pFunc = PyObject_GetAttrString(pModule, "init_test");
PyErr_Print();
if(pFunc && PyCallable_Check(pFunc)){
PyErr_Print();
pValue = PyObject_CallObject(pFunc, Py_BuildValue("(ss)", env, mode));
PyErr_Print();
}
Py_FinalizeEx();
return 1;
}
int main(){
char *env = (char*)"prod";
char * mode = (char*)"prod";
init_python_test(env, mode);
std::cout << "ok" <<std::endl;
}
I am able to run my c++ code properly with g++ command linked with python dynamic library. And I can use g++ to wrap my c++ code as .so file. When I use another c++ code and python code to test the init_python_test function. Segmentation fault occurs when the code runs into Py_Initialize().
So, how to resolve this question? and did I wrap c++ code properly with g++? here is my shell.
g++ -fPIC -shared -Wall -o libtest.so ./mini_test.cpp -DLINUX -D_GLIBCXX_USE_CXX11_ABI=0 -I /usr/include/python3.8 -L/usr/lib/python3 -L/usr/lib/python3.8 -lpython3.8
Somebody helps me! plz!!! thank u!!!!

Related

How to use future / async in cppyy

I'm trying to use future from C++ STL via cppyy (a C++-python binding packet).
For example, I could run this following code in C++ (which is adapted from this answer)
#include <future>
#include <thread>
#include <chrono>
#include <iostream>
using namespace std;
using namespace chrono_literals;
int main () {
promise<int> p;
future<int> f = p.get_future();
thread t([&p]() {
this_thread::sleep_for(10s);
p.set_value(2);
});
auto status = f.wait_for(10ms);
if (status == future_status::ready) {
cout << "task is read" << endl;
} else {
cout << "task is running" << endl;
}
t.join();
return 0;
}
A similar implementation of the above in Python is
import cppyy
cppyy.cppdef(r'''
#include <future>
#include <thread>
#include <chrono>
#include <iostream>
using namespace std;
int test () {
promise<int> p;
future<int> f = p.get_future();
thread t([&p]() {
this_thread::sleep_for(10s);
p.set_value(2);
});
auto status = f.wait_for(10ms);
if (status == future_status::ready) {
cout << "task is read" << endl;
} else {
cout << "task is running" << endl;
}
t.join();
return 0;
}
''')
cppyy.gbl.test()
And the above code yields
IncrementalExecutor::executeFunction: symbol '__emutls_v._ZSt15__once_callable' unresolved while linking symbol '__cf_4'!
IncrementalExecutor::executeFunction: symbol '__emutls_v._ZSt11__once_call' unresolved while linking symbol '__cf_4'!
It looks like it's caused by using future in cppyy.
Any solutions to this?
Clang9's JIT does not support thread local storage the way the modern g++ implements it, will check again when the (on-going) upgrade to Clang13 is finished, which may resolve this issue.
Otherwise, cppyy mixes fine with threaded code (e.g. the above example runs fine on MacOS, with Clang the system compiler). Just that any TLS use needs to sit in compiled library code while the JIT has this limitation.

PyBind11 Using Qt results in ImportError when importing library in Python

I've been struggling to get an example of pybind11 with Qt working. I can import other libraries like VTK fine, but when I include a Qt library, say QString, and create a simple QString object inside one of my functions, the built library has an import error when it's being imported in Python. I am not sure how to debug these issues, as there is no useful error anywhere that I can see. I tried to look at docs, but they don't show a way to debug these errors. There are no warnings or other issues when building the library.
>>> import pyLib
ImportError: DLL load failed while importing pyLib: The specified module could not be found.
I tried to create a minimal example below. The executable target pyLib2 builds and runs just fine, but the pyLib python library target doesn't work when imported due to this line QString x;. Without it, it works fine:
CMake
cmake_minimum_required (VERSION 3.24)
project (pybindTest)
include_directories(${PROJECT_BINARY_DIR} src)
# set C++ settings
set (CXX_VERSION 20) # sets which standard of C we are using, e.g. C++20
set (CMAKE_CXX_FLAGS "/EHsc /O2 /favor:INTEL64 /W4 /MP -std:c++${CXX_VERSION}")
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE)
add_subdirectory (external/pybind11)
add_executable(pyLib2 main.cpp)
pybind11_add_module(pyLib pyLib.cpp)
target_include_directories(pyLib PUBLIC
"C:/.../external/pybind11/include"
)
find_package (Qt5 5.15.2 EXACT COMPONENTS CONFIG REQUIRED Core Widgets SerialPort Network)
target_link_libraries(pyLib PUBLIC
Qt5::Core
Qt5::Widgets
Qt5::SerialPort
)
target_link_libraries(pyLib2 PUBLIC
Qt5::Core
Qt5::Widgets
Qt5::SerialPort
)
pyLib.cpp
#include <pybind11/pybind11.h>
#include <QString>
#include <array>
#include <iostream>
namespace py = pybind11;
float test(float a)
{
QString x; // Commenting this line out works fine
return a * 2.0;
}
void test2()
{
std::cout << "test2!" << std::endl;
}
void init_pyLib(py::module& handle)
{
std::cout << "here!" << std::endl;
}
PYBIND11_MODULE(pyLib, handle)
{
handle.doc() = "test doc";
handle.def("testpy", &test, py::arg("i"));
handle.def("testpy2", &test2);
init_pyLib(handle);
}
main.cpp
#include <QString>
#include <array>
#include <iostream>
float test(float a)
{
QString x;
return a * 2.0;
}
void test2()
{
std::cout << "test2!" << std::endl;
}
void init_pyLib()
{
std::cout << "here!" << std::endl;
}
int main()
{
std::cout << "hello!\n";
QString x;
test(5.0f);
std::cout << "goodbye!\n";
}

C++ function in python from .dll using ctypes - function not found and access violation

I have this simple C++ code named hello.cpp which has a function to print "Hello world"
#include <iostream>
void hello_world();
int main() {
std::cout << "Start" << std::endl;
}
void hello_world() {
std::cout << "Hello world" << std::endl;
}
I build the .dll (~1.9mb) using:
g++ -c hello.cpp
g++ -static -fPIC -o hello.dll hello.o
(using -shared gives a WinError 126 ... module not found when trying to access it in python)
The python code is:
from ctypes import cdll
lib = cdll.LoadLibrary('hello.dll')
lib.hello_world()
This throws the following error:
AttributeError: function 'hello_world' not found
I've read people mention that a __declspec(dllexport) wrapper is necessary and so is a extern "C" so that the code doesn't get "mangled". So now using that as the code:
#include <iostream>
extern "C" {
__declspec(dllexport) void hello_world();
}
int main() {
std::cout << "Opened" << std::endl;
}
void hello_world() {
std::cout << "hello world" << std::endl;
}
The python line lib.hello_world() now raises:
OSError: exception: access violation writing 0x000E28A0
What are the issues here? How can I get python to recognise and run the C++ function in the .dll? Can I skip the middleman and somehow run a C++ function from a .cpp file or a .o file?
edit:
Using eryksun's answer, it turns out that the dllexport isn't needed. The extern "C" is a must though
Thanks to #eryksun, this was solved in this case by compiling like this:
g++ -c hello.cpp
g++ -static -shared -o hello.dll hello.o
Having the C++ code set up like so:
#include <iostream>
int main() {
std::cout << "Opened" << std::endl;
}
void hello_world() {
std::cout << "hello world" << std::endl;
}
extern "C" {
void hello_world();
}
And running it from Python as usual:
from ctypes import cdll
lib = cdll.LoadLibrary('hello.dll')
lib.hello_world()

Segfault when import_array not in same translation unit

I'm having problems getting the NumPy C API to properly initialize. I think I've isolated the problem to calling import_array from a different translation unit, but I don't know why this should matter.
Minimal working example:
header1.hpp
#ifndef HEADER1_HPP
#define HEADER1_HPP
#include <Python.h>
#include <numpy/npy_3kcompat.h>
#include <numpy/arrayobject.h>
void initialize();
#endif
file1.cpp
#include "header1.hpp"
void* wrap_import_array()
{
import_array();
return (void*) 1;
}
void initialize()
{
wrap_import_array();
}
file2.cpp
#include "header1.hpp"
#include <iostream>
void* loc_wrap_import_array()
{
import_array();
return (void*) 1;
}
void loc_initialize()
{
loc_wrap_import_array();
}
int main()
{
Py_Initialize();
#ifdef USE_LOC_INIT
loc_initialize();
#else
initialize();
#endif
npy_intp dims[] = {5};
std::cout << "creating descr" << std::endl;
PyArray_Descr* dtype = PyArray_DescrFromType(NPY_FLOAT64);
std::cout << "zeros" << std::endl;
PyArray_Zeros(1, dims, dtype, 0);
std::cout << "cleanup" << std::endl;
return 0;
}
Compiler commands:
g++ file1.cpp file2.cpp -o segissue -lpython3.4m -I/usr/include/python3.4m -DUSE_LOC_INIT
./segissue
# runs fine
g++ file1.cpp file2.cpp -o segissue -lpython3.4m -I/usr/include/python3.4m
./segissue
# segfaults
I've tested this with Clang 3.6.0, GCC 4.9.2, Python 2.7, and Python 3.4 (with a suitably modified wrap_import_array because this is different between Python 2.x and 3.x). The various combinations all give the same result: if I don't call loc_initialize, the program will segfault in the PyArray_DescrFromType call. I have NumPy version 1.8.2. For reference, I'm running this in Ubuntu 15.04.
What baffles me most of all is this C++ NumPy wrapper appears to get away with calling import_array in a different translation unit.
What am I missing? Why must I call import_array from the same translation unit in order for it to actually take effect? More importantly, how do I get it to work when I call import_array from a different translation unit like the Boost.NumPy wrapper does?
After digging through the NumPy headers, I think I've found a solution:
in numpy/__multiarray_api.h, there's a section dealing with where an internal API buffer should be. For conciseness, here's the relevant snippet:
#if defined(PY_ARRAY_UNIQUE_SYMBOL)
#define PyArray_API PY_ARRAY_UNIQUE_SYMBOL
#endif
#if defined(NO_IMPORT) || defined(NO_IMPORT_ARRAY)
extern void **PyArray_API;
#else
#if defined(PY_ARRAY_UNIQUE_SYMBOL)
void **PyArray_API;
#else
static void **PyArray_API=NULL;
#endif
#endif
It looks like this is intended to allow multiple modules define their own internal API buffer, in which each module must call their own import_array define.
A consistent way to get several translation units to use the same internal API buffer is in every module, define PY_ARRAY_UNIQUE_SYMBOL to some library unique name, then every translation unit other than the one where the import_array wrapper is defined defines NO_IMPORT or NO_IMPORT_ARRAY. Incidentally, there are similar macros for the ufunc features: PY_UFUNC_UNIQUE_SYMBOL, and NO_IMPORT/NO_IMPORT_UFUNC.
The modified working example:
header1.hpp
#ifndef HEADER1_HPP
#define HEADER1_HPP
#ifndef MYLIBRARY_USE_IMPORT
#define NO_IMPORT
#endif
#define PY_ARRAY_UNIQUE_SYMBOL MYLIBRARY_ARRAY_API
#define PY_UFUNC_UNIQUE_SYMBOL MYLIBRARY_UFUNC_API
#include <Python.h>
#include <numpy/npy_3kcompat.h>
#include <numpy/arrayobject.h>
void initialize();
#endif
file1.cpp
#define MYLIBRARY_USE_IMPORT
#include "header1.hpp"
void* wrap_import_array()
{
import_array();
return (void*) 1;
}
void initialize()
{
wrap_import_array();
}
file2.cpp
#include "header1.hpp"
#include <iostream>
int main()
{
Py_Initialize();
initialize();
npy_intp dims[] = {5};
std::cout << "creating descr" << std::endl;
PyArray_Descr* dtype = PyArray_DescrFromType(NPY_FLOAT64);
std::cout << "zeros" << std::endl;
PyArray_Zeros(1, dims, dtype, 0);
std::cout << "cleanup" << std::endl;
return 0;
}
I don't know what pitfalls there are with this hack or if there are any better alternatives, but this appears to at least compile and run without any segfaults.

Calling a Python function from C++

I am trying to make a call to a python module function from my cpp file.
The call i have made is as follows:
#include <iostream>
#include "Python.h"
int
main(int argc, char** argv)
{
Py_Initialize();
PyObject *pName = PyString_FromString("tmpPyth");
PyObject *pModule = PyImport_Import(pName);
std::cout<< "Works fine till here";
PyObject *pDict = PyModule_GetDict(pModule);
if (pModule != NULL) {
PyObject *pFunc = PyObject_GetAttrString(pDict, "pyFunc");
if(pFunc != NULL){
PyObject_CallObject(pFunc, NULL);
}
}
else
std::cout << "Python Module not found";
return 0;
}
My python module is defined as follows:
import numpy
import scipy
import matplotlib
from scipy import stats
def blah():
baseline = [9.74219, 10.2226, 8.7469, 8.69791, 9.96442, 9.96472, 9.37913, 9.75004]
follow_up = [9.94227,9.46763,8.53081,9.43679,9.97695,10.4285,10.159,8.86134]
paired_sample = stats.ttest_rel(baseline , follow_up )
print "The t-statistic is %.3f and the p-value is %.3f." % paired_sample
The code in the cpp file runs fine till the 1st "std::cout" but then ends up giving me a "seg fault". Running the python code separately works fine and gives the desired output.
I cant figure out what is going wrong. Any help will be appreciated.
(Note the program is compiling correctly and running correctly till the 1st "cout")
So there are a couple of things that you were not doing right. See the comments inline. Assuming that both your CPP file and Python file lives at the following path: /home/shanil/project.
test.cpp:
#include <iostream>
#include "Python.h"
int
main(int argc, char** argv)
{
Py_Initialize();
// First set in path where to find your custom python module.
// You have to tell the path otherwise the next line will try to load
// your module from the path where Python's system modules/packages are
// found.
PyObject* sysPath = PySys_GetObject("path");
PyList_Append(sysPath, PyString_FromString("/home/shanil/project"));
// Load the module
PyObject *pName = PyString_FromString("my_mod");
PyObject *pModule = PyImport_Import(pName);
// Random use-less check
std::cout<< "Works fine till here\n";
if (pModule != NULL) {
std::cout << "Python module found\n";
// Load all module level attributes as a dictionary
PyObject *pDict = PyModule_GetDict(pModule);
// Remember that you are loading the module as a dictionary, the lookup you were
// doing on pDict would fail as you were trying to find something as an attribute
// which existed as a key in the dictionary
PyObject *pFunc = PyDict_GetItem(pDict, PyString_FromString("my_func"));
if(pFunc != NULL){
PyObject_CallObject(pFunc, NULL);
} else {
std::cout << "Couldn't find func\n";
}
}
else
std::cout << "Python Module not found\n";
return 0;
}
my_mod.py:
def my_func():
print 'got called'

Categories