python installing package with submodules - python

I have a custom project package with structure like:
package-dir/
mypackage/
__init__.py
submodule1/
__init__.py
testmodule.py
main.py
requirements.txt
setup.py
using cd package-dir followed by $pip install -e . or pip install . as suggested by python-packaging as long as I access the package from package-dir
For example :
$cd project-dir
$pip install .
at this point this works:
$python -c 'import mypackage; import submodule1'
but This does not work
$ cd some-other-dir
$ python -c 'import mypackage; import submodule1'
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named submodule1
How to install all the submodules?
also, if i check the package-dir/build/lib.linux-x86_64-2.7/mypackage dir, I only see the immediate files in mypackage/*.py and NO mypackage/submodule1
setup.py looks like:
from setuptools import setup
from pip.req import parse_requirements
reqs = parse_requirements('./requirements.txt', session=False)
install_requires = [str(ir.req) for ir in reqs]
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mypackage',
version='1.6.1',
description='mypackage',
long_description=readme(),
classifiers=[
],
keywords='',
url='',
author='',
author_email='',
license='Proprietary',
packages=['mypackage'],
package_dir={'mypackage': 'mypackage'},
install_requires=install_requires,
include_package_data=True,
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose'],
entry_points={
'console_scripts': ['mypackage=mypackage.run:run'],
}
)

setup.py is missing information about your package structure. You can enable auto-discovery by adding a line
setup(
# ...
packages=setuptools.find_packages(),
)
to it.

Related

Copy a non-Python file to specific directory during Pip Install

Problem Statement: when I install my pip package, a specific file inside the package get coped to Temp directory
Approach:
My Package directory Sturcture is following:
my-app/
├─ app/
│ ├─ __init__.py
│ ├─ __main__.py
├─ folder-with-extra-stuff/
│ ├─ __init__.py
│ ├─ file_I_want_to_cppy.tar.gz
├─ setup.py
├─ MANIFEST.in
I'm tweaking my setup.py file to do the job. Following is my setup.py
#!/usr/bin/env python
from setuptools import setup, find_packages
from setuptools.command.install import install
import os
import sys
import shutil
rootDir = os.path.abspath(os.path.dirname(__file__))
def run_custom_install():
print("--------Start running custom command -------")
temp_dir = r'c:\temp' if sys.platform == "win32" else r'/tmp'
temp_col_dir = temp_dir + os.sep + 'dump'
os.makedirs(temp_dir, exist_ok=True)
os.makedirs(temp_col_dir, exist_ok=True)
print("----------locate the zip file ---------------")
ColDirTests = os.path.abspath(os.path.join(rootDir, 'my-app','folder-with-extra-stuff'))
_src_file = os.path.join(ColDirTests , 'file_I_want_to_cppy.tar.gz ')
print(f"******{_src_file}**********")
if os.path.exists(_src_file):
print(f"-----zip file has been located at {_src_file}")
shutil.copy(_src_file, temp_col_dir)
else:
print("!!!!Couldn't locate the zip file for transfer!!!!")
class CustomInstall(install):
def run(self):
print("***********Custom run from install********")
install.run(self)
run_custom_install()
ver = "0.0.0"
setup(
name='my_pkg',
version=ver,
packages=find_packages(),
python_requires='>=3.6.0',
install_requires = getRequirements(),
include_package_data= True,
cmdclass={
'install' : CustomInstall,
}
)
MANIFEST.in
include README.md
include file_I_want_to_cppy.tar.gz
recursive-include my-app *
global-exclude *.pyc
include requirements.txt
prune test
Testing build:
> python setup.py bdist_wheel
It is working during build. I can see there is a directory formed C:\temp\dump and file_I_want_to_cppy.tar.gz inside it. But when I release the package in pip and try to install it from pip, the folder remains Empty!
Any idea what I might be doing wrong here?
After a lot of research I have figure out how to resolve this issue. Let me summarize my findings, it might be helpful for other who wants to do post_pip_install processing.
setup.py
Different options to install package: 1) pip install pkg_name, 2) python -m setup.py sdist
If you want to make them work in either ways, need to have install, egg_info and develop all 3 options repeated as shown in setup.py
If you create *.whl file by python -m setup.py bdist_wheel , post pip install processing won't be executed! Please upload .tar.gz format generated usingsdist to PyPi/Artifacts to make post pip install processing work. Again, Please note: It will not work when installing from a binary wheel
upload the pip package: twine upload dist/*.tar.gz
from setuptools import setup, find_packages
from setuptools.command.install import install
from setuptools.command.egg_info import egg_info
from setuptools.command.develop import develop
rootDir = os.path.abspath(os.path.dirname(__file__))
def run_post_processing():
print("--------Start running custom command -------")
# One can Run any Post Processing here that will be executed post pip install
class PostInstallCommand(install):
def run(self):
print("***********Custom run from install********")
install.run(self)
run_post_processing()
class PostEggCommand(egg_info):
def run(self):
print("***********Custom run from Egg********")
egg_info.run(self)
run_post_processing()
class PostDevelopCommand(develop):
def run(self):
print("***********Custom run from Develop********")
develop.run(self)
run_post_processing()
ver = "0.0.0"
setup(
name='my_pkg',
version=ver,
packages=find_packages(),
python_requires='>=3.6.0',
install_requires = getRequirements(),
include_package_data= True,
cmdclass={
'install' : PostInstallCommand,
'egg_info': PostEggCommand,
'develop': PostDevelopCommand
}
)
Few More Things from my research:
If you want to do pre-processing instead of post-processing, need to move install.run(self) at the end
while pip installing, if you want to see custom messages of pre/post instllation, use -vvv. Example: pip install -vvv my_pkg

Empty __init__.py when deployed with Dockerfile

I have a pretty weird case - even don't know where to look.
I have a Python package e.g. my_utils which is uploaded to artifactory (same as PyPi).
Project structure:
my_utils
__init__.py
first_package
__init__.py
some_file.py
first_package/some_file.py
def do_job():
print("job_done")
my_utils/init.py
from first_package.some_file import do_job
Package deployed using pretty standard way:
setup.py
from os import path
import setuptools
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setuptools.setup(
name='my_utils',
version='1.0.0',
description='my_utils',
setup_requires=['wheel'],
long_description=long_description,
include_package_data=True,
url='',
author='',
author_email='',
license='MIT',
packages=setuptools.find_packages(),
package_data={
'': ['*.*']
},
install_requires=[],
zip_safe=False
)
To deploy - I use command:
python setup.py bdist_wheel upload -r local
So when I do pip install my_utils - I can do the following:
# env with my_utils installed
from my_utils import do_job
do_job() # job done
Now when I use this package in the application that is deployed with Docker and contains it in requirements:
requirements.txt
my_utils==1.0.0
Dockerfile
FROM ...
COPY requirements.txt .
RUN PIP install --no-cache-dir -r requirements.txt
When I enter the container i have file my_utils/__init__.py empty. And the import failed:
File "/app/app.py", line 13, in <module>
from my_utils import do_job
ImportError: cannot import name do_job
And the direct import works fine:
from my_utils.first_package.some_file import get_job
For now, I switched to "direct" import without a shortcut but really interesting why that can happen
When I go inside the container - I see that file __init__.py is empty. But when re-installed it gets content
$: docker exec -it my_app bash
# du /usr/lib/python2.7/site-packages/my_utils/__init__.py
0 /usr/lib/python2.7/site-packages/my_utils/__init__.py
# pip uninstall my_utils
# pip install my_utils
# du /usr/lib/python2.7/site-packages/my_utils/__init__.py
4 /usr/lib/python2.7/site-packages/my_utils/__init__.py
# cat /usr/lib/python2.7/site-packages/my_utils/__init__.py
from first_package.some_file import do_job

Packages not getting installed in python setuptools

I am packaging a python project which has the following directory structure:
toingpkg/
src/
subtoingpkg1/
subsubtoingpkg1/
...
__init__.py
__init__.py
subtoingpkg2/
...
subtoingpkg2.py
__init__.py
toingpkg.py
__init__.py
setup.cfg
pyproject.toml
My setup.cfg is as follows:
[metadata]
name = toingpkg
...
classifiers =
Programming Language :: Python :: 3
...
[options]
package_dir =
= src
packages = find_namespace:
python_requires = >=3.6
install_requires =
requests
pytz
[options.packages.find]
where=src
And my pyproject.toml is as follows:
[build-system]
requires = [
"setuptools>=42",
"wheel"
]
build-backend = "setuptools.build_meta"
When I build my package using python3 -m build as mentioned in the docs, I get a whl file in my dist folder but it does not include the files in the root of the src directory.
So when I do a pip3 install dist/toingpackage-xxx-.whl, the package gets installed (shows up in pip3 list) but I cannot do a:
>>> import toingpkg
I get:
Traceback (most recent call last): File "", line 1, in
ModuleNotFoundError: No module named 'toingpkg'
I also tried specifing all the subpackages manually but got the same result. My python environment is 3.8.5, setuptools 45.2.0.
What am I doing wrong?

Tox fails because setup.py can't find the requirements.txt

I have added tox to my project and my tox.ini is very simple:
[tox]
envlist = py37
[testenv]
deps =
-r{toxinidir}/requirements_test.txt
commands =
pytest -v
But when I run tox, I get the following error:
ERROR: invocation failed (exit code 1), logfile: /path/to/my_project/.tox/py37/log/py37-2.log
========================================================================================= log start ==========================================================================================
Processing ./.tox/.tmp/package/1/my_project-0+untagged.30.g6909bfa.dirty.zip
Complete output from command python setup.py egg_info:
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-req-build-ywna_4ks/setup.py", line 15, in <module>
with open(requirements_path) as requirements_file:
FileNotFoundError: [Errno 2] No such file or directory: '/tmp/pip-req-build-ywna_4ks/requirements.txt'
----------------------------------------
Command "python setup.py egg_info" failed with error code 1 in /tmp/pip-req-build-ywna_4ks/
You are using pip version 10.0.1, however version 19.2.2 is available.
You should consider upgrading via the 'pip install --upgrade pip' command.
========================================================================================== log end ===========================================================================================
__________________________________________________________________________________________ summary ___________________________________________________________________________________________
ERROR: py37: InvocationError for command /path/to/my_project/.tox/py37/bin/python -m pip install --exists-action w .tox/.tmp/package/1/my_project-0+untagged.30.g6909bfa.dirty.zip (exited with code 1)
Here is my setup.py:
-*- coding: utf-8 -*-
import os
import sys
from setuptools import setup, find_packages
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, here)
requirements_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'requirements.txt')
with open(requirements_path) as requirements_file:
requires = requirements_file.readlines()
setup(
name='my_project',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
maintainer='Hamed',
license='BSD',
py_modules=['my_project'],
packages=find_packages(),
package_data={'': ['*.csv', '*.yml', '*.html']},
include_package_data=True,
install_requires=requires,
long_description=open('README.md').read(),
zip_safe=False
]
},
)
python setup.py install works fine.
It seems that tox is looking for requirements in the tmp dir, but can't find it there. Is there something wrong with my configurations?
I am using tox==3.12.1, python==3.7.3, setuptools==41.0.1, and conda==4.6.9
I've tested this on Arch and SLES 12 and got the same result with both.
Based on the point from #phd, I found out that requirements.txt was not present in the source distribution. Adding requirements.txt to the MANIFEST.in solved the issue!
Complementing Hamed2005's answer:
I have my requirements split into different files (base_requirements.txt, dev_requirements.txt, etc), all of them in a requirements directory. In this case, you need to add this directory in the MANIFEST.in as
recursive-include requirements *

Import only works when module installed using --editable pip flag

I have this project on github that allows me to do some jekyll actions more easily.
When I clone that and install it into my virtualenv ( pip install virtualenv .) it works fine, but if I just install without the --editable flag I get these errors when I try to use the commands exposed by click:
$ jk-config-set-editor Traceback (most recent call last):
File "/home/felipe/jekyll-utils/jekyll-venv/bin/jk-config-set-editor", line 7, in <module>
from jekyllutils.configs import set_editor
ImportError: No module named 'jekyllutils'
This is my setup.py file:
from setuptools import setup
setup(
name="jekyllutils",
version='0.1',
py_modules=['generators'],
install_requires=[
'click',
'python-slugify',
'appdirs',
'toml'
],
entry_points='''
[console_scripts]
jk-new = jekyllutils.generators:new_post
jk-edit = jekyllutils.managers:edit_post
jk-config-set-editor = jekyllutils.configs:set_editor
jk-config-set-posts-path = jekyllutils.configs:set_path_to_posts_dir
jk-config-dump-configs = jekyllutils.configs:dump_configs
jk-config-clear-configs = jekyllutils.configs:clear_configs
'''
)
Anybody has any idea as to why this works when --editable is on but not otherwise?
In case anyway runs into this same issue, what worked for me was to use the find_packages function to define my packages in setup.py
I also had to define static data files using the package_data field.
from setuptools import setup, find_packages
setup(
name="jekyllutils",
version='0.1',
py_modules=['generators'],
install_requires=[
'click',
'python-slugify',
'appdirs',
'toml'
],
entry_points='''
[console_scripts]
jk-new = jekyllutils.generators:new_post
jk-edit = jekyllutils.managers:edit_post
jk-config-set-editor = jekyllutils.configs:set_editor
jk-config-set-posts-path = jekyllutils.configs:set_path_to_posts_dir
jk-config-dump-configs = jekyllutils.configs:dump_configs
jk-config-clear-configs = jekyllutils.configs:clear_configs
''',
packages=find_packages(),
package_data={
"": ["*.txt", "*.json", "*.csv", "*.html"],
},
)

Categories