Dockerfile:
# syntax=docker/dockerfile:1
FROM python:alpine3.14 AS cython-compile
WORKDIR /tmp/cython
COPY /data/python .
RUN pip3 install --upgrade pip && \
pip3 install --no-cache-dir cython && \
apk add --no-cache --virtual .build-dependencies gcc musl-dev && \
python3 setup.py build
FROM alpine:latest
WORKDIR /data
COPY --from=cython-compile /tmp/cython .
docker-compose.yml:
version: "3.9"
services:
testtest:
container_name: ztz-test
build:
context: .
dockerfile: Dockerfile
ports:
- "7776:7776"
volumes:
- .:/data
When I run the command docker-compose build there is no error at all but the file compiled by cython is not copied. I have confirmed that the file is in /tmp/cython by commenting this line:
FROM alpine:latest
WORKDIR /data
COPY --from=cython-compile /tmp/cython .
Related
During working with docker where I dockerised Django PostgreSQL, I've entered in such problems as when I change some model and migrate it, after entering to the page, it says there is no such relationship in the database. After some research, I found that problem can be due to creating every time new migration and deleting the old.
How can I fix this problem?
Below you can see my configurations
docker-compose-prod.yml
services:
app:
volumes:
- static_data:/app/staticfiles
- media_data:/app/mediafiles
env_file:
- django.env
- words_az.env
- words_en.env
build:
context: .
ports:
- "8000:8000"
entrypoint: /app/script/entrypoint.sh
command: sh -c "python manage.py collectstatic --no-input &&
gunicorn --workers=3 --bind 0.0.0.0:8000 django.wsgi:application"
depends_on:
- db
nginx:
build: ./nginx
volumes:
- static_data:/app/staticfiles
- media_data:/app/mediafiles
ports:
- "80:80"
- "443:443"
depends_on:
- app
- flower
db:
image: postgres:14.0-alpine
volumes:
- postgres_data:/var/lib/postgresql/data/
env_file:
- db.env
ports:
- "5432:5432"
redis:
image: redis:alpine
ports:
- "6379:6379"
worker:
build:
context: .
command: celery -A django worker -l info
env_file:
- django.env
depends_on:
- db
- redis
- app
flower:
build: ./
command: celery -A django flower --basic_auth=$user:$password --address=0.0.0.0 --port=5555 --url-prefix=flower
env_file:
- django.env
ports:
- "5555:5555"
depends_on:
- redis
- worker
volumes:
postgres_data:
static_data:
media_data:
Dockerfile
FROM python:3.9-alpine
ENV PATH = "/script:${PATH}"
COPY ./requirements.txt /requirements.txt
RUN apk add --update --no-cache --virtual .tmp gcc g++ libc-dev linux-headers \
&& apk add --virtual build-deps gcc python3-dev musl-dev \
&& apk add postgresql \
&& apk add postgresql-dev \
&& pip install psycopg2 \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install Pillow \
&& apk del build-deps
RUN pip install --upgrade pip
RUN pip install -r /requirements.txt
RUN apk del .tmp
RUN mkdir /app
COPY /src /app
RUN mkdir /app/staticfiles
COPY /script /app/script
RUN chmod +x /app/script/*
WORKDIR /app
COPY django.env /app
RUN adduser -D user
RUN chown -R user:user /app
RUN chown -R user:user /var
RUN chmod -R 755 /var/
RUN chmod +x script/entrypoint.sh
USER user
CMD ["/script/entrypoint.sh"]
How can I connect my postgreSQL database container with my django application.
How can I create a database in postgreSQL while building the image but the case is I have separate container for postgreSQL and in this case How can I connect my postgreSQL.
Dockerfile
FROM ubuntu
ENV PATH="/scripts:${PATH}"
RUN apt update -y
RUN apt-get install debconf-utils
RUN apt install python3.8 -y
RUN apt install python3-pip -y
RUN echo 'tzdata tzdata/Areas select Asia' | debconf-set-selections
RUN echo 'tzdata tzdata/Zones/Asia select Kolkata' | debconf-set-selections
RUN DEBIAN_FRONTEND="noninteractive" apt install -y tzdata
RUN apt-get install -y gdal-bin
RUN apt-get install -y libgdal-dev
COPY ./requirements.txt /requirements.txt
RUN pip install -r requirements.txt
RUN mkdir /app
COPY ./app /app
WORKDIR /app
COPY ./scripts /scripts
RUN chmod +x /scripts/*
# RUN mkdir -p /vol/web/media
# RUN mkdir -p /vol/web/static
# RUN mkdir -p /vol/web/media
# RUN adduser --disabled-password user
# RUN chown -R user:user /vol
# RUN chmod -R 755 /vol/web
# USER user
CMD ["entrypoint.sh"]
docker-compose.yml
version: '3.8'
services:
app:
build:
context: .
environment:
- SECRET_KEY=changeme
- ALLOWED_HOSTS=127.0.0.1,localhost
depends_on:
- db
db:
image: postgres
restart: always
volumes:
- static_data:/static/db
ports:
- 5432:5432
container_name: ae73234b58e8
proxy:
build:
context: ./proxy
volumes:
- static_data:/vol/static
ports:
- 80:8080
depends_on:
- app
volumes:
static_data:
So, here I need to create a database while I build the Dockerfile image and How can I do that?
You can add environment variables in db.
Set the POSTGRES_DB environment variable with name of database you use in django. It will be created once postgres is built.
I tried to run in one container NodeJS and in other container Python3.
I have two node applications and one python aplication.
Dockerfile
WORKDIR /app
COPY package.json /app
RUN npm install
RUN apt-get update || : && apt-get install python -y
COPY . /app
CMD ["npm", "start"]
Docker-Compose
services:
app1:
build: .
ports:
- 3100:3100
command: node ./app.js
app2:
build: .
ports:
- 3000:3000
command: node ./app1.js
app3:
build: .
command: python ./algo.py
Is it possible to run all of these apps in one image with docker-compose
I'm very new to docker, am trying to use it with Django, here is my DockerFile :
FROM python:3.6
RUN mkdir /app
WORKDIR /app
ADD . /app/
ENV PYTHONUNBUFFERED 1
ENV LANG C.UTF-8
ENV DEBIAN_FRONTEND=noninteractive
ENV PORT=8000
RUN apt-get update && apt-get install -y --no-install-recommends \
tzdata \
python3-setuptools \
python3-pip \
python3-dev \
python3-venv \
git \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN pip3 install --upgrade pip
RUN pip3 install pipenv
RUN pip install -r requirements.txt && python manage.py migrate
EXPOSE 8888
CMD gunicorn g_attend.wsgi:application --bind 0.0.0.0:$PORT
it works normally but it never does the migrations, any help?
Note Pardon me if the question is a beginner question, it is my 1st time with docker and can't find clear documentation for Docker/Django
First of all you should not run migrations in your custom Dockerfile. A good practice is creating entrypoint.sh.
This is example entrypoint file:
#!/bin/bash
set -e
echo "${0}: running migrations."
python manage.py makemigrations --merge
python manage.py migrate --noinput
echo "${0}: collecting statics."
python manage.py collectstatic --noinput
cp -rv static/* static_shared/
gunicorn yourapp.wsgi:application \
--env DJANGO_SETTINGS_MODULE=yourapp.production_settings \
--name yourapp \
--bind 0.0.0.0:8000 \
--timeout 600 \
--workers 4 \
--log-level=info \
--reload
Additionally I recommend using docker-compose, which helps to organize your deployment in one place.
Example:
version: '3'
web:
build:
context: .
dockerfile: Dockerfile
command:
- /bin/sh
- '-c'
- '/code/entrypoint.sh'
ports:
- '8000:8000'
volumes:
- '.:/code'
- 'media_volume:/media'
And example Dockerfile
FROM python:3.6.8
RUN apt-get update;
ENV PYTHONUNBUFFERED 1
RUN mkdir /code
ADD requirements.txt /code
ADD entrypoint.sh /code
WORKDIR /code
RUN chmod +x *.sh
RUN pip install --upgrade pip
RUN pip install -r requirements.txt
ADD . /code
Based on #sebb answer, I've created a docker-compose.yml file but the entrypoint.sh didn't work as expected, after some searches, I've added the migration line to the docker-compose file, so here is how files looked at the end :
Dockerfile
FROM python:3
ENV PYTHONUNBUFFERED 1
RUN mkdir /code
WORKDIR /code
COPY requirements.txt /code/
COPY entrypoint.sh /code/
RUN pip install -r requirements.txt
COPY . /code/
docker-compose.yml
version: '3'
services:
db:
image: postgres
web:
build: .
command: bash -c "python manage.py makemigrations && python manage.py migrate && python manage.py runserver 0.0.0.0:8000"
volumes:
- .:/code
ports:
- "8000:8000"
depends_on:
- db
And Finally, it worked.
I what to dockerize my django app,
i create my Dockerfile :
FROM python:3.6-alpine
RUN apk add --no-cache linux-headers libffi-dev jpeg-dev zlib-dev
RUN apk update && apk add postgresql-dev gcc python3-dev musl-dev
RUN mkdir /DEV
WORKDIR /DEV
COPY ./requirements.txt .
RUN pip install --upgrade pip
RUN pip install -r requirements.txt
ENV PYTHONUNBUFFERED 1
COPY . .
at this point i create my docker-compose.yml:
version: '3'
networks:
mynetwork:
driver: bridge
services:
db:
image: postgres
restart: always
ports:
- "5432:5432"
networks:
- mynetwork
environment:
POSTGRES_USER: myuser
POSTGRES_PASSWORD: mypass
POSTGRES_DB: mydb
volumes:
- ./data:/var/lib/postgresql/data
web:
build: .
command: python manage.py runserver 0.0.0.0:8000
networks:
- mynetwork
volumes:
- .:/DEV
ports:
- "8000:8000"
depends_on:
- db
then i create a .dockerignore file:
# Ignore
.DS_Store
.idea
.venv2
__pycache__
!manage.py
*.py[cod]
*$py.class
*.so
.Python
*.log
docker-compose.yml
Dockerfile
geckodriver.log
golog.py
golog.pyc
log.html
media
out
output.xml
report.html
startup.sh
templates
testlibs
.dockerignore
well, at this point i run:
docker-compose build --no-cache
at the end image was build correctly, but when i run:
docker-compose up
system return this error:
web_1 | python: can't open file 'manage.py': [Errno 2] No such file or directory
core_web_1 exited with code 2
Someone can help me about the issue?
so many thanks in advance
Try making your Dockerfile more explicit with the locations and then change your docker-compose as well:
FROM python:3.6-alpine
RUN apk add --no-cache linux-headers libffi-dev jpeg-dev zlib-dev
RUN apk update && apk add postgresql-dev gcc python3-dev musl-dev
RUN mkdir /DEV
WORKDIR /DEV
COPY ./requirements.txt /DEV/
RUN pip install --upgrade pip
RUN pip install -r requirements.txt
ENV PYTHONUNBUFFERED 1
COPY . /DEV/
web:
build: .
command: python /DEV/manage.py runserver 0.0.0.0:8000
networks:
- mynetwork