Make docker compose work locally

This commit is contained in:
Daniel Egger 2022-02-02 22:59:20 +01:00
parent 2411b8cf4b
commit 82c987f541
31 changed files with 128 additions and 639 deletions

View File

@ -1,3 +1,4 @@
.direnv
.editorconfig
.gitattributes
.github
@ -7,6 +8,5 @@
.pre-commit-config.yaml
.readthedocs.yml
.travis.yml
venv
.direnv
node_modules
venv

1
.gitignore vendored
View File

@ -275,6 +275,7 @@ vbv_lernwelt/media/
.ipython/
project.css
project.min.css
tailwind-output.css
vendors.js
*.min.js
.env

View File

@ -1,4 +1,12 @@
ARG PYTHON_VERSION=3.9-slim-bullseye
# create a new version of this docker image
# from https://stackoverflow.com/a/42125241/669561
# build from project top directory!
# > docker build -f Dockerfile -t iterativ/vbv-lernwelt-django .
# run locally
# > docker run -v $(dirname $(pwd)) -p 8080:80 -it iterativ/vbv-lernwelt-django /bin/bash
# push to registry
# > docker push iterativ/vbv-lernwelt-django
ARG PYTHON_VERSION=3.10-slim-bullseye
FROM node:16-bullseye-slim as client-builder
@ -29,8 +37,7 @@ RUN apt-get update && apt-get install --no-install-recommends -y \
COPY ./requirements .
# Create Python Dependency and Sub-Dependency Wheels.
RUN pip wheel --wheel-dir /usr/src/app/wheels \
-r ${BUILD_ENVIRONMENT}.txt
RUN pip wheel --wheel-dir /usr/src/app/wheels -r requirements-dev.txt
# Python 'run' stage
@ -68,15 +75,28 @@ RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \
&& rm -rf /wheels/
COPY --chown=django:django ./compose/production/django/entrypoint /entrypoint
COPY --chown=django:django ./compose/django/docker_entrypoint.sh /entrypoint
RUN sed -i 's/\r$//g' /entrypoint
RUN chmod +x /entrypoint
COPY --chown=django:django ./compose/production/django/start /start
COPY --chown=django:django ./compose/django/docker_start.sh /start
RUN sed -i 's/\r$//g' /start
RUN chmod +x /start
#COPY --chown=django:django ./compose/production/django/celery/worker/start /start-celeryworker
#RUN sed -i 's/\r$//g' /start-celeryworker
#RUN chmod +x /start-celeryworker
#
#COPY --chown=django:django ./compose/production/django/celery/beat/start /start-celerybeat
#RUN sed -i 's/\r$//g' /start-celerybeat
#RUN chmod +x /start-celerybeat
#
#
#COPY ./compose/production/django/celery/flower/start /start-flower
#RUN sed -i 's/\r$//g' /start-flower
#RUN chmod +x /start-flower
# copy application code to WORKDIR
COPY --from=client-builder --chown=django:django ${APP_HOME} ${APP_HOME}

View File

@ -0,0 +1,44 @@
#!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
# N.B. If only .env files supported variable expansion...
# export CELERY_BROKER_URL="${REDIS_URL}"
if [ -z "${POSTGRES_USER}" ]; then
base_postgres_image_default_user='postgres'
export POSTGRES_USER="${base_postgres_image_default_user}"
fi
export VBV_DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
postgres_ready() {
python << END
import sys
import psycopg2
try:
psycopg2.connect(
dbname="${POSTGRES_DB}",
user="${POSTGRES_USER}",
password="${POSTGRES_PASSWORD}",
host="${POSTGRES_HOST}",
port="${POSTGRES_PORT}",
)
except psycopg2.OperationalError:
sys.exit(-1)
sys.exit(0)
END
}
until postgres_ready; do
>&2 echo 'Waiting for PostgreSQL to become available...'
sleep 1
done
>&2 echo 'PostgreSQL is available'
exec /start

View File

@ -0,0 +1,11 @@
#!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
python /app/manage.py collectstatic --noinput
python /app/manage.py migrate
/usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:80 --chdir=/app -k uvicorn.workers.UvicornWorker

View File

@ -1,69 +0,0 @@
ARG PYTHON_VERSION=3.9-slim-bullseye
# define an alias for the specfic python version used in this file.
FROM python:${PYTHON_VERSION} as python
# Python build stage
FROM python as python-build-stage
ARG BUILD_ENVIRONMENT=local
# Install apt packages
RUN apt-get update && apt-get install --no-install-recommends -y \
# dependencies for building Python packages
build-essential \
# psycopg2 dependencies
libpq-dev
# Requirements are installed here to ensure they will be cached.
COPY ./requirements .
# Create Python Dependency and Sub-Dependency Wheels.
RUN pip wheel --wheel-dir /usr/src/app/wheels \
-r ${BUILD_ENVIRONMENT}.txt
# Python 'run' stage
FROM python as python-run-stage
ARG BUILD_ENVIRONMENT=local
ARG APP_HOME=/app
ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE 1
ENV BUILD_ENV ${BUILD_ENVIRONMENT}
WORKDIR ${APP_HOME}
# Install required system dependencies
RUN apt-get update && apt-get install --no-install-recommends -y \
# psycopg2 dependencies
libpq-dev \
# Translations dependencies
gettext \
# cleaning up unused files
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
&& rm -rf /var/lib/apt/lists/*
# All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction
# copy python dependency wheels from python-build-stage
COPY --from=python-build-stage /usr/src/app/wheels /wheels/
# use wheels to install python dependencies
RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \
&& rm -rf /wheels/
COPY ./compose/production/django/entrypoint /entrypoint
RUN sed -i 's/\r$//g' /entrypoint
RUN chmod +x /entrypoint
COPY ./compose/local/django/start /start
RUN sed -i 's/\r$//g' /start
RUN chmod +x /start
# copy application code to WORKDIR
COPY . ${APP_HOME}
ENTRYPOINT ["/entrypoint"]

View File

@ -1,9 +0,0 @@
#!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
python manage.py migrate
uvicorn config.asgi:application --host 0.0.0.0 --reload

View File

@ -1,64 +0,0 @@
ARG PYTHON_VERSION=3.9-slim-bullseye
# define an alias for the specfic python version used in this file.
FROM python:${PYTHON_VERSION} as python
# Python build stage
FROM python as python-build-stage
ENV PYTHONDONTWRITEBYTECODE 1
RUN apt-get update && apt-get install --no-install-recommends -y \
# dependencies for building Python packages
build-essential \
# psycopg2 dependencies
libpq-dev \
# cleaning up unused files
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
&& rm -rf /var/lib/apt/lists/*
# Requirements are installed here to ensure they will be cached.
COPY ./requirements /requirements
# create python dependency wheels
RUN pip wheel --no-cache-dir --no-deps --wheel-dir /usr/src/app/wheels \
-r /requirements/local.txt -r /requirements/production.txt \
&& rm -rf /requirements
# Python 'run' stage
FROM python as python-run-stage
ARG BUILD_ENVIRONMENT
ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE 1
RUN apt-get update && apt-get install --no-install-recommends -y \
# To run the Makefile
make \
# psycopg2 dependencies
libpq-dev \
# Translations dependencies
gettext \
# Uncomment below lines to enable Sphinx output to latex and pdf
# texlive-latex-recommended \
# texlive-fonts-recommended \
# texlive-latex-extra \
# latexmk \
# cleaning up unused files
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
&& rm -rf /var/lib/apt/lists/*
# copy python dependency wheels from python-build-stage
COPY --from=python-build-stage /usr/src/app/wheels /wheels
# use wheels to install python dependencies
RUN pip install --no-cache /wheels/* \
&& rm -rf /wheels
COPY ./compose/local/docs/start /start-docs
RUN sed -i 's/\r$//g' /start-docs
RUN chmod +x /start-docs
WORKDIR /docs

View File

@ -1,7 +0,0 @@
#!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
make livehtml

View File

@ -1,9 +0,0 @@
FROM node:16-bullseye-slim
WORKDIR /app
COPY ./package.json /app
RUN npm install && npm cache clean --force
ENV PATH ./node_modules/.bin/:$PATH

View File

@ -1,42 +0,0 @@
#!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
if [ -z "${VBV_POSTGRES_USER}" ]; then
base_postgres_image_default_user='postgres'
export VBV_POSTGRES_USER="${base_postgres_image_default_user}"
fi
export VBV_DATABASE_URL="postgres://${VBV_POSTGRES_USER}:${VBV_POSTGRES_PASSWORD}@${VBV_POSTGRES_HOST}:${VBV_POSTGRES_PORT}/${VBV_POSTGRES_DB}"
postgres_ready() {
python << END
import sys
import psycopg2
try:
psycopg2.connect(
dbname="${VBV_POSTGRES_DB}",
user="${VBV_POSTGRES_USER}",
password="${VBV_POSTGRES_PASSWORD}",
host="${VBV_POSTGRES_HOST}",
port="${VBV_POSTGRES_PORT}",
)
except psycopg2.OperationalError:
sys.exit(-1)
sys.exit(0)
END
}
until postgres_ready; do
>&2 echo 'Waiting for PostgreSQL to become available...'
sleep 1
done
>&2 echo 'PostgreSQL is available'
exec "$@"

View File

@ -1,10 +0,0 @@
#!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
python /app/manage.py collectstatic --noinput
/usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn.workers.UvicornWorker

View File

@ -1,6 +0,0 @@
FROM postgres:14.1
COPY ./compose/production/postgres/maintenance /usr/local/bin/maintenance
RUN chmod +x /usr/local/bin/maintenance/*
RUN mv /usr/local/bin/maintenance/* /usr/local/bin \
&& rmdir /usr/local/bin/maintenance

View File

@ -1,5 +0,0 @@
#!/usr/bin/env bash
BACKUP_DIR_PATH='/backups'
BACKUP_FILE_PREFIX='backup'

View File

@ -1,12 +0,0 @@
#!/usr/bin/env bash
countdown() {
declare desc="A simple countdown. Source: https://superuser.com/a/611582"
local seconds="${1}"
local d=$(($(date +%s) + "${seconds}"))
while [ "$d" -ge `date +%s` ]; do
echo -ne "$(date -u --date @$(($d - `date +%s`)) +%H:%M:%S)\r";
sleep 0.1
done
}

View File

@ -1,41 +0,0 @@
#!/usr/bin/env bash
message_newline() {
echo
}
message_debug()
{
echo -e "DEBUG: ${@}"
}
message_welcome()
{
echo -e "\e[1m${@}\e[0m"
}
message_warning()
{
echo -e "\e[33mWARNING\e[0m: ${@}"
}
message_error()
{
echo -e "\e[31mERROR\e[0m: ${@}"
}
message_info()
{
echo -e "\e[37mINFO\e[0m: ${@}"
}
message_suggestion()
{
echo -e "\e[33mSUGGESTION\e[0m: ${@}"
}
message_success()
{
echo -e "\e[32mSUCCESS\e[0m: ${@}"
}

View File

@ -1,16 +0,0 @@
#!/usr/bin/env bash
yes_no() {
declare desc="Prompt for confirmation. \$\"\{1\}\": confirmation message."
local arg1="${1}"
local response=
read -r -p "${arg1} (y/[n])? " response
if [[ "${response}" =~ ^[Yy]$ ]]
then
exit 0
else
exit 1
fi
}

View File

@ -1,38 +0,0 @@
#!/usr/bin/env bash
### Create a database backup.
###
### Usage:
### $ docker-compose -f <environment>.yml (exec |run --rm) postgres backup
set -o errexit
set -o pipefail
set -o nounset
working_dir="$(dirname ${0})"
source "${working_dir}/_sourced/constants.sh"
source "${working_dir}/_sourced/messages.sh"
message_welcome "Backing up the '${VBV_POSTGRES_DB}' database..."
if [[ "${VBV_POSTGRES_USER}" == "postgres" ]]; then
message_error "Backing up as 'postgres' user is not supported. Assign 'VBV_POSTGRES_USER' env with another one and try again."
exit 1
fi
export PGHOST="${VBV_POSTGRES_HOST}"
export PGPORT="${VBV_POSTGRES_PORT}"
export PGUSER="${VBV_POSTGRES_USER}"
export PGPASSWORD="${VBV_POSTGRES_PASSWORD}"
export PGDATABASE="${VBV_POSTGRES_DB}"
backup_filename="${BACKUP_FILE_PREFIX}_$(date +'%Y_%m_%dT%H_%M_%S').sql.gz"
pg_dump | gzip > "${BACKUP_DIR_PATH}/${backup_filename}"
message_success "'${VBV_POSTGRES_DB}' database backup '${backup_filename}' has been created and placed in '${BACKUP_DIR_PATH}'."

View File

@ -1,22 +0,0 @@
#!/usr/bin/env bash
### View backups.
###
### Usage:
### $ docker-compose -f <environment>.yml (exec |run --rm) postgres backups
set -o errexit
set -o pipefail
set -o nounset
working_dir="$(dirname ${0})"
source "${working_dir}/_sourced/constants.sh"
source "${working_dir}/_sourced/messages.sh"
message_welcome "These are the backups you have got:"
ls -lht "${BACKUP_DIR_PATH}"

View File

@ -1,55 +0,0 @@
#!/usr/bin/env bash
### Restore database from a backup.
###
### Parameters:
### <1> filename of an existing backup.
###
### Usage:
### $ docker-compose -f <environment>.yml (exec |run --rm) postgres restore <1>
set -o errexit
set -o pipefail
set -o nounset
working_dir="$(dirname ${0})"
source "${working_dir}/_sourced/constants.sh"
source "${working_dir}/_sourced/messages.sh"
if [[ -z ${1+x} ]]; then
message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again."
exit 1
fi
backup_filename="${BACKUP_DIR_PATH}/${1}"
if [[ ! -f "${backup_filename}" ]]; then
message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again."
exit 1
fi
message_welcome "Restoring the '${VBV_POSTGRES_DB}' database from the '${backup_filename}' backup..."
if [[ "${VBV_POSTGRES_USER}" == "postgres" ]]; then
message_error "Restoring as 'postgres' user is not supported. Assign 'VBV_POSTGRES_USER' env with another one and try again."
exit 1
fi
export PGHOST="${VBV_POSTGRES_HOST}"
export PGPORT="${VBV_POSTGRES_PORT}"
export PGUSER="${VBV_POSTGRES_USER}"
export PGPASSWORD="${VBV_POSTGRES_PASSWORD}"
export PGDATABASE="${VBV_POSTGRES_DB}"
message_info "Dropping the database..."
dropdb "${PGDATABASE}"
message_info "Creating a new database..."
createdb --owner="${VBV_POSTGRES_USER}"
message_info "Applying the backup to the new database..."
gunzip -c "${backup_filename}" | psql "${VBV_POSTGRES_DB}"
message_success "The '${VBV_POSTGRES_DB}' database has been restored from the '${backup_filename}' backup."

View File

@ -1,5 +0,0 @@
FROM traefik:v2.2.11
RUN mkdir -p /etc/traefik/acme \
&& touch /etc/traefik/acme/acme.json \
&& chmod 600 /etc/traefik/acme/acme.json
COPY ./compose/production/traefik/traefik.yml /etc/traefik

View File

@ -1,58 +0,0 @@
log:
level: INFO
entryPoints:
web:
# http
address: ":80"
http:
# https://docs.traefik.io/routing/entrypoints/#entrypoint
redirections:
entryPoint:
to: web-secure
web-secure:
# https
address: ":443"
certificatesResolvers:
letsencrypt:
# https://docs.traefik.io/master/https/acme/#lets-encrypt
acme:
email: "daniel.egger@iterativ.ch"
storage: /etc/traefik/acme/acme.json
# https://docs.traefik.io/master/https/acme/#httpchallenge
httpChallenge:
entryPoint: web
http:
routers:
web-secure-router:
rule: "Host(`vbv-lernwelt.iterativ.ch`)"
entryPoints:
- web-secure
middlewares:
- csrf
service: django
tls:
# https://docs.traefik.io/master/routing/routers/#certresolver
certResolver: letsencrypt
middlewares:
csrf:
# https://docs.traefik.io/master/middlewares/headers/#hostsproxyheaders
# https://docs.djangoproject.com/en/dev/ref/csrf/#ajax
headers:
hostsProxyHeaders: ["X-CSRFToken"]
services:
django:
loadBalancer:
servers:
- url: http://django:5000
providers:
# https://docs.traefik.io/master/providers/file/
file:
filename: /etc/traefik/traefik.yml
watch: true

View File

@ -232,25 +232,6 @@ MANAGERS = ADMINS
# https://docs.djangoproject.com/en/dev/ref/settings/#logging
# See https://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "%(levelname)s %(asctime)s %(module)s "
"%(process)d %(thread)d %(message)s"
}
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
}
},
"root": {"level": "INFO", "handlers": ["console"]},
}
VBV_DJANGO_LOGGING_CONF = env("VBV_DJANGO_LOGGING_CONF", default="VBV_DJANGO_LOGGING_CONF_JSON_FILE")
if VBV_DJANGO_LOGGING_CONF == "VBV_DJANGO_LOGGING_CONF_CONSOLE_COLOR":
@ -326,7 +307,7 @@ else:
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
"formatter": "json",
},
},
'loggers': {

29
docker-compose-local.yml Normal file
View File

@ -0,0 +1,29 @@
version: '3'
volumes:
vbv_lernwelt_local_postgres_data: {}
vbv_lernwelt_local_postgres_data_backups: {}
services:
django:
build:
context: .
dockerfile: ./compose/django/Dockerfile
image: iterativ/vbv-lernwelt-django
depends_on:
- postgres
env_file:
- ./env/docker_local.env
ports:
- "8008:80"
command: /start
postgres:
build:
context: .
image: postgres:14.1
volumes:
- vbv_lernwelt_local_postgres_data:/var/lib/postgresql/data:Z
- vbv_lernwelt_local_postgres_data_backups:/backups:z
env_file:
- ./env/docker_local.env

10
env/docker_local.env vendored
View File

@ -1,10 +1,10 @@
# PostgreSQL
# ------------------------------------------------------------------------------
VBV_POSTGRES_HOST=postgres
VBV_POSTGRES_PORT=5432
VBV_POSTGRES_DB=vbv_lernwelt
VBV_POSTGRES_USER=MRsLOrFLFqmAnAxxWMsHXfUSqWHThtGQ
VBV_POSTGRES_PASSWORD=hNqfCdG6bwCLcnfboDtNM1L2Hiwp8GuKp1DJ6t2rcKl15Vls2QbByoIZ6IQlciKM
POSTGRES_HOST=postgres
POSTGRES_PORT=5432
POSTGRES_DB=vbv_lernwelt
POSTGRES_USER=MRsLOrFLFqmAnAxxWMsHXfUSqWHThtGQ
POSTGRES_PASSWORD=hNqfCdG6bwCLcnfboDtNM1L2Hiwp8GuKp1DJ6t2rcKl15Vls2QbByoIZ6IQlciKM
# General
# ------------------------------------------------------------------------------

Binary file not shown.

View File

@ -1,69 +0,0 @@
version: '3'
volumes:
vbv_lernwelt_local_postgres_data: {}
vbv_lernwelt_local_postgres_data_backups: {}
services:
django:
build:
- ./env/docker_local.env
context: .
dockerfile: ./compose/local/django/Dockerfile
image: vbv_lernwelt_local_django
container_name: vbv_lernwelt_local_django
depends_on:
- postgres
volumes:
- .:/app:z
env_file:
- ./env/docker_local.env
ports:
- "8000:8000"
command: /start
postgres:
build:
context: .
dockerfile: ./compose/production/postgres/Dockerfile
image: vbv_lernwelt_production_postgres
container_name: vbv_lernwelt_local_postgres
volumes:
- vbv_lernwelt_local_postgres_data:/var/lib/postgresql/data:Z
- vbv_lernwelt_local_postgres_data_backups:/backups:z
env_file:
- ./env/docker_local.env
docs:
image: vbv_lernwelt_local_docs
container_name: vbv_lernwelt_local_docs
build:
context: .
dockerfile: ./compose/local/docs/Dockerfile
env_file:
- ./env/docker_local.env
volumes:
- ./docs:/docs:z
- ./config:/app/config:z
- ./vbv_lernwelt:/app/vbv_lernwelt:z
ports:
- "7000:7000"
command: /start-docs
node:
build:
context: .
dockerfile: ./compose/local/node/Dockerfile
image: vbv_lernwelt_local_node
container_name: vbv_lernwelt_local_node
depends_on:
- django
volumes:
- .:/app:z
# http://jdlm.info/articles/2016/03/06/lessons-building-node-app-docker.html
- /app/node_modules
command: npm run dev
ports:
- "3000:3000"
# Expose browsersync UI: https://www.browsersync.io/docs/options/#option-ui
- "3001:3001"

View File

@ -1,67 +0,0 @@
import os
from pathlib import Path
from typing import Sequence
import pytest
ROOT_DIR_PATH = Path(__file__).parent.resolve()
PRODUCTION_DOTENVS_DIR_PATH = ROOT_DIR_PATH / ".envs" / ".production"
PRODUCTION_DOTENV_FILE_PATHS = [
PRODUCTION_DOTENVS_DIR_PATH / ".django",
PRODUCTION_DOTENVS_DIR_PATH / ".postgres",
]
DOTENV_FILE_PATH = ROOT_DIR_PATH / ".env"
def merge(
output_file_path: str, merged_file_paths: Sequence[str], append_linesep: bool = True
) -> None:
with open(output_file_path, "w") as output_file:
for merged_file_path in merged_file_paths:
with open(merged_file_path, "r") as merged_file:
merged_file_content = merged_file.read()
output_file.write(merged_file_content)
if append_linesep:
output_file.write(os.linesep)
def main():
merge(DOTENV_FILE_PATH, PRODUCTION_DOTENV_FILE_PATHS)
@pytest.mark.parametrize("merged_file_count", range(3))
@pytest.mark.parametrize("append_linesep", [True, False])
def test_merge(tmpdir_factory, merged_file_count: int, append_linesep: bool):
tmp_dir_path = Path(str(tmpdir_factory.getbasetemp()))
output_file_path = tmp_dir_path / ".env"
expected_output_file_content = ""
merged_file_paths = []
for i in range(merged_file_count):
merged_file_ord = i + 1
merged_filename = ".service{}".format(merged_file_ord)
merged_file_path = tmp_dir_path / merged_filename
merged_file_content = merged_filename * merged_file_ord
with open(merged_file_path, "w+") as file:
file.write(merged_file_content)
expected_output_file_content += merged_file_content
if append_linesep:
expected_output_file_content += os.linesep
merged_file_paths.append(merged_file_path)
merge(output_file_path, merged_file_paths, append_linesep)
with open(output_file_path, "r") as output_file:
actual_output_file_content = output_file.read()
assert actual_output_file_content == expected_output_file_content
if __name__ == "__main__":
main()

View File

@ -24,7 +24,7 @@
],
"scripts": {
"dev": "gulp",
"tailwind": "tailwindcss -i ./vbv_lernwelt/static/tailwind/input.css -o ./vbv_lernwelt/static/css/output.css --watch",
"build": "gulp generate-assets"
"tailwind": "tailwindcss -i ./vbv_lernwelt/static/tailwind/input.css -o ./vbv_lernwelt/static/css/tailwind-output.css --watch",
"build": "tailwindcss -i ./vbv_lernwelt/static/tailwind/input.css -o ./vbv_lernwelt/static/css/tailwind-output.css && gulp generate-assets"
}
}

7
tailwind.config.js Normal file
View File

@ -0,0 +1,7 @@
module.exports = {
content: ["./vbv_lernwelt/**/*.{html,js}"],
theme: {
extend: {},
},
plugins: [],
}

View File

@ -15,7 +15,7 @@
<!-- Your stuff: Third-party CSS libraries go here -->
<!-- This file stores project-specific CSS -->
<link href="{% static 'css/project.min.css' %}" rel="stylesheet">
<link href="{% static 'css/output.css' %}" rel="stylesheet">
<link href="{% static 'css/tailwind-output.css' %}" rel="stylesheet">
{% endblock %}
<!-- Le javascript
================================================== -->