Bài 33: Deploying Django with Docker
Production Dockerfile
Dockerfile tối ưu cho production deployment.
Multi-Stage Production Dockerfile
# Dockerfile.prod# Stage 1: BuilderFROM python:3.11-slim as builder WORKDIR /app ENV PYTHONDONTWRITEBYTECODE=1ENV PYTHONUNBUFFERED=1 # Install system dependenciesRUN apt-get update && apt-get install -y \ gcc \ postgresql-client \ && rm -rf /var/lib/apt/lists/* # Install Python dependenciesCOPY requirements.txt .RUN pip wheel --no-cache-dir --no-deps --wheel-dir /app/wheels -r requirements.txt # Stage 2: RuntimeFROM python:3.11-slim # Create app userRUN groupadd -r app && useradd -r -g app app # Install runtime dependenciesRUN apt-get update && apt-get install -y \ postgresql-client \ && rm -rf /var/lib/apt/lists/* WORKDIR /app # Copy wheels from builderCOPY --from=builder /app/wheels /wheelsRUN pip install --no-cache /wheels/* # Copy projectCOPY . /app/ # Collect static filesRUN python manage.py collectstatic --noinput # Change ownershipRUN chown -R app:app /app USER app EXPOSE 8000 CMD ["gunicorn", "--bind", "0.0.0.0:8000", "--workers", "3", "myproject.wsgi:application"]Optimized Production Dockerfile
# Dockerfile.prodFROM python:3.11-slim # Set environment variablesENV PYTHONDONTWRITEBYTECODE=1 \ PYTHONUNBUFFERED=1 \ PIP_NO_CACHE_DIR=1 \ PIP_DISABLE_PIP_VERSION_CHECK=1 # Install system dependenciesRUN apt-get update && apt-get install -y --no-install-recommends \ postgresql-client \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* # Create app user and directoriesRUN groupadd -r app && \ useradd -r -g app app && \ mkdir -p /app /app/staticfiles /app/media && \ chown -R app:app /app WORKDIR /app # Install Python dependenciesCOPY requirements.txt .RUN pip install --no-cache-dir -r requirements.txt # Copy project filesCOPY --chown=app:app . . # Collect static filesRUN python manage.py collectstatic --noinput USER app EXPOSE 8000 # Health checkHEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \ CMD python manage.py check || exit 1 CMD ["gunicorn", \ "--bind", "0.0.0.0:8000", \ "--workers", "3", \ "--worker-class", "sync", \ "--worker-connections", "1000", \ "--max-requests", "1000", \ "--max-requests-jitter", "100", \ "--timeout", "30", \ "--access-logfile", "-", \ "--error-logfile", "-", \ "myproject.wsgi:application"]Gunicorn Setup
Install Gunicorn
# requirements.txtDjango==5.0.1gunicorn==21.2.0psycopg2-binary==2.9.9python-decouple==3.8whitenoise==6.6.0Gunicorn Configuration
# gunicorn_config.pyimport multiprocessing # Server socketbind = "0.0.0.0:8000"backlog = 2048 # Worker processesworkers = multiprocessing.cpu_count() * 2 + 1worker_class = "sync"worker_connections = 1000timeout = 30keepalive = 2 # Restart workers after this many requests (prevent memory leaks)max_requests = 1000max_requests_jitter = 100 # Loggingaccesslog = "-" # stdouterrorlog = "-" # stderrloglevel = "info"access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' # Process namingproc_name = "myproject" # Server mechanicsdaemon = Falsepidfile = Noneuser = Nonegroup = Nonetmp_upload_dir = None # SSL (if needed)# keyfile = "/path/to/keyfile"# certfile = "/path/to/certfile"Run with Gunicorn
# Basic commandgunicorn myproject.wsgi:application # With optionsgunicorn --bind 0.0.0.0:8000 --workers 3 myproject.wsgi:application # With config filegunicorn -c gunicorn_config.py myproject.wsgi:application # In DockerfileCMD ["gunicorn", "-c", "gunicorn_config.py", "myproject.wsgi:application"]Nginx as Reverse Proxy
Nginx Configuration
# nginx/nginx.confupstream django { server web:8000;} server { listen 80; server_name example.com www.example.com; # Redirect to HTTPS return 301 https://$server_name$request_uri;} server { listen 443 ssl http2; server_name example.com www.example.com; # SSL certificates ssl_certificate /etc/nginx/ssl/cert.pem; ssl_certificate_key /etc/nginx/ssl/key.pem; # SSL configuration ssl_protocols TLSv1.2 TLSv1.3; ssl_ciphers HIGH:!aNULL:!MD5; ssl_prefer_server_ciphers on; # Security headers add_header X-Frame-Options "SAMEORIGIN" always; add_header X-Content-Type-Options "nosniff" always; add_header X-XSS-Protection "1; mode=block" always; add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always; client_max_body_size 10M; # Static files location /static/ { alias /app/staticfiles/; expires 30d; add_header Cache-Control "public, immutable"; } # Media files location /media/ { alias /app/media/; expires 30d; } # Proxy to Django location / { proxy_pass http://django; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; proxy_redirect off; # Timeouts proxy_connect_timeout 60s; proxy_send_timeout 60s; proxy_read_timeout 60s; } # Health check endpoint location /health/ { access_log off; proxy_pass http://django; }}Nginx Dockerfile
# nginx/DockerfileFROM nginx:alpine # Remove default configRUN rm /etc/nginx/conf.d/default.conf # Copy custom configCOPY nginx.conf /etc/nginx/nginx.conf # Create directories for static/mediaRUN mkdir -p /app/staticfiles /app/media EXPOSE 80 443Production docker-compose.yml
Complete Production Setup
# docker-compose.prod.ymlversion: '3.8' services: db: image: postgres:15-alpine container_name: myproject_db volumes: - postgres_data:/var/lib/postgresql/data environment: POSTGRES_DB: ${DB_NAME} POSTGRES_USER: ${DB_USER} POSTGRES_PASSWORD: ${DB_PASSWORD} restart: unless-stopped healthcheck: test: ["CMD-SHELL", "pg_isready -U ${DB_USER}"] interval: 10s timeout: 5s retries: 5 networks: - backend web: build: context: . dockerfile: Dockerfile.prod container_name: myproject_web command: gunicorn -c gunicorn_config.py myproject.wsgi:application volumes: - static_volume:/app/staticfiles - media_volume:/app/media env_file: - .env.prod depends_on: db: condition: service_healthy restart: unless-stopped healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:8000/health/ || exit 1"] interval: 30s timeout: 10s retries: 3 start_period: 40s networks: - backend - frontend nginx: build: context: ./nginx dockerfile: Dockerfile container_name: myproject_nginx ports: - "80:80" - "443:443" volumes: - static_volume:/app/staticfiles:ro - media_volume:/app/media:ro - ./nginx/ssl:/etc/nginx/ssl:ro depends_on: - web restart: unless-stopped networks: - frontend volumes: postgres_data: static_volume: media_volume: networks: frontend: driver: bridge backend: driver: bridgeWith Redis Cache
# docker-compose.prod.yml (with Redis)version: '3.8' services: db: image: postgres:15-alpine # ... postgres config redis: image: redis:7-alpine container_name: myproject_redis command: redis-server --appendonly yes volumes: - redis_data:/data restart: unless-stopped healthcheck: test: ["CMD", "redis-cli", "ping"] interval: 5s timeout: 3s retries: 5 networks: - backend web: build: context: . dockerfile: Dockerfile.prod command: gunicorn -c gunicorn_config.py myproject.wsgi:application depends_on: - db - redis # ... web config nginx: # ... nginx config volumes: postgres_data: redis_data: static_volume: media_volume:Environment Management
.env.prod
# .env.prod (not committed to git!)DEBUG=FalseSECRET_KEY=your-super-secret-production-key-hereALLOWED_HOSTS=example.com,www.example.com # DatabaseDB_NAME=myproject_prodDB_USER=myproject_userDB_PASSWORD=super-secure-passwordDB_HOST=dbDB_PORT=5432 # RedisREDIS_URL=redis://redis:6379/0 # EmailEMAIL_HOST=smtp.gmail.comEMAIL_PORT=587[email protected]EMAIL_HOST_PASSWORD=your-app-passwordEMAIL_USE_TLS=True # SecuritySECURE_SSL_REDIRECT=TrueSESSION_COOKIE_SECURE=TrueCSRF_COOKIE_SECURE=TrueDjango Production Settings
# settings.pyfrom decouple import config, Csvimport dj_database_url DEBUG = config('DEBUG', default=False, cast=bool)SECRET_KEY = config('SECRET_KEY')ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv()) # DatabaseDATABASES = { 'default': dj_database_url.config( default=f"postgresql://{config('DB_USER')}:{config('DB_PASSWORD')}@{config('DB_HOST')}:{config('DB_PORT')}/{config('DB_NAME')}", conn_max_age=600 )} # Static filesSTATIC_URL = '/static/'STATIC_ROOT = BASE_DIR / 'staticfiles'STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' # Media filesMEDIA_URL = '/media/'MEDIA_ROOT = BASE_DIR / 'media' # SecuritySECURE_SSL_REDIRECT = config('SECURE_SSL_REDIRECT', default=True, cast=bool)SESSION_COOKIE_SECURE = config('SESSION_COOKIE_SECURE', default=True, cast=bool)CSRF_COOKIE_SECURE = config('CSRF_COOKIE_SECURE', default=True, cast=bool)SECURE_HSTS_SECONDS = 31536000SECURE_HSTS_INCLUDE_SUBDOMAINS = TrueSECURE_HSTS_PRELOAD = True # LoggingLOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'verbose': { 'format': '{levelname} {asctime} {module} {message}', 'style': '{', }, }, 'handlers': { 'console': { 'class': 'logging.StreamHandler', 'formatter': 'verbose', }, }, 'root': { 'handlers': ['console'], 'level': 'INFO', },}Deployment Workflow
Initial Deployment
# 1. Prepare server# Install Docker and Docker Compose on server # 2. Clone repositorygit clone https://github.com/yourusername/myproject.gitcd myproject # 3. Create .env.prodcp .env.example .env.prod# Edit .env.prod with production values # 4. Build and start servicesdocker-compose -f docker-compose.prod.yml up -d --build # 5. Run migrationsdocker-compose -f docker-compose.prod.yml exec web python manage.py migrate # 6. Create superuserdocker-compose -f docker-compose.prod.yml exec web python manage.py createsuperuser # 7. Collect static files (if not done in Dockerfile)docker-compose -f docker-compose.prod.yml exec web python manage.py collectstatic --noinput # 8. Check logsdocker-compose -f docker-compose.prod.yml logs -fUpdate Deployment
# 1. Pull latest codegit pull origin main # 2. Rebuild and restartdocker-compose -f docker-compose.prod.yml up -d --build # 3. Run migrationsdocker-compose -f docker-compose.prod.yml exec web python manage.py migrate # 4. Collect static filesdocker-compose -f docker-compose.prod.yml exec web python manage.py collectstatic --noinput # 5. Check logsdocker-compose -f docker-compose.prod.yml logs -f webZero-Downtime Deployment
# Use Docker swarm or rolling updatesdocker-compose -f docker-compose.prod.yml up -d --no-deps --build web # Or scale up then downdocker-compose -f docker-compose.prod.yml up -d --scale web=2# Wait for new container to be healthydocker-compose -f docker-compose.prod.yml up -d --scale web=1Monitoring and Logging
Health Check Endpoint
# views.pyfrom django.http import JsonResponsefrom django.db import connection def health_check(request): """Health check endpoint for monitoring""" try: # Check database with connection.cursor() as cursor: cursor.execute("SELECT 1") return JsonResponse({ 'status': 'healthy', 'database': 'connected' }) except Exception as e: return JsonResponse({ 'status': 'unhealthy', 'error': str(e) }, status=503) # urls.pyurlpatterns = [ path('health/', health_check, name='health_check'),]View Logs
# View all logsdocker-compose -f docker-compose.prod.yml logs # Follow logsdocker-compose -f docker-compose.prod.yml logs -f # Specific servicedocker-compose -f docker-compose.prod.yml logs -f web # Last 100 linesdocker-compose -f docker-compose.prod.yml logs --tail=100 web # Export logsdocker-compose -f docker-compose.prod.yml logs > logs.txtBackup and Restore
Database Backup
# Backup databasedocker-compose -f docker-compose.prod.yml exec db pg_dump -U ${DB_USER} ${DB_NAME} > backup.sql # Or with timestampdocker-compose -f docker-compose.prod.yml exec db pg_dump -U myproject_user myproject_prod > backup_$(date +%Y%m%d_%H%M%S).sql # Backup script#!/bin/bash# backup.shBACKUP_DIR="/backups"DATE=$(date +%Y%m%d_%H%M%S)BACKUP_FILE="$BACKUP_DIR/backup_$DATE.sql" docker-compose -f docker-compose.prod.yml exec -T db pg_dump -U myproject_user myproject_prod > $BACKUP_FILE # Keep only last 7 daysfind $BACKUP_DIR -name "backup_*.sql" -mtime +7 -delete echo "Backup completed: $BACKUP_FILE"Database Restore
# Restore databasedocker-compose -f docker-compose.prod.yml exec -T db psql -U myproject_user myproject_prod < backup.sql # Or copy file to container firstdocker cp backup.sql myproject_db:/tmp/backup.sqldocker-compose -f docker-compose.prod.yml exec db psql -U myproject_user myproject_prod -f /tmp/backup.sqlMedia Files Backup
# Backup media filesdocker run --rm -v myproject_media_volume:/data -v $(pwd):/backup alpine tar czf /backup/media_backup.tar.gz /data # Restore media filesdocker run --rm -v myproject_media_volume:/data -v $(pwd):/backup alpine tar xzf /backup/media_backup.tar.gz -C /Cloud Deployment Platforms
DigitalOcean App Platform
# .do/app.yamlname: myprojectservices:- name: web dockerfile_path: Dockerfile.prod github: repo: yourusername/myproject branch: main health_check: http_path: /health/ envs: - key: DEBUG value: "False" - key: SECRET_KEY value: ${SECRET_KEY} - key: DATABASE_URL value: ${db.DATABASE_URL} databases:- name: db engine: PG production: true version: "15"Railway
# Install Railway CLInpm install -g @railway/cli # Loginrailway login # Create projectrailway init # Deployrailway up # Add PostgreSQLrailway add postgresql # Set environment variablesrailway variables set DEBUG=Falserailway variables set SECRET_KEY=your-secret-keyHeroku with Docker
# heroku.ymlbuild: docker: web: Dockerfile.prodrun: web: gunicorn myproject.wsgi:application# Login to Herokuheroku login # Create appheroku create myproject # Set stack to containerheroku stack:set container -a myproject # Add PostgreSQLheroku addons:create heroku-postgresql:hobby-dev -a myproject # Deploygit push heroku mainDocker Best Practices
Security
# Use specific versionsFROM python:3.11.4-slim # Not 'latest' # Run as non-root userRUN adduser --disabled-password appUSER app # Don't include secrets in image# Use environment variables or secrets management # Scan for vulnerabilitiesdocker scan myproject:prodOptimization
# Multi-stage buildsFROM python:3.11-slim as builder# ... build dependenciesFROM python:3.11-slimCOPY --from=builder ... # Order layers by frequency of changeCOPY requirements.txt .RUN pip install -r requirements.txtCOPY . . # Code changes frequently # Minimize layersRUN apt-get update && apt-get install -y \ package1 package2 \ && rm -rf /var/lib/apt/lists/*Bài Tập
Bài 1: Production Dockerfile
Task: Create production-ready Dockerfile:
# Requirements:# 1. Multi-stage build for smaller image# 2. Run as non-root user# 3. Install Gunicorn# 4. Collect static files# 5. Health check# 6. Optimize layer caching# 7. Security best practices# 8. Image size < 200MB# 9. Test locally# 10. Document all decisionsBài 2: Complete Production Setup
Task: Full production deployment:
# Requirements:# 1. Dockerfile.prod with multi-stage build# 2. docker-compose.prod.yml with Django + PostgreSQL + Nginx# 3. Gunicorn configuration# 4. Nginx configuration with SSL# 5. .env.prod with all settings# 6. Health checks for all services# 7. Restart policies# 8. Volume management# 9. Test deployment locally# 10. Document deployment processBài 3: Deployment with Monitoring
Task: Production deployment with monitoring:
# Requirements:# 1. Complete production setup# 2. Health check endpoint# 3. Logging configuration# 4. Database backup script# 5. Media backup script# 6. Automated backup (cron)# 7. Log rotation# 8. Monitoring dashboard (optional)# 9. Alert system (optional)# 10. DocumentationBài 4: Deploy to Cloud Platform
Task: Deploy to real cloud platform:
# Requirements:# 1. Choose platform (DigitalOcean/Railway/Heroku)# 2. Setup production environment# 3. Configure database# 4. Setup domain and SSL# 5. Deploy application# 6. Run migrations# 7. Setup media storage# 8. Configure backups# 9. Setup monitoring# 10. Test production deployment# 11. Create deployment documentation# 12. Setup CI/CD (optional)# 13. Load testing# 14. Security audit# 15. Cost optimizationTài Liệu Tham Khảo
- Docker Production Best Practices
- Gunicorn Documentation
- Nginx Docker Documentation
- Django Deployment Checklist
- Docker Security Best Practices
Previous: Bài 32: Docker Compose | Next: Bài 34: Project - News/Blog Website