Back to all articles

Deploying FastAPI Applications with Nginx: A Production Guide

3 min read

Deploying FastAPI Applications with Nginx: A Production Guide

Deploying a FastAPI application properly requires careful consideration of performance, security, and reliability. In this guide, we'll explore how to deploy FastAPI applications using Nginx as a reverse proxy.

Why Use Nginx with FastAPI?

Nginx offers several benefits when used as a reverse proxy:

  • Load Balancing: Distribute traffic across multiple FastAPI instances
  • SSL/TLS Termination: Handle HTTPS efficiently
  • Static File Serving: Serve static files without touching the Python process
  • Caching: Implement response caching at the proxy level
  • Security: Additional layer of protection against common attacks

Basic Nginx Configuration

server {
    listen 80;
    server_name api.yourdomain.com;

    location / {
        proxy_pass http://localhost:8000;
        proxy_set_header Host $host;
        proxy_set_header X-Real-IP $remote_addr;
        proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
        proxy_set_header X-Forwarded-Proto $scheme;
    }

    location /static/ {
        alias /path/to/your/static/files/;
        expires 1h;
        add_header Cache-Control "public, no-transform";
    }
}

SSL Configuration

server {
    listen 443 ssl http2;
    server_name api.yourdomain.com;

    ssl_certificate /etc/letsencrypt/live/api.yourdomain.com/fullchain.pem;
    ssl_certificate_key /etc/letsencrypt/live/api.yourdomain.com/privkey.pem;
    
    ssl_protocols TLSv1.2 TLSv1.3;
    ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
    ssl_prefer_server_ciphers off;

    # HSTS (uncomment if you're sure)
    # add_header Strict-Transport-Security "max-age=63072000" always;
}

Load Balancing Configuration

upstream fastapi_cluster {
    server 127.0.0.1:8000;
    server 127.0.0.1:8001;
    server 127.0.0.1:8002;
    keepalive 32;
}

server {
    location / {
        proxy_pass http://fastapi_cluster;
        proxy_http_version 1.1;
        proxy_set_header Connection "";
    }
}

Rate Limiting

# In http block
limit_req_zone $binary_remote_addr zone=api_limit:10m rate=10r/s;

# In server block
location / {
    limit_req zone=api_limit burst=20 nodelay;
    proxy_pass http://fastapi_cluster;
}

Systemd Service Configuration

[Unit]
Description=FastAPI application
After=network.target

[Service]
User=www-data
Group=www-data
WorkingDirectory=/path/to/your/app
Environment="PATH=/path/to/your/venv/bin"
ExecStart=/path/to/your/venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000 --workers 4

[Install]
WantedBy=multi-user.target

Docker Compose Setup

version: '3.8'

services:
  nginx:
    image: nginx:alpine
    ports:
      - "80:80"
      - "443:443"
    volumes:
      - ./nginx.conf:/etc/nginx/conf.d/default.conf
      - ./certbot/conf:/etc/letsencrypt
    depends_on:
      - fastapi

  fastapi:
    build: .
    command: uvicorn main:app --host 0.0.0.0 --port 8000 --workers 4
    volumes:
      - .:/app
    environment:
      - DATABASE_URL=postgresql://user:password@db:5432/dbname
    depends_on:
      - db

  db:
    image: postgres:13-alpine
    volumes:
      - postgres_data:/var/lib/postgresql/data
    environment:
      - POSTGRES_USER=user
      - POSTGRES_PASSWORD=password
      - POSTGRES_DB=dbname

volumes:
  postgres_data:

Security Headers

add_header X-Frame-Options "SAMEORIGIN" always;
add_header X-XSS-Protection "1; mode=block" always;
add_header X-Content-Type-Options "nosniff" always;
add_header Referrer-Policy "no-referrer-when-downgrade" always;
add_header Content-Security-Policy "default-src 'self' http: https: data: blob: 'unsafe-inline'" always;

Monitoring

access_log /var/log/nginx/api.access.log combined buffer=512k flush=1m;
error_log /var/log/nginx/api.error.log warn;

# Enable stub status for monitoring
location /nginx_status {
    stub_status on;
    allow 127.0.0.1;
    deny all;
}

Conclusion

A proper Nginx configuration is crucial for running FastAPI applications in production. Remember to:

  • Configure SSL properly
  • Implement rate limiting
  • Set up proper logging
  • Use appropriate security headers
  • Monitor your application
  • Use Docker for consistent deployments

In the next article, we'll explore implementing observability in FastAPI applications using OpenTelemetry.