Security Hardening

Protecting your web server from common attacks and vulnerabilities

Defense in Depth

Security is about layers. No single measure protects everything, but combined defenses make attacks significantly harder:

Rate Limiting

Rate limiting restricts how many requests a client can make in a time period. This protects against brute-force attacks, credential stuffing, and denial-of-service attempts:

# Define rate limit zones in http block
http {
    # 10MB zone, 10 requests per second per IP
    limit_req_zone $binary_remote_addr zone=general:10m rate=10r/s;

    # Stricter limit for login endpoints
    limit_req_zone $binary_remote_addr zone=login:10m rate=1r/s;

    # Per-server rate limit (for API keys)
    limit_req_zone $http_x_api_key zone=api:10m rate=100r/s;

    server {
        # General rate limit with burst
        location / {
            limit_req zone=general burst=20 nodelay;
            # burst=20: allow 20 extra requests
            # nodelay: process burst immediately, don't queue
        }

        # Strict rate limit on authentication
        location /login {
            limit_req zone=login burst=5;
            # No nodelay: excess requests queued
        }

        # Custom error page for rate limiting
        error_page 429 /rate-limited.html;
    }
}

# Connection limiting (separate from request rate)
http {
    limit_conn_zone $binary_remote_addr zone=addr:10m;

    server {
        # Max 10 concurrent connections per IP
        limit_conn addr 10;
    }
}
# Using mod_ratelimit (bandwidth limiting)
<Location "/">
    SetOutputFilter RATE_LIMIT
    SetEnv rate-limit 500  # 500 KB/s max
</Location>

# Using mod_evasive (request rate limiting)
<IfModule mod_evasive24.c>
    # Max requests per page per second
    DOSPageCount 10
    DOSPageInterval 1

    # Max requests per site per second
    DOSSiteCount 100
    DOSSiteInterval 1

    # Block period (seconds)
    DOSBlockingPeriod 10

    # Email notification
    DOSEmailNotify admin@example.com

    # Log directory
    DOSLogDir "/var/log/mod_evasive"
</IfModule>

# Using mod_qos (Quality of Service)
<IfModule mod_qos.c>
    # Max connections per IP
    QS_SrvMaxConnPerIP 50

    # Request rate limit
    QS_ClientEventRequestLimit 100
</IfModule>
const rateLimit = require('express-rate-limit');
const RedisStore = require('rate-limit-redis');
const Redis = require('ioredis');

// Basic rate limiter (in-memory)
const generalLimiter = rateLimit({
    windowMs: 60 * 1000, // 1 minute
    max: 100,            // 100 requests per minute
    message: { error: 'Too many requests, slow down' },
    standardHeaders: true,  // Return rate limit info in headers
    legacyHeaders: false,
});

// Strict limiter for authentication
const authLimiter = rateLimit({
    windowMs: 15 * 60 * 1000, // 15 minutes
    max: 5,                   // 5 attempts
    message: { error: 'Too many login attempts' },
    skipSuccessfulRequests: true, // Only count failures
});

// Production: Use Redis for distributed rate limiting
const redis = new Redis(process.env.REDIS_URL);
const distributedLimiter = rateLimit({
    windowMs: 60 * 1000,
    max: 100,
    store: new RedisStore({
        sendCommand: (...args) => redis.call(...args),
    }),
});

// Apply limiters
app.use(generalLimiter);
app.use('/login', authLimiter);
app.use('/api', distributedLimiter);

// Custom rate limit by API key
const apiKeyLimiter = rateLimit({
    windowMs: 60 * 1000,
    max: 1000,
    keyGenerator: (req) => req.headers['x-api-key'] || req.ip,
    skip: (req) => req.headers['x-api-key'] === 'premium-key',
});

Burst vs Sustained Rate

The burst parameter allows temporary spikes above the rate limit. Without burst, legitimate users may get blocked during normal page loads (which issue many parallel requests for assets).

Request Size Limits

Unbounded request sizes can exhaust server memory or enable denial-of-service. Set appropriate limits:

http {
    # Maximum request body size (default: 1m)
    client_max_body_size 10m;

    # Buffer sizes
    client_body_buffer_size 128k;
    client_header_buffer_size 1k;
    large_client_header_buffers 4 8k;

    server {
        # Override for file upload endpoint
        location /upload {
            client_max_body_size 100m;
        }

        # Strict limit for API
        location /api {
            client_max_body_size 1m;
        }
    }
}
# Global request body limit (in bytes)
LimitRequestBody 10485760  # 10MB

# Header limits
LimitRequestFields 100
LimitRequestFieldSize 8190
LimitRequestLine 8190

# Per-location override
<Location "/upload">
    LimitRequestBody 104857600  # 100MB
</Location>

<Location "/api">
    LimitRequestBody 1048576    # 1MB
</Location>
const express = require('express');
const app = express();

// JSON body limit
app.use(express.json({ limit: '1mb' }));

// URL-encoded form limit
app.use(express.urlencoded({ limit: '1mb', extended: true }));

// Raw body limit
app.use(express.raw({ limit: '10mb', type: 'application/octet-stream' }));

// File uploads with multer
const multer = require('multer');
const upload = multer({
    limits: {
        fileSize: 100 * 1024 * 1024, // 100MB
        files: 10,                    // Max 10 files
        fields: 50,                   // Max 50 form fields
    },
    fileFilter: (req, file, cb) => {
        // Only allow certain types
        const allowed = ['image/jpeg', 'image/png', 'application/pdf'];
        if (allowed.includes(file.mimetype)) {
            cb(null, true);
        } else {
            cb(new Error('Invalid file type'));
        }
    }
});

app.post('/upload', upload.array('files'), (req, res) => {
    res.json({ uploaded: req.files.length });
});

Timeout Configuration

Timeouts prevent slow or malicious clients from holding connections indefinitely:

http {
    # Client timeouts
    client_body_timeout 12s;    # Time to receive request body
    client_header_timeout 12s;  # Time to receive headers
    send_timeout 10s;           # Time between write operations

    # Keep-alive
    keepalive_timeout 65s;      # Keep connection open
    keepalive_requests 100;     # Max requests per connection

    # Upstream (proxy) timeouts
    proxy_connect_timeout 60s;  # Time to establish connection
    proxy_send_timeout 60s;     # Time to send request to upstream
    proxy_read_timeout 60s;     # Time to receive response

    # FastCGI timeouts
    fastcgi_read_timeout 60s;

    server {
        # Long timeout for specific endpoints
        location /api/export {
            proxy_read_timeout 300s;  # 5 minutes for exports
        }
    }
}
# General timeout (seconds)
Timeout 60

# Keep-alive settings
KeepAlive On
MaxKeepAliveRequests 100
KeepAliveTimeout 5

# Request timeouts (mod_reqtimeout)
<IfModule mod_reqtimeout.c>
    # Header: 20 seconds initial, +10 per 500 bytes, max 40s
    RequestReadTimeout header=20-40,MinRate=500

    # Body: 20 seconds initial, +10 per 500 bytes
    RequestReadTimeout body=20,MinRate=500
</IfModule>

# Proxy timeouts
ProxyTimeout 60

# Per-location timeout (mod_proxy)
<Location "/api/export">
    ProxyTimeout 300
</Location>
const http = require('http');
const express = require('express');

const app = express();

// Create server with timeout
const server = http.createServer(app);
server.timeout = 120000;        // 2 minutes
server.keepAliveTimeout = 65000; // Must be > load balancer timeout
server.headersTimeout = 66000;   // Must be > keepAliveTimeout

// Per-request timeout middleware
const timeout = require('connect-timeout');

app.use(timeout('30s')); // 30 second default

// Longer timeout for specific routes
app.use('/api/export', timeout('5m'));

// Handle timeout
app.use((req, res, next) => {
    if (req.timedout) {
        return; // Response already sent
    }
    next();
});

// Abort long operations on timeout
app.get('/api/slow', async (req, res) => {
    const result = await doLongOperation({
        signal: req.socket.destroyed ? AbortSignal.abort() : undefined
    });
    res.json(result);
});

server.listen(3000);

Slowloris Attack

Slowloris attacks send partial HTTP requests slowly, tying up connections. Low client_header_timeout and client_body_timeout values help mitigate this attack.

Security Headers

HTTP security headers instruct browsers to enable protective features. These headers defend against XSS, clickjacking, and other client-side attacks:

Header Purpose Recommended Value
Content-Security-Policy Control resource loading, prevent XSS Depends on application
X-Frame-Options Prevent clickjacking via iframes DENY or SAMEORIGIN
X-Content-Type-Options Prevent MIME type sniffing nosniff
Strict-Transport-Security Force HTTPS connections max-age=31536000; includeSubDomains
Referrer-Policy Control referrer information strict-origin-when-cross-origin
Permissions-Policy Control browser features Depends on application
server {
    # Prevent clickjacking
    add_header X-Frame-Options "SAMEORIGIN" always;

    # Prevent MIME sniffing
    add_header X-Content-Type-Options "nosniff" always;

    # HSTS (only over HTTPS!)
    add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;

    # Control referrer
    add_header Referrer-Policy "strict-origin-when-cross-origin" always;

    # Content Security Policy (adjust for your app)
    add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self'; connect-src 'self'; frame-ancestors 'self';" always;

    # Permissions Policy
    add_header Permissions-Policy "geolocation=(), microphone=(), camera=()" always;

    # Cross-Origin policies
    add_header Cross-Origin-Opener-Policy "same-origin" always;
    add_header Cross-Origin-Resource-Policy "same-origin" always;
}
<IfModule mod_headers.c>
    # Prevent clickjacking
    Header always set X-Frame-Options "SAMEORIGIN"

    # Prevent MIME sniffing
    Header always set X-Content-Type-Options "nosniff"

    # HSTS
    Header always set Strict-Transport-Security "max-age=31536000; includeSubDomains"

    # Referrer policy
    Header always set Referrer-Policy "strict-origin-when-cross-origin"

    # Content Security Policy
    Header always set Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'"

    # Permissions Policy
    Header always set Permissions-Policy "geolocation=(), microphone=(), camera=()"
</IfModule>
const helmet = require('helmet');

// Helmet sets many security headers with sensible defaults
app.use(helmet());

// Customize specific headers
app.use(helmet({
    contentSecurityPolicy: {
        directives: {
            defaultSrc: ["'self'"],
            scriptSrc: ["'self'", "'unsafe-inline'"],
            styleSrc: ["'self'", "'unsafe-inline'"],
            imgSrc: ["'self'", "data:", "https:"],
            connectSrc: ["'self'"],
            fontSrc: ["'self'"],
            frameAncestors: ["'self'"],
        },
    },
    hsts: {
        maxAge: 31536000,
        includeSubDomains: true,
        preload: true,
    },
    referrerPolicy: {
        policy: "strict-origin-when-cross-origin",
    },
}));

// Manual headers without helmet
app.use((req, res, next) => {
    res.setHeader('X-Frame-Options', 'SAMEORIGIN');
    res.setHeader('X-Content-Type-Options', 'nosniff');
    res.setHeader('Referrer-Policy', 'strict-origin-when-cross-origin');
    next();
});

Content Security Policy Deep Dive

CSP is the most powerful (and complex) security header. It controls what resources can load:

Content-Security-Policy: default-src 'self'; script-src 'self' https://cdn.example.com; style-src 'self' 'unsafe-inline'; img-src *; connect-src 'self' https://api.example.com
Directive Controls Example Values
default-src Fallback for other directives 'self', 'none'
script-src JavaScript sources 'self', 'unsafe-inline', URLs
style-src CSS sources 'self', 'unsafe-inline'
img-src Image sources *, data:, URLs
connect-src AJAX, WebSocket, fetch 'self', API URLs
frame-ancestors Who can embed this page 'none', 'self'

CSP Report-Only Mode

Use Content-Security-Policy-Report-Only to test CSP without breaking your site. Violations are reported but not blocked. Add report-uri /csp-report to receive reports.

Hiding Server Information

By default, servers advertise their software and version. This helps attackers identify vulnerable servers:

Server: nginx/1.18.0 (Ubuntu)

Hide this information:

http {
    # Hide Nginx version
    server_tokens off;

    # Remove Server header entirely (requires headers-more module)
    # more_clear_headers Server;

    # Hide PHP version (if using PHP-FPM)
    fastcgi_hide_header X-Powered-By;

    # Hide proxy server headers
    proxy_hide_header X-Powered-By;
    proxy_hide_header Server;
}
# Hide Apache version
ServerTokens Prod
ServerSignature Off

# Hide PHP version (in php.ini)
# expose_php = Off

# Remove X-Powered-By
<IfModule mod_headers.c>
    Header unset X-Powered-By
    Header unset Server
</IfModule>
const express = require('express');
const app = express();

// Disable X-Powered-By header
app.disable('x-powered-by');

// Or remove it manually
app.use((req, res, next) => {
    res.removeHeader('X-Powered-By');
    next();
});

// Using helmet (does this automatically)
const helmet = require('helmet');
app.use(helmet.hidePoweredBy());

Security Through Obscurity

Hiding version information is a minor security measure—determined attackers can fingerprint your server other ways. Don't rely on obscurity; keep your server software updated.

Principle of Least Privilege

The web server process should have minimal permissions. If compromised, limited access reduces damage:

User and Group

# Nginx runs as unprivileged user
user nginx nginx;  # or www-data on Debian

# Verify with
ps aux | grep nginx
# Should show nginx/www-data, not root (except master process)

File Permissions

# Web content: readable by server, not writable
chown -R root:nginx /var/www/html
chmod -R 750 /var/www/html
find /var/www/html -type f -exec chmod 640 {} \;

# Upload directory: writable only where needed
chown nginx:nginx /var/www/uploads
chmod 750 /var/www/uploads

# Configuration files: root only
chown root:root /etc/nginx/nginx.conf
chmod 600 /etc/nginx/nginx.conf

# SSL certificates: minimal access
chown root:root /etc/ssl/private/
chmod 700 /etc/ssl/private/
chmod 600 /etc/ssl/private/*.key

Restricting Access in Configuration

# Nginx: Block access to sensitive files
location ~ /\. {
    deny all;  # Hidden files (.htaccess, .git, etc.)
}

location ~ /\.git {
    deny all;
}

location ~ /\.env {
    deny all;
}

location ~* \.(sql|bak|config|ini)$ {
    deny all;  # Backup and config files
}

# Restrict admin area by IP
location /admin {
    allow 10.0.0.0/8;
    allow 192.168.1.0/24;
    deny all;
}

chroot and Containers

For maximum isolation, run the web server in a restricted environment:

# Using systemd sandboxing
# /etc/systemd/system/nginx.service.d/security.conf
[Service]
ProtectSystem=strict
ProtectHome=true
PrivateTmp=true
NoNewPrivileges=true
ReadOnlyPaths=/
ReadWritePaths=/var/log/nginx /var/cache/nginx

# Docker isolation
docker run -d \
    --read-only \
    --tmpfs /var/cache/nginx \
    --tmpfs /var/run \
    -v /var/www/html:/usr/share/nginx/html:ro \
    nginx

Blocking Common Attacks

SQL Injection Attempts

# Block common SQL injection patterns in Nginx
location / {
    if ($query_string ~* "union.*select") { return 403; }
    if ($query_string ~* "concat.*\(") { return 403; }
    if ($query_string ~* "base64_") { return 403; }
}

Path Traversal

# Block directory traversal attempts
location / {
    if ($uri ~* "\.\.") { return 403; }
    if ($uri ~* "etc/passwd") { return 403; }
}

User Agent Filtering

# Block known bad bots and scanners
map $http_user_agent $bad_bot {
    default 0;
    ~*sqlmap 1;
    ~*nikto 1;
    ~*nmap 1;
    ~*masscan 1;
    "" 1;  # Empty user agent
}

server {
    if ($bad_bot) {
        return 403;
    }
}

WAF for Serious Protection

Simple pattern matching misses sophisticated attacks. For production systems, use a Web Application Firewall (WAF) like ModSecurity, Cloudflare WAF, or AWS WAF. They maintain updated rule sets against current threats.

Security Checklist

Review this checklist when deploying or auditing a web server:

Summary