Configure OpenResty with Lua scripting for advanced caching strategies including dynamic cache keys, intelligent TTL management, and conditional cache invalidation based on business logic.
Prerequisites
- Root or sudo access
- Basic understanding of NGINX configuration
- Familiarity with Lua scripting concepts
What this solves
OpenResty extends NGINX with powerful Lua scripting capabilities that enable advanced caching logic beyond standard HTTP caching. This tutorial shows how to implement dynamic cache key generation, intelligent TTL management based on content type and user context, and conditional cache invalidation using custom business rules.
Step-by-step installation
Update system packages
Start by updating your package manager to ensure you get the latest versions.
sudo apt update && sudo apt upgrade -y
Install OpenResty
OpenResty provides NGINX with integrated Lua JIT support and additional modules for advanced scripting.
wget -qO - https://openresty.org/package/pubkey.gpg | sudo apt-key add -
echo "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main" | sudo tee /etc/apt/sources.list.d/openresty.list
sudo apt update
sudo apt install -y openresty openresty-resty
Create Lua script directory
Create a dedicated directory for Lua scripts with proper ownership and permissions.
sudo mkdir -p /etc/openresty/lua
sudo chown -R nobody:nobody /etc/openresty/lua
sudo chmod 755 /etc/openresty/lua
Configure OpenResty main configuration
Set up the main OpenResty configuration with Lua support and shared memory zones for caching.
user nobody;
worker_processes auto;
worker_rlimit_nofile 65535;
events {
worker_connections 4096;
use epoll;
multi_accept on;
}
http {
include /etc/openresty/mime.types;
default_type application/octet-stream;
# Lua package path
lua_package_path '/etc/openresty/lua/?.lua;;';
# Shared memory zones for caching
lua_shared_dict cache_dict 100m;
lua_shared_dict cache_stats 10m;
lua_shared_dict cache_locks 1m;
# Initialize Lua modules
init_by_lua_file /etc/openresty/lua/init.lua;
init_worker_by_lua_file /etc/openresty/lua/init_worker.lua;
# Logging
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" cache_status=$cache_status '
'cache_key="$cache_key" ttl=$cache_ttl';
access_log /var/log/openresty/access.log main;
error_log /var/log/openresty/error.log warn;
# Performance optimizations
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
client_max_body_size 50m;
client_body_timeout 60;
client_header_timeout 60;
send_timeout 60;
# Compression
gzip on;
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_types text/plain text/css text/xml text/javascript application/javascript application/xml+rss application/json;
include /etc/openresty/conf.d/*.conf;
}
Create Lua initialization script
Initialize global variables and load required Lua modules during NGINX startup.
-- Global configuration
local _M = {}
-- Cache configuration
_M.cache_config = {
default_ttl = 3600, -- 1 hour
max_ttl = 86400, -- 24 hours
min_ttl = 300, -- 5 minutes
-- TTL by content type
content_ttl = {
["text/html"] = 1800, -- 30 minutes
["application/json"] = 900, -- 15 minutes
["text/css"] = 7200, -- 2 hours
["application/javascript"] = 7200, -- 2 hours
["image/"] = 86400, -- 24 hours for images
},
-- Cache zones
zones = {
static = "static_cache",
dynamic = "dynamic_cache",
api = "api_cache"
}
}
-- Performance settings
_M.performance = {
max_cache_size = "500m",
inactive = "7d",
use_temp_path = false
}
return _M
Create worker initialization script
Set up per-worker initialization for cache statistics and background tasks.
local cache_stats = ngx.shared.cache_stats
-- Initialize cache statistics
cache_stats:set("hits", 0)
cache_stats:set("misses", 0)
cache_stats:set("invalidations", 0)
cache_stats:set("start_time", ngx.time())
-- Background cleanup timer
local function cleanup_expired_keys()
local cache_dict = ngx.shared.cache_dict
local keys = cache_dict:get_keys(1000)
local cleaned = 0
for _, key in ipairs(keys) do
local value, flags = cache_dict:get(key)
if not value then
cache_dict:delete(key)
cleaned = cleaned + 1
end
end
if cleaned > 0 then
ngx.log(ngx.INFO, "Cleaned ", cleaned, " expired cache keys")
end
end
-- Run cleanup every 5 minutes
local ok, err = ngx.timer.every(300, cleanup_expired_keys)
if not ok then
ngx.log(ngx.ERR, "Failed to create cleanup timer: ", err)
end
Create advanced cache module
Implement the main caching logic with dynamic key generation and intelligent TTL management.
local _M = {}
local cache_dict = ngx.shared.cache_dict
local cache_stats = ngx.shared.cache_stats
local cache_locks = ngx.shared.cache_locks
-- Generate dynamic cache key based on request context
function _M.generate_cache_key()
local uri = ngx.var.uri
local args = ngx.var.args or ""
local user_type = ngx.var.http_x_user_type or "anonymous"
local device_type = _M.detect_device_type()
local geo_region = ngx.var.http_cf_ipcountry or "unknown"
-- Custom key generation logic
local key_parts = {
"v2", -- version prefix
ngx.md5(uri .. "?" .. args),
user_type,
device_type,
geo_region
}
local cache_key = table.concat(key_parts, ":")
ngx.var.cache_key = cache_key
return cache_key
end
-- Detect device type from user agent
function _M.detect_device_type()
local user_agent = ngx.var.http_user_agent or ""
if string.match(user_agent, "Mobile") or string.match(user_agent, "Android") then
return "mobile"
elseif string.match(user_agent, "Tablet") or string.match(user_agent, "iPad") then
return "tablet"
else
return "desktop"
end
end
-- Calculate intelligent TTL based on content and context
function _M.calculate_ttl(content_type, uri)
local config = require("init")
local base_ttl = config.cache_config.default_ttl
-- TTL by content type
for pattern, ttl in pairs(config.cache_config.content_ttl) do
if string.match(content_type or "", pattern) then
base_ttl = ttl
break
end
end
-- Adjust TTL based on URI patterns
if string.match(uri, "/api/") then
base_ttl = math.min(base_ttl, 900) -- Max 15 minutes for API
elseif string.match(uri, "/admin/") then
base_ttl = 0 -- No caching for admin pages
elseif string.match(uri, "\.(css|js|png|jpg|jpeg|gif|ico|svg)$") then
base_ttl = 86400 -- 24 hours for static assets
end
-- Time-based adjustments
local hour = tonumber(os.date("%H"))
if hour >= 9 and hour <= 17 then
-- Shorter TTL during business hours for dynamic content
if not string.match(uri, "\.(css|js|png|jpg|jpeg|gif|ico|svg)$") then
base_ttl = base_ttl * 0.5
end
end
ngx.var.cache_ttl = base_ttl
return base_ttl
end
-- Get cached content
function _M.get_cached_content(cache_key)
local cached_data = cache_dict:get(cache_key)
if cached_data then
cache_stats:incr("hits", 1)
ngx.var.cache_status = "HIT"
return cached_data
else
cache_stats:incr("misses", 1)
ngx.var.cache_status = "MISS"
return nil
end
end
-- Store content in cache with TTL
function _M.store_cached_content(cache_key, content, ttl)
if ttl <= 0 then
ngx.var.cache_status = "BYPASS"
return false
end
local success, err = cache_dict:set(cache_key, content, ttl)
if success then
ngx.var.cache_status = "STORED"
return true
else
ngx.log(ngx.ERR, "Failed to store cache: ", err)
ngx.var.cache_status = "ERROR"
return false
end
end
-- Invalidate cache by pattern
function _M.invalidate_cache_pattern(pattern)
local keys = cache_dict:get_keys(1000)
local invalidated = 0
for _, key in ipairs(keys) do
if string.match(key, pattern) then
cache_dict:delete(key)
invalidated = invalidated + 1
end
end
cache_stats:incr("invalidations", invalidated)
return invalidated
end
-- Cache warming function
function _M.warm_cache(urls)
for _, url in ipairs(urls) do
local res = ngx.location.capture(url, { method = ngx.HTTP_GET })
if res.status == 200 then
ngx.log(ngx.INFO, "Warmed cache for: ", url)
end
end
end
return _M
Create cache invalidation module
Implement intelligent cache invalidation based on business logic and webhook triggers.
local _M = {}
local cache = require("cache")
local cache_dict = ngx.shared.cache_dict
local cache_stats = ngx.shared.cache_stats
-- Invalidation rules based on content updates
_M.invalidation_rules = {
-- User profile updates
{
trigger = "/api/users/profile",
method = "POST|PUT|PATCH",
patterns = { "v2:.:user:.", "v2:.profile." }
},
-- Product catalog updates
{
trigger = "/api/products",
method = "POST|PUT|DELETE",
patterns = { "v2:.products.", "v2:.catalog." }
},
-- CMS content updates
{
trigger = "/admin/content",
method = "POST|PUT|DELETE",
patterns = { "v2:.:anonymous:.", "v2:.content." }
}
}
-- Process invalidation webhook
function _M.process_invalidation_webhook()
ngx.req.read_body()
local body = ngx.req.get_body_data()
if not body then
ngx.status = 400
ngx.say('{"error": "No body provided"}')
return
end
local cjson = require "cjson"
local ok, data = pcall(cjson.decode, body)
if not ok then
ngx.status = 400
ngx.say('{"error": "Invalid JSON"}')
return
end
local invalidated = 0
-- Process custom patterns
if data.patterns then
for _, pattern in ipairs(data.patterns) do
invalidated = invalidated + cache.invalidate_cache_pattern(pattern)
end
end
-- Process entity-based invalidation
if data.entity and data.entity_id then
local pattern = ".:" .. data.entity .. ":" .. data.entity_id .. "."
invalidated = invalidated + cache.invalidate_cache_pattern(pattern)
end
-- Process tag-based invalidation
if data.tags then
for _, tag in ipairs(data.tags) do
local pattern = ".:tag:" .. tag .. "."
invalidated = invalidated + cache.invalidate_cache_pattern(pattern)
end
end
ngx.status = 200
ngx.say(string.format('{"invalidated": %d, "status": "success"}', invalidated))
end
-- Smart invalidation based on request
function _M.smart_invalidation()
local method = ngx.var.request_method
local uri = ngx.var.uri
-- Only process write operations
if method ~= "POST" and method ~= "PUT" and method ~= "PATCH" and method ~= "DELETE" then
return
end
local invalidated = 0
-- Apply invalidation rules
for _, rule in ipairs(_M.invalidation_rules) do
if string.match(uri, rule.trigger) and string.match(method, rule.method) then
for _, pattern in ipairs(rule.patterns) do
invalidated = invalidated + cache.invalidate_cache_pattern(pattern)
end
end
end
if invalidated > 0 then
ngx.log(ngx.INFO, "Smart invalidation: ", invalidated, " keys for ", method, " ", uri)
end
end
-- Time-based invalidation
function _M.schedule_time_based_invalidation()
local hour = tonumber(os.date("%H"))
local minute = tonumber(os.date("%M"))
-- Daily cache refresh at 3 AM
if hour == 3 and minute == 0 then
local invalidated = cache.invalidate_cache_pattern("v2:.:dynamic:.")
ngx.log(ngx.INFO, "Daily cache refresh: ", invalidated, " keys invalidated")
end
-- Hourly API cache refresh during business hours
if hour >= 9 and hour <= 17 and minute == 0 then
local invalidated = cache.invalidate_cache_pattern("v2:.api.")
ngx.log(ngx.INFO, "Hourly API cache refresh: ", invalidated, " keys invalidated")
end
end
return _M
Create cache statistics and monitoring module
Implement comprehensive cache monitoring and performance metrics collection.
local _M = {}
local cache_stats = ngx.shared.cache_stats
local cache_dict = ngx.shared.cache_dict
-- Get cache statistics
function _M.get_cache_stats()
local hits = cache_stats:get("hits") or 0
local misses = cache_stats:get("misses") or 0
local invalidations = cache_stats:get("invalidations") or 0
local start_time = cache_stats:get("start_time") or ngx.time()
local total_requests = hits + misses
local hit_rate = total_requests > 0 and (hits / total_requests * 100) or 0
local uptime = ngx.time() - start_time
return {
hits = hits,
misses = misses,
total_requests = total_requests,
hit_rate = string.format("%.2f%%", hit_rate),
invalidations = invalidations,
uptime = uptime,
uptime_human = _M.format_uptime(uptime),
memory_usage = _M.get_memory_usage(),
top_keys = _M.get_top_cache_keys()
}
end
-- Format uptime in human readable format
function _M.format_uptime(seconds)
local days = math.floor(seconds / 86400)
local hours = math.floor((seconds % 86400) / 3600)
local minutes = math.floor((seconds % 3600) / 60)
local secs = seconds % 60
return string.format("%dd %02dh %02dm %02ds", days, hours, minutes, secs)
end
-- Get memory usage statistics
function _M.get_memory_usage()
local capacity = cache_dict:capacity()
local free_space = cache_dict:free_space()
local used_space = capacity - free_space
local usage_percent = (used_space / capacity) * 100
return {
capacity = capacity,
used = used_space,
free = free_space,
usage_percent = string.format("%.2f%%", usage_percent)
}
end
-- Get most frequently accessed cache keys
function _M.get_top_cache_keys(limit)
limit = limit or 10
local keys = cache_dict:get_keys(1000)
local key_stats = {}
-- This is a simplified version - in production you'd track access counts
for i = 1, math.min(#keys, limit) do
table.insert(key_stats, {
key = keys[i],
size = string.len(cache_dict:get(keys[i]) or "")
})
end
return key_stats
end
-- Export stats in Prometheus format
function _M.export_prometheus_metrics()
local stats = _M.get_cache_stats()
local metrics = {
"# HELP openresty_cache_hits_total Total number of cache hits",
"# TYPE openresty_cache_hits_total counter",
string.format("openresty_cache_hits_total %d", stats.hits),
"# HELP openresty_cache_misses_total Total number of cache misses",
"# TYPE openresty_cache_misses_total counter",
string.format("openresty_cache_misses_total %d", stats.misses),
"# HELP openresty_cache_hit_rate Cache hit rate percentage",
"# TYPE openresty_cache_hit_rate gauge",
string.format("openresty_cache_hit_rate %s", string.gsub(stats.hit_rate, "%%", "")),
"# HELP openresty_cache_memory_usage_bytes Memory usage in bytes",
"# TYPE openresty_cache_memory_usage_bytes gauge",
string.format("openresty_cache_memory_usage_bytes %d", stats.memory_usage.used)
}
return table.concat(metrics, "\n") .. "\n"
end
return _M
Configure virtual server with advanced caching
Create a virtual server configuration that demonstrates the advanced caching functionality.
server {
listen 80;
server_name example.com;
# Variables for cache operations
set $cache_key "";
set $cache_status "";
set $cache_ttl "";
# Cache directory for file-based caching
location / {
# Generate cache key
access_by_lua_block {
local cache = require("cache")
cache.generate_cache_key()
}
# Try to serve from Lua cache first
content_by_lua_block {
local cache = require("cache")
local cache_key = ngx.var.cache_key
-- Try to get from cache
local cached_content = cache.get_cached_content(cache_key)
if cached_content then
ngx.print(cached_content)
ngx.exit(200)
else
-- Forward to backend and cache response
local res = ngx.location.capture("/backend" .. ngx.var.uri .. (ngx.var.args and "?" .. ngx.var.args or ""))
if res.status == 200 then
local content_type = res.header["Content-Type"] or ""
local ttl = cache.calculate_ttl(content_type, ngx.var.uri)
if ttl > 0 then
cache.store_cached_content(cache_key, res.body, ttl)
end
-- Set response headers
for k, v in pairs(res.header) do
ngx.header[k] = v
end
ngx.status = res.status
ngx.print(res.body)
else
ngx.status = res.status
ngx.print(res.body)
end
end
}
}
# Backend proxy location
location /backend {
internal;
proxy_pass http://127.0.0.1:8080;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# Timeout settings
proxy_connect_timeout 5s;
proxy_send_timeout 10s;
proxy_read_timeout 30s;
}
# Cache invalidation endpoint
location /cache/invalidate {
access_by_lua_block {
-- Simple authentication
local auth_header = ngx.var.http_authorization
if not auth_header or auth_header ~= "Bearer your-secret-token" then
ngx.status = 401
ngx.say('{"error": "Unauthorized"}')
ngx.exit(401)
end
}
content_by_lua_block {
local invalidation = require("invalidation")
invalidation.process_invalidation_webhook()
}
}
# Cache statistics endpoint
location /cache/stats {
content_by_lua_block {
local stats = require("stats")
local cache_stats = stats.get_cache_stats()
ngx.header.content_type = "application/json"
local cjson = require "cjson"
ngx.print(cjson.encode(cache_stats))
}
}
# Prometheus metrics endpoint
location /metrics {
content_by_lua_block {
local stats = require("stats")
local metrics = stats.export_prometheus_metrics()
ngx.header.content_type = "text/plain"
ngx.print(metrics)
}
}
# Smart invalidation on write operations
location ~ ^/api/ {
access_by_lua_block {
local invalidation = require("invalidation")
invalidation.smart_invalidation()
}
proxy_pass http://127.0.0.1:8080;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}
Create systemd service file
Set up OpenResty as a system service with proper user permissions and security settings.
[Unit]
Description=OpenResty Web Server with Lua Scripting
After=network.target remote-fs.target nss-lookup.target
[Service]
Type=forking
PIDFile=/var/run/openresty.pid
ExecStartPre=/usr/bin/openresty -t -c /etc/openresty/nginx.conf
ExecStart=/usr/bin/openresty -c /etc/openresty/nginx.conf
ExecReload=/bin/sh -c "/bin/kill -s HUP $(/bin/cat /var/run/openresty.pid)"
ExecStop=/bin/sh -c "/bin/kill -s QUIT $(/bin/cat /var/run/openresty.pid)"
KillMode=mixed
KillSignal=SIGQUIT
TimeoutStopSec=5
PrivateTmp=true
LimitNOFILE=65535
User=nobody
Group=nobody
[Install]
WantedBy=multi-user.target
Create log directory and set permissions
Set up proper logging directories with correct ownership for the nobody user.
sudo mkdir -p /var/log/openresty
sudo chown -R nobody:nobody /var/log/openresty
sudo chmod 755 /var/log/openresty
Enable and start OpenResty service
Enable the service to start automatically on boot and start it immediately.
sudo systemctl daemon-reload
sudo systemctl enable openresty
sudo systemctl start openresty
sudo systemctl status openresty
Create cache warming script
Create a script to warm up the cache with frequently accessed content during deployment or startup.
#!/bin/bash
Cache warming script for OpenResty
SERVER_URL="http://localhost"
WARM_URLS=(
"/"
"/products"
"/about"
"/contact"
"/api/health"
"/static/main.css"
"/static/app.js"
)
echo "Starting cache warming..."
for url in "${WARM_URLS[@]}"; do
echo "Warming: $url"
curl -s -o /dev/null "$SERVER_URL$url"
sleep 0.1
done
echo "Cache warming completed"
Display cache statistics
echo "Cache Statistics:"
curl -s "$SERVER_URL/cache/stats" | jq .
Make cache warming script executable
Set proper permissions on the cache warming script.
sudo chmod 755 /usr/local/bin/warm-cache.sh
Verify your setup
Test the OpenResty installation and advanced caching functionality with these verification commands.
# Check OpenResty is running
sudo systemctl status openresty
Verify configuration syntax
sudo openresty -t -c /etc/openresty/nginx.conf
Test basic connectivity
curl -I http://localhost
Check cache statistics
curl http://localhost/cache/stats
Test cache warming
sudo /usr/local/bin/warm-cache.sh
Monitor cache hit rates
curl http://localhost/cache/stats | jq '.hit_rate'
Check Prometheus metrics
curl http://localhost/metrics
Test cache invalidation (with proper auth token)
curl -X POST -H "Authorization: Bearer your-secret-token" \
-H "Content-Type: application/json" \
-d '{"patterns": ["v2:.:api:."]}' \
http://localhost/cache/invalidate
Common issues
| Symptom | Cause | Fix |
|---|---|---|
| OpenResty fails to start | Configuration syntax error or permission issue | sudo openresty -t to check config, verify file permissions with ls -la /etc/openresty/ |
| Lua script errors in error log | Missing Lua modules or syntax errors | Check /var/log/openresty/error.log, verify Lua syntax with luac -p script.lua |
| Cache not working (always MISS) | Shared memory not allocated or TTL set to 0 | Verify lua_shared_dict in nginx.conf, check cache key generation logic |
| High memory usage | Cache size too large or memory leaks | Adjust shared memory size, implement cache size limits, monitor with /cache/stats |
| Permission denied on log files | Incorrect ownership on log directory | sudo chown -R nobody:nobody /var/log/openresty |
| Cache invalidation not working | Authentication failure or pattern matching issues | Verify auth token, test patterns with simpler regex, check error logs |
Next steps
- Setup OpenResty monitoring with Prometheus and Grafana dashboards for performance analytics
- Implement OpenResty JWT authentication with OAuth2 integration for secure web applications
- Configure Nginx Redis caching with SSL authentication and security hardening
- Setup OpenResty load balancing with health checks and automatic failover
- Implement OpenResty rate limiting and DDoS protection with advanced Lua rules
Automated install script
Run this to automate the entire setup
#!/usr/bin/env bash
set -euo pipefail
# OpenResty with Lua Caching Install Script
# Supports Ubuntu, Debian, AlmaLinux, Rocky Linux, CentOS
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Configuration variables
DOMAIN=${1:-"example.com"}
OPENRESTY_USER="nobody"
# Cleanup function for rollback
cleanup() {
echo -e "${RED}[ERROR] Installation failed. Cleaning up...${NC}"
systemctl stop openresty 2>/dev/null || true
rm -rf /etc/openresty/lua /var/log/openresty
}
# Set trap for cleanup on error
trap cleanup ERR
# Usage message
if [[ $# -gt 1 ]]; then
echo "Usage: $0 [domain]"
echo "Example: $0 mysite.com"
exit 1
fi
# Check prerequisites
if [[ $EUID -ne 0 ]]; then
echo -e "${RED}This script must be run as root${NC}"
exit 1
fi
# Detect OS and set package manager
echo -e "${YELLOW}[1/8] Detecting operating system...${NC}"
if [ -f /etc/os-release ]; then
. /etc/os-release
case "$ID" in
ubuntu|debian)
PKG_MGR="apt"
PKG_UPDATE="apt update"
PKG_INSTALL="apt install -y"
REPO_CMD="apt-key add -"
OPENRESTY_CONFIG_DIR="/etc/openresty"
;;
almalinux|rocky|centos|rhel|ol)
PKG_MGR="dnf"
PKG_UPDATE="dnf update -y"
PKG_INSTALL="dnf install -y"
REPO_CMD="rpm --import"
OPENRESTY_CONFIG_DIR="/etc/openresty"
;;
fedora)
PKG_MGR="dnf"
PKG_UPDATE="dnf update -y"
PKG_INSTALL="dnf install -y"
REPO_CMD="rpm --import"
OPENRESTY_CONFIG_DIR="/etc/openresty"
;;
amzn)
PKG_MGR="yum"
PKG_UPDATE="yum update -y"
PKG_INSTALL="yum install -y"
REPO_CMD="rpm --import"
OPENRESTY_CONFIG_DIR="/etc/openresty"
;;
*)
echo -e "${RED}Unsupported distribution: $ID${NC}"
exit 1
;;
esac
echo -e "${GREEN}Detected: $PRETTY_NAME${NC}"
else
echo -e "${RED}Cannot detect operating system${NC}"
exit 1
fi
# Update system packages
echo -e "${YELLOW}[2/8] Updating system packages...${NC}"
$PKG_UPDATE
# Install prerequisites
echo -e "${YELLOW}[3/8] Installing prerequisites...${NC}"
if [[ "$PKG_MGR" == "apt" ]]; then
$PKG_INSTALL wget gnupg2 lsb-release curl
else
$PKG_INSTALL wget curl
fi
# Add OpenResty repository and install
echo -e "${YELLOW}[4/8] Installing OpenResty...${NC}"
if [[ "$PKG_MGR" == "apt" ]]; then
wget -qO - https://openresty.org/package/pubkey.gpg | apt-key add -
echo "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/openresty.list
apt update
$PKG_INSTALL openresty openresty-resty
else
cat > /etc/yum.repos.d/openresty.repo << 'EOF'
[openresty]
name=Official OpenResty Open Source Repository for CentOS
baseurl=https://openresty.org/package/centos/$releasever/$basearch
skip_if_unavailable=False
gpgcheck=1
gpgkey=https://openresty.org/package/pubkey.gpg
enabled=1
enabled_metadata=1
EOF
$PKG_INSTALL openresty openresty-resty
fi
# Create directories with proper permissions
echo -e "${YELLOW}[5/8] Setting up directories and permissions...${NC}"
mkdir -p $OPENRESTY_CONFIG_DIR/lua
mkdir -p $OPENRESTY_CONFIG_DIR/conf.d
mkdir -p /var/log/openresty
chown -R $OPENRESTY_USER:$OPENRESTY_USER $OPENRESTY_CONFIG_DIR/lua
chown -R $OPENRESTY_USER:$OPENRESTY_USER /var/log/openresty
chmod 755 $OPENRESTY_CONFIG_DIR/lua
chmod 755 /var/log/openresty
# Create main OpenResty configuration
echo -e "${YELLOW}[6/8] Creating OpenResty configuration...${NC}"
cat > $OPENRESTY_CONFIG_DIR/nginx.conf << 'EOF'
user nobody;
worker_processes auto;
worker_rlimit_nofile 65535;
events {
worker_connections 4096;
use epoll;
multi_accept on;
}
http {
include /etc/openresty/mime.types;
default_type application/octet-stream;
# Lua package path
lua_package_path '/etc/openresty/lua/?.lua;;';
# Shared memory zones for caching
lua_shared_dict cache_dict 100m;
lua_shared_dict cache_stats 10m;
lua_shared_dict cache_locks 1m;
# Initialize Lua modules
init_by_lua_file /etc/openresty/lua/init.lua;
init_worker_by_lua_file /etc/openresty/lua/init_worker.lua;
# Logging
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" cache_status=$cache_status '
'cache_key="$cache_key" ttl=$cache_ttl';
access_log /var/log/openresty/access.log main;
error_log /var/log/openresty/error.log warn;
# Performance optimizations
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
client_max_body_size 50m;
# Compression
gzip on;
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_types text/plain text/css text/xml text/javascript application/javascript application/xml+rss application/json;
include /etc/openresty/conf.d/*.conf;
}
EOF
# Create Lua initialization script
cat > $OPENRESTY_CONFIG_DIR/lua/init.lua << 'EOF'
local _M = {}
_M.cache_config = {
default_ttl = 3600,
max_ttl = 86400,
min_ttl = 300,
content_ttl = {
["text/html"] = 1800,
["application/json"] = 900,
["text/css"] = 7200,
["application/javascript"] = 7200,
["image/"] = 86400,
}
}
return _M
EOF
# Create worker initialization script
cat > $OPENRESTY_CONFIG_DIR/lua/init_worker.lua << 'EOF'
local cache_stats = ngx.shared.cache_stats
if cache_stats then
cache_stats:set("hits", 0)
cache_stats:set("misses", 0)
cache_stats:set("total_requests", 0)
end
ngx.log(ngx.INFO, "Cache worker initialized")
EOF
# Create sample site configuration
cat > $OPENRESTY_CONFIG_DIR/conf.d/default.conf << EOF
server {
listen 80;
server_name $DOMAIN;
root /var/www/html;
index index.html;
set \$cache_status "MISS";
set \$cache_key "";
set \$cache_ttl "0";
location / {
access_by_lua_block {
local cache_dict = ngx.shared.cache_dict
local cache_stats = ngx.shared.cache_stats
local cache_key = ngx.var.request_uri
ngx.var.cache_key = cache_key
local cached_response = cache_dict:get(cache_key)
if cached_response then
ngx.var.cache_status = "HIT"
cache_stats:incr("hits", 1, 0)
ngx.say(cached_response)
ngx.exit(200)
else
ngx.var.cache_status = "MISS"
cache_stats:incr("misses", 1, 0)
end
cache_stats:incr("total_requests", 1, 0)
}
try_files \$uri \$uri/ =404;
header_filter_by_lua_block {
ngx.var.cache_ttl = "3600"
}
body_filter_by_lua_block {
if ngx.arg[2] then
local cache_dict = ngx.shared.cache_dict
local cache_key = ngx.var.cache_key
local response_body = ngx.arg[1]
if cache_key and response_body and ngx.status == 200 then
cache_dict:set(cache_key, response_body, 3600)
end
end
}
}
location /cache-stats {
content_by_lua_block {
local cache_stats = ngx.shared.cache_stats
local hits = cache_stats:get("hits") or 0
local misses = cache_stats:get("misses") or 0
local total = cache_stats:get("total_requests") or 0
ngx.header.content_type = "application/json"
ngx.say('{"hits":' .. hits .. ',"misses":' .. misses .. ',"total":' .. total .. '}')
}
}
}
EOF
# Set proper ownership for configuration files
chown -R root:root $OPENRESTY_CONFIG_DIR
chmod 644 $OPENRESTY_CONFIG_DIR/nginx.conf
chmod 644 $OPENRESTY_CONFIG_DIR/conf.d/default.conf
chmod 644 $OPENRESTY_CONFIG_DIR/lua/*.lua
# Create sample web content
echo -e "${YELLOW}[7/8] Creating sample web content...${NC}"
mkdir -p /var/www/html
cat > /var/www/html/index.html << EOF
<!DOCTYPE html>
<html>
<head>
<title>OpenResty Lua Caching</title>
</head>
<body>
<h1>OpenResty with Lua Caching Active</h1>
<p>Server: $DOMAIN</p>
<p>Time: $(date)</p>
<p><a href="/cache-stats">View Cache Statistics</a></p>
</body>
</html>
EOF
chown -R $OPENRESTY_USER:$OPENRESTY_USER /var/www/html
chmod 644 /var/www/html/index.html
# Enable and start OpenResty
echo -e "${YELLOW}[8/8] Starting OpenResty service...${NC}"
systemctl enable openresty
systemctl start openresty
# Verification
echo -e "${GREEN}Installation completed successfully!${NC}"
echo -e "${GREEN}OpenResty Status:${NC}"
systemctl status openresty --no-pager -l
echo -e "${GREEN}Testing configuration...${NC}"
/usr/local/openresty/bin/openresty -t
echo -e "${GREEN}Installation Summary:${NC}"
echo "- OpenResty installed with Lua support"
echo "- Configuration: $OPENRESTY_CONFIG_DIR/nginx.conf"
echo "- Lua scripts: $OPENRESTY_CONFIG_DIR/lua/"
echo "- Site config: $OPENRESTY_CONFIG_DIR/conf.d/default.conf"
echo "- Logs: /var/log/openresty/"
echo "- Test URL: http://$DOMAIN"
echo "- Cache stats: http://$DOMAIN/cache-stats"
Review the script before running. Execute with: bash install.sh