Periodic Ghost Backup

Dockerfile

FROM alpine:latest

# Install required packages
RUN apk add --no-cache \
    s3cmd \
    tar \
    bash

# Create directories
RUN mkdir -p /ghost-content /backup

# Copy backup script
COPY ghost-backup.sh /usr/local/bin/backup.sh
RUN chmod +x /usr/local/bin/backup.sh

# Create entrypoint script that generates config from env vars
RUN echo '#!/bin/bash' > /entrypoint.sh && \
    echo 'cat > /root/.s3cfg <<EOF' >> /entrypoint.sh && \
    echo '[default]' >> /entrypoint.sh && \
    echo 'access_key = ${SPACES_ACCESS_KEY}' >> /entrypoint.sh && \
    echo 'secret_key = ${SPACES_SECRET_KEY}' >> /entrypoint.sh && \
    echo 'host_base = ${SPACES_REGION}.digitaloceanspaces.com' >> /entrypoint.sh && \
    echo 'host_bucket = %(bucket)s.${SPACES_REGION}.digitaloceanspaces.com' >> /entrypoint.sh && \
    echo 'use_https = True' >> /entrypoint.sh && \
    echo 'EOF' >> /entrypoint.sh && \
    echo 'chmod 600 /root/.s3cfg' >> /entrypoint.sh && \
    echo 'exec "$@"' >> /entrypoint.sh && \
    chmod +x /entrypoint.sh

# Set entrypoint
ENTRYPOINT ["/entrypoint.sh"]

# Create crontab entry for hourly backups
RUN echo "0 * * * * /usr/local/bin/backup.sh >> /var/log/backup.log 2>&1" > /etc/crontabs/root

# Run cron in foreground
CMD ["crond", "-f", "-d", "8"]

Backup Script

#!/bin/bash

BACKUP_DIR="/backup"
GHOST_DATA_DIR="/ghost-content"
SPACES_BUCKET="s3://${SPACES_BUCKET}/ghost-backups"
RETENTION_DAYS=30

echo "[$(date)] Starting backup..."

TIMESTAMP=$(date +"%Y%m%d_%H%M")
BACKUP_FILE="ghost_backup_${TIMESTAMP}.tar.gz"

mkdir -p "${BACKUP_DIR}"
tar -czf "${BACKUP_DIR}/${BACKUP_FILE}" -C "${GHOST_DATA_DIR}" .

s3cmd put "${BACKUP_DIR}/${BACKUP_FILE}" "${SPACES_BUCKET}/${BACKUP_FILE}"
rm "${BACKUP_DIR}/${BACKUP_FILE}"

echo "[$(date)] Backup uploaded: ${BACKUP_FILE}"

# Cleanup old backups
s3cmd ls "${SPACES_BUCKET}/" | while read -r line; do
    if [ -n "$line" ]; then
        FILE_DATE=$(echo "$line" | awk '{print $1}')
        FILE_NAME=$(echo "$line" | awk '{print $4}')
        
        FILE_AGE_DAYS=$(( ( $(date +%s) - $(date -d "$FILE_DATE" +%s) ) / 86400 ))
        
        if [ "$FILE_AGE_DAYS" -gt "$RETENTION_DAYS" ]; then
            echo "[$(date)] Deleting old backup: $FILE_NAME"
            s3cmd del "$FILE_NAME"
        fi
    fi
done

echo "[$(date)] Backup process complete"

Docker Compose

  ghost-backup:
    build: ./backup
    container_name: ghost-backup
    restart: unless-stopped
    environment:
      - SPACES_ACCESS_KEY=${SPACES_ACCESS_KEY}
      - SPACES_SECRET_KEY=${SPACES_SECRET_KEY}
      - SPACES_REGION=${SPACES_REGION:-nyc3}
      - SPACES_BUCKET=${SPACES_BUCKET}
    volumes:
      - ghost_content:/ghost-content:ro  # Read-only access
    depends_on:
      - ghost