create docker compose setup

This commit is contained in:
Micha R. Albert 2025-07-03 15:03:12 -04:00
parent f2fd4c8d7c
commit 16e840fb78
Signed by: mra
SSH key fingerprint: SHA256:2JB0fGfy7m2HQXAzvSXXKm7wPTj9Z60MOjFOQGM2Y/E
9 changed files with 228 additions and 46 deletions

63
.dockerignore Normal file
View file

@ -0,0 +1,63 @@
# Git
.git
.gitignore
# Python
__pycache__
*.pyc
*.pyo
*.pyd
.Python
env
pip-log.txt
pip-delete-this-directory.txt
.tox
.coverage
.coverage.*
.pytest_cache
htmlcov
# Virtual environments
venv/
ENV/
env/
.venv/
# IDEs
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
# Project specific
*.db
*.sqlite
*.sqlite3
.env.local
.env.development
.env.test
node_modules/
dist/
build/
# Documentation (keep Dockerfile.dev for development builds)
DOCKER.md
docs/
# Logs
*.log
logs/
# Temporary files
tmp/
temp/

32
.env.example Normal file
View file

@ -0,0 +1,32 @@
# Copy this file to .env and fill in your actual values
# Slack Integration
SLACK_CLIENT_ID=your_slack_client_id_here
SLACK_CLIENT_SECRET=your_slack_client_secret_here
SLACK_SIGNING_SECRET=your_slack_signing_secret_here
# Airtable Configuration
AIRTABLE_PAT=your_airtable_personal_access_token_here
AIRTABLE_BASE=your_airtable_base_id_here
AIRTABLE_SUBMISSIONS_TABLE=your_submissions_table_id_here
AIRTABLE_USERS_TABLE=your_users_table_id_here
AIRTABLE_SESSIONS_TABLE=your_sessions_table_id_here
AIRTABLE_ITEMS_TABLE=your_items_table_id_here
AIRTABLE_ITEM_ADDONS_TABLE=your_item_addons_table_id_here
# Application Settings
APP_BASE_URL=http://localhost:8000
GAME_ID_SALT=generate_a_secure_random_string_here
JWT_SECRET_KEY=generate_a_secure_jwt_secret_key_here
# Environment Configuration
ENVIRONMENT=development
MAX_REQUEST_SIZE=1048576
RATE_LIMIT_REQUESTS=20
SESSION_TTL_HOURS=24
# Redis/Valkey Configuration
# For Docker: REDIS_HOST=valkey (automatically set in docker-compose)
# For local development: REDIS_HOST=localhost (default)
REDIS_HOST=valkey
REDIS_PORT=6379

View file

@ -1,33 +0,0 @@
# Security Configuration Template
# Copy this to .env and fill in your actual values
# Airtable Configuration
AIRTABLE_PAT=your_airtable_personal_access_token
AIRTABLE_BASE=your_airtable_base_id
AIRTABLE_SESSIONS_TABLE=Sessions
AIRTABLE_USERS_TABLE=Users
AIRTABLE_ITEMS_TABLE=Items
AIRTABLE_ITEM_ADDONS_TABLE=Item Addons
AIRTABLE_SUBMISSIONS_TABLE=Submissions
# Slack Configuration
SLACK_SIGNING_SECRET=your_slack_signing_secret
SLACK_CLIENT_ID=your_slack_client_id
SLACK_CLIENT_SECRET=your_slack_client_secret
# Application Configuration
APP_BASE_URL=https://your-domain.com
GAME_ID_SALT=your_secure_random_salt_string
# Security Configuration
ENVIRONMENT=development # development, staging, production
MAX_REQUEST_SIZE=1048576 # 1MB
RATE_LIMIT_REQUESTS=100 # requests per minute per IP
SESSION_TTL_HOURS=24
# CORS Configuration
# For development: use "*" to allow all origins
# For production: use comma-separated list of allowed domains
ALLOWED_ORIGINS=*
# Production example:
# ALLOWED_ORIGINS=https://yourgame.com,https://anothergame.com

46
Dockerfile Normal file
View file

@ -0,0 +1,46 @@
# Use Python 3.13 slim image
FROM python:3.13-slim
# Set working directory
WORKDIR /app
# Install system dependencies and build tools
RUN apt-get update && apt-get install -y \
curl \
gcc \
g++ \
build-essential \
python3-dev \
&& rm -rf /var/lib/apt/lists/*
# Install Hatch
RUN pip install --no-cache-dir hatch
# Copy project files
COPY pyproject.toml ./
COPY LICENSE ./
COPY README.md ./
COPY src/ ./src/
# Install project and dependencies using Hatch
RUN hatch build -t wheel && \
pip install --no-cache-dir dist/*.whl && \
rm -rf dist/ build/
# Create non-root user for security
RUN useradd --create-home --shell /bin/bash app \
&& chown -R app:app /app
USER app
# Set environment variable to indicate container environment
ENV DOCKER_CONTAINER=1
# Expose port
EXPOSE 8000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8000/ || exit 1
# Run the application
CMD ["uvicorn", "random_access.main:app", "--host", "0.0.0.0", "--port", "8000"]

37
docker-compose.yml Normal file
View file

@ -0,0 +1,37 @@
services:
api:
build: .
ports:
- "8000:8000"
environment:
# Redis configuration
- REDIS_HOST=valkey
- REDIS_PORT=6379
# Docker environment flag
- DOCKER_CONTAINER=1
# Override environment for production
- ENVIRONMENT=production
env_file: .env
depends_on:
- valkey
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
valkey:
image: valkey/valkey:7-alpine
restart: unless-stopped
healthcheck:
test: ["CMD", "valkey-cli", "ping"]
interval: 30s
timeout: 10s
retries: 3
volumes:
- valkey_data:/data
volumes:
valkey_data:

View file

@ -32,7 +32,8 @@ dependencies = [
"python-jose[cryptography]~=3.5.0",
"valkey[libvalkey]~=6.1.0",
"slowapi~=0.1.9",
"aiocache[redis]~=0.12.3"
"aiocache[redis]~=0.12.3",
"pydantic-settings~=2.10.1"
]
requires-python = ">=3.12"

View file

@ -12,6 +12,8 @@ from aiocache.serializers import PickleSerializer
from pyairtable import Api as AirtableApi
from pyairtable.formulas import match
from random_access.settings import settings
logger = logging.getLogger("uvicorn.error")
# Global queue for write operations
@ -52,7 +54,8 @@ def _generate_cache_key(*args, **kwargs) -> str:
ttl=300, # 5 minutes
cache=Cache.REDIS, # type: ignore
serializer=PickleSerializer(),
port=6379,
endpoint=settings.redis_host,
port=settings.redis_port,
namespace="airtable_reads",
key_builder=lambda f, *args, **kwargs: _generate_cache_key(f.__name__, *args, **kwargs)
)
@ -69,7 +72,8 @@ async def get_user_record(slack_user_id: str, users_table) -> dict:
ttl=300, # 5 minutes
cache=Cache.REDIS, # type: ignore
serializer=PickleSerializer(),
port=6379,
endpoint=settings.redis_host,
port=settings.redis_port,
namespace="airtable_reads",
key_builder=lambda f, *args, **kwargs: _generate_cache_key(f.__name__, *args, **kwargs)
)
@ -86,7 +90,8 @@ async def get_game_record(game_id: str, submissions_table) -> dict:
ttl=180, # 3 minutes
cache=Cache.REDIS, # type: ignore
serializer=PickleSerializer(),
port=6379,
endpoint=settings.redis_host,
port=settings.redis_port,
namespace="airtable_reads",
key_builder=lambda f, *args, **kwargs: _generate_cache_key(f.__name__, *args, **kwargs)
)
@ -100,7 +105,8 @@ async def get_all_items(items_table):
ttl=60, # 1 minute
cache=Cache.REDIS, # type: ignore
serializer=PickleSerializer(),
port=6379,
endpoint=settings.redis_host,
port=settings.redis_port,
namespace="airtable_reads",
key_builder=lambda f, *args, **kwargs: _generate_cache_key(f.__name__, *args, **kwargs)
)
@ -117,7 +123,8 @@ async def get_session_by_token_cached(token: str, sessions_table) -> Optional[di
ttl=300, # 5 minutes
cache=Cache.REDIS, # type: ignore
serializer=PickleSerializer(),
port=6379,
endpoint=settings.redis_host,
port=settings.redis_port,
namespace="airtable_reads",
key_builder=lambda f, *args, **kwargs: _generate_cache_key(f.__name__, *args, **kwargs)
)
@ -138,7 +145,8 @@ async def get_user_items(user_id: str, users_table) -> List[dict]:
ttl=300, # 5 minutes
cache=Cache.REDIS, # type: ignore
serializer=PickleSerializer(),
port=6379,
endpoint=settings.redis_host,
port=settings.redis_port,
namespace="airtable_reads",
key_builder=lambda f, *args, **kwargs: _generate_cache_key(f.__name__, *args, **kwargs)
)

View file

@ -5,11 +5,10 @@ import logging
from collections import namedtuple
from contextlib import asynccontextmanager
from dotenv import load_dotenv
from fastapi import FastAPI, Request, Response, HTTPException
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
from slowapi import Limiter, _rate_limit_exceeded_handler
from slowapi import Limiter
from slowapi.errors import RateLimitExceeded
from slowapi.middleware import SlowAPIMiddleware
from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler
@ -27,9 +26,6 @@ Result = namedtuple("Result", "content, status")
logger = logging.getLogger("uvicorn.error")
if not load_dotenv():
raise FileNotFoundError("Environment secrets not found!")
# Initialize rate limiter
limiter = Limiter(key_func=get_client_ip)

View file

@ -1,8 +1,29 @@
"""
Settings configuration that works in both local development and Docker environments.
In local development:
- Loads from .env file if it exists
- Environment variables override .env file values
In Docker:
- Reads directly from environment variables (no .env file needed)
- Use docker-compose.yml or docker run -e to set environment variables
"""
import os
from os import environ
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
model_config = SettingsConfigDict(env_file='.env', env_file_encoding='utf-8', extra='ignore')
model_config = SettingsConfigDict(
# Try to load .env file for local development, but don't fail if it doesn't exist
env_file='.env' if os.path.exists('.env') else None,
env_file_encoding='utf-8',
extra='ignore',
# Environment variables take precedence over .env file
env_ignore_empty=True
)
airtable_pat: str
airtable_base: str
slack_signing_secret: str
@ -20,10 +41,21 @@ class Settings(BaseSettings):
# Session security
session_ttl_hours: int = 24 # Session expires after 24 hours
# Redis/Valkey settings - prioritize explicit env vars, fall back to container detection
redis_host: str = environ.get("REDIS_HOST") or (
"valkey" if environ.get("DOCKER_CONTAINER") else "localhost"
)
redis_port: int = int(environ.get("REDIS_PORT", "6379"))
@property
def is_production(self) -> bool:
return self.environment == "production"
@property
def is_container(self) -> bool:
"""Detect if running in a container environment."""
return bool(environ.get("DOCKER_CONTAINER") or environ.get("REDIS_HOST"))
@property
def origins_list(self) -> list[str]:
if self.allowed_origins == "*":