Scalability Guide
This guide provides detailed instructions for making Sunday production-ready with specific code changes required.
Overview
Scaling Sunday requires changes across multiple layers:
1. Distributed Rate Limiting
Install Dependencies
npm install @upstash/redis
# or for self-hosted
npm install ioredisNew File: lib/rate-limit-redis.ts
import { Redis } from '@upstash/redis'
const redis = Redis.fromEnv()
interface RateLimitResult {
allowed: boolean
remaining: number
resetAt: number
}
export async function checkRateLimit(
identifier: string,
limit: number,
windowSeconds: number
): Promise<RateLimitResult> {
const key = `ratelimit:${identifier}`
const now = Date.now()
const windowStart = now - (windowSeconds * 1000)
// Use sorted set for sliding window
const pipeline = redis.pipeline()
// Remove old entries
pipeline.zremrangebyscore(key, 0, windowStart)
// Add current request
pipeline.zadd(key, { score: now, member: `${now}-${Math.random()}` })
// Count requests in window
pipeline.zcard(key)
// Set expiry
pipeline.expire(key, windowSeconds)
const results = await pipeline.exec()
const count = results[2] as number
return {
allowed: count <= limit,
remaining: Math.max(0, limit - count),
resetAt: now + (windowSeconds * 1000)
}
}Update Middleware: middleware.ts
import { checkRateLimit } from '@/lib/rate-limit-redis'
export async function middleware(request: NextRequest) {
const { pathname } = request.nextUrl
// Get client identifier
const ip = request.headers.get('x-forwarded-for')?.split(',')[0] || 'unknown'
let limit = { max: 60, window: 60 }
if (pathname.startsWith('/api/auth/')) {
limit = { max: 5, window: 60 }
}
const result = await checkRateLimit(`${ip}:${pathname}`, limit.max, limit.window)
if (!result.allowed) {
return new NextResponse(
JSON.stringify({ error: 'Rate limit exceeded' }),
{
status: 429,
headers: {
'X-RateLimit-Limit': limit.max.toString(),
'X-RateLimit-Remaining': '0',
'Retry-After': Math.ceil((result.resetAt - Date.now()) / 1000).toString()
}
}
)
}
return NextResponse.next()
}2. Database Connection Pooling
Update: lib/mongodb.ts
import { MongoClient, type Db, type MongoClientOptions } from "mongodb"
const options: MongoClientOptions = {
// Connection pool settings
maxPoolSize: 50,
minPoolSize: 10,
maxIdleTimeMS: 30000,
// Timeouts
serverSelectionTimeoutMS: 5000,
socketTimeoutMS: 45000,
connectTimeoutMS: 10000,
// Write concern
w: 'majority',
retryWrites: true,
// Read preference for read replicas
readPreference: 'secondaryPreferred',
// Compression
compressors: ['zstd', 'snappy', 'zlib']
}
let cachedClient: MongoClient | null = null
async function getMongoClient(): Promise<MongoClient> {
if (cachedClient) {
return cachedClient
}
const uri = process.env.MONGODB_URI
if (!uri) throw new Error('MONGODB_URI not set')
cachedClient = new MongoClient(uri, options)
// Handle connection events
cachedClient.on('error', (error) => {
console.error('MongoDB connection error:', error)
cachedClient = null
})
cachedClient.on('close', () => {
console.warn('MongoDB connection closed')
cachedClient = null
})
await cachedClient.connect()
return cachedClient
}
export async function getDatabase(): Promise<Db> {
const client = await getMongoClient()
return client.db(process.env.DB_NAME)
}
// Health check
export async function checkDatabaseHealth(): Promise<boolean> {
try {
const client = await getMongoClient()
await client.db().command({ ping: 1 })
return true
} catch {
return false
}
}Add Database Indexes
Create a migration script: scripts/create-indexes.ts
import { getDatabase } from '../lib/mongodb'
async function createIndexes() {
const db = await getDatabase()
// Users collection
await db.collection('users').createIndexes([
{ key: { email: 1 }, unique: true },
{ key: { emailVerificationToken: 1 }, sparse: true },
{ key: { resetPasswordToken: 1 }, sparse: true }
])
// Tasks collection
await db.collection('tasks').createIndexes([
{ key: { boardId: 1 } },
{ key: { groupId: 1 } },
{ key: { assignees: 1 } },
{ key: { dueDate: 1 } },
{ key: { status: 1 } },
{ key: { createdAt: -1 } },
// Compound index for common queries
{ key: { boardId: 1, status: 1, dueDate: 1 } },
// Text search index
{
key: { name: 'text', description: 'text' },
weights: { name: 10, description: 5 }
}
])
// Boards collection
await db.collection('boards').createIndexes([
{ key: { workspaceId: 1 } },
{ key: { 'members.userId': 1 } }
])
// Notifications collection
await db.collection('notifications').createIndexes([
{ key: { userId: 1, read: 1, createdAt: -1 } },
// TTL index - auto-delete after 30 days
{ key: { createdAt: 1 }, expireAfterSeconds: 30 * 24 * 60 * 60 }
])
console.log('Indexes created successfully')
}
createIndexes().catch(console.error)3. Server-Side Caching with Redis
New File: lib/cache.ts
import { Redis } from 'ioredis'
const redis = new Redis(process.env.REDIS_URL!)
interface CacheOptions {
ttl?: number // seconds
tags?: string[]
}
export const cache = {
async get<T>(key: string): Promise<T | null> {
const data = await redis.get(key)
return data ? JSON.parse(data) : null
},
async set(key: string, value: any, options: CacheOptions = {}): Promise<void> {
const { ttl = 300 } = options
await redis.setex(key, ttl, JSON.stringify(value))
// Store tags for invalidation
if (options.tags) {
for (const tag of options.tags) {
await redis.sadd(`tag:${tag}`, key)
}
}
},
async del(key: string): Promise<void> {
await redis.del(key)
},
async invalidateTag(tag: string): Promise<void> {
const keys = await redis.smembers(`tag:${tag}`)
if (keys.length > 0) {
await redis.del(...keys)
await redis.del(`tag:${tag}`)
}
}
}
// Cached data fetching
export async function cached<T>(
key: string,
fetcher: () => Promise<T>,
options: CacheOptions = {}
): Promise<T> {
const cachedData = await cache.get<T>(key)
if (cachedData) return cachedData
const data = await fetcher()
await cache.set(key, data, options)
return data
}Update API Routes with Caching
// Example: GET /api/boards/[id]/route.ts
import { cached, cache } from '@/lib/cache'
export async function GET(
request: Request,
{ params }: { params: { id: string } }
) {
const board = await cached(
`board:${params.id}`,
async () => {
const db = await getDatabase()
return db.collection('boards').findOne({ _id: new ObjectId(params.id) })
},
{ ttl: 300, tags: [`board:${params.id}`] }
)
return Response.json(board)
}
// Invalidate cache on update
export async function PATCH(
request: Request,
{ params }: { params: { id: string } }
) {
// ... update logic
// Invalidate cache
await cache.invalidateTag(`board:${params.id}`)
return Response.json({ success: true })
}4. Pagination and Data Loading
Update API Routes
// GET /api/tasks/route.ts
export async function GET(request: Request) {
const { searchParams } = new URL(request.url)
const boardId = searchParams.get('boardId')
const page = parseInt(searchParams.get('page') || '1')
const limit = parseInt(searchParams.get('limit') || '50')
const skip = (page - 1) * limit
const db = await getDatabase()
const [tasks, total] = await Promise.all([
db.collection('tasks')
.find({ boardId: new ObjectId(boardId) })
.sort({ createdAt: -1 })
.skip(skip)
.limit(limit)
.toArray(),
db.collection('tasks').countDocuments({ boardId: new ObjectId(boardId) })
])
return Response.json({
tasks,
pagination: {
page,
limit,
total,
totalPages: Math.ceil(total / limit),
hasNext: page * limit < total
}
})
}Update Client with React Query
npm install @tanstack/react-query// lib/queries.ts
import { useInfiniteQuery, useMutation, useQueryClient } from '@tanstack/react-query'
export function useTasks(boardId: string) {
return useInfiniteQuery({
queryKey: ['tasks', boardId],
queryFn: async ({ pageParam = 1 }) => {
const res = await fetch(`/api/tasks?boardId=${boardId}&page=${pageParam}&limit=50`)
return res.json()
},
getNextPageParam: (lastPage) =>
lastPage.pagination.hasNext ? lastPage.pagination.page + 1 : undefined,
staleTime: 30000, // 30 seconds
})
}
export function useUpdateTask() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ taskId, updates }) => {
const res = await fetch('/api/tasks', {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ taskId, updates })
})
return res.json()
},
onSuccess: (_, { taskId }) => {
// Invalidate and refetch
queryClient.invalidateQueries({ queryKey: ['tasks'] })
}
})
}5. Background Job Queue
Install Dependencies
npm install bullmqNew File: lib/queue.ts
import { Queue, Worker, Job } from 'bullmq'
import { Redis } from 'ioredis'
const connection = new Redis(process.env.REDIS_URL!, { maxRetriesPerRequest: null })
// Define queues
export const emailQueue = new Queue('email', { connection })
export const automationQueue = new Queue('automation', { connection })
// Email worker
new Worker('email', async (job: Job) => {
const { to, subject, html } = job.data
// Import email sending logic
const { sendEmail } = await import('./email')
await sendEmail(to, subject, html)
}, { connection })
// Automation worker
new Worker('automation', async (job: Job) => {
const { automationId, taskId, trigger } = job.data
// Execute automation actions
await executeAutomation(automationId, taskId, trigger)
}, { connection })
// Helper to queue jobs
export async function queueEmail(to: string, subject: string, html: string) {
await emailQueue.add('send-email', { to, subject, html }, {
attempts: 3,
backoff: { type: 'exponential', delay: 1000 }
})
}
export async function queueAutomation(
automationId: string,
taskId: string,
trigger: string
) {
await automationQueue.add('execute', { automationId, taskId, trigger })
}Update Automation Trigger
// In store.ts - replace synchronous automation with queue
import { queueAutomation, queueEmail } from './queue'
// Instead of immediate execution:
automation.actions.forEach(async (action) => {
if (action.type === 'notify') {
await queueEmail(
userEmail,
'Notification',
action.config.message
)
}
})6. Session Management with Redis
New File: lib/session.ts
import { Redis } from 'ioredis'
import { nanoid } from 'nanoid'
const redis = new Redis(process.env.REDIS_URL!)
const SESSION_TTL = 86400 * 7 // 7 days
interface Session {
userId: string
email: string
createdAt: number
lastAccess: number
userAgent?: string
ip?: string
}
export async function createSession(
userId: string,
email: string,
metadata?: { userAgent?: string; ip?: string }
): Promise<string> {
const sessionId = nanoid(32)
const session: Session = {
userId,
email,
createdAt: Date.now(),
lastAccess: Date.now(),
...metadata
}
await redis.setex(
`session:${sessionId}`,
SESSION_TTL,
JSON.stringify(session)
)
// Track user's sessions
await redis.sadd(`user-sessions:${userId}`, sessionId)
return sessionId
}
export async function getSession(sessionId: string): Promise<Session | null> {
const data = await redis.get(`session:${sessionId}`)
if (!data) return null
const session = JSON.parse(data)
// Update last access
session.lastAccess = Date.now()
await redis.setex(`session:${sessionId}`, SESSION_TTL, JSON.stringify(session))
return session
}
export async function deleteSession(sessionId: string): Promise<void> {
const session = await getSession(sessionId)
if (session) {
await redis.srem(`user-sessions:${session.userId}`, sessionId)
}
await redis.del(`session:${sessionId}`)
}
export async function deleteAllUserSessions(userId: string): Promise<void> {
const sessions = await redis.smembers(`user-sessions:${userId}`)
if (sessions.length > 0) {
await redis.del(...sessions.map(s => `session:${s}`))
await redis.del(`user-sessions:${userId}`)
}
}
export async function listUserSessions(userId: string): Promise<Session[]> {
const sessionIds = await redis.smembers(`user-sessions:${userId}`)
const sessions: Session[] = []
for (const id of sessionIds) {
const session = await getSession(id)
if (session) sessions.push(session)
}
return sessions
}7. Full-Text Search
Option A: MongoDB Atlas Search
// Create search index via Atlas UI or API
// Index name: 'tasks_search'
export async function searchTasks(query: string, boardId?: string) {
const db = await getDatabase()
const pipeline: any[] = [
{
$search: {
index: 'tasks_search',
compound: {
must: [
{
text: {
query,
path: ['name', 'description'],
fuzzy: { maxEdits: 1 }
}
}
],
filter: boardId ? [
{ equals: { path: 'boardId', value: new ObjectId(boardId) } }
] : []
}
}
},
{ $limit: 20 },
{ $project: { score: { $meta: 'searchScore' }, name: 1, description: 1 } }
]
return db.collection('tasks').aggregate(pipeline).toArray()
}Option B: OpenSearch/Elasticsearch
import { Client } from '@opensearch-project/opensearch'
const client = new Client({
node: process.env.OPENSEARCH_URL,
auth: {
username: process.env.OPENSEARCH_USER!,
password: process.env.OPENSEARCH_PASS!
}
})
export async function indexTask(task: Task) {
await client.index({
index: 'tasks',
id: task.id,
body: {
name: task.name,
description: task.description,
boardId: task.boardId,
status: task.status,
createdAt: task.createdAt
}
})
}
export async function searchTasks(query: string) {
const result = await client.search({
index: 'tasks',
body: {
query: {
multi_match: {
query,
fields: ['name^2', 'description'],
fuzziness: 'AUTO'
}
}
}
})
return result.body.hits.hits.map(hit => ({
id: hit._id,
...hit._source,
score: hit._score
}))
}Environment Variables for Production
# Database
MONGODB_URI=mongodb+srv://user:pass@cluster.mongodb.net/?retryWrites=true&w=majority
DB_NAME=sunday_prod
# Redis
REDIS_URL=redis://user:pass@redis-host:6379
# OpenSearch (optional)
OPENSEARCH_URL=https://search-domain.region.es.amazonaws.com
OPENSEARCH_USER=admin
OPENSEARCH_PASS=your-password
# S3
AWS_REGION=us-east-1
S3_BUCKET=sunday-uploads
CDN_DOMAIN=cdn.yourdomain.com
# Queue (if using SQS)
EMAIL_QUEUE_URL=https://sqs.us-east-1.amazonaws.com/123456789/email-queueMigration Checklist
- Set up Redis (ElastiCache or Upstash)
- Update rate limiting to use Redis
- Configure MongoDB connection pooling
- Create database indexes
- Implement caching layer
- Add pagination to API routes
- Set up React Query for client
- Configure background job queue
- Implement session management
- Set up full-text search
- Update environment variables
Next: AWS Deployment Guide