perf: Major performance overhaul with virtual scrolling and context splitting

Phase 1 - Virtual Scrolling:
- Add @tanstack/react-virtual for efficient message list rendering
- Only render visible messages instead of entire history
- Fix auto-scroll using native scrollTop instead of unreliable virtualizer

Phase 2 - Context Optimization:
- Split monolithic SessionContext into 4 specialized contexts
- MessagesContext, SessionsContext, SettingsContext, UIContext
- Prevents unnecessary re-renders across unrelated components

Phase 3 - Compression & Cleanup:
- Enable Brotli compression (~23% smaller than gzip)
- Switch to fholzer/nginx-brotli:v1.28.0 image
- Add automatic upload cleanup for idle sessions

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2025-12-20 17:28:03 +01:00
parent fbc8103034
commit e5d17bfad3
17 changed files with 827 additions and 564 deletions

5
backend/.npmrc Normal file
View File

@@ -0,0 +1,5 @@
fetch-timeout=300000
fetch-retries=3
fetch-retry-factor=2
fetch-retry-mintimeout=10000
fetch-retry-maxtimeout=60000

View File

@@ -4,7 +4,7 @@ import { createServer } from 'http';
import { spawn } from 'child_process';
import { v4 as uuidv4 } from 'uuid';
import cors from 'cors';
import { existsSync, readFileSync, readdirSync, statSync, mkdirSync, writeFileSync, appendFileSync } from 'fs';
import { existsSync, readFileSync, readdirSync, statSync, mkdirSync, writeFileSync, appendFileSync, rmSync } from 'fs';
import { join, basename, extname } from 'path';
import multer from 'multer';
import session from 'express-session';
@@ -41,6 +41,32 @@ const DEBUG = process.env.DEBUG === 'true';
const UPLOAD_DIR = process.env.UPLOAD_DIR || '/projects/.claude-uploads';
const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB
// History cache with 30s TTL
const historyCache = new Map();
const HISTORY_CACHE_TTL = 30 * 1000; // 30 seconds
function getCachedHistory(cacheKey) {
const cached = historyCache.get(cacheKey);
if (cached && Date.now() - cached.timestamp < HISTORY_CACHE_TTL) {
return cached.data;
}
historyCache.delete(cacheKey);
return null;
}
function setCachedHistory(cacheKey, data) {
historyCache.set(cacheKey, { data, timestamp: Date.now() });
// Clean up old entries periodically
if (historyCache.size > 100) {
const now = Date.now();
for (const [key, value] of historyCache) {
if (now - value.timestamp > HISTORY_CACHE_TTL) {
historyCache.delete(key);
}
}
}
}
// Allowed file types
const ALLOWED_TYPES = {
// Images
@@ -99,6 +125,19 @@ const upload = multer({
}
});
// Cleanup uploads for a session
function cleanupSessionUploads(sessionId) {
try {
const sessionDir = join(UPLOAD_DIR, sessionId);
if (existsSync(sessionDir)) {
rmSync(sessionDir, { recursive: true, force: true });
console.log(`[Cleanup] Removed upload directory for session: ${sessionId}`);
}
} catch (err) {
console.error(`[Cleanup] Failed to remove uploads for session ${sessionId}:`, err.message);
}
}
// Load hosts configuration
const CONFIG_PATH = process.env.CONFIG_PATH || '/app/config/hosts.json';
let hostsConfig = { hosts: {}, defaults: { scanSubdirs: true, maxDepth: 1 } };
@@ -181,6 +220,8 @@ setInterval(() => {
}
}
sessions.delete(id);
// Also cleanup uploads
cleanupSessionUploads(id);
}
}
}, 60 * 60 * 1000); // Check hourly
@@ -548,6 +589,14 @@ app.get('/api/history/:project', requireAuth, async (req, res) => {
const isSSH = host?.connection?.type === 'ssh';
console.log(`[History] Resolved - projectPath: ${projectPath}, hostId: ${hostId}, isSSH: ${isSSH}`);
// Check cache first
const cacheKey = `${hostId || 'local'}:${projectPath}`;
const cached = getCachedHistory(cacheKey);
if (cached) {
console.log(`[History] Cache hit for ${cacheKey}`);
return res.json(cached);
}
// Convert project path to Claude's folder naming convention
const projectFolder = projectPath.replace(/\//g, '-');
@@ -581,9 +630,11 @@ app.get('/api/history/:project', requireAuth, async (req, res) => {
const sessionId = basename(latestFile).replace('.jsonl', '');
const messages = parseHistoryContent(content);
const result = { messages, sessionId, source: 'ssh' };
console.log(`[History] SSH - Returning ${messages.length} messages from session ${sessionId}`);
return res.json({ messages, sessionId, source: 'ssh' });
setCachedHistory(cacheKey, result);
return res.json(result);
} catch (sshErr) {
console.error('SSH history fetch error:', sshErr.message);
return res.json({ messages: [], sessionId: null, error: sshErr.message });
@@ -617,8 +668,10 @@ app.get('/api/history/:project', requireAuth, async (req, res) => {
const sessionId = latestFile.name.replace('.jsonl', '');
const content = readFileSync(latestFile.path, 'utf-8');
const messages = parseHistoryContent(content);
const result = { messages, sessionId };
res.json({ messages, sessionId });
setCachedHistory(cacheKey, result);
res.json(result);
} catch (err) {
console.error('Error reading history:', err);
res.status(500).json({ error: err.message });
@@ -1342,6 +1395,8 @@ wss.on('connection', async (ws, req) => {
claudeProcess.kill();
sessions.delete(sessionId);
}
// Cleanup uploaded files for this session
cleanupSessionUploads(sessionId);
});
ws.on('error', (err) => {