Game Performance Testing: FPS, Memory Leaks, and Load Testing
Performance is the most common reason games get negative reviews on mobile and PC. "Unplayable on my device," "terrible framerate," "game crashes after 20 minutes." Most of these issues are detectable before release with systematic performance testing.
Frame Rate Testing
Defining Performance Targets
Before testing, define your targets. Typical targets by platform:
| Platform | Target FPS | Minimum Acceptable |
|---|---|---|
| PC (high-end) | 120+ | 60 |
| PC (minimum spec) | 60 | 30 |
| Console | 60 | 30 (some titles: 60) |
| Mobile (flagship) | 60 | 30 |
| Mobile (minimum spec) | 30 | 24 |
Unity Performance Test Framework
// Unity Performance Testing extension (com.unity.test-framework.performance)
using Unity.PerformanceTesting;
using NUnit.Framework;
using UnityEngine.TestTools;
using System.Collections;
public class FPSPerformanceTests
{
[UnityTest, Performance]
public IEnumerator MainMenuFPSBenchmark()
{
yield return new WaitForSeconds(2); // Allow scene to stabilize
using (Measure.Frames().Scope())
{
yield return new WaitForSeconds(5); // Measure for 5 seconds
}
// Access results via PerformanceTest.Active.SampleGroups
}
[UnityTest, Performance]
public IEnumerator CombatSceneFPSBenchmark()
{
// Load combat scene with many enemies
yield return LoadScene("CombatBenchmarkScene");
yield return new WaitForSeconds(2); // Stabilize
using (Measure.Frames().WarmupCount(60).MeasurementCount(120).Scope())
{
// The framework captures frame timings
yield return null;
}
}
[UnityTest, Performance]
public IEnumerator DrawCallCount()
{
yield return LoadScene("MaxCrowdScene");
Measure.Custom(new SampleGroup("Draw Calls", SampleUnit.None),
() => UnityStats.drawCalls);
yield return new WaitForSeconds(3);
// Assert via custom benchmark comparisons in CI
}
}Automated FPS Regression in CI
# performance_regression.py
import json
import sys
def check_fps_regression(current_results_path: str,
baseline_results_path: str,
regression_threshold_pct: float = 10.0) -> bool:
"""
Fail CI if FPS dropped more than threshold_pct compared to baseline.
"""
with open(current_results_path) as f:
current = json.load(f)
with open(baseline_results_path) as f:
baseline = json.load(f)
regressions = []
for test_name, current_data in current.items():
if test_name not in baseline:
continue
baseline_fps = baseline[test_name]["median_fps"]
current_fps = current_data["median_fps"]
regression_pct = ((baseline_fps - current_fps) / baseline_fps) * 100
if regression_pct > regression_threshold_pct:
regressions.append({
"test": test_name,
"baseline_fps": baseline_fps,
"current_fps": current_fps,
"regression_pct": regression_pct
})
if regressions:
print("PERFORMANCE REGRESSIONS DETECTED:")
for r in regressions:
print(f" {r['test']}: {r['baseline_fps']:.1f}→{r['current_fps']:.1f} fps "
f"(-{r['regression_pct']:.1f}%)")
return False
print(f"No regressions detected (threshold: {regression_threshold_pct}%)")
return True
if __name__ == "__main__":
success = check_fps_regression(
current_results_path="performance-results-current.json",
baseline_results_path="performance-results-baseline.json"
)
sys.exit(0 if success else 1)Memory Leak Detection
Memory leaks in games manifest as gradual performance degradation and eventual OOM crashes. The pattern: memory grows session over session.
Unity Memory Profiler
[UnityTest]
public IEnumerator NoMemoryLeakOnSceneReload()
{
// Baseline memory
System.GC.Collect();
yield return new WaitForSeconds(0.5f);
long initialMemory = UnityEngine.Profiling.Profiler.GetTotalAllocatedMemoryLong();
// Load and unload a scene multiple times
for (int i = 0; i < 10; i++)
{
yield return UnityEngine.SceneManagement.SceneManager.LoadSceneAsync("GameLevel01");
yield return new WaitForSeconds(1f);
yield return UnityEngine.SceneManagement.SceneManager.LoadSceneAsync("MainMenu");
yield return new WaitForSeconds(0.5f);
System.GC.Collect();
}
yield return new WaitForSeconds(1f);
System.GC.Collect();
long finalMemory = UnityEngine.Profiling.Profiler.GetTotalAllocatedMemoryLong();
long growthMB = (finalMemory - initialMemory) / (1024 * 1024);
Assert.Less(growthMB, 20,
$"Memory grew by {growthMB}MB after 10 scene reload cycles — possible leak");
}Android Memory Tracking
import subprocess
import time
def track_memory_over_session(package_name: str, duration_minutes: int = 30) -> list:
"""Sample memory every minute for the duration of a play session."""
samples = []
for minute in range(duration_minutes):
result = subprocess.run(
["adb", "shell", "dumpsys", "meminfo", package_name, "--scale", "MB"],
capture_output=True, text=True
)
total_pss = 0
for line in result.stdout.split('\n'):
if 'TOTAL PSS:' in line or 'TOTAL:' in line:
parts = line.split()
for i, part in enumerate(parts):
if part.isdigit():
total_pss = int(part)
break
samples.append({
"minute": minute,
"total_pss_mb": total_pss
})
time.sleep(60)
return samples
def test_memory_stable_over_30_min_session():
samples = track_memory_over_session("com.yourcompany.yourgame", duration_minutes=30)
first_5min_avg = sum(s['total_pss_mb'] for s in samples[:5]) / 5
last_5min_avg = sum(s['total_pss_mb'] for s in samples[-5:]) / 5
growth_mb = last_5min_avg - first_5min_avg
assert growth_mb < 50, \
f"Memory grew by {growth_mb:.1f}MB over 30 minutes — possible leak\n" \
f"Initial avg: {first_5min_avg:.1f}MB, Final avg: {last_5min_avg:.1f}MB"Game Server Load Testing
Simulating Concurrent Players
// k6 script: simulate players joining and playing
import ws from 'k6/ws';
import { check, sleep } from 'k6';
import { Counter, Trend } from 'k6/metrics';
const matchJoinTime = new Trend('match_join_time_ms');
const disconnections = new Counter('unexpected_disconnections');
export const options = {
stages: [
{ duration: '5m', target: 500 }, // Ramp to 500 concurrent players
{ duration: '20m', target: 5000 }, // Load test at 5000 players
{ duration: '5m', target: 0 },
],
thresholds: {
'match_join_time_ms': ['p(95)<3000'], // 95% join in under 3 seconds
'unexpected_disconnections': ['count<10'], // Fewer than 10 drops per run
ws_sessions: ['rate>0.99'], // 99%+ successful connections
},
};
export default function () {
const playerId = `player-${__VU}-${__ITER}`;
const start = Date.now();
const res = ws.connect(
`wss://gameserver.example.com/ws?player_id=${playerId}`,
{},
function(socket) {
socket.on('open', () => {
socket.send(JSON.stringify({ type: 'find_match', mode: 'casual' }));
});
socket.on('message', (msg) => {
const data = JSON.parse(msg);
if (data.type === 'match_found') {
const joinTime = Date.now() - start;
matchJoinTime.add(joinTime);
// Simulate playing for 5-15 minutes
socket.setTimeout(() => {
socket.send(JSON.stringify({ type: 'leave_match' }));
socket.close();
}, Math.random() * 600000 + 300000);
}
});
socket.on('close', (code) => {
if (code !== 1000 && code !== 1001) {
disconnections.add(1);
}
});
}
);
check(res, { 'connected': (r) => r && r.status === 101 });
}Server Stress Testing with Metrics
def test_server_handles_1000_concurrent_players():
"""Server should maintain <100ms tick latency with 1000 players."""
# Run k6 load test
result = subprocess.run([
"k6", "run",
"--vus", "1000",
"--duration", "10m",
"--out", "json=results.json",
"game-server-load-test.js"
], capture_output=True, text=True)
# Parse results
with open("results.json") as f:
metrics = json.load(f)
# Check thresholds
join_p95 = metrics['metrics']['match_join_time_ms']['p(95)']
assert join_p95 < 3000, f"Match join P95 {join_p95}ms exceeds 3s"
disconnections = metrics['metrics']['unexpected_disconnections']['count']
assert disconnections < 10, f"{disconnections} unexpected disconnections"Automated Performance Reports
def generate_performance_report(test_run_id: str) -> str:
results = load_results(test_run_id)
baseline = load_baseline_results()
report = f"""
# Performance Test Report — {test_run_id}
**Date:** {datetime.now().strftime('%Y-%m-%d %H:%M')}
## Frame Rate
| Scene | Baseline P50 | Current P50 | Change | Status |
|-------|-------------|-------------|--------|--------|
"""
for scene in results['fps_scenes']:
baseline_fps = baseline['fps'][scene]['p50']
current_fps = results['fps'][scene]['p50']
change = current_fps - baseline_fps
change_pct = (change / baseline_fps) * 100
status = "✅" if change_pct >= -5 else "⚠️" if change_pct >= -10 else "❌"
report += f"| {scene} | {baseline_fps:.1f} | {current_fps:.1f} | {change_pct:+.1f}% | {status} |\n"
report += f"""
## Memory
- Peak memory during session: {results['memory']['peak_mb']:.1f} MB
- Memory at session end: {results['memory']['end_mb']:.1f} MB
- Growth over session: {results['memory']['end_mb'] - results['memory']['start_mb']:.1f} MB
## Server Load (1000 CCU)
- Match join P95: {results['server']['join_p95_ms']:.0f}ms
- Unexpected disconnections: {results['server']['disconnections']}
"""
return reportPerformance testing is most valuable when it runs automatically on every release candidate, comparing against a known baseline. The specific numbers matter less than the trend — a 5% FPS drop per release adds up fast.