Building a Backend Architecture Without a Database
The Heresy: What if you don't need a database? What if version control, issue tracking, and a global CDN could replace your entire backend stack? This article explores how I built a production application using GitHub as the primary data store.
Traditional databases are powerful, but they come with baggage:
GitHub, as it turns out, provides most of what a database offers—and more:
| Concern | Traditional Stack | GitHub Stack |
|---|---|---|
| Storage | PostgreSQL / MongoDB | Git Repository Files |
| API | REST / GraphQL (Custom) | GitHub REST / GraphQL API |
| Authentication | Auth0 / Custom OAuth | GitHub OAuth |
| Versioning | Custom Migration System | Git Commits |
| Real-time Updates | WebSockets / Polling | GitHub Webhooks |
| CDN | CloudFlare / AWS CloudFront | GitHub Pages (Built-in) |
| Backups | Custom Backup Scripts | Git Remotes |
| Cost | $50-500/month | $0/month |
The key insight is that everything is a file. Instead of tables and rows, we use directories and files:
To make GitHub feel like a database, we need an abstraction layer:
// GitHub Database Client
class GitHubDB {
constructor(config) {
this.config = {
owner: config.owner,
repo: config.repo,
token: config.token,
branch: config.branch || 'main'
};
this.baseUrl = 'https://api.github.com';
this.cache = new Map();
}
// Generic CRUD operations
async create(collection, data) {
const id = this.generateId();
const path = `data/${collection}/${id}.json`;
const content = JSON.stringify(data, null, 2);
await this.writeFile(path, content, `Create ${collection} ${id}`);
return { id, ...data };
}
async read(collection, id) {
const cacheKey = `${collection}:${id}`;
if (this.cache.has(cacheKey)) {
return this.cache.get(cacheKey);
}
const path = `data/${collection}/${id}.json`;
const content = await this.readFile(path);
if (content === null) return null;
const data = JSON.parse(content);
this.cache.set(cacheKey, data);
return data;
}
async update(collection, id, updates) {
const existing = await this.read(collection, id);
if (!existing) throw new Error('Document not found');
const updated = { ...existing, ...updates, updatedAt: Date.now() };
const path = `data/${collection}/${id}.json`;
const content = JSON.stringify(updated, null, 2);
await this.updateFile(path, content, `Update ${collection} ${id}`);
// Update cache
this.cache.set(`${collection}:${id}`, updated);
return updated;
}
async delete(collection, id) {
const path = `data/${collection}/${id}.json`;
await this.deleteFile(path, `Delete ${collection} ${id}`);
// Clear cache
this.cache.delete(`${collection}:${id}`);
return { success: true };
}
// Query operations
async query(collection, predicate) {
const files = await this.listFiles(`data/${collection}`);
const results = [];
for (const file of files) {
const content = await this.readFile(file.path);
const data = JSON.parse(content);
if (predicate(data)) {
results.push(data);
}
}
return results;
}
async findAll(collection) {
return this.query(collection, () => true);
}
async findOne(collection, predicate) {
const results = await this.query(collection, predicate);
return results[0] || null;
}
// Index management
async createIndex(collection, field, unique = false) {
const indexPath = `data/indexes/${collection}_by_${field}.json`;
const records = await this.findAll(collection);
const index = {};
for (const record of records) {
const value = record[field];
if (unique) {
index[value] = record.id;
} else {
if (!index[value]) index[value] = [];
index[value].push(record.id);
}
}
await this.writeFile(indexPath, JSON.stringify(index, null, 2));
return index;
}
async findByIndex(collection, field, value) {
const indexPath = `data/indexes/${collection}_by_${field}.json`;
const indexContent = await this.readFile(indexPath);
if (!indexContent) return null;
const index = JSON.parse(indexContent);
if (Array.isArray(index[value])) {
return Promise.all(
index[value].map(id => this.read(collection, id))
);
} else {
return this.read(collection, index[value]);
}
}
// GitHub API helpers
async writeFile(path, content, message) {
const url = `${this.baseUrl}/repos/${this.config.owner}/${this.config.repo}/contents/${path}`;
const base64Content = btoa(unescape(encodeURIComponent(content)));
const response = await fetch(url, {
method: 'PUT',
headers: {
'Authorization': `token ${this.config.token}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
message,
content: base64Content,
branch: this.config.branch
})
});
if (!response.ok) {
throw new Error(`Failed to write file: ${response.statusText}`);
}
return response.json();
}
async readFile(path) {
const url = `${this.baseUrl}/repos/${this.config.owner}/${this.config.repo}/contents/${path}?ref=${this.config.branch}`;
const response = await fetch(url, {
headers: {
'Authorization': `token ${this.config.token}`,
}
});
if (response.status === 404) return null;
if (!response.ok) throw new Error(`Failed to read file: ${response.statusText}`);
const data = await response.json();
return atob(data.content);
}
async updateFile(path, content, message) {
const url = `${this.baseUrl}/repos/${this.config.owner}/${this.config.repo}/contents/${path}`;
// Get current file to get SHA
const current = await fetch(url, {
headers: {
'Authorization': `token ${this.config.token}`,
}
});
const currentData = await current.json();
const base64Content = btoa(unescape(encodeURIComponent(content)));
const response = await fetch(url, {
method: 'PUT',
headers: {
'Authorization': `token ${this.config.token}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
message,
content: base64Content,
sha: currentData.sha,
branch: this.config.branch
})
});
if (!response.ok) {
throw new Error(`Failed to update file: ${response.statusText}`);
}
return response.json();
}
async deleteFile(path, message) {
const url = `${this.baseUrl}/repos/${this.config.owner}/${this.config.repo}/contents/${path}`;
// Get current file to get SHA
const current = await fetch(url, {
headers: {
'Authorization': `token ${this.config.token}`,
}
});
const currentData = await current.json();
const response = await fetch(url, {
method: 'DELETE',
headers: {
'Authorization': `token ${this.config.token}`,
},
body: JSON.stringify({
message,
sha: currentData.sha,
branch: this.config.branch
})
});
if (!response.ok) {
throw new Error(`Failed to delete file: ${response.statusText}`);
}
return response.json();
}
async listFiles(path) {
const url = `${this.baseUrl}/repos/${this.config.owner}/${this.config.repo}/contents/${path}?ref=${this.config.branch}`;
const response = await fetch(url, {
headers: {
'Authorization': `token ${this.config.token}`,
}
});
if (!response.ok) {
if (response.status === 404) return [];
throw new Error(`Failed to list files: ${response.statusText}`);
}
return response.json();
}
generateId() {
return `${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
// Cache management
clearCache() {
this.cache.clear();
}
invalidateCache(collection, id) {
this.cache.delete(`${collection}:${id}`);
}
}
// Webhook Handler
class WebhookHandler {
constructor(db) {
this.db = db;
this.handlers = new Map();
}
// Register event handler
on(event, handler) {
if (!this.handlers.has(event)) {
this.handlers.set(event, []);
}
this.handlers.get(event).push(handler);
}
// Process webhook payload
async handleWebhook(payload) {
const { action, sender, repository } = payload;
// Invalidate cache for affected files
for (const commit of payload.commits || []) {
for (const file of [...(commit.added || []), ...(commit.modified || []), ...(commit.removed || [])]) {
if (file.startsWith('data/')) {
const [_, collection, filename] = file.split('/');
const id = filename.replace('.json', '');
this.db.invalidateCache(collection, id);
}
}
}
// Trigger registered handlers
const eventHandlers = this.handlers.get(action) || [];
await Promise.all(eventHandlers.map(handler => handler(payload)));
return { received: true };
}
}
// Enhanced cache with TTL
class Cache {
constructor(ttl = 60000) {
this.cache = new Map();
this.ttl = ttl;
}
set(key, value) {
this.cache.set(key, {
value,
expires: Date.now() + this.ttl
});
}
get(key) {
const item = this.cache.get(key);
if (!item) return null;
if (Date.now() > item.expires) {
this.cache.delete(key);
return null;
}
return item.value;
}
invalidate(pattern) {
const regex = new RegExp(pattern);
for (const key of this.cache.keys()) {
if (regex.test(key)) {
this.cache.delete(key);
}
}
}
}
// Batch read operations
async function batchRead(db, collection, ids) {
const results = await Promise.all(
ids.map(id => db.read(collection, id))
);
return results.filter(r => r !== null);
}
// Batch write operations
async function batchWrite(db, collection, items) {
const commits = [];
for (const item of items) {
const id = item.id || db.generateId();
const path = `data/${collection}/${id}.json`;
const content = JSON.stringify(item, null, 2);
commits.push({
path,
content,
message: `Batch update ${collection} ${id}`
});
}
// Use GitHub's tree API for batch commits
return db.createCommit(commits);
}
// Pre-computed indexes for fast lookups
async function optimizeQueries(db) {
// Create indexes for common queries
await db.createIndex('users', 'email', true);
await db.createIndex('posts', 'slug', true);
await db.createIndex('posts', 'authorId');
await db.createIndex('posts', 'createdAt');
// Query by index (fast!)
const user = await db.findByIndex('users', 'email', 'user@example.com');
const posts = await db.findByIndex('posts', 'authorId', 'user_123');
}
After 12 months in production with GitHub as the database:
This approach isn't for everyone. Avoid it if:
GitHub as a database is not just a hack—it's a legitimate architecture for certain use cases. For content-driven applications, personal projects, and prototypes, it offers unmatched simplicity and zero cost.
The key is understanding the tradeoffs. You're trading raw performance and complex queries for simplicity, versioning, and collaboration. For many applications, that's a trade worth making.
The future isn't always faster databases—sometimes it's smarter architectures.