Examples
Data Transformation: XLSX to CSV
Section titled “Data Transformation: XLSX to CSV”Convert an uploaded spreadsheet to CSV using the xlsx package. This example requires bundling with esbuild.
const XLSX = require("xlsx");
module.exports = { handler: async (input, ctx) => { const base64 = await ctx.fs9.readBase64(input.path); const workbook = XLSX.read(Buffer.from(base64, "base64"), { type: "buffer" });
const results = []; for (const sheetName of workbook.SheetNames) { const csv = XLSX.utils.sheet_to_csv(workbook.Sheets[sheetName]); const outPath = `/output/${sheetName}.csv`; await ctx.fs9.write(outPath, csv); results.push({ sheet: sheetName, path: outPath, rows: csv.split("\n").length }); }
return { sheets: results }; }};npm run buildcat dist/index.js | db9 functions create xlsx-to-csv --db myapp -f - \ --fs9-scope /data:ro --fs9-scope /output:rw
db9 functions invoke xlsx-to-csv --db myapp \ --payload '{"path":"/data/report.xlsx"}'See c4pt0r/db9-function-xls-csv-tbl for a complete working example.
Webhook Handler
Section titled “Webhook Handler”Accept external webhook payloads and store them in a table.
module.exports = { handler: async (input, ctx) => { if (!input?.event || !input?.data) { throw new Error("Missing event or data field"); }
await ctx.db.query( `INSERT INTO webhook_events (event_type, payload, source_ip, received_at) VALUES ($1, $2, $3, NOW())`, [input.event, JSON.stringify(input.data), input.source || "unknown"] );
const count = await ctx.db.query( "SELECT count(*) as total FROM webhook_events WHERE event_type = $1", [input.event] );
return { accepted: true, eventType: input.event, totalEvents: count.rows[0][0] }; }};db9 functions create webhook-handler --db myapp -f webhook.jsInvoke via the REST API from an external service:
curl -X POST \ -H "Authorization: Bearer $DB9_API_KEY" \ -H "Content-Type: application/json" \ -d '{"input": {"event": "user.created", "data": {"userId": 42, "email": "alice@example.com"}}}' \ https://api.db9.ai/customer/databases/{db_id}/functions/{fn_id}/invokeScheduled Data Cleanup
Section titled “Scheduled Data Cleanup”Combine a function with pg_cron to clean up stale data on a schedule.
module.exports = { handler: async (input, ctx) => { const sessions = await ctx.db.query( "DELETE FROM sessions WHERE expires_at < NOW() RETURNING id" );
const logs = await ctx.db.query( "DELETE FROM audit_logs WHERE created_at < NOW() - INTERVAL '90 days' RETURNING id" );
// Archive old events to fs9 const oldEvents = await ctx.db.query( `SELECT * FROM events WHERE created_at < NOW() - INTERVAL '30 days'` );
if (oldEvents.row_count > 0) { const csv = oldEvents.rows.map(row => row.join(",")).join("\n"); const archivePath = `/archives/events-${new Date().toISOString().slice(0, 10)}.csv`; await ctx.fs9.write(archivePath, csv);
await ctx.db.query( "DELETE FROM events WHERE created_at < NOW() - INTERVAL '30 days'" ); }
return { sessionsDeleted: sessions.row_count, logsDeleted: logs.row_count, eventsArchived: oldEvents.row_count }; }};db9 functions create cleanup --db myapp -f cleanup.js \ --fs9-scope /archives:rw --timeout 120000Analytics API Endpoint
Section titled “Analytics API Endpoint”Build an analytics endpoint that aggregates data and returns formatted results.
module.exports = { handler: async (input, ctx) => { const days = input?.days || 7;
const [activity, topUsers, summary] = await Promise.all([ ctx.db.query(` SELECT date_trunc('day', created_at) as day, count(*) as events FROM events WHERE created_at >= NOW() - INTERVAL '1 day' * $1 GROUP BY 1 ORDER BY 1 `, [days]),
ctx.db.query(` SELECT user_id, count(*) as actions FROM events WHERE created_at >= NOW() - INTERVAL '1 day' * $1 GROUP BY 1 ORDER BY 2 DESC LIMIT 10 `, [days]),
ctx.db.query(` SELECT count(*) as total_events, count(DISTINCT user_id) as unique_users, min(created_at) as earliest, max(created_at) as latest FROM events WHERE created_at >= NOW() - INTERVAL '1 day' * $1 `, [days]) ]);
return { period: `${days} days`, summary: { totalEvents: summary.rows[0]?.[0], uniqueUsers: summary.rows[0]?.[1], }, dailyActivity: activity.rows.map(([day, count]) => ({ day, count })), topUsers: topUsers.rows.map(([userId, actions]) => ({ userId, actions })), }; }};External API Integration with Secrets
Section titled “External API Integration with Secrets”Call an external API using secrets for authentication.
module.exports = { handler: async (input, ctx) => { const apiKey = ctx.secrets.get("OPENAI_KEY");
const doc = await ctx.db.query( "SELECT content FROM documents WHERE id = $1", [input.documentId] );
if (doc.row_count === 0) throw new Error("Document not found");
const response = await fetch("https://api.openai.com/v1/chat/completions", { method: "POST", headers: { "Authorization": `Bearer ${apiKey}`, "Content-Type": "application/json", }, body: JSON.stringify({ model: "gpt-4o-mini", messages: [ { role: "system", content: "Summarize the following document in 3 sentences." }, { role: "user", content: doc.rows[0][0] } ] }), });
const result = await response.json(); const summary = result.choices[0].message.content;
await ctx.db.query( "UPDATE documents SET summary = $1, summarized_at = NOW() WHERE id = $2", [summary, input.documentId] );
return { documentId: input.documentId, summary }; }};db9 functions secrets set OPENAI_KEY --db myappdb9 functions create summarize --db myapp -f summarize.js \ --secret OPENAI_KEY=OPENAI_KEYNext Steps
Section titled “Next Steps”- Overview — Quick start and deployment basics
- Runtime & ctx — ctx.db, ctx.fs9, ctx.self API
- Configuration — Secrets, fs9 scope, network, cron, limits
- Troubleshooting — Errors, logs, debugging