-
Notifications
You must be signed in to change notification settings - Fork 10
Expand file tree
/
Copy pathdb.ts
More file actions
139 lines (122 loc) · 4.67 KB
/
db.ts
File metadata and controls
139 lines (122 loc) · 4.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
// Server-only pooled Postgres client for SvelteKit.
// One Pool per process. Requires DATABASE_URL in the environment.
// Stays on `pg` (node-postgres) to match the SQL already written in
// pages/api/* — the port is a 1:1 copy rather than a rewrite.
import pg from 'pg';
import { env } from '$env/dynamic/private';
type Pool = pg.Pool;
type PoolClient = pg.PoolClient;
type QueryResult<T extends pg.QueryResultRow = pg.QueryResultRow> = pg.QueryResult<T>;
let _pool: Pool | null = null;
// Pool sizing. Each SSR page can fan out 10+ concurrent queries via
// Promise.all (homepage trigram search + venue laterals + vote
// scores). With max=10 a single stuck query starves every other
// request. 25 keeps the worst-case query path concurrency-safe while
// staying well under typical Postgres max_connections defaults.
const POOL_MAX = Number(env.PG_POOL_MAX ?? 25);
// Hard ceiling on any single statement. Anything legitimately slower
// than 10 s belongs in a worker, not in a request path. Without this,
// a plan regression on the trigram search or a wedged backend can hold
// pool slots indefinitely and tip the entire app over. Override via
// PG_STATEMENT_TIMEOUT_MS for emergencies.
const STATEMENT_TIMEOUT_MS = Number(env.PG_STATEMENT_TIMEOUT_MS ?? 10_000);
export function getPool(): Pool {
if (_pool) return _pool;
const url = env.DATABASE_URL;
if (!url) {
throw new Error('DATABASE_URL is not set');
}
_pool = new pg.Pool({
connectionString: url,
max: POOL_MAX,
idleTimeoutMillis: 30_000,
connectionTimeoutMillis: 5_000,
// query_timeout is enforced client-side by node-postgres; pair it
// with statement_timeout below so the server also kills the
// query if the client crashed mid-await and never aborted it.
query_timeout: STATEMENT_TIMEOUT_MS,
// application_name surfaces in pg_stat_activity / slow-query logs
// so an operator can attribute a stuck query to this process
// rather than guess. Leave the default for any worker that
// imports a different db helper.
application_name: 'tokenstork-web',
statement_timeout: STATEMENT_TIMEOUT_MS
});
_pool.on('error', (err) => {
// Avoid logging the full pg error object — it can contain bound
// parameter values for the query that failed. Code + severity
// + message are enough for an operator to grep journalctl.
const e = err as Error & { code?: string; severity?: string };
console.error(
`[pg] idle client error: code=${e.code ?? '?'} severity=${e.severity ?? '?'} ${e.message}`
);
});
return _pool;
}
export async function query<T extends pg.QueryResultRow = pg.QueryResultRow>(
text: string,
params?: readonly unknown[]
): Promise<QueryResult<T>> {
return getPool().query<T>(text, params as unknown[] | undefined);
}
export async function withTransaction<T>(
fn: (client: PoolClient) => Promise<T>
): Promise<T> {
const client = await getPool().connect();
try {
await client.query('BEGIN');
const result = await fn(client);
await client.query('COMMIT');
return result;
} catch (err) {
await client.query('ROLLBACK');
throw err;
} finally {
client.release();
}
}
export async function closePool(): Promise<void> {
if (_pool) {
await _pool.end();
_pool = null;
}
}
// Hex <-> BYTEA helpers. Postgres returns BYTEA as Buffer; we hex-encode at
// the API boundary. Category ids are 32 bytes, commitments up to 40.
//
// Upper bound chosen to comfortably cover commitments (≤ 40 bytes) and
// category ids (32 bytes) without echoing arbitrarily large user input
// into a Buffer allocation. Anything bigger is a caller bug.
const MAX_HEX_LEN = 256;
export function bytesFromHex(hex: string): Buffer {
if (hex.startsWith('0x')) hex = hex.slice(2);
if (hex.startsWith('\\x')) hex = hex.slice(2);
if (hex.length > MAX_HEX_LEN) {
throw new Error('invalid hex string: exceeds max length');
}
if (!/^[0-9a-fA-F]*$/.test(hex) || hex.length % 2 !== 0) {
throw new Error('invalid hex string');
}
return Buffer.from(hex, 'hex');
}
// Strict 32-byte-category helper. Use this whenever the input is meant to
// be a CashTokens category id — every BYTEA-keyed query downstream
// trusts the byte length, and a stricter gate at the boundary means
// downstream call sites can rely on a known shape.
export function categoryFromHex(hex: string): Buffer {
const cleaned = hex.startsWith('0x')
? hex.slice(2)
: hex.startsWith('\\x')
? hex.slice(2)
: hex;
if (cleaned.length !== 64 || !/^[0-9a-fA-F]+$/.test(cleaned)) {
throw new Error('invalid category (expected 64 hex chars)');
}
return Buffer.from(cleaned.toLowerCase(), 'hex');
}
export function hexFromBytes(
buf: Buffer | Uint8Array | null | undefined
): string | null {
if (!buf) return null;
return Buffer.from(buf).toString('hex');
}