Skip to content

Instantly share code, notes, and snippets.

@troykelly
Last active March 10, 2026 07:20
Show Gist options
  • Select an option

  • Save troykelly/b5965d35eed14cc3716400fe49dc047a to your computer and use it in GitHub Desktop.

Select an option

Save troykelly/b5965d35eed14cc3716400fe49dc047a to your computer and use it in GitHub Desktop.
Patch for better-ccflare 3.3.4
diff --git a/apps/server/src/server.ts b/apps/server/src/server.ts
index d502e19..f11b791 100644
--- a/apps/server/src/server.ts
+++ b/apps/server/src/server.ts
@@ -510,12 +510,14 @@ export default async function startServer(options?: {
runtime.port = port;
}
DatabaseFactory.initialize(undefined, runtime);
- const dbOps = DatabaseFactory.getInstance();
+ const dbOps = await DatabaseFactory.getInstanceAsync();
- // Run integrity check if database was initialized in fast mode
- dbOps.runIntegrityCheck();
+ // Run integrity check if database was initialized in fast mode (SQLite only)
+ if (dbOps.isSQLite) {
+ dbOps.runIntegrityCheck();
+ }
- const db = dbOps.getDatabase();
+ const db = dbOps.getAdapter();
const log = container.resolve<Logger>(SERVICE_KEYS.Logger);
container.registerInstance(SERVICE_KEYS.Database, dbOps);
diff --git a/bun.lock b/bun.lock
index bb4f230..10a0f34 100644
--- a/bun.lock
+++ b/bun.lock
@@ -1,5 +1,6 @@
{
"lockfileVersion": 1,
+ "configVersion": 0,
"workspaces": {
"": {
"name": "ccflare",
diff --git a/packages/cli-commands/src/commands/account.ts b/packages/cli-commands/src/commands/account.ts
index ea03900..75de57b 100644
--- a/packages/cli-commands/src/commands/account.ts
+++ b/packages/cli-commands/src/commands/account.ts
@@ -82,7 +82,7 @@ async function createConsoleAccountWithApiKey(
const validatedApiKey = validateApiKey(apiKey, "Claude API key");
const validatedPriority = validatePriority(priority, "priority");
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint
@@ -118,7 +118,7 @@ async function createMinimaxAccount(
const validatedApiKey = validateApiKey(apiKey, "Minimax API key");
const validatedPriority = validatePriority(priority, "priority");
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint
@@ -162,7 +162,7 @@ export async function createNanoGPTAccount(
const validatedMappings = validateAndSanitizeModelMappings(modelMappings);
validatedModelMappings = JSON.stringify(validatedMappings);
}
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -204,7 +204,7 @@ async function createKiloAccount(
const validated = validateAndSanitizeModelMappings(modelMappings);
validatedModelMappings = JSON.stringify(validated);
}
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -246,7 +246,7 @@ async function createOpenRouterAccount(
const validatedMappings = validateAndSanitizeModelMappings(modelMappings);
validatedModelMappings = JSON.stringify(validatedMappings);
}
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -303,7 +303,7 @@ async function createAnthropicCompatibleAccount(
validatedModelMappings = JSON.stringify(validatedMappings);
}
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -337,7 +337,7 @@ async function createZaiAccount(
const validatedApiKey = validateApiKey(apiKey, "z.ai API key");
const validatedPriority = validatePriority(priority, "priority");
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint
@@ -413,7 +413,7 @@ async function createBedrockAccount(
// Store as "bedrock:profile:region" format
const customEndpoint = `bedrock:${profile}:${region}`;
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, cross_region_mode
@@ -464,7 +464,7 @@ async function createVertexAIAccount(
region: region.trim(),
});
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint
@@ -578,7 +578,7 @@ async function createOpenAIAccount(
? JSON.stringify(validatedModelMappings)
: null;
- dbOps.getDatabase().run(
+ await dbOps.getAdapter().run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -1259,12 +1259,11 @@ export async function forceResetRateLimit(
name: string,
config: Config,
): Promise<{ success: boolean; message: string }> {
- const db = dbOps.getDatabase();
- const account = db
- .query<{ id: string; name: string }, [string]>(
- "SELECT id, name FROM accounts WHERE name = ?",
- )
- .get(name);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ id: string; name: string }>(
+ "SELECT id, name FROM accounts WHERE name = ?",
+ [name],
+ );
if (!account) {
return {
@@ -1353,23 +1352,19 @@ export async function reauthenticateAccount(
config: Config,
name: string,
): Promise<{ success: boolean; message: string }> {
- const db = dbOps.getDatabase();
+ const adapter = dbOps.getAdapter();
// Get account by name
- const account = db
- .query<
- {
- id: string;
- provider: string;
- priority: number;
- custom_endpoint: string | null;
- api_key: string | null;
- },
- [string]
- >(
- "SELECT id, provider, priority, custom_endpoint, api_key FROM accounts WHERE name = ?",
- )
- .get(name);
+ const account = await adapter.get<{
+ id: string;
+ provider: string;
+ priority: number;
+ custom_endpoint: string | null;
+ api_key: string | null;
+ }>(
+ "SELECT id, provider, priority, custom_endpoint, api_key FROM accounts WHERE name = ?",
+ [name],
+ );
if (!account) {
return {
@@ -1490,7 +1485,7 @@ export async function reauthenticateAccount(
// Update existing account with new API key (preserving all other metadata)
// Use transaction for atomic update
try {
- db.run(
+ await adapter.run(
`UPDATE accounts SET
api_key = ?,
refresh_token = ?,
@@ -1520,7 +1515,7 @@ export async function reauthenticateAccount(
console.log("Updating OAuth tokens...");
try {
- db.run(
+ await adapter.run(
`UPDATE accounts SET
refresh_token = ?,
access_token = ?,
diff --git a/packages/cli-commands/src/commands/database-repair.ts b/packages/cli-commands/src/commands/database-repair.ts
index 3547457..94718f8 100644
--- a/packages/cli-commands/src/commands/database-repair.ts
+++ b/packages/cli-commands/src/commands/database-repair.ts
@@ -11,7 +11,9 @@ interface RepairResult {
/**
* Perform database integrity check and repair
*/
-export function repairDatabase(dbOps: DatabaseOperations): RepairResult {
+export async function repairDatabase(
+ dbOps: DatabaseOperations,
+): Promise<RepairResult> {
const result: RepairResult = {
integrityOk: false,
nullsFixed: 0,
@@ -20,32 +22,38 @@ export function repairDatabase(dbOps: DatabaseOperations): RepairResult {
warnings: [],
};
- const db = dbOps.getDatabase();
+ const adapter = dbOps.getAdapter();
console.log("🔍 Checking database integrity...\n");
- // 1. Check database integrity
- try {
- const integrityResult = db.query("PRAGMA integrity_check").get() as {
- integrity_check: string;
- };
-
- if (integrityResult.integrity_check === "ok") {
- result.integrityOk = true;
- console.log("✅ Database integrity check: PASSED\n");
- } else {
- result.integrityOk = false;
- result.errors.push(
- `Integrity check failed: ${integrityResult.integrity_check}`,
- );
- console.log(
- `❌ Database integrity check: FAILED\n ${integrityResult.integrity_check}\n`,
- );
+ // 1. Check database integrity (SQLite-specific PRAGMA)
+ if (dbOps.isSQLite) {
+ try {
+ const integrityResult = await adapter.get<{
+ integrity_check: string;
+ }>("PRAGMA integrity_check");
+
+ if (integrityResult?.integrity_check === "ok") {
+ result.integrityOk = true;
+ console.log("✅ Database integrity check: PASSED\n");
+ } else {
+ result.integrityOk = false;
+ result.errors.push(
+ `Integrity check failed: ${integrityResult?.integrity_check}`,
+ );
+ console.log(
+ `❌ Database integrity check: FAILED\n ${integrityResult?.integrity_check}\n`,
+ );
+ }
+ } catch (error) {
+ result.errors.push(`Failed to run integrity check: ${error}`);
+ console.log(`❌ Failed to run integrity check: ${error}\n`);
+ return result;
}
- } catch (error) {
- result.errors.push(`Failed to run integrity check: ${error}`);
- console.log(`❌ Failed to run integrity check: ${error}\n`);
- return result;
+ } else {
+ // PostgreSQL doesn't have PRAGMA integrity_check; assume OK
+ result.integrityOk = true;
+ console.log("✅ Database integrity check: SKIPPED (PostgreSQL)\n");
}
// 2. Check for NULL values in numeric fields
@@ -60,12 +68,16 @@ export function repairDatabase(dbOps: DatabaseOperations): RepairResult {
FROM accounts
`;
- const nullStats = db.query(nullCheckQuery).get() as {
+ const nullStats = await adapter.get<{
total: number;
null_request_count: number;
null_total_requests: number;
null_session_count: number;
- };
+ }>(nullCheckQuery);
+
+ if (!nullStats) {
+ throw new Error("Failed to query NULL stats");
+ }
const totalNulls =
nullStats.null_request_count +
@@ -86,7 +98,7 @@ export function repairDatabase(dbOps: DatabaseOperations): RepairResult {
// Fix NULL values
console.log("🔧 Fixing NULL values...\n");
- const updateResult = db.run(`
+ const changes = await adapter.runWithChanges(`
UPDATE accounts
SET
request_count = COALESCE(request_count, 0),
@@ -98,7 +110,7 @@ export function repairDatabase(dbOps: DatabaseOperations): RepairResult {
OR session_request_count IS NULL
`);
- result.nullsFixed = updateResult.changes;
+ result.nullsFixed = changes;
console.log(
`✅ Fixed ${result.nullsFixed} account records with NULL values\n`,
);
@@ -110,32 +122,42 @@ export function repairDatabase(dbOps: DatabaseOperations): RepairResult {
console.log(`❌ Failed to check/fix NULL values: ${error}\n`);
}
- // 3. Check foreign key constraints
+ // 3. Check foreign key constraints (SQLite-specific PRAGMA)
console.log("🔍 Checking foreign key constraints...\n");
- try {
- const fkCheck = db.query("PRAGMA foreign_key_check").all();
- if (fkCheck.length === 0) {
- console.log("✅ Foreign key constraints: PASSED\n");
- } else {
- result.warnings.push(`Found ${fkCheck.length} foreign key violations`);
- console.log(`⚠️ Found ${fkCheck.length} foreign key violations:`);
- for (const violation of fkCheck) {
- console.log(` ${JSON.stringify(violation)}`);
+ if (dbOps.isSQLite) {
+ try {
+ const fkCheck = await adapter.query<Record<string, unknown>>(
+ "PRAGMA foreign_key_check",
+ );
+ if (fkCheck.length === 0) {
+ console.log("✅ Foreign key constraints: PASSED\n");
+ } else {
+ result.warnings.push(
+ `Found ${fkCheck.length} foreign key violations`,
+ );
+ console.log(`⚠️ Found ${fkCheck.length} foreign key violations:`);
+ for (const violation of fkCheck) {
+ console.log(` ${JSON.stringify(violation)}`);
+ }
+ console.log("");
}
- console.log("");
+ } catch (error) {
+ result.warnings.push(`Failed to check foreign keys: ${error}`);
+ console.log(`⚠️ Failed to check foreign keys: ${error}\n`);
}
- } catch (error) {
- result.warnings.push(`Failed to check foreign keys: ${error}`);
- console.log(`⚠️ Failed to check foreign keys: ${error}\n`);
+ } else {
+ console.log("✅ Foreign key constraints: SKIPPED (PostgreSQL)\n");
}
// 4. Vacuum database to rebuild and optimize
console.log("🔧 Vacuuming database (this may take a moment)...\n");
try {
- // First checkpoint WAL
- db.exec("PRAGMA wal_checkpoint(TRUNCATE)");
- // Then vacuum
- db.exec("VACUUM");
+ if (dbOps.isSQLite) {
+ // First checkpoint WAL
+ await adapter.unsafe("PRAGMA wal_checkpoint(TRUNCATE)");
+ }
+ // VACUUM works on both SQLite and PostgreSQL
+ await adapter.unsafe("VACUUM");
result.vacuumed = true;
console.log("✅ Database vacuumed successfully\n");
} catch (error) {
@@ -146,8 +168,11 @@ export function repairDatabase(dbOps: DatabaseOperations): RepairResult {
// 5. Optimize database
console.log("🔧 Optimizing database...\n");
try {
- db.exec("ANALYZE");
- db.exec("PRAGMA optimize");
+ // ANALYZE works on both SQLite and PostgreSQL
+ await adapter.unsafe("ANALYZE");
+ if (dbOps.isSQLite) {
+ await adapter.unsafe("PRAGMA optimize");
+ }
console.log("✅ Database optimized successfully\n");
} catch (error) {
result.warnings.push(`Failed to optimize database: ${error}`);
@@ -204,13 +229,15 @@ export function printRepairSummary(result: RepairResult): void {
/**
* Main repair command handler
*/
-export function handleRepairCommand(dbOps: DatabaseOperations): void {
+export async function handleRepairCommand(
+ dbOps: DatabaseOperations,
+): Promise<void> {
console.log("");
console.log("🔧 BETTER-CCFLARE DATABASE REPAIR");
console.log("=".repeat(50));
console.log("");
- const result = repairDatabase(dbOps);
+ const result = await repairDatabase(dbOps);
printRepairSummary(result);
// Exit with appropriate code
diff --git a/packages/database/src/adapters/bun-sql-adapter.ts b/packages/database/src/adapters/bun-sql-adapter.ts
index ecbe96d..e2dfa7e 100644
--- a/packages/database/src/adapters/bun-sql-adapter.ts
+++ b/packages/database/src/adapters/bun-sql-adapter.ts
@@ -1,6 +1,56 @@
import type { Database } from "bun:sqlite";
import type { SQL } from "bun";
+/**
+ * Convert SQLite-style `?` and `?N` placeholders to PostgreSQL-style `$N` placeholders.
+ * Handles both `?` (sequential) and `?1`, `?2` (positional) styles.
+ * Skips placeholders inside single-quoted string literals.
+ */
+function convertPlaceholders(sql: string): string {
+ let result = "";
+ let paramIndex = 0;
+ let inString = false;
+
+ for (let i = 0; i < sql.length; i++) {
+ const ch = sql[i];
+
+ // Toggle string literal tracking (handle escaped quotes '')
+ if (ch === "'") {
+ inString = !inString;
+ result += ch;
+ continue;
+ }
+
+ if (inString) {
+ result += ch;
+ continue;
+ }
+
+ if (ch === "?") {
+ // Check for ?N style (e.g., ?1, ?2)
+ let numStr = "";
+ let j = i + 1;
+ while (j < sql.length && sql[j] >= "0" && sql[j] <= "9") {
+ numStr += sql[j];
+ j++;
+ }
+ if (numStr.length > 0) {
+ // ?N -> $N (keep the original number)
+ result += `$${numStr}`;
+ i = j - 1; // skip the digits
+ } else {
+ // ? -> $N (sequential)
+ paramIndex++;
+ result += `$${paramIndex}`;
+ }
+ } else {
+ result += ch;
+ }
+ }
+
+ return result;
+}
+
/**
* Unified SQL adapter that abstracts over bun:sqlite (sync) and Bun.SQL/PostgreSQL (async).
*
@@ -51,6 +101,13 @@ export class BunSqlAdapter {
return this.sql;
}
+ /**
+ * Convert SQL for PostgreSQL if needed (? -> $N placeholders).
+ */
+ private pgSql(sqlStr: string): string {
+ return this.isSQLite ? sqlStr : convertPlaceholders(sqlStr);
+ }
+
/**
* Execute a SELECT query returning multiple rows.
*/
@@ -60,8 +117,9 @@ export class BunSqlAdapter {
return this.sqliteDb.query<R, any[]>(sqlStr).all(...(params as any[]));
}
// PostgreSQL via Bun.SQL unsafe
+ const pgQuery = this.pgSql(sqlStr);
// biome-ignore lint/suspicious/noExplicitAny: Bun.SQL accepts various binding types
- const result = await this.sql?.unsafe(sqlStr, params as any[]);
+ const result = await this.sql?.unsafe(pgQuery, params as any[]);
return result as unknown as R[];
}
@@ -76,8 +134,9 @@ export class BunSqlAdapter {
.get(...(params as any[]));
return (result as R) ?? null;
}
+ const pgQuery = this.pgSql(sqlStr);
// biome-ignore lint/suspicious/noExplicitAny: Bun.SQL accepts various binding types
- const rows = await this.sql?.unsafe(sqlStr, params as any[]);
+ const rows = await this.sql?.unsafe(pgQuery, params as any[]);
return ((rows as unknown as R[])[0] ?? null) as R | null;
}
@@ -90,8 +149,9 @@ export class BunSqlAdapter {
this.sqliteDb.run(sqlStr, params as any[]);
return;
}
+ const pgQuery = this.pgSql(sqlStr);
// biome-ignore lint/suspicious/noExplicitAny: Bun.SQL accepts various binding types
- await this.sql?.unsafe(sqlStr, params as any[]);
+ await this.sql?.unsafe(pgQuery, params as any[]);
}
/**
@@ -106,8 +166,9 @@ export class BunSqlAdapter {
const result = this.sqliteDb.run(sqlStr, params as any[]);
return result.changes;
}
+ const pgQuery = this.pgSql(sqlStr);
// biome-ignore lint/suspicious/noExplicitAny: Bun.SQL accepts various binding types
- const result = await this.sql?.unsafe(sqlStr, params as any[]);
+ const result = await this.sql?.unsafe(pgQuery, params as any[]);
// Bun.SQL returns an array-like with a `count` property for DML statements
return (result as unknown as { count: number }).count ?? 0;
}
@@ -155,8 +216,9 @@ export class BunSqlAdapter {
this.sqliteDb.exec(sqlStr);
return;
}
+ const pgQuery = params && params.length > 0 ? this.pgSql(sqlStr) : sqlStr;
// biome-ignore lint/suspicious/noExplicitAny: Bun.SQL accepts various binding types
- return this.sql?.unsafe(sqlStr, params as any[]);
+ return this.sql?.unsafe(pgQuery, params as any[]);
}
/**
diff --git a/packages/database/src/analyze-performance.ts b/packages/database/src/analyze-performance.ts
index 79bcb58..31a314a 100644
--- a/packages/database/src/analyze-performance.ts
+++ b/packages/database/src/analyze-performance.ts
@@ -39,7 +39,7 @@ function analyzeQueryPerformance(db: Database) {
name: "Success rate calculation",
query: `
SELECT
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as success_rate
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as success_rate
FROM requests
WHERE timestamp > ?
`,
diff --git a/packages/database/src/database-operations.ts b/packages/database/src/database-operations.ts
index d933799..b29d04c 100644
--- a/packages/database/src/database-operations.ts
+++ b/packages/database/src/database-operations.ts
@@ -203,7 +203,11 @@ export class DatabaseOperations implements StrategyStore, Disposable {
this.isSQLite = false;
// Import SQL lazily to avoid issues when not needed
const { SQL } = require("bun");
- const sqlClient = new SQL(databaseUrl);
+ const sqlClient = new SQL({
+ url: databaseUrl,
+ max: 10,
+ idleTimeout: 30,
+ });
this.adapter = new BunSqlAdapter(sqlClient, false);
} else {
this.isSQLite = true;
diff --git a/packages/database/src/repositories/request.repository.ts b/packages/database/src/repositories/request.repository.ts
index 333e501..0d38604 100644
--- a/packages/database/src/repositories/request.repository.ts
+++ b/packages/database/src/repositories/request.repository.ts
@@ -45,7 +45,7 @@ export class RequestRepository extends BaseRepository<RequestData> {
status_code, success, error_message, response_time_ms, failover_attempts,
api_key_id, api_key_name
)
- VALUES (?, ?, ?, ?, ?, ?, 0, NULL, 0, 0, ?, ?)
+ VALUES (?, ?, ?, ?, ?, ?, FALSE, NULL, 0, 0, ?, ?)
`,
[
id,
@@ -103,7 +103,7 @@ export class RequestRepository extends BaseRepository<RequestData> {
data.path,
data.accountUsed,
data.statusCode,
- data.success ? 1 : 0,
+ data.success,
data.errorMessage,
data.responseTime,
data.failoverAttempts,
@@ -283,8 +283,8 @@ export class RequestRepository extends BaseRepository<RequestData> {
`
SELECT
COUNT(*) as total_requests,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful_requests,
- SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) as failed_requests,
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) as successful_requests,
+ SUM(CASE WHEN success = FALSE THEN 1 ELSE 0 END) as failed_requests,
AVG(response_time_ms) as avg_response_time
FROM requests
${whereClause}
@@ -334,7 +334,7 @@ export class RequestRepository extends BaseRepository<RequestData> {
`
SELECT
COUNT(*) as total_requests,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful_requests,
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) as successful_requests,
AVG(response_time_ms) as avg_response_time,
SUM(total_tokens) as total_tokens,
SUM(cost_usd) as total_cost_usd,
@@ -390,7 +390,7 @@ export class RequestRepository extends BaseRepository<RequestData> {
`
SELECT error_message
FROM requests
- WHERE success = 0 AND error_message IS NOT NULL
+ WHERE success = FALSE AND error_message IS NOT NULL
ORDER BY timestamp DESC
LIMIT ?
`,
@@ -421,7 +421,7 @@ export class RequestRepository extends BaseRepository<RequestData> {
r.account_used as account_id,
a.name as account_name,
COUNT(*) as request_count,
- SUM(CASE WHEN r.success = 1 THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as success_rate
+ SUM(CASE WHEN r.success = TRUE THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as success_rate
FROM requests r
LEFT JOIN accounts a ON r.account_used = a.id
${whereClause}
diff --git a/packages/database/src/repositories/stats.repository.ts b/packages/database/src/repositories/stats.repository.ts
index 413af4a..1031d58 100644
--- a/packages/database/src/repositories/stats.repository.ts
+++ b/packages/database/src/repositories/stats.repository.ts
@@ -37,15 +37,15 @@ export class StatsRepository {
const since = sinceMs ?? Date.now() - 30 * 24 * 60 * 60 * 1000;
const stats = await this.adapter.get<AggregatedStats>(
`SELECT
- COUNT(*) as totalRequests,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successfulRequests,
- AVG(response_time_ms) as avgResponseTime,
- SUM(input_tokens) as inputTokens,
- SUM(output_tokens) as outputTokens,
- SUM(cache_creation_input_tokens) as cacheCreationInputTokens,
- SUM(cache_read_input_tokens) as cacheReadInputTokens,
- SUM(cost_usd) as totalCostUsd,
- AVG(output_tokens_per_second) as avgTokensPerSecond
+ COUNT(*) as "totalRequests",
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) as "successfulRequests",
+ AVG(response_time_ms) as "avgResponseTime",
+ SUM(input_tokens) as "inputTokens",
+ SUM(output_tokens) as "outputTokens",
+ SUM(cache_creation_input_tokens) as "cacheCreationInputTokens",
+ SUM(cache_read_input_tokens) as "cacheReadInputTokens",
+ SUM(cost_usd) as "totalCostUsd",
+ AVG(output_tokens_per_second) as "avgTokensPerSecond"
FROM requests
WHERE timestamp > ?`,
[since],
@@ -53,18 +53,25 @@ export class StatsRepository {
const s = stats as AggregatedStats;
- // Calculate total tokens
- const totalTokens =
- (s.inputTokens || 0) +
- (s.outputTokens || 0) +
- (s.cacheCreationInputTokens || 0) +
- (s.cacheReadInputTokens || 0);
+ // Coerce to numbers — PostgreSQL returns BIGINT/SUM as strings
+ const totalRequests = Number(s.totalRequests) || 0;
+ const successfulRequests = Number(s.successfulRequests) || 0;
+ const inputTokens = Number(s.inputTokens) || 0;
+ const outputTokens = Number(s.outputTokens) || 0;
+ const cacheCreationInputTokens = Number(s.cacheCreationInputTokens) || 0;
+ const cacheReadInputTokens = Number(s.cacheReadInputTokens) || 0;
return {
- ...s,
- totalTokens,
- avgResponseTime: s.avgResponseTime || 0,
- totalCostUsd: s.totalCostUsd || 0,
+ totalRequests,
+ successfulRequests,
+ avgResponseTime: Number(s.avgResponseTime) || 0,
+ totalTokens: inputTokens + outputTokens + cacheCreationInputTokens + cacheReadInputTokens,
+ totalCostUsd: Number(s.totalCostUsd) || 0,
+ inputTokens,
+ outputTokens,
+ cacheCreationInputTokens,
+ cacheReadInputTokens,
+ avgTokensPerSecond: s.avgTokensPerSecond != null ? Number(s.avgTokensPerSecond) : null,
};
}
@@ -95,16 +102,16 @@ export class StatsRepository {
SELECT
COALESCE(a.id, ?) as id,
COALESCE(a.name, ?) as name,
- COUNT(r.id) as requestCount,
- COALESCE(a.total_requests, 0) as totalRequests
+ COUNT(r.id) as "requestCount",
+ COALESCE(MAX(a.total_requests), 0) as "totalRequests"
FROM requests r
LEFT JOIN accounts a ON a.id = r.account_used
- GROUP BY COALESCE(a.id, ?), COALESCE(a.name, ?)
+ GROUP BY 1, 2
HAVING COUNT(r.id) > 0
- ORDER BY requestCount DESC
+ ORDER BY "requestCount" DESC
LIMIT ?
`,
- [NO_ACCOUNT_ID, NO_ACCOUNT_ID, NO_ACCOUNT_ID, NO_ACCOUNT_ID, limit],
+ [NO_ACCOUNT_ID, NO_ACCOUNT_ID, limit],
);
} else {
accountStats = await this.adapter.query<{
@@ -117,8 +124,8 @@ export class StatsRepository {
SELECT
a.id,
a.name,
- a.request_count as requestCount,
- a.total_requests as totalRequests
+ a.request_count as "requestCount",
+ a.total_requests as "totalRequests"
FROM accounts a
WHERE a.request_count > 0
ORDER BY a.request_count DESC
@@ -140,29 +147,29 @@ export class StatsRepository {
successful: number;
}>(
`SELECT
- account_used as accountId,
+ account_used as "accountId",
COUNT(*) as total,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) as successful
FROM requests
WHERE account_used IN (${placeholders})
GROUP BY account_used`,
accountIds,
);
- // Create a map for O(1) lookup
+ // Create a map for O(1) lookup — Number() coercion for PG BIGINT strings
const successRateMap = new Map(
successRates.map((sr) => [
sr.accountId,
- sr.total > 0 ? Math.round((sr.successful / sr.total) * 100) : 0,
+ Number(sr.total) > 0 ? Math.round((Number(sr.successful) / Number(sr.total)) * 100) : 0,
]),
);
- // Combine the data
+ // Combine the data — Number() coercion for PG BIGINT strings
return accountStats.map((acc) => ({
name: acc.name,
- requestCount: acc.requestCount,
+ requestCount: Number(acc.requestCount),
successRate: successRateMap.get(acc.id) || 0,
- totalRequests: acc.totalRequests,
+ totalRequests: Number(acc.totalRequests),
}));
}
@@ -173,7 +180,7 @@ export class StatsRepository {
const result = await this.adapter.get<{ count: number }>(
"SELECT COUNT(*) as count FROM accounts WHERE request_count > 0",
);
- return result?.count || 0;
+ return Number(result?.count) || 0;
}
/**
@@ -181,11 +188,12 @@ export class StatsRepository {
*/
async getRecentErrors(limit = 10): Promise<string[]> {
const errors = await this.adapter.query<{ error_message: string }>(
- `SELECT DISTINCT error_message
+ `SELECT error_message, MAX(timestamp) as latest
FROM requests
WHERE error_message IS NOT NULL
AND error_message != ''
- ORDER BY timestamp DESC
+ GROUP BY error_message
+ ORDER BY latest DESC
LIMIT ?`,
[limit],
);
@@ -199,7 +207,7 @@ export class StatsRepository {
async getTopModels(
limit = 5,
): Promise<Array<{ model: string; count: number; percentage: number }>> {
- return this.adapter.query<{
+ const rows = await this.adapter.query<{
model: string;
count: number;
percentage: number;
@@ -218,12 +226,18 @@ export class StatsRepository {
SELECT
mc.model,
mc.count,
- ROUND(CAST(mc.count AS REAL) / t.total * 100, 2) as percentage
+ ROUND(CAST(CAST(mc.count AS REAL) / t.total * 100 AS NUMERIC), 2) as percentage
FROM model_counts mc, total t
ORDER BY mc.count DESC
LIMIT ?`,
[limit],
);
+ // Coerce to numbers — PG returns COUNT/ROUND as strings
+ return rows.map((r) => ({
+ model: r.model,
+ count: Number(r.count),
+ percentage: Number(r.percentage),
+ }));
}
/**
@@ -278,28 +292,28 @@ export class StatsRepository {
successful: number;
}>(
`SELECT
- api_key_id as apiKeyId,
+ api_key_id as "apiKeyId",
COUNT(*) as total,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) as successful
FROM requests
WHERE api_key_id IN (${placeholders})
GROUP BY api_key_id`,
apiKeyIds,
);
- // Create a map for O(1) lookup
+ // Create a map for O(1) lookup — Number() coercion for PG BIGINT strings
const successRateMap = new Map(
successRates.map((sr) => [
sr.apiKeyId,
- sr.total > 0 ? Math.round((sr.successful / sr.total) * 100) : 0,
+ Number(sr.total) > 0 ? Math.round((Number(sr.successful) / Number(sr.total)) * 100) : 0,
]),
);
- // Combine the data
+ // Combine the data — Number() coercion for PG BIGINT strings
return apiKeyStats.map((key) => ({
id: key.id,
name: key.name,
- requests: key.requests,
+ requests: Number(key.requests),
successRate: successRateMap.get(key.id) || 0,
}));
}
diff --git a/packages/http-api/src/handlers/accounts.ts b/packages/http-api/src/handlers/accounts.ts
index 0551bab..3e39a71 100644
--- a/packages/http-api/src/handlers/accounts.ts
+++ b/packages/http-api/src/handlers/accounts.ts
@@ -1,4 +1,3 @@
-import type { Database } from "bun:sqlite";
import crypto from "node:crypto";
import { existsSync, readFileSync } from "node:fs";
import { homedir } from "node:os";
@@ -39,14 +38,14 @@ const log = new Logger("AccountsHandler");
/**
* Create an accounts list handler
*/
-export function createAccountsListHandler(db: Database) {
+export function createAccountsListHandler(dbOps: DatabaseOperations) {
return async (): Promise<Response> => {
const now = Date.now();
const sessionDuration = 5 * 60 * 60 * 1000; // 5 hours
- const accounts = db
- .query(
- `
+ const adapter = dbOps.getAdapter();
+ const accounts = (await adapter.query(
+ `
SELECT
id,
name,
@@ -87,8 +86,8 @@ export function createAccountsListHandler(db: Database) {
FROM accounts
ORDER BY priority DESC, request_count DESC
`,
- )
- .all(now, now, now, sessionDuration) as Array<{
+ [now, now, now, sessionDuration],
+ )) as Array<{
id: string;
name: string;
provider: string | null;
@@ -311,24 +310,24 @@ export function createAccountsListHandler(db: Database) {
requestCount: account.request_count,
totalRequests: account.total_requests,
lastUsed: account.last_used
- ? new Date(account.last_used).toISOString()
+ ? new Date(Number(account.last_used)).toISOString()
: null,
- created: new Date(account.created_at).toISOString(),
- paused: account.paused === 1,
- priority: account.priority,
+ created: new Date(Number(account.created_at)).toISOString(),
+ paused: !!account.paused,
+ priority: Number(account.priority),
tokenStatus: account.token_valid ? "valid" : "expired",
tokenExpiresAt: account.expires_at
- ? new Date(account.expires_at).toISOString()
+ ? new Date(Number(account.expires_at)).toISOString()
: null,
rateLimitStatus,
rateLimitReset: account.rate_limit_reset
- ? new Date(account.rate_limit_reset).toISOString()
+ ? new Date(Number(account.rate_limit_reset)).toISOString()
: null,
rateLimitRemaining: account.rate_limit_remaining,
rateLimitedUntil: account.rate_limited_until || null,
sessionInfo: account.session_info || "",
- autoFallbackEnabled: account.auto_fallback_enabled === 1,
- autoRefreshEnabled: account.auto_refresh_enabled === 1,
+ autoFallbackEnabled: !!account.auto_fallback_enabled,
+ autoRefreshEnabled: !!account.auto_refresh_enabled,
customEndpoint: account.custom_endpoint,
modelMappings,
usageUtilization,
@@ -359,10 +358,8 @@ export function createAccountPriorityUpdateHandler(dbOps: DatabaseOperations) {
const priority = validatePriority(body.priority, "priority");
// Check if account exists
- const db = dbOps.getDatabase();
- const account = db
- .query<{ id: string }, [string]>("SELECT id FROM accounts WHERE id = ?")
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ id: string }>("SELECT id FROM accounts WHERE id = ?", [accountId]);
if (!account) {
return errorResponse(NotFound("Account not found"));
@@ -466,7 +463,8 @@ export function createAccountAddHandler(
const accountId = crypto.randomUUID();
const now = Date.now();
- dbOps.getDatabase().run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, refresh_token, access_token,
created_at, request_count, total_requests, priority, custom_endpoint
@@ -536,12 +534,11 @@ export function createAccountRemoveHandler(dbOps: DatabaseOperations) {
}
// Find the account ID to clean up usage cache (check before deletion)
- const db = dbOps.getDatabase();
- const account = db
- .query<{ id: string }, [string]>(
- "SELECT id FROM accounts WHERE name = ?",
- )
- .get(accountName);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ id: string }>(
+ "SELECT id FROM accounts WHERE name = ?",
+ [accountName],
+ );
if (account) {
// Clear usage cache for removed account to prevent memory leaks
@@ -567,12 +564,11 @@ export function createAccountPauseHandler(dbOps: DatabaseOperations) {
return async (_req: Request, accountId: string): Promise<Response> => {
try {
// Get account name by ID
- const db = dbOps.getDatabase();
- const account = db
- .query<{ name: string }, [string]>(
- "SELECT name FROM accounts WHERE id = ?",
- )
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ name: string }>(
+ "SELECT name FROM accounts WHERE id = ?",
+ [accountId],
+ );
if (!account) {
return errorResponse(NotFound("Account not found"));
@@ -603,12 +599,11 @@ export function createAccountResumeHandler(dbOps: DatabaseOperations) {
return async (_req: Request, accountId: string): Promise<Response> => {
try {
// Get account name by ID
- const db = dbOps.getDatabase();
- const account = db
- .query<{ name: string }, [string]>(
- "SELECT name FROM accounts WHERE id = ?",
- )
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ name: string }>(
+ "SELECT name FROM accounts WHERE id = ?",
+ [accountId],
+ );
if (!account) {
return errorResponse(NotFound("Account not found"));
@@ -656,23 +651,21 @@ export function createAccountRenameHandler(dbOps: DatabaseOperations) {
}
// Check if account exists
- const db = dbOps.getDatabase();
- const account = db
- .query<{ name: string }, [string]>(
- "SELECT name FROM accounts WHERE id = ?",
- )
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ name: string }>(
+ "SELECT name FROM accounts WHERE id = ?",
+ [accountId],
+ );
if (!account) {
return errorResponse(NotFound("Account not found"));
}
// Check if new name is already taken
- const existingAccount = db
- .query<{ id: string }, [string, string]>(
- "SELECT id FROM accounts WHERE name = ? AND id != ?",
- )
- .get(newName, accountId);
+ const existingAccount = await adapter.get<{ id: string }>(
+ "SELECT id FROM accounts WHERE name = ? AND id != ?",
+ [newName, accountId],
+ );
if (existingAccount) {
return errorResponse(
@@ -763,8 +756,8 @@ export function createZaiAccountAddHandler(dbOps: DatabaseOperations) {
const accountId = crypto.randomUUID();
const now = Date.now();
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint
@@ -790,29 +783,25 @@ export function createZaiAccountAddHandler(dbOps: DatabaseOperations) {
);
// Get the created account for response
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
return errorResponse(
@@ -829,13 +818,13 @@ export function createZaiAccountAddHandler(dbOps: DatabaseOperations) {
requestCount: account.request_count,
totalRequests: account.total_requests,
lastUsed: account.last_used
- ? new Date(account.last_used).toISOString()
+ ? new Date(Number(account.last_used)).toISOString()
: null,
- created: new Date(account.created_at).toISOString(),
- paused: account.paused === 1,
+ created: new Date(Number(account.created_at)).toISOString(),
+ paused: !!account.paused,
priority: priority,
tokenStatus: "valid" as const,
- tokenExpiresAt: new Date(account.expires_at).toISOString(),
+ tokenExpiresAt: new Date(Number(account.expires_at)).toISOString(),
rateLimitStatus: "OK",
rateLimitReset: null,
rateLimitRemaining: null,
@@ -930,8 +919,8 @@ export function createOpenAIAccountAddHandler(dbOps: DatabaseOperations) {
const accountId = crypto.randomUUID();
const now = Date.now();
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -958,29 +947,25 @@ export function createOpenAIAccountAddHandler(dbOps: DatabaseOperations) {
);
// Get the created account for response
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
throw new Error("Failed to retrieve created account");
@@ -995,13 +980,13 @@ export function createOpenAIAccountAddHandler(dbOps: DatabaseOperations) {
requestCount: account.request_count,
totalRequests: account.total_requests,
lastUsed: account.last_used
- ? new Date(account.last_used).toISOString()
+ ? new Date(Number(account.last_used)).toISOString()
: null,
- created: new Date(account.created_at).toISOString(),
- paused: account.paused === 1,
+ created: new Date(Number(account.created_at)).toISOString(),
+ paused: !!account.paused,
priority: priority,
tokenStatus: "valid" as const,
- tokenExpiresAt: new Date(account.expires_at).toISOString(),
+ tokenExpiresAt: new Date(Number(account.expires_at)).toISOString(),
rateLimitStatus: "OK",
rateLimitReset: null,
rateLimitRemaining: null,
@@ -1079,8 +1064,8 @@ export function createVertexAIAccountAddHandler(dbOps: DatabaseOperations) {
// Create Vertex AI account directly in database
const accountId = crypto.randomUUID();
const now = Date.now();
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint
@@ -1106,29 +1091,25 @@ export function createVertexAIAccountAddHandler(dbOps: DatabaseOperations) {
);
// Get the created account for response
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number | null;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number | null;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
return errorResponse(
@@ -1145,11 +1126,11 @@ export function createVertexAIAccountAddHandler(dbOps: DatabaseOperations) {
request_count: account.request_count,
total_requests: account.total_requests,
last_used: account.last_used,
- created_at: new Date(account.created_at),
- expires_at: account.expires_at ? new Date(account.expires_at) : null,
+ created_at: new Date(Number(account.created_at)),
+ expires_at: account.expires_at ? new Date(Number(account.expires_at)) : null,
tokenStatus: "valid",
mode: "vertex-ai",
- paused: account.paused === 1,
+ paused: !!account.paused,
},
});
} catch (error) {
@@ -1207,8 +1188,8 @@ export function createMinimaxAccountAddHandler(dbOps: DatabaseOperations) {
// Create Minimax account directly in database
const accountId = crypto.randomUUID();
const now = Date.now();
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint
@@ -1234,29 +1215,25 @@ export function createMinimaxAccountAddHandler(dbOps: DatabaseOperations) {
);
// Get the created account for response
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
return errorResponse(
@@ -1273,13 +1250,13 @@ export function createMinimaxAccountAddHandler(dbOps: DatabaseOperations) {
requestCount: account.request_count,
totalRequests: account.total_requests,
lastUsed: account.last_used
- ? new Date(account.last_used).toISOString()
+ ? new Date(Number(account.last_used)).toISOString()
: null,
- created: new Date(account.created_at).toISOString(),
- paused: account.paused === 1,
+ created: new Date(Number(account.created_at)).toISOString(),
+ paused: !!account.paused,
priority: priority,
tokenStatus: "valid" as const,
- tokenExpiresAt: new Date(account.expires_at).toISOString(),
+ tokenExpiresAt: new Date(Number(account.expires_at)).toISOString(),
rateLimitStatus: "OK",
rateLimitReset: null,
rateLimitRemaining: null,
@@ -1378,8 +1355,8 @@ export function createNanoGPTAccountAddHandler(dbOps: DatabaseOperations) {
// Create NanoGPT account directly in database
const accountId = crypto.randomUUID();
const now = Date.now();
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -1404,29 +1381,25 @@ export function createNanoGPTAccountAddHandler(dbOps: DatabaseOperations) {
`Successfully added NanoGPT account: ${name} (Priority ${priority})`,
);
// Get the created account for response
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
return errorResponse(
InternalServerError("Failed to retrieve created account"),
@@ -1441,13 +1414,13 @@ export function createNanoGPTAccountAddHandler(dbOps: DatabaseOperations) {
requestCount: account.request_count,
totalRequests: account.total_requests,
lastUsed: account.last_used
- ? new Date(account.last_used).toISOString()
+ ? new Date(Number(account.last_used)).toISOString()
: null,
- created: new Date(account.created_at).toISOString(),
- paused: account.paused === 1,
+ created: new Date(Number(account.created_at)).toISOString(),
+ paused: !!account.paused,
priority: priority,
tokenStatus: "valid" as const,
- tokenExpiresAt: new Date(account.expires_at).toISOString(),
+ tokenExpiresAt: new Date(Number(account.expires_at)).toISOString(),
rateLimitStatus: "OK",
rateLimitReset: null,
rateLimitRemaining: null,
@@ -1546,8 +1519,8 @@ export function createAnthropicCompatibleAccountAddHandler(
// Create Anthropic-compatible account directly in database
const accountId = crypto.randomUUID();
const now = Date.now();
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -1574,29 +1547,25 @@ export function createAnthropicCompatibleAccountAddHandler(
);
// Get the created account for response
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
return errorResponse(
@@ -1613,13 +1582,13 @@ export function createAnthropicCompatibleAccountAddHandler(
requestCount: account.request_count,
totalRequests: account.total_requests,
lastUsed: account.last_used
- ? new Date(account.last_used).toISOString()
+ ? new Date(Number(account.last_used)).toISOString()
: null,
- created: new Date(account.created_at).toISOString(),
- paused: account.paused === 1,
+ created: new Date(Number(account.created_at)).toISOString(),
+ paused: !!account.paused,
priority: priority,
tokenStatus: "valid" as const,
- tokenExpiresAt: new Date(account.expires_at).toISOString(),
+ tokenExpiresAt: new Date(Number(account.expires_at)).toISOString(),
rateLimitStatus: "OK",
rateLimitReset: null,
rateLimitRemaining: null,
@@ -1658,12 +1627,11 @@ export function createAccountAutoFallbackHandler(dbOps: DatabaseOperations) {
}
// Check if account exists
- const db = dbOps.getDatabase();
- const account = db
- .query<{ name: string; provider: string }, [string]>(
- "SELECT name, provider FROM accounts WHERE id = ?",
- )
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ name: string; provider: string }>(
+ "SELECT name, provider FROM accounts WHERE id = ?",
+ [accountId],
+ );
if (!account) {
return errorResponse(NotFound("Account not found"));
@@ -1716,12 +1684,11 @@ export function createAccountAutoRefreshHandler(dbOps: DatabaseOperations) {
}
// Check if account exists
- const db = dbOps.getDatabase();
- const account = db
- .query<{ name: string; provider: string }, [string]>(
- "SELECT name, provider FROM accounts WHERE id = ?",
- )
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ name: string; provider: string }>(
+ "SELECT name, provider FROM accounts WHERE id = ?",
+ [accountId],
+ );
if (!account) {
return errorResponse(NotFound("Account not found"));
@@ -1735,7 +1702,7 @@ export function createAccountAutoRefreshHandler(dbOps: DatabaseOperations) {
}
// Update auto-refresh setting
- db.run("UPDATE accounts SET auto_refresh_enabled = ? WHERE id = ?", [
+ await adapter.run("UPDATE accounts SET auto_refresh_enabled = ? WHERE id = ?", [
enabled,
accountId,
]);
@@ -1790,8 +1757,8 @@ export function createAccountCustomEndpointUpdateHandler(
);
// Update account custom endpoint
- const db = dbOps.getDatabase();
- db.run("UPDATE accounts SET custom_endpoint = ? WHERE id = ?", [
+ const adapter = dbOps.getAdapter();
+ await adapter.run("UPDATE accounts SET custom_endpoint = ? WHERE id = ?", [
customEndpoint || null,
accountId,
]);
@@ -1824,12 +1791,11 @@ export function createAccountModelMappingsUpdateHandler(
const body = await req.json();
// Get account to verify it supports model mappings
- const db = dbOps.getDatabase();
- const account = db
- .query<{ provider: string; custom_endpoint: string | null }, [string]>(
- "SELECT provider, custom_endpoint FROM accounts WHERE id = ?",
- )
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ provider: string; custom_endpoint: string | null }>(
+ "SELECT provider, custom_endpoint FROM accounts WHERE id = ?",
+ [accountId],
+ );
if (!account) {
return errorResponse(NotFound("Account not found"));
@@ -1869,11 +1835,10 @@ export function createAccountModelMappingsUpdateHandler(
// Get existing model mappings from the dedicated field
let existingModelMappings: { [key: string]: string } = {};
- const result = db
- .query<{ model_mappings: string | null }, [string]>(
- "SELECT model_mappings FROM accounts WHERE id = ?",
- )
- .get(accountId);
+ const result = await adapter.get<{ model_mappings: string | null }>(
+ "SELECT model_mappings FROM accounts WHERE id = ?",
+ [accountId],
+ );
const existingModelMappingsStr = result?.model_mappings || null;
if (existingModelMappingsStr) {
@@ -1907,7 +1872,7 @@ export function createAccountModelMappingsUpdateHandler(
? JSON.stringify(mergedModelMappings)
: null;
- db.run("UPDATE accounts SET model_mappings = ? WHERE id = ?", [
+ await adapter.run("UPDATE accounts SET model_mappings = ? WHERE id = ?", [
finalModelMappings,
accountId,
]);
@@ -1939,18 +1904,16 @@ export function createAccountForceResetRateLimitHandler(
) {
return async (_req: Request, accountId: string): Promise<Response> => {
try {
- const db = dbOps.getDatabase();
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- access_token: string | null;
- },
- [string]
- >("SELECT id, name, provider, access_token FROM accounts WHERE id = ?")
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ access_token: string | null;
+ }>(
+ "SELECT id, name, provider, access_token FROM accounts WHERE id = ?",
+ [accountId],
+ );
if (!account) {
return errorResponse(NotFound("Account not found"));
@@ -2014,12 +1977,11 @@ export function createAccountReloadHandler(dbOps: DatabaseOperations) {
return async (_req: Request, accountId: string): Promise<Response> => {
try {
// Check if account exists
- const db = dbOps.getDatabase();
- const account = db
- .query<{ name: string; provider: string }, [string]>(
- "SELECT name, provider FROM accounts WHERE id = ?",
- )
- .get(accountId);
+ const adapter = dbOps.getAdapter();
+ const account = await adapter.get<{ name: string; provider: string }>(
+ "SELECT name, provider FROM accounts WHERE id = ?",
+ [accountId],
+ );
if (!account) {
return errorResponse(NotFound("Account not found"));
@@ -2230,8 +2192,8 @@ export function createBedrockAccountAddHandler(dbOps: DatabaseOperations) {
const accountId = crypto.randomUUID();
const now = Date.now();
const oneYearFromNow = now + 365 * 24 * 60 * 60 * 1000; // 1 year expiry
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, cross_region_mode, model_mappings
@@ -2259,29 +2221,25 @@ export function createBedrockAccountAddHandler(dbOps: DatabaseOperations) {
);
// Get the created account for response
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number | null;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number | null;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
return errorResponse(
@@ -2298,11 +2256,11 @@ export function createBedrockAccountAddHandler(dbOps: DatabaseOperations) {
request_count: account.request_count,
total_requests: account.total_requests,
last_used: account.last_used,
- created_at: new Date(account.created_at),
- expires_at: account.expires_at ? new Date(account.expires_at) : null,
+ created_at: new Date(Number(account.created_at)),
+ expires_at: account.expires_at ? new Date(Number(account.expires_at)) : null,
tokenStatus: "valid",
mode: "bedrock",
- paused: account.paused === 1,
+ paused: !!account.paused,
cross_region_mode: crossRegionMode,
},
});
@@ -2383,8 +2341,8 @@ export function createKiloAccountAddHandler(dbOps: DatabaseOperations) {
// Create Kilo account in database
const accountId = crypto.randomUUID();
const now = Date.now();
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -2410,29 +2368,25 @@ export function createKiloAccountAddHandler(dbOps: DatabaseOperations) {
`Successfully added Kilo Gateway account: ${name} (Priority ${priority})`,
);
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
return errorResponse(
@@ -2449,13 +2403,13 @@ export function createKiloAccountAddHandler(dbOps: DatabaseOperations) {
requestCount: account.request_count,
totalRequests: account.total_requests,
lastUsed: account.last_used
- ? new Date(account.last_used).toISOString()
+ ? new Date(Number(account.last_used)).toISOString()
: null,
- created: new Date(account.created_at).toISOString(),
- paused: account.paused === 1,
+ created: new Date(Number(account.created_at)).toISOString(),
+ paused: !!account.paused,
priority: priority,
tokenStatus: "valid" as const,
- tokenExpiresAt: new Date(account.expires_at).toISOString(),
+ tokenExpiresAt: new Date(Number(account.expires_at)).toISOString(),
rateLimitStatus: "OK",
rateLimitReset: null,
rateLimitRemaining: null,
@@ -2540,8 +2494,8 @@ export function createOpenRouterAccountAddHandler(dbOps: DatabaseOperations) {
// Create OpenRouter account in database
const accountId = crypto.randomUUID();
const now = Date.now();
- const db = dbOps.getDatabase();
- db.run(
+ const adapter = dbOps.getAdapter();
+ await adapter.run(
`INSERT INTO accounts (
id, name, provider, api_key, refresh_token, access_token,
expires_at, created_at, request_count, total_requests, priority, custom_endpoint, model_mappings
@@ -2567,29 +2521,25 @@ export function createOpenRouterAccountAddHandler(dbOps: DatabaseOperations) {
`Successfully added OpenRouter account: ${name} (Priority ${priority})`,
);
- const account = db
- .query<
- {
- id: string;
- name: string;
- provider: string;
- request_count: number;
- total_requests: number;
- last_used: number | null;
- created_at: number;
- expires_at: number;
- refresh_token: string;
- paused: number;
- },
- [string]
- >(
- `SELECT
- id, name, provider, request_count, total_requests,
- last_used, created_at, expires_at, refresh_token,
- COALESCE(paused, 0) as paused
- FROM accounts WHERE id = ?`,
- )
- .get(accountId);
+ const account = await adapter.get<{
+ id: string;
+ name: string;
+ provider: string;
+ request_count: number;
+ total_requests: number;
+ last_used: number | null;
+ created_at: number;
+ expires_at: number;
+ refresh_token: string;
+ paused: number;
+ }>(
+ `SELECT
+ id, name, provider, request_count, total_requests,
+ last_used, created_at, expires_at, refresh_token,
+ COALESCE(paused, 0) as paused
+ FROM accounts WHERE id = ?`,
+ [accountId],
+ );
if (!account) {
return errorResponse(
@@ -2606,13 +2556,13 @@ export function createOpenRouterAccountAddHandler(dbOps: DatabaseOperations) {
requestCount: account.request_count,
totalRequests: account.total_requests,
lastUsed: account.last_used
- ? new Date(account.last_used).toISOString()
+ ? new Date(Number(account.last_used)).toISOString()
: null,
- created: new Date(account.created_at).toISOString(),
- paused: account.paused === 1,
+ created: new Date(Number(account.created_at)).toISOString(),
+ paused: !!account.paused,
priority: priority,
tokenStatus: "valid" as const,
- tokenExpiresAt: new Date(account.expires_at).toISOString(),
+ tokenExpiresAt: new Date(Number(account.expires_at)).toISOString(),
rateLimitStatus: "OK",
rateLimitReset: null,
rateLimitRemaining: null,
diff --git a/packages/http-api/src/handlers/analytics.ts b/packages/http-api/src/handlers/analytics.ts
index 1a0ae67..6481da0 100644
--- a/packages/http-api/src/handlers/analytics.ts
+++ b/packages/http-api/src/handlers/analytics.ts
@@ -58,7 +58,7 @@ function getRangeConfig(range: string): {
export function createAnalyticsHandler(context: APIContext) {
return async (params: URLSearchParams): Promise<Response> => {
- const { db } = context;
+ const db = context.dbOps.getAdapter();
const range = params.get("range") ?? "24h";
const { startMs, bucket } = getRangeConfig(range);
const mode = params.get("mode") ?? "normal";
@@ -104,9 +104,9 @@ export function createAnalyticsHandler(context: APIContext) {
}
if (statusFilter === "success") {
- conditions.push("success = 1");
+ conditions.push("success = TRUE");
} else if (statusFilter === "error") {
- conditions.push("success = 0");
+ conditions.push("success = FALSE");
}
const whereClause = conditions.join(" AND ");
@@ -117,7 +117,19 @@ export function createAnalyticsHandler(context: APIContext) {
// Consolidated query to get all analytics data in a single roundtrip
// Using CTEs to compute multiple metrics efficiently
- const consolidatedQuery = db.prepare(`
+ const consolidatedResult = await db.get<{
+ total_requests: number;
+ success_rate: number;
+ avg_response_time: number;
+ total_tokens: number;
+ total_cost_usd: number;
+ avg_tokens_per_second: number;
+ active_accounts: number;
+ input_tokens: number;
+ cache_read_input_tokens: number;
+ cache_creation_input_tokens: number;
+ output_tokens: number;
+ }>(`
WITH
-- Base filtered data
filtered_requests AS (
@@ -132,7 +144,7 @@ export function createAnalyticsHandler(context: APIContext) {
COUNT(*) as requests,
SUM(COALESCE(total_tokens, 0)) as tokens,
SUM(COALESCE(cost_usd, 0)) as cost_usd,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful_requests,
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) as successful_requests,
SUM(COALESCE(cache_read_input_tokens, 0)) as cache_read_input,
SUM(COALESCE(input_tokens, 0)) as input_tokens_sum,
SUM(COALESCE(cache_creation_input_tokens, 0)) as cache_creation_input,
@@ -160,11 +172,11 @@ export function createAnalyticsHandler(context: APIContext) {
SELECT
COALESCE(a.name, ?) as name,
COUNT(r.id) as requests,
- SUM(CASE WHEN r.success = 1 THEN 1 ELSE 0 END) as successful_requests
+ SUM(CASE WHEN r.success = TRUE THEN 1 ELSE 0 END) as successful_requests
FROM filtered_requests r
LEFT JOIN accounts a ON a.id = r.account_used
GROUP BY name
- HAVING requests > 0
+ HAVING COUNT(r.id) > 0
ORDER BY requests DESC
),
-- API key performance
@@ -173,11 +185,11 @@ export function createAnalyticsHandler(context: APIContext) {
api_key_id as id,
api_key_name as name,
COUNT(*) as requests,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful_requests
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) as successful_requests
FROM filtered_requests
WHERE api_key_id IS NOT NULL
GROUP BY api_key_id, api_key_name
- HAVING requests > 0
+ HAVING COUNT(*) > 0
ORDER BY requests DESC
),
-- Model performance metrics
@@ -187,7 +199,7 @@ export function createAnalyticsHandler(context: APIContext) {
AVG(response_time_ms) as avg_response_time,
MAX(response_time_ms) as max_response_time,
COUNT(*) as total_requests,
- SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) as error_count,
+ SUM(CASE WHEN success = FALSE THEN 1 ELSE 0 END) as error_count,
AVG(output_tokens_per_second) as avg_tokens_per_second,
MIN(CASE WHEN output_tokens_per_second > 0 THEN output_tokens_per_second ELSE NULL END) as min_tokens_per_second,
MAX(output_tokens_per_second) as max_tokens_per_second
@@ -213,7 +225,7 @@ export function createAnalyticsHandler(context: APIContext) {
SELECT
-- Totals
(SELECT COUNT(*) FROM filtered_requests) as total_requests,
- (SELECT SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) FROM filtered_requests) as success_rate,
+ (SELECT SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) FROM filtered_requests) as success_rate,
(SELECT AVG(response_time_ms) FROM filtered_requests) as avg_response_time,
(SELECT SUM(COALESCE(total_tokens, 0)) FROM filtered_requests) as total_tokens,
(SELECT SUM(COALESCE(cost_usd, 0)) FROM filtered_requests) as total_cost_usd,
@@ -224,41 +236,35 @@ export function createAnalyticsHandler(context: APIContext) {
(SELECT SUM(COALESCE(cache_read_input_tokens, 0)) FROM filtered_requests) as cache_read_input_tokens,
(SELECT SUM(COALESCE(cache_creation_input_tokens, 0)) FROM filtered_requests) as cache_creation_input_tokens,
(SELECT SUM(COALESCE(output_tokens, 0)) FROM filtered_requests) as output_tokens
- `);
-
- const consolidatedResult = consolidatedQuery.get(
+ `, [
+ ...queryParams,
bucket.bucketMs,
bucket.bucketMs,
NO_ACCOUNT_ID,
NO_ACCOUNT_ID,
- ...queryParams,
- ) as {
- total_requests: number;
- success_rate: number;
- avg_response_time: number;
- total_tokens: number;
- total_cost_usd: number;
- avg_tokens_per_second: number;
- active_accounts: number;
- input_tokens: number;
- cache_read_input_tokens: number;
- cache_creation_input_tokens: number;
- output_tokens: number;
- };
-
- // Finalize prepared statement to prevent memory leak
- consolidatedQuery.finalize();
+ ]);
// Get remaining data that couldn't be consolidated
- const timeSeriesQuery = db.prepare(`
+ const timeSeries = await db.query<{
+ ts: number;
+ model?: string;
+ requests: number;
+ tokens: number;
+ cost_usd: number;
+ success_rate: number;
+ error_rate: number;
+ cache_hit_rate: number;
+ avg_response_time: number;
+ avg_tokens_per_second: number | null;
+ }>(`
SELECT
(timestamp / ?) * ? as ts,
${includeModelBreakdown ? "model," : ""}
COUNT(*) as requests,
SUM(COALESCE(total_tokens, 0)) as tokens,
SUM(COALESCE(cost_usd, 0)) as cost_usd,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) as success_rate,
- SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) as error_rate,
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) as success_rate,
+ SUM(CASE WHEN success = FALSE THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) as error_rate,
SUM(COALESCE(cache_read_input_tokens, 0)) * 100.0 /
NULLIF(SUM(COALESCE(input_tokens, 0) + COALESCE(cache_read_input_tokens, 0) + COALESCE(cache_creation_input_tokens, 0)), 0) as cache_hit_rate,
AVG(response_time_ms) as avg_response_time,
@@ -267,38 +273,31 @@ export function createAnalyticsHandler(context: APIContext) {
WHERE ${whereClause} ${includeModelBreakdown ? "AND model IS NOT NULL" : ""}
GROUP BY ts${includeModelBreakdown ? ", model" : ""}
ORDER BY ts${includeModelBreakdown ? ", model" : ""}
- `);
- const timeSeries = timeSeriesQuery.all(
+ `, [
bucket.bucketMs,
bucket.bucketMs,
...queryParams,
- ) as Array<{
- ts: number;
- model?: string;
- requests: number;
- tokens: number;
- cost_usd: number;
- success_rate: number;
- error_rate: number;
- cache_hit_rate: number;
- avg_response_time: number;
- avg_tokens_per_second: number | null;
- }>;
-
- // Finalize prepared statement
- timeSeriesQuery.finalize();
+ ]);
// Get model distribution, account performance, and cost by model in parallel queries
- const additionalDataQuery = db.prepare(`
+ const additionalData = await db.query<{
+ data_type: string;
+ name: string;
+ count: number | null;
+ requests: number | null;
+ success_rate: number | null;
+ cost_usd: number | null;
+ total_tokens: number | null;
+ }>(`
SELECT * FROM (
SELECT
'model_distribution' as data_type,
model as name,
COUNT(*) as count,
- NULL as requests,
- NULL as success_rate,
- NULL as cost_usd,
- NULL as total_tokens
+ CAST(NULL AS BIGINT) as requests,
+ CAST(NULL AS DOUBLE PRECISION) as success_rate,
+ CAST(NULL AS DOUBLE PRECISION) as cost_usd,
+ CAST(NULL AS BIGINT) as total_tokens
FROM requests r
WHERE ${whereClause} AND model IS NOT NULL
GROUP BY model
@@ -312,16 +311,16 @@ export function createAnalyticsHandler(context: APIContext) {
SELECT
'account_performance' as data_type,
COALESCE(a.name, ?) as name,
- NULL as count,
+ CAST(NULL AS BIGINT) as count,
COUNT(r.id) as requests,
- SUM(CASE WHEN r.success = 1 THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(r.id), 0) as success_rate,
- NULL as cost_usd,
- NULL as total_tokens
+ SUM(CASE WHEN r.success = TRUE THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(r.id), 0) as success_rate,
+ CAST(NULL AS DOUBLE PRECISION) as cost_usd,
+ CAST(NULL AS BIGINT) as total_tokens
FROM requests r
LEFT JOIN accounts a ON a.id = r.account_used
WHERE ${whereClause}
GROUP BY name
- HAVING requests > 0
+ HAVING COUNT(r.id) > 0
ORDER BY requests DESC
LIMIT 10
)
@@ -332,9 +331,9 @@ export function createAnalyticsHandler(context: APIContext) {
SELECT
'cost_by_model' as data_type,
model as name,
- NULL as count,
+ CAST(NULL AS BIGINT) as count,
COUNT(*) as requests,
- NULL as success_rate,
+ CAST(NULL AS DOUBLE PRECISION) as success_rate,
SUM(COALESCE(cost_usd, 0)) as cost_usd,
SUM(COALESCE(total_tokens, 0)) as total_tokens
FROM requests r
@@ -350,75 +349,74 @@ export function createAnalyticsHandler(context: APIContext) {
SELECT
'api_key_performance' as data_type,
api_key_name as name,
- NULL as count,
+ CAST(NULL AS BIGINT) as count,
COUNT(*) as requests,
- SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) as success_rate,
- NULL as cost_usd,
- NULL as total_tokens
+ SUM(CASE WHEN success = TRUE THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) as success_rate,
+ CAST(NULL AS DOUBLE PRECISION) as cost_usd,
+ CAST(NULL AS BIGINT) as total_tokens
FROM requests r
WHERE ${whereClause} AND api_key_id IS NOT NULL
GROUP BY api_key_id, api_key_name
- HAVING requests > 0
+ HAVING COUNT(*) > 0
ORDER BY requests DESC
LIMIT 10
)
- `);
-
- const additionalData = additionalDataQuery.all(
+ `, [
...queryParams, // First subquery params
NO_ACCOUNT_ID,
...queryParams, // Second subquery params (account performance)
...queryParams, // Third subquery params (cost by model)
...queryParams, // Fourth subquery params (API key performance)
- ) as Array<{
- data_type: string;
- name: string;
- count: number | null;
- requests: number | null;
- success_rate: number | null;
- cost_usd: number | null;
- total_tokens: number | null;
- }>;
+ ]);
// Parse the combined results
+ // Number() coercion — PG returns BIGINT/COUNT/SUM as strings
const modelDistribution = additionalData
.filter((row) => row.data_type === "model_distribution")
.map((row) => ({
model: row.name,
- count: row.count || 0,
+ count: Number(row.count) || 0,
}));
const accountPerformance = additionalData
.filter((row) => row.data_type === "account_performance")
.map((row) => ({
name: row.name,
- requests: row.requests || 0,
- successRate: row.success_rate || 0,
+ requests: Number(row.requests) || 0,
+ successRate: Number(row.success_rate) || 0,
}));
const costByModel = additionalData
.filter((row) => row.data_type === "cost_by_model")
.map((row) => ({
model: row.name,
- costUsd: row.cost_usd || 0,
- requests: row.requests || 0,
- totalTokens: row.total_tokens || 0,
+ costUsd: Number(row.cost_usd) || 0,
+ requests: Number(row.requests) || 0,
+ totalTokens: Number(row.total_tokens) || 0,
}));
const apiKeyPerformance = additionalData
.filter((row) => row.data_type === "api_key_performance")
.map((row) => ({
- id: row.name, // API key name used as id for now
+ id: row.name,
name: row.name,
- requests: row.requests || 0,
- successRate: row.success_rate || 0,
+ requests: Number(row.requests) || 0,
+ successRate: Number(row.success_rate) || 0,
}));
- // Finalize prepared statement
- additionalDataQuery.finalize();
-
// Get model performance metrics
- const modelPerfQuery = db.prepare(`
+ const modelPerfData = await db.query<{
+ model: string;
+ avg_response_time: number;
+ max_response_time: number;
+ total_requests: number;
+ error_count: number;
+ error_rate: number;
+ avg_tokens_per_second: number | null;
+ p95_response_time: number | null;
+ min_tokens_per_second: number | null;
+ max_tokens_per_second: number | null;
+ }>(`
WITH filtered AS (
SELECT
model,
@@ -447,8 +445,8 @@ export function createAnalyticsHandler(context: APIContext) {
AVG(response_time_ms) as avg_response_time,
MAX(response_time_ms) as max_response_time,
COUNT(*) as total_requests,
- SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) as error_count,
- SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) as error_rate,
+ SUM(CASE WHEN success = FALSE THEN 1 ELSE 0 END) as error_count,
+ SUM(CASE WHEN success = FALSE THEN 1 ELSE 0 END) * 100.0 / NULLIF(COUNT(*), 0) as error_rate,
AVG(output_tokens_per_second) as avg_tokens_per_second,
MIN(CASE WHEN pr >= 0.95 THEN response_time_ms END) as p95_response_time,
MIN(CASE WHEN output_tokens_per_second > 0 THEN output_tokens_per_second ELSE NULL END) as min_tokens_per_second,
@@ -457,48 +455,34 @@ export function createAnalyticsHandler(context: APIContext) {
GROUP BY model
ORDER BY total_requests DESC
LIMIT 10
- `);
- const modelPerfData = modelPerfQuery.all(...queryParams) as Array<{
- model: string;
- avg_response_time: number;
- max_response_time: number;
- total_requests: number;
- error_count: number;
- error_rate: number;
- avg_tokens_per_second: number | null;
- p95_response_time: number | null;
- min_tokens_per_second: number | null;
- max_tokens_per_second: number | null;
- }>;
-
- modelPerfQuery.finalize();
+ `, queryParams);
const modelPerformance = modelPerfData.map((modelData) => ({
model: modelData.model,
- avgResponseTime: modelData.avg_response_time || 0,
+ avgResponseTime: Number(modelData.avg_response_time) || 0,
p95ResponseTime:
- modelData.p95_response_time ||
- modelData.max_response_time ||
- modelData.avg_response_time ||
+ Number(modelData.p95_response_time) ||
+ Number(modelData.max_response_time) ||
+ Number(modelData.avg_response_time) ||
0,
- errorRate: modelData.error_rate || 0,
- avgTokensPerSecond: modelData.avg_tokens_per_second || null,
- minTokensPerSecond: modelData.min_tokens_per_second || null,
- maxTokensPerSecond: modelData.max_tokens_per_second || null,
+ errorRate: Number(modelData.error_rate) || 0,
+ avgTokensPerSecond: modelData.avg_tokens_per_second != null ? Number(modelData.avg_tokens_per_second) : null,
+ minTokensPerSecond: modelData.min_tokens_per_second != null ? Number(modelData.min_tokens_per_second) : null,
+ maxTokensPerSecond: modelData.max_tokens_per_second != null ? Number(modelData.max_tokens_per_second) : null,
}));
- // Transform timeSeries data
+ // Transform timeSeries data — Number() coercion for PG BIGINT strings
let transformedTimeSeries = timeSeries.map((point) => ({
- ts: point.ts,
+ ts: Number(point.ts),
...(point.model && { model: point.model }),
- requests: point.requests || 0,
- tokens: point.tokens || 0,
- costUsd: point.cost_usd || 0,
- successRate: point.success_rate || 0,
- errorRate: point.error_rate || 0,
- cacheHitRate: point.cache_hit_rate || 0,
- avgResponseTime: point.avg_response_time || 0,
- avgTokensPerSecond: point.avg_tokens_per_second || null,
+ requests: Number(point.requests) || 0,
+ tokens: Number(point.tokens) || 0,
+ costUsd: Number(point.cost_usd) || 0,
+ successRate: Number(point.success_rate) || 0,
+ errorRate: Number(point.error_rate) || 0,
+ cacheHitRate: Number(point.cache_hit_rate) || 0,
+ avgResponseTime: Number(point.avg_response_time) || 0,
+ avgTokensPerSecond: point.avg_tokens_per_second != null ? Number(point.avg_tokens_per_second) : null,
}));
// Apply cumulative transformation if requested
@@ -551,6 +535,7 @@ export function createAnalyticsHandler(context: APIContext) {
});
}
+ // Number() coercion — PG returns BIGINT/COUNT/SUM as strings
const response: AnalyticsResponse = {
meta: {
range,
@@ -558,22 +543,22 @@ export function createAnalyticsHandler(context: APIContext) {
cumulative: isCumulative,
},
totals: {
- requests: consolidatedResult?.total_requests || 0,
- successRate: consolidatedResult?.success_rate || 0,
- activeAccounts: consolidatedResult?.active_accounts || 0,
- avgResponseTime: consolidatedResult?.avg_response_time || 0,
- totalTokens: consolidatedResult?.total_tokens || 0,
- totalCostUsd: consolidatedResult?.total_cost_usd || 0,
- avgTokensPerSecond: consolidatedResult?.avg_tokens_per_second || null,
+ requests: Number(consolidatedResult?.total_requests) || 0,
+ successRate: Number(consolidatedResult?.success_rate) || 0,
+ activeAccounts: Number(consolidatedResult?.active_accounts) || 0,
+ avgResponseTime: Number(consolidatedResult?.avg_response_time) || 0,
+ totalTokens: Number(consolidatedResult?.total_tokens) || 0,
+ totalCostUsd: Number(consolidatedResult?.total_cost_usd) || 0,
+ avgTokensPerSecond: consolidatedResult?.avg_tokens_per_second != null ? Number(consolidatedResult.avg_tokens_per_second) : null,
},
timeSeries: transformedTimeSeries,
tokenBreakdown: {
- inputTokens: consolidatedResult?.input_tokens || 0,
+ inputTokens: Number(consolidatedResult?.input_tokens) || 0,
cacheReadInputTokens:
- consolidatedResult?.cache_read_input_tokens || 0,
+ Number(consolidatedResult?.cache_read_input_tokens) || 0,
cacheCreationInputTokens:
- consolidatedResult?.cache_creation_input_tokens || 0,
- outputTokens: consolidatedResult?.output_tokens || 0,
+ Number(consolidatedResult?.cache_creation_input_tokens) || 0,
+ outputTokens: Number(consolidatedResult?.output_tokens) || 0,
},
modelDistribution,
accountPerformance,
diff --git a/packages/http-api/src/handlers/health.ts b/packages/http-api/src/handlers/health.ts
index e10a264..3782f3b 100644
--- a/packages/http-api/src/handlers/health.ts
+++ b/packages/http-api/src/handlers/health.ts
@@ -1,4 +1,4 @@
-import type { Database } from "bun:sqlite";
+import type { BunSqlAdapter } from "@better-ccflare/database";
import type { Config } from "@better-ccflare/config";
import { jsonResponse } from "@better-ccflare/http-common";
import type { HealthResponse } from "../types";
@@ -6,11 +6,11 @@ import type { HealthResponse } from "../types";
/**
* Create a health check handler
*/
-export function createHealthHandler(db: Database, config: Config) {
- return (): Response => {
- const accountCount = db
- .query("SELECT COUNT(*) as count FROM accounts")
- .get() as { count: number } | undefined;
+export function createHealthHandler(db: BunSqlAdapter, config: Config) {
+ return async (): Promise<Response> => {
+ const accountCount = await db.get<{ count: number }>(
+ "SELECT COUNT(*) as count FROM accounts",
+ );
const response: HealthResponse = {
status: "ok",
diff --git a/packages/http-api/src/handlers/requests.ts b/packages/http-api/src/handlers/requests.ts
index 2645ffc..c4ae37f 100644
--- a/packages/http-api/src/handlers/requests.ts
+++ b/packages/http-api/src/handlers/requests.ts
@@ -1,5 +1,4 @@
-import type { Database } from "bun:sqlite";
-import type { DatabaseOperations } from "@better-ccflare/database";
+import type { BunSqlAdapter, DatabaseOperations } from "@better-ccflare/database";
import { jsonResponse } from "@better-ccflare/http-common";
import type { RequestResponse } from "../types";
@@ -31,19 +30,9 @@ function truncateBase64(body: unknown): {
/**
* Create a requests summary handler (existing functionality)
*/
-export function createRequestsSummaryHandler(db: Database) {
- return (limit: number = 50): Response => {
- const requests = db
- .query(
- `
- SELECT r.*, a.name as account_name
- FROM requests r
- LEFT JOIN accounts a ON r.account_used = a.id
- ORDER BY r.timestamp DESC
- LIMIT ?1
- `,
- )
- .all(limit) as Array<{
+export function createRequestsSummaryHandler(db: BunSqlAdapter) {
+ return async (limit: number = 50): Promise<Response> => {
+ const requests = await db.query<{
id: string;
timestamp: number;
method: string;
@@ -68,16 +57,22 @@ export function createRequestsSummaryHandler(db: Database) {
output_tokens_per_second: number | null;
api_key_id: string | null;
api_key_name: string | null;
- }>;
+ }>(`
+ SELECT r.*, a.name as account_name
+ FROM requests r
+ LEFT JOIN accounts a ON r.account_used = a.id
+ ORDER BY r.timestamp DESC
+ LIMIT ?1
+ `, [limit]);
const response: RequestResponse[] = requests.map((request) => ({
id: request.id,
- timestamp: new Date(request.timestamp).toISOString(),
+ timestamp: new Date(Number(request.timestamp)).toISOString(),
method: request.method,
path: request.path,
accountUsed: request.account_name || request.account_used,
statusCode: request.status_code,
- success: request.success === 1,
+ success: !!request.success,
errorMessage: request.error_message,
responseTimeMs: request.response_time_ms,
failoverAttempts: request.failover_attempts,
diff --git a/packages/http-api/src/router.ts b/packages/http-api/src/router.ts
index 6f3a3e0..8cff1c5 100644
--- a/packages/http-api/src/router.ts
+++ b/packages/http-api/src/router.ts
@@ -97,7 +97,7 @@ export class APIRouter {
const healthHandler = createHealthHandler(db, config);
const statsHandler = createStatsHandler(dbOps);
const statsResetHandler = createStatsResetHandler(dbOps);
- const accountsHandler = createAccountsListHandler(db);
+ const accountsHandler = createAccountsListHandler(dbOps);
const accountAddHandler = createAccountAddHandler(dbOps, config);
const zaiAccountAddHandler = createZaiAccountAddHandler(dbOps);
const minimaxAccountAddHandler = createMinimaxAccountAddHandler(dbOps);
@@ -112,7 +112,7 @@ export class APIRouter {
createAnthropicCompatibleAccountAddHandler(dbOps);
const openaiAccountAddHandler = createOpenAIAccountAddHandler(dbOps);
const _accountRemoveHandler = createAccountRemoveHandler(dbOps);
- const requestsSummaryHandler = createRequestsSummaryHandler(db);
+ const requestsSummaryHandler = createRequestsSummaryHandler(db); // db is now BunSqlAdapter
const requestsDetailHandler = createRequestsDetailHandler(dbOps);
const configHandlers = createConfigHandlers(config, this.context.runtime);
const logsStreamHandler = createLogsStreamHandler();
diff --git a/packages/proxy/src/auto-refresh-scheduler.ts b/packages/proxy/src/auto-refresh-scheduler.ts
index 7f64826..edbe1ef 100644
--- a/packages/proxy/src/auto-refresh-scheduler.ts
+++ b/packages/proxy/src/auto-refresh-scheduler.ts
@@ -1,4 +1,4 @@
-import type { Database } from "bun:sqlite";
+import type { BunSqlAdapter } from "@better-ccflare/database";
import {
CLAUDE_MODEL_IDS,
getClientVersion,
@@ -18,7 +18,7 @@ const log = new Logger("AutoRefreshScheduler");
* and sends dummy messages when their usage window resets
*/
export class AutoRefreshScheduler {
- private db: Database;
+ private db: BunSqlAdapter;
private proxyContext: ProxyContext;
private unregisterInterval: (() => void) | null = null;
private checkInterval = 60000; // Check every minute
@@ -33,7 +33,7 @@ export class AutoRefreshScheduler {
// Threshold for marking an account as needing re-authentication
private readonly FAILURE_THRESHOLD = 5;
- constructor(db: Database, proxyContext: ProxyContext) {
+ constructor(db: BunSqlAdapter, proxyContext: ProxyContext) {
this.db = db;
this.proxyContext = proxyContext;
}
@@ -102,22 +102,19 @@ export class AutoRefreshScheduler {
// Periodically clean up the tracking map - remove entries for accounts that no longer exist
// or have auto-refresh disabled
- this.cleanupTracking();
+ await this.cleanupTracking();
// Get all accounts with auto-refresh enabled that have reset windows OR need immediate refresh
- const query = this.db.query<
- {
- id: string;
- name: string;
- provider: string;
- refresh_token: string;
- access_token: string | null;
- expires_at: number | null;
- rate_limit_reset: number | null;
- custom_endpoint: string | null;
- },
- [number, number]
- >(
+ const accounts = await this.db.query<{
+ id: string;
+ name: string;
+ provider: string;
+ refresh_token: string;
+ access_token: string | null;
+ expires_at: number | null;
+ rate_limit_reset: number | null;
+ custom_endpoint: string | null;
+ }>(
`
SELECT
id, name, provider, refresh_token, access_token,
@@ -132,10 +129,9 @@ export class AutoRefreshScheduler {
OR rate_limit_reset < (? - 24 * 60 * 60 * 1000) -- Reset time is more than 24h old (stale)
)
`,
+ [now, now],
);
- const accounts = query.all(now, now);
-
log.debug(
`Auto-refresh check found ${accounts.length} account(s) to consider`,
);
@@ -413,7 +409,7 @@ export class AutoRefreshScheduler {
);
// Mark account as needing attention in database (disable auto-refresh to prevent repeated failures)
- this.db.run(
+ await this.db.run(
`UPDATE accounts SET auto_refresh_enabled = 0 WHERE id = ?`,
[accountRow.id],
);
@@ -449,7 +445,7 @@ export class AutoRefreshScheduler {
// Update rate limit fields from unified headers
if (rateLimitInfo.resetTime) {
- this.db.run(
+ await this.db.run(
"UPDATE accounts SET rate_limit_reset = ?, rate_limited_until = NULL WHERE id = ?",
[rateLimitInfo.resetTime, accountRow.id],
);
@@ -466,7 +462,7 @@ export class AutoRefreshScheduler {
} else {
// Even if no reset time is provided, clear rate_limited_until as the refresh was successful
// Also make sure to clear any existing rate_limited_until value to ensure the account is not stuck
- this.db.run(
+ await this.db.run(
"UPDATE accounts SET rate_limited_until = NULL WHERE id = ?",
[accountRow.id],
);
@@ -476,7 +472,7 @@ export class AutoRefreshScheduler {
}
if (rateLimitInfo.statusHeader) {
- this.db.run(
+ await this.db.run(
"UPDATE accounts SET rate_limit_status = ? WHERE id = ?",
[rateLimitInfo.statusHeader, accountRow.id],
);
@@ -486,7 +482,7 @@ export class AutoRefreshScheduler {
}
if (rateLimitInfo.remaining !== undefined) {
- this.db.run(
+ await this.db.run(
"UPDATE accounts SET rate_limit_remaining = ? WHERE id = ?",
[rateLimitInfo.remaining, accountRow.id],
);
@@ -598,7 +594,7 @@ export class AutoRefreshScheduler {
* Clean up the tracking map by removing entries for accounts that no longer exist
* or have auto-refresh disabled
*/
- private cleanupTracking(): void {
+ private async cleanupTracking(): Promise<void> {
try {
// Check if database is available
if (!this.db) {
@@ -607,11 +603,11 @@ export class AutoRefreshScheduler {
}
// Get all account IDs that have auto-refresh enabled
- const query = this.db.query<{ id: string }, []>(
+ const activeAccountRows = await this.db.query<{ id: string }>(
`SELECT id FROM accounts WHERE auto_refresh_enabled = 1 AND provider = 'anthropic'`,
);
- const activeAccountIds = query.all().map((row) => row.id);
+ const activeAccountIds = activeAccountRows.map((row) => row.id);
const activeAccountIdSet = new Set(activeAccountIds);
// Remove entries from the maps that are not in the active set
diff --git a/packages/proxy/src/handlers/response-processor.ts b/packages/proxy/src/handlers/response-processor.ts
index 22315bd..7b0f12e 100644
--- a/packages/proxy/src/handlers/response-processor.ts
+++ b/packages/proxy/src/handlers/response-processor.ts
@@ -56,12 +56,12 @@ export function updateAccountMetadata(
// Update basic usage (with optional bypass)
if (bypassSession) {
// Increment request count without updating session tracking
- ctx.asyncWriter.enqueue(() => {
+ ctx.asyncWriter.enqueue(async () => {
// Manually increment request count and total requests without touching session
- const db = ctx.dbOps.getDatabase();
+ const adapter = ctx.dbOps.getAdapter();
const now = Date.now();
- db.run(
- `UPDATE accounts
+ await adapter.run(
+ `UPDATE accounts
SET last_used = ?, request_count = request_count + 1, total_requests = total_requests + 1
WHERE id = ?`,
[now, account.id],
@@ -87,19 +87,18 @@ export function updateAccountMetadata(
} else {
// If there's no rate limit status header (meaning request was successful),
// clear the rate_limited_until field if it has expired
- ctx.asyncWriter.enqueue(() => {
- const db = ctx.dbOps.getDatabase();
- const result = db
- .query<{ rate_limited_until: number | null }, [string]>(
- "SELECT rate_limited_until FROM accounts WHERE id = ?",
- )
- .get(account.id);
+ ctx.asyncWriter.enqueue(async () => {
+ const adapter = ctx.dbOps.getAdapter();
+ const result = await adapter.get<{ rate_limited_until: number | null }>(
+ "SELECT rate_limited_until FROM accounts WHERE id = ?",
+ [account.id],
+ );
if (
result?.rate_limited_until &&
result.rate_limited_until < Date.now()
) {
- db.run("UPDATE accounts SET rate_limited_until = NULL WHERE id = ?", [
+ await adapter.run("UPDATE accounts SET rate_limited_until = NULL WHERE id = ?", [
account.id,
]);
log.debug(
@@ -222,21 +221,20 @@ export async function processProxyResponse(
// Clear rate_limited_until if the account was previously rate-limited but is now successful
if (!rateLimitInfo.isRateLimited) {
// Check if the account had a rate_limited_until value and clear it
- ctx.asyncWriter.enqueue(() => {
- const db = ctx.dbOps.getDatabase();
+ ctx.asyncWriter.enqueue(async () => {
+ const adapter = ctx.dbOps.getAdapter();
// Only clear rate_limited_until if it's in the past or null (meaning it was rate-limited before)
- const result = db
- .query<{ rate_limited_until: number | null }, [string]>(
- "SELECT rate_limited_until FROM accounts WHERE id = ?",
- )
- .get(account.id);
+ const result = await adapter.get<{ rate_limited_until: number | null }>(
+ "SELECT rate_limited_until FROM accounts WHERE id = ?",
+ [account.id],
+ );
if (result?.rate_limited_until) {
const now = Date.now();
// If the rate limit was in the past (already expired) or if we're just clearing it after success
// We clear it regardless if it's expired to ensure the account is no longer marked as rate-limited
if (result.rate_limited_until <= now) {
- db.run("UPDATE accounts SET rate_limited_until = NULL WHERE id = ?", [
+ await adapter.run("UPDATE accounts SET rate_limited_until = NULL WHERE id = ?", [
account.id,
]);
log.debug(
diff --git a/packages/proxy/src/post-processor.worker.ts b/packages/proxy/src/post-processor.worker.ts
index 7f0c4bc..57e916a 100644
--- a/packages/proxy/src/post-processor.worker.ts
+++ b/packages/proxy/src/post-processor.worker.ts
@@ -87,6 +87,10 @@ let tokenEncoder: Tiktoken | null = null;
// Initialize database connection for worker
const dbOps = new DatabaseOperations();
+// Ensure PostgreSQL schema exists (no-op for SQLite)
+dbOps.initializeAsync().catch((err) => {
+ console.error("[WORKER] Failed to initialize database:", err);
+});
const asyncWriter = new AsyncDbWriter();
// Environment variables
@@ -386,9 +390,9 @@ async function handleStart(msg: StartMessage): Promise<void> {
`Saving request meta for ${msg.requestId} (${msg.method} ${msg.path})`,
);
}
- asyncWriter.enqueue(() => {
+ asyncWriter.enqueue(async () => {
try {
- dbOps.saveRequestMeta(
+ await dbOps.saveRequestMeta(
msg.requestId,
msg.method,
msg.path,
@@ -590,8 +594,8 @@ async function handleEnd(msg: EndMessage): Promise<void> {
) {
log.debug(`Saving final request data for ${startMessage.requestId}`);
}
- asyncWriter.enqueue(() =>
- dbOps.saveRequest(
+ asyncWriter.enqueue(async () =>
+ await dbOps.saveRequest(
startMessage.requestId,
startMessage.method,
startMessage.path,
@@ -665,8 +669,8 @@ async function handleEnd(msg: EndMessage): Promise<void> {
state.buffer = "";
const requestId = startMessage.requestId;
- asyncWriter.enqueue(() =>
- dbOps.saveRequestPayloadRaw(requestId, payloadJson),
+ asyncWriter.enqueue(async () =>
+ await dbOps.saveRequestPayloadRaw(requestId, payloadJson),
);
// Log if we have usage
diff --git a/packages/types/src/account.ts b/packages/types/src/account.ts
index 109078d..175201d 100644
--- a/packages/types/src/account.ts
+++ b/packages/types/src/account.ts
@@ -220,6 +220,16 @@ export interface AccountDeleteRequest {
confirm: string;
}
+// Coerce PostgreSQL BIGINT (returned as string) to number
+function toNum(v: number | string | null | undefined): number {
+ if (v == null) return 0;
+ return typeof v === "string" ? Number(v) : v;
+}
+function toNumOrNull(v: number | string | null | undefined): number | null {
+ if (v == null) return null;
+ return typeof v === "string" ? Number(v) : v;
+}
+
// Type mappers
export function toAccount(row: AccountRow): Account {
return {
@@ -229,21 +239,21 @@ export function toAccount(row: AccountRow): Account {
api_key: row.api_key,
refresh_token: row.refresh_token,
access_token: row.access_token,
- expires_at: row.expires_at,
- created_at: row.created_at,
- last_used: row.last_used,
- request_count: row.request_count,
- total_requests: row.total_requests,
- rate_limited_until: row.rate_limited_until || null,
- session_start: row.session_start || null,
- session_request_count: row.session_request_count || 0,
- paused: row.paused === 1,
- rate_limit_reset: row.rate_limit_reset || null,
+ expires_at: toNumOrNull(row.expires_at),
+ created_at: toNum(row.created_at),
+ last_used: toNumOrNull(row.last_used),
+ request_count: toNum(row.request_count),
+ total_requests: toNum(row.total_requests),
+ rate_limited_until: toNumOrNull(row.rate_limited_until),
+ session_start: toNumOrNull(row.session_start),
+ session_request_count: toNum(row.session_request_count),
+ paused: !!row.paused,
+ rate_limit_reset: toNumOrNull(row.rate_limit_reset),
rate_limit_status: row.rate_limit_status || null,
- rate_limit_remaining: row.rate_limit_remaining || null,
- priority: row.priority || 0,
- auto_fallback_enabled: row.auto_fallback_enabled === 1,
- auto_refresh_enabled: row.auto_refresh_enabled === 1,
+ rate_limit_remaining: toNumOrNull(row.rate_limit_remaining),
+ priority: toNum(row.priority),
+ auto_fallback_enabled: !!row.auto_fallback_enabled,
+ auto_refresh_enabled: !!row.auto_refresh_enabled,
custom_endpoint: row.custom_endpoint || null,
model_mappings: row.model_mappings || null,
cross_region_mode: row.cross_region_mode || null,
diff --git a/packages/types/src/api-key.ts b/packages/types/src/api-key.ts
index ec88559..2d21fa6 100644
--- a/packages/types/src/api-key.ts
+++ b/packages/types/src/api-key.ts
@@ -135,9 +135,9 @@ export function toApiKey(row: ApiKeyRow): ApiKey {
name: row.name,
hashedKey: row.hashed_key,
prefixLast8: row.prefix_last_8,
- createdAt: row.created_at,
- lastUsed: row.last_used,
- usageCount: row.usage_count,
+ createdAt: Number(row.created_at),
+ lastUsed: row.last_used ? Number(row.last_used) : null,
+ usageCount: Number(row.usage_count),
isActive: row.is_active === 1,
role: row.role,
};
diff --git a/packages/types/src/context.ts b/packages/types/src/context.ts
index b2eb4d4..bdfd0af 100644
--- a/packages/types/src/context.ts
+++ b/packages/types/src/context.ts
@@ -1,6 +1,5 @@
-import type { Database } from "bun:sqlite";
import type { Config } from "@better-ccflare/config";
-import type { DatabaseOperations } from "@better-ccflare/database";
+import type { BunSqlAdapter, DatabaseOperations } from "@better-ccflare/database";
import type { Account } from "./account";
import type { RequestMeta } from "./api";
import type { ApiKey } from "./api-key";
@@ -8,7 +7,7 @@ import type { StrategyStore } from "./strategy";
// API context for HTTP handlers
export interface APIContext {
- db: Database;
+ db: BunSqlAdapter;
config: Config;
dbOps: DatabaseOperations;
auth?: {
diff --git a/packages/types/src/request.ts b/packages/types/src/request.ts
index 06a6000..ab79c5b 100644
--- a/packages/types/src/request.ts
+++ b/packages/types/src/request.ts
@@ -116,26 +116,26 @@ export interface RequestPayload {
export function toRequest(row: RequestRow): Request {
return {
id: row.id,
- timestamp: row.timestamp,
+ timestamp: Number(row.timestamp),
method: row.method,
path: row.path,
accountUsed: row.account_used,
- statusCode: row.status_code,
- success: row.success === 1,
+ statusCode: row.status_code != null ? Number(row.status_code) : null,
+ success: !!row.success,
errorMessage: row.error_message,
- responseTimeMs: row.response_time_ms,
- failoverAttempts: row.failover_attempts,
+ responseTimeMs: row.response_time_ms != null ? Number(row.response_time_ms) : 0,
+ failoverAttempts: row.failover_attempts != null ? Number(row.failover_attempts) : 0,
model: row.model || undefined,
- promptTokens: row.prompt_tokens || undefined,
- completionTokens: row.completion_tokens || undefined,
- totalTokens: row.total_tokens || undefined,
- costUsd: row.cost_usd || undefined,
- inputTokens: row.input_tokens || undefined,
- cacheReadInputTokens: row.cache_read_input_tokens || undefined,
- cacheCreationInputTokens: row.cache_creation_input_tokens || undefined,
- outputTokens: row.output_tokens || undefined,
+ promptTokens: row.prompt_tokens ? Number(row.prompt_tokens) : undefined,
+ completionTokens: row.completion_tokens ? Number(row.completion_tokens) : undefined,
+ totalTokens: row.total_tokens ? Number(row.total_tokens) : undefined,
+ costUsd: row.cost_usd ? Number(row.cost_usd) : undefined,
+ inputTokens: row.input_tokens ? Number(row.input_tokens) : undefined,
+ cacheReadInputTokens: row.cache_read_input_tokens ? Number(row.cache_read_input_tokens) : undefined,
+ cacheCreationInputTokens: row.cache_creation_input_tokens ? Number(row.cache_creation_input_tokens) : undefined,
+ outputTokens: row.output_tokens ? Number(row.output_tokens) : undefined,
agentUsed: row.agent_used || undefined,
- tokensPerSecond: row.output_tokens_per_second || undefined,
+ tokensPerSecond: row.output_tokens_per_second ? Number(row.output_tokens_per_second) : undefined,
apiKeyId: row.api_key_id || undefined,
apiKeyName: row.api_key_name || undefined,
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment