fix: make compiled docker auth and sqlite runtime work

This commit is contained in:
2026-05-01 09:41:35 +09:00
parent 8656f237d4
commit 6df915b287
8 changed files with 297 additions and 8 deletions

View File

@@ -12,19 +12,24 @@ COPY packages/shared-types/package.json packages/shared-types/package.json
RUN bun install --frozen-lockfile
COPY . .
RUN bun run --filter @codexdash/web build
RUN bun run --filter @codexdash/api bundle
RUN mkdir -p /tmp/codexdash-runtime-data /tmp/codexdash-data-volume /tmp/codexdash-prisma \
&& cp /app/node_modules/.bun/@prisma+client@*/node_modules/.prisma/client/libquery_engine-*.so.node /tmp/codexdash-prisma/libquery_engine.so.node
FROM gcr.io/distroless/cc-debian12:nonroot
WORKDIR /app
ENV PORT=3001 \
WEB_DIST_DIR=/app/web \
CODEX_OAUTH_CALLBACK_BIND_HOST=0.0.0.0
CODEX_OAUTH_CALLBACK_BIND_HOST=0.0.0.0 \
PRISMA_QUERY_ENGINE_LIBRARY=/app/prisma/libquery_engine.so.node
COPY --from=builder /app/apps/api/dist/codexdash /app/codexdash
COPY --from=builder /app/apps/web/dist /app/web
COPY --from=builder --chown=65532:65532 /tmp/codexdash-runtime-data /home/processor/codexdash
COPY --from=builder --chown=65532:65532 /tmp/codexdash-data-volume /data
COPY --from=builder --chown=65532:65532 /app/apps/api/dist/codexdash /app/codexdash
COPY --from=builder --chown=65532:65532 /tmp/codexdash-prisma/libquery_engine.so.node /app/prisma/libquery_engine.so.node
COPY --from=builder --chown=65532:65532 /app/apps/web/dist /app/web
EXPOSE 3001 1455

View File

@@ -48,7 +48,9 @@ bun run dev:web -- --host 0.0.0.0
The production image uses a multi-stage build:
- `bun install` + frontend build in the builder stage
- `bun build --compile` to emit a single API executable at `apps/api/dist/codexdash`
- a distroless runtime image that only contains the compiled binary and the built web assets
- the Prisma query engine shared library copied alongside the binary so the compiled app can still talk to SQLite
- the container auto-bootstraps the SQLite schema for fresh `file:` databases before Prisma connects
- a distroless non-root runtime image that only contains the compiled binary, Prisma engine library, and the built web assets
Build the image:
@@ -76,6 +78,8 @@ Notes:
- The bundled frontend now defaults to the browser's current origin for API calls, so the production image can be deployed behind any host name without rebuilding the web bundle.
- `VITE_API_BASE_URL` is now optional and mainly useful for local development when Vite runs on a different origin than the API.
- `CODEX_OAUTH_CALLBACK_BIND_HOST=0.0.0.0` keeps the callback bridge reachable through Docker port publishing while the public redirect URL can still stay on `localhost:1455`.
- Fresh SQLite `file:` databases are initialized automatically on first boot, so a brand-new named volume can be used without running `prisma db push` inside the container.
- The image pre-creates writable `/data` and `/home/processor/codexdash` directories for non-root volume mounts, matching both the README example and the `processor` host-user bind/volume pattern.
- If the callback bridge is still unreachable in your setup, the manual callback URL paste fallback remains available.
## Environment variables

View File

@@ -4,8 +4,8 @@ import {
UnauthorizedException,
} from '@nestjs/common';
import { JwtService } from '@nestjs/jwt';
import * as argon2 from 'argon2';
import { AuthResponse, UserProfile } from '@codexdash/shared-types';
import { hashPassword, verifyPassword } from './password-hasher';
import { PrismaService } from '../prisma/prisma.service';
import { LoginDto } from './dto/login.dto';
import { RegisterDto } from './dto/register.dto';
@@ -30,7 +30,7 @@ export class AuthService {
data: {
email: dto.email.toLowerCase(),
name: dto.name.trim(),
passwordHash: await argon2.hash(dto.password),
passwordHash: await hashPassword(dto.password),
},
});
@@ -42,7 +42,7 @@ export class AuthService {
where: { email: dto.email.toLowerCase() },
});
if (!user || !(await argon2.verify(user.passwordHash, dto.password))) {
if (!user || !(await verifyPassword(user.passwordHash, dto.password))) {
throw new UnauthorizedException('Invalid email or password');
}

View File

@@ -0,0 +1,25 @@
import * as argon2 from 'argon2';
import { hashPassword, verifyPassword } from './password-hasher';
describe('password-hasher', () => {
it('hashes passwords into an argon2id digest that can be verified', async () => {
const digest = await hashPassword('correct horse battery staple');
expect(digest.startsWith('$argon2id$')).toBe(true);
await expect(
verifyPassword(digest, 'correct horse battery staple'),
).resolves.toBe(true);
await expect(verifyPassword(digest, 'wrong password')).resolves.toBe(false);
});
it('verifies legacy node-argon2 digests', async () => {
const legacyDigest = await argon2.hash('legacy secret');
await expect(verifyPassword(legacyDigest, 'legacy secret')).resolves.toBe(
true,
);
await expect(verifyPassword(legacyDigest, 'wrong password')).resolves.toBe(
false,
);
});
});

View File

@@ -0,0 +1,42 @@
type BunPasswordApi = {
hash(password: string, options?: { algorithm?: 'argon2id' }): Promise<string>;
verify(password: string, digest: string): Promise<boolean>;
};
function getBunPasswordApi(): BunPasswordApi | undefined {
const runtime = globalThis as typeof globalThis & {
Bun?: {
password?: BunPasswordApi;
};
};
return runtime.Bun?.password;
}
function loadArgon2(): typeof import('argon2') {
// eslint-disable-next-line @typescript-eslint/no-require-imports
return require('argon2') as typeof import('argon2');
}
export async function hashPassword(password: string): Promise<string> {
const bunPassword = getBunPasswordApi();
if (bunPassword) {
return bunPassword.hash(password, { algorithm: 'argon2id' });
}
const argon2 = loadArgon2();
return argon2.hash(password);
}
export async function verifyPassword(
digest: string,
password: string,
): Promise<boolean> {
const bunPassword = getBunPasswordApi();
if (bunPassword) {
return bunPassword.verify(password, digest);
}
const argon2 = loadArgon2();
return argon2.verify(digest, password);
}

View File

@@ -1,5 +1,6 @@
import { Injectable, OnModuleDestroy, OnModuleInit } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import { ensureSqliteSchema } from './sqlite-bootstrap';
@Injectable()
export class PrismaService
@@ -7,6 +8,7 @@ export class PrismaService
implements OnModuleInit, OnModuleDestroy
{
async onModuleInit() {
await ensureSqliteSchema();
await this.$connect();
}

View File

@@ -0,0 +1,59 @@
import { mkdtempSync, mkdirSync, rmSync } from 'node:fs';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import {
ensureSqliteSchema,
parseSqliteFilePath,
resolveSqliteFilePath,
} from './sqlite-bootstrap';
describe('sqlite-bootstrap', () => {
it('parses sqlite file URLs', () => {
expect(parseSqliteFilePath('file:./dev.db')).toBe('./dev.db');
expect(parseSqliteFilePath('file:/tmp/codexdash.db')).toBe(
'/tmp/codexdash.db',
);
expect(
parseSqliteFilePath('file:/tmp/codexdash.db?connection_limit=1'),
).toBe('/tmp/codexdash.db');
expect(parseSqliteFilePath(undefined)).toBeNull();
expect(parseSqliteFilePath('postgresql://example.com/app')).toBeNull();
});
it('resolves relative sqlite URLs like the Prisma schema layout', () => {
const tempDir = mkdtempSync(join(tmpdir(), 'codexdash-sqlite-path-'));
try {
const repoStylePrismaDir = join(tempDir, 'apps', 'api', 'prisma');
mkdirSync(repoStylePrismaDir, { recursive: true });
expect(resolveSqliteFilePath('file:./dev.db', tempDir)).toBe(
join(repoStylePrismaDir, 'dev.db'),
);
const packageStyleRoot = mkdtempSync(
join(tmpdir(), 'codexdash-package-prisma-'),
);
try {
const packageStylePrismaDir = join(packageStyleRoot, 'prisma');
mkdirSync(packageStylePrismaDir, { recursive: true });
expect(resolveSqliteFilePath('file:./dev.db', packageStyleRoot)).toBe(
join(packageStylePrismaDir, 'dev.db'),
);
} finally {
rmSync(packageStyleRoot, { force: true, recursive: true });
}
expect(resolveSqliteFilePath('file:/tmp/absolute.db', tempDir)).toBe(
'/tmp/absolute.db',
);
} finally {
rmSync(tempDir, { force: true, recursive: true });
}
});
it('skips bootstrap for non-sqlite database URLs', async () => {
await expect(
ensureSqliteSchema('postgresql://example.com/app'),
).resolves.toBe(false);
});
});

View File

@@ -0,0 +1,152 @@
import { existsSync } from 'node:fs';
import { mkdir } from 'node:fs/promises';
import { dirname, isAbsolute, resolve } from 'node:path';
type BunSqliteDatabase = {
exec(sql: string): void;
close(throwOnError?: boolean): void;
};
type BunSqliteModule = {
Database: new (
filename: string,
options?: {
create?: boolean;
readonly?: boolean;
strict?: boolean;
},
) => BunSqliteDatabase;
};
const BUN_SQLITE_SPECIFIER = 'bun:sqlite';
const SQLITE_BOOTSTRAP_SQL = `
PRAGMA foreign_keys = ON;
CREATE TABLE IF NOT EXISTS "User" (
"id" TEXT NOT NULL PRIMARY KEY,
"email" TEXT NOT NULL,
"passwordHash" TEXT NOT NULL,
"name" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS "User_email_key" ON "User"("email");
CREATE TABLE IF NOT EXISTS "OpenAiAccount" (
"id" TEXT NOT NULL PRIMARY KEY,
"userId" TEXT NOT NULL,
"label" TEXT NOT NULL,
"emailHint" TEXT,
"providerEmail" TEXT,
"providerAccountId" TEXT,
"planType" TEXT,
"authType" TEXT NOT NULL DEFAULT 'codex-oauth',
"encryptedSessionJson" TEXT NOT NULL,
"sessionExpiresAt" DATETIME,
"lastValidatedAt" DATETIME,
"lastUsageJson" JSONB,
"lastSyncedAt" DATETIME,
"lastError" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "OpenAiAccount_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX IF NOT EXISTS "OpenAiAccount_userId_idx" ON "OpenAiAccount"("userId");
CREATE TABLE IF NOT EXISTS "OpenAiLoginAttempt" (
"id" TEXT NOT NULL PRIMARY KEY,
"userId" TEXT NOT NULL,
"accountId" TEXT,
"label" TEXT NOT NULL,
"emailHint" TEXT,
"status" TEXT NOT NULL DEFAULT 'pending',
"state" TEXT NOT NULL,
"encryptedCodeVerifier" TEXT NOT NULL,
"expiresAt" DATETIME NOT NULL,
"completedAt" DATETIME,
"lastError" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "OpenAiLoginAttempt_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "OpenAiLoginAttempt_accountId_fkey" FOREIGN KEY ("accountId") REFERENCES "OpenAiAccount" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
CREATE UNIQUE INDEX IF NOT EXISTS "OpenAiLoginAttempt_state_key" ON "OpenAiLoginAttempt"("state");
CREATE INDEX IF NOT EXISTS "OpenAiLoginAttempt_userId_status_idx" ON "OpenAiLoginAttempt"("userId", "status");
`;
export function parseSqliteFilePath(
databaseUrl: string | undefined,
): string | null {
if (!databaseUrl || !databaseUrl.startsWith('file:')) {
return null;
}
const rawPath = databaseUrl.slice('file:'.length).split('?')[0];
if (!rawPath) {
return null;
}
return decodeURIComponent(rawPath);
}
export function resolveSqliteFilePath(
databaseUrl: string | undefined,
cwd = process.cwd(),
): string | null {
const parsedPath = parseSqliteFilePath(databaseUrl);
if (!parsedPath) {
return null;
}
if (isAbsolute(parsedPath)) {
return parsedPath;
}
const schemaRelativeBases = [
resolve(cwd, 'apps/api/prisma'),
resolve(cwd, 'prisma'),
];
const prismaSchemaBase = schemaRelativeBases.find((candidate) =>
existsSync(candidate),
);
return prismaSchemaBase
? resolve(prismaSchemaBase, parsedPath)
: resolve(cwd, parsedPath);
}
export async function ensureSqliteSchema(
databaseUrl = process.env.DATABASE_URL,
): Promise<boolean> {
const bunRuntime = globalThis as typeof globalThis & {
Bun?: {
version: string;
};
};
if (!bunRuntime.Bun) {
return false;
}
const databasePath = resolveSqliteFilePath(databaseUrl);
if (!databasePath) {
return false;
}
await mkdir(dirname(databasePath), { recursive: true });
const { Database } = (await import(BUN_SQLITE_SPECIFIER)) as BunSqliteModule;
const database = new Database(databasePath, { create: true, strict: true });
try {
database.exec(SQLITE_BOOTSTRAP_SQL);
} finally {
database.close(false);
}
return true;
}