Compare commits

...

23 Commits

Author SHA1 Message Date
283f91ed91 ci: configure npm auth for release publish
All checks were successful
npm release / verify (push) Successful in 13s
npm release / publish to npm (push) Successful in 12s
2026-05-11 13:59:29 +09:00
5991e4f1f0 ci: run Gitea release steps with bash
Some checks failed
npm release / verify (push) Successful in 13s
npm release / publish to npm (push) Failing after 11s
2026-05-11 13:57:45 +09:00
0dc657c97b ci: make Gitea release workflow self-contained
Some checks failed
npm release / verify (push) Failing after 10s
npm release / publish to npm (push) Has been skipped
2026-05-11 13:56:39 +09:00
96d0568197 ci: set writable HOME for Gitea release workflow
Some checks failed
npm release / verify (push) Failing after 2s
npm release / publish to npm (push) Has been skipped
2026-05-11 13:53:04 +09:00
e8adccfbbf ci: add tag-gated npm release workflow
Some checks failed
npm release / verify (push) Failing after 13s
npm release / publish to npm (push) Has been skipped
2026-05-11 13:36:07 +09:00
1c82b63e7a docs: add IdentityDB wiki documentation plan 2026-05-11 12:27:12 +09:00
3e39d3bbd5 docs: document LLM extractor adapter usage 2026-05-11 12:19:58 +09:00
4f877a40fb feat: add provider-agnostic LLM extractor adapter 2026-05-11 12:19:50 +09:00
7a02621e40 docs: add LLM extractor adapter plan 2026-05-11 12:14:55 +09:00
4c418dc39a docs: document topic alias and semantic search APIs 2026-05-11 12:06:48 +09:00
810f4a6bf2 feat: add semantic fact search and embeddings 2026-05-11 12:05:47 +09:00
428f5021e8 feat: add topic alias resolution APIs 2026-05-11 11:53:56 +09:00
ba03ecb85b feat: add topic hierarchy APIs 2026-05-11 11:46:10 +09:00
d95ac8c1a0 docs: add IdentityDB memory expansion plan 2026-05-11 11:41:13 +09:00
21e0b1e897 docs: add IdentityDB usage examples 2026-05-11 10:55:09 +09:00
2c6624beea feat: add pluggable fact extraction pipeline 2026-05-11 10:54:40 +09:00
9f3133a403 test: specify pluggable fact ingestion behavior 2026-05-11 10:50:43 +09:00
9dc529af04 feat: add IdentityDB core memory graph APIs 2026-05-11 10:50:11 +09:00
f4b6548054 test: specify memory graph query APIs 2026-05-11 10:46:38 +09:00
2f8712e1df feat: add multi-dialect schema initialization 2026-05-11 10:45:39 +09:00
fb140d7a50 test: define schema contract for topic fact graph 2026-05-11 10:42:50 +09:00
cadc1b0733 chore: scaffold IdentityDB package tooling 2026-05-11 10:41:48 +09:00
bf1495a4d0 docs: add IdentityDB foundation plan 2026-05-11 10:41:45 +09:00
34 changed files with 4179 additions and 1 deletions

View File

@@ -0,0 +1,117 @@
name: npm release
on:
push:
tags:
- 'v*'
- '[0-9]*'
permissions:
contents: read
defaults:
run:
shell: bash
jobs:
verify:
name: verify
runs-on: ubuntu-latest
container:
image: node:20-bookworm
timeout-minutes: 20
steps:
- name: Install release tools
run: |
set -euo pipefail
apt-get update
apt-get install -y git curl ca-certificates
curl -fsSL https://bun.sh/install | bash -s -- bun-v1.3.13
install -m 0755 /root/.bun/bin/bun /usr/local/bin/bun
node --version
npm --version
bun --version
- name: Clone tagged source
run: |
set -euo pipefail
REPO_URL="${{ gitea.server_url }}/${{ gitea.repository }}.git"
AUTH_HEADER="$(printf '%s' '${{ gitea.actor }}:${{ secrets.GITEA_TOKEN }}' | base64 -w0)"
git -c http.extraHeader="Authorization: Basic $AUTH_HEADER" clone --depth 1 --branch "${{ gitea.ref_name }}" "$REPO_URL" repo
git -C repo rev-parse HEAD
- name: Verify release tag matches package version
working-directory: repo
shell: bash
run: |
set -euo pipefail
TAG_NAME="${{ gitea.ref_name }}"
PACKAGE_VERSION="$(node -p "require('./package.json').version")"
if [ "$TAG_NAME" = "v$PACKAGE_VERSION" ] || [ "$TAG_NAME" = "$PACKAGE_VERSION" ]; then
echo "Release tag $TAG_NAME matches package version $PACKAGE_VERSION"
exit 0
fi
echo "Tag $TAG_NAME does not match package.json version $PACKAGE_VERSION" >&2
exit 1
- name: Run verify pipeline
working-directory: repo
run: |
set -euo pipefail
bun install --frozen-lockfile
bun run test
bun run check
bun run build
release:
name: publish to npm
runs-on: ubuntu-latest
container:
image: node:20-bookworm
timeout-minutes: 20
needs:
- verify
steps:
- name: Install release tools
run: |
set -euo pipefail
apt-get update
apt-get install -y git curl ca-certificates
curl -fsSL https://bun.sh/install | bash -s -- bun-v1.3.13
install -m 0755 /root/.bun/bin/bun /usr/local/bin/bun
node --version
npm --version
bun --version
- name: Clone tagged source
run: |
set -euo pipefail
REPO_URL="${{ gitea.server_url }}/${{ gitea.repository }}.git"
AUTH_HEADER="$(printf '%s' '${{ gitea.actor }}:${{ secrets.GITEA_TOKEN }}' | base64 -w0)"
git -c http.extraHeader="Authorization: Basic $AUTH_HEADER" clone --depth 1 --branch "${{ gitea.ref_name }}" "$REPO_URL" repo
git -C repo rev-parse HEAD
- name: Install dependencies
working-directory: repo
run: |
set -euo pipefail
bun install --frozen-lockfile
- name: Build package
working-directory: repo
run: |
set -euo pipefail
bun run build
- name: Publish package to npm
working-directory: repo
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
set -euo pipefail
printf '//registry.npmjs.org/:_authToken=%s\n' "$NODE_AUTH_TOKEN" > ~/.npmrc
npm publish

6
.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
node_modules/
dist/
coverage/
.env
.DS_Store
*.log

153
README.md
View File

@@ -1,3 +1,154 @@
# IdentityDB
Memory database for Artificial Personality
IdentityDB is a TypeScript package for building structured AI memory on top of relational databases.
## What it is
IdentityDB stores memory as a graph made of:
- **Topics** — named nodes such as `TypeScript`, `programming language`, `2025`, or `I`
- **Facts** — statements that connect multiple topics
- **Fact-topic links** — the relationships that turn one fact into a bridge between many topics
A single fact like `I have worked with TypeScript since 2025.` can connect the topics `I`, `TypeScript`, and `2025` at the same time.
## Current capabilities
- SQLite, PostgreSQL, MySQL, and MariaDB connection adapters
- Automatic schema initialization for `topics`, `facts`, `fact_topics`, `topic_relations`, `topic_aliases`, and `fact_embeddings`
- High-level APIs for adding topics and facts
- Topic hierarchy APIs for parent/child traversal and lineage lookup
- Topic alias and canonical resolution APIs so facts and queries can resolve alternate names
- Semantic fact indexing and search APIs built around provider-agnostic embeddings
- Dedup-aware ingestion hooks that can reuse an existing fact when a semantic near-duplicate is detected
- Pluggable fact extraction so callers can use a small LLM or a deterministic extractor
## Install
```bash
bun install
```
## Quick start
```ts
import { IdentityDB, NaiveExtractor, type EmbeddingProvider } from 'identitydb';
const db = await IdentityDB.connect({
client: 'sqlite',
filename: ':memory:',
});
await db.initialize();
await db.ingestStatement('I have worked with TypeScript since 2025.', {
extractor: new NaiveExtractor(),
});
await db.addFact({
statement: 'TypeScript is a programming language.',
topics: [
{
name: 'TypeScript',
category: 'entity',
granularity: 'concrete',
},
{
name: 'programming language',
category: 'concept',
granularity: 'abstract',
},
],
});
await db.linkTopics({
parentName: 'programming language',
childName: 'TypeScript',
});
await db.addTopicAlias('TypeScript', 'TS');
const provider: EmbeddingProvider = {
model: 'example-embedding-v1',
dimensions: 3,
async embed(input) {
if (input.toLowerCase().includes('typescript')) {
return [1, 0, 0];
}
return [0, 1, 0];
},
};
await db.indexFactEmbeddings({ provider });
const topic = await db.getTopicByName('TS', { includeFacts: true });
const children = await db.getTopicChildren('programming language');
const lineage = await db.getTopicLineage('TS');
const connected = await db.findConnectedTopics('TypeScript');
const matches = await db.searchFacts({
query: 'TypeScript experience',
provider,
limit: 5,
});
console.log(topic?.name);
console.log(children.map((entry) => entry.name));
console.log(lineage.map((entry) => entry.name));
console.log(connected.map((entry) => [entry.name, entry.sharedFactCount]));
console.log(matches.map((entry) => [entry.statement, entry.score]));
await db.close();
```
## Semantic ingestion and duplicate detection
If you provide an embedding provider during ingestion, IdentityDB can index the new fact automatically and reuse an existing fact when a semantic near-duplicate is already present.
```ts
await db.ingestStatement('Bun makes TypeScript tooling fast.', {
extractor: new NaiveExtractor(),
embeddingProvider: provider,
duplicateThreshold: 0.95,
});
```
## LLM-backed extraction
You can bridge any text-generating model into IdentityDB by wrapping it with `LlmFactExtractor`.
```ts
import { LlmFactExtractor } from 'identitydb';
const extractor = new LlmFactExtractor({
model: {
async generateText(prompt) {
return callYourFavoriteLlm(prompt);
},
},
instructions: 'Prefer technology, product, and time topics over generic nouns.',
});
await db.ingestStatement('I have worked with Bun and TypeScript since 2025.', {
extractor,
});
```
The adapter expects the model to return JSON and will validate the structured response before IdentityDB writes a fact.
## Development
```bash
bun run test
bun run check
bun run build
```
## Current status
This repository is in active MVP expansion development.
See these implementation plans for the current roadmap:
- `docs/plans/2026-05-11-identitydb-foundation.md`
- `docs/plans/2026-05-11-identitydb-memory-expansion.md`

426
bun.lock Normal file
View File

@@ -0,0 +1,426 @@
{
"lockfileVersion": 1,
"configVersion": 1,
"workspaces": {
"": {
"name": "identitydb",
"dependencies": {
"better-sqlite3": "^12.1.1",
"kysely": "^0.28.8",
"mysql2": "^3.15.3",
"pg": "^8.16.0",
},
"devDependencies": {
"@types/better-sqlite3": "^7.6.13",
"@types/node": "^24.0.0",
"@types/pg": "^8.20.0",
"tsup": "^8.5.0",
"typescript": "^5.8.3",
"vitest": "^3.2.4",
},
},
},
"packages": {
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.7", "", { "os": "aix", "cpu": "ppc64" }, "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg=="],
"@esbuild/android-arm": ["@esbuild/android-arm@0.27.7", "", { "os": "android", "cpu": "arm" }, "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ=="],
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.27.7", "", { "os": "android", "cpu": "arm64" }, "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ=="],
"@esbuild/android-x64": ["@esbuild/android-x64@0.27.7", "", { "os": "android", "cpu": "x64" }, "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg=="],
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.27.7", "", { "os": "darwin", "cpu": "arm64" }, "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw=="],
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.27.7", "", { "os": "darwin", "cpu": "x64" }, "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ=="],
"@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.27.7", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w=="],
"@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.27.7", "", { "os": "freebsd", "cpu": "x64" }, "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ=="],
"@esbuild/linux-arm": ["@esbuild/linux-arm@0.27.7", "", { "os": "linux", "cpu": "arm" }, "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA=="],
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.27.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A=="],
"@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.27.7", "", { "os": "linux", "cpu": "ia32" }, "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg=="],
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.27.7", "", { "os": "linux", "cpu": "none" }, "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q=="],
"@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.27.7", "", { "os": "linux", "cpu": "none" }, "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw=="],
"@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.27.7", "", { "os": "linux", "cpu": "ppc64" }, "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ=="],
"@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.27.7", "", { "os": "linux", "cpu": "none" }, "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ=="],
"@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.27.7", "", { "os": "linux", "cpu": "s390x" }, "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw=="],
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.27.7", "", { "os": "linux", "cpu": "x64" }, "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA=="],
"@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.27.7", "", { "os": "none", "cpu": "arm64" }, "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w=="],
"@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.27.7", "", { "os": "none", "cpu": "x64" }, "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw=="],
"@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.27.7", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A=="],
"@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.27.7", "", { "os": "openbsd", "cpu": "x64" }, "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg=="],
"@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.27.7", "", { "os": "none", "cpu": "arm64" }, "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw=="],
"@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.27.7", "", { "os": "sunos", "cpu": "x64" }, "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA=="],
"@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.27.7", "", { "os": "win32", "cpu": "arm64" }, "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA=="],
"@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.27.7", "", { "os": "win32", "cpu": "ia32" }, "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw=="],
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.27.7", "", { "os": "win32", "cpu": "x64" }, "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg=="],
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.60.3", "", { "os": "android", "cpu": "arm" }, "sha512-x35CNW/ANXG3hE/EZpRU8MXX1JDN86hBb2wMGAtltkz7pc6cxgjpy1OMMfDosOQ+2hWqIkag/fGok1Yady9nGw=="],
"@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.60.3", "", { "os": "android", "cpu": "arm64" }, "sha512-xw3xtkDApIOGayehp2+Rz4zimfkaX65r4t47iy+ymQB2G4iJCBBfj0ogVg5jpvjpn8UWn/+q9tprxleYeNp3Hw=="],
"@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.60.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-vo6Y5Qfpx7/5EaamIwi0WqW2+zfiusVihKatLvtN1VFVy3D13uERk/6gZLU1UiHRL6fDXqj/ELIeVRGnvcTE1g=="],
"@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.60.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-D+0QGcZhBzTN82weOnsSlY7V7+RMmPuF1CkbxyMAGE8+ZHeUjyb76ZiWmBlCu//AQQONvxcqRbwZTajZKqjuOw=="],
"@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.60.3", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-6HnvHCT7fDyj6R0Ph7A6x8dQS/S38MClRWeDLqc0MdfWkxjiu1HSDYrdPhqSILzjTIC/pnXbbJbo+ft+gy/9hQ=="],
"@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.60.3", "", { "os": "freebsd", "cpu": "x64" }, "sha512-KHLgC3WKlUYW3ShFKnnosZDOJ0xjg9zp7au3sIm2bs/tGBeC2ipmvRh/N7JKi0t9Ue20C0dpEshi8WUubg+cnA=="],
"@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.60.3", "", { "os": "linux", "cpu": "arm" }, "sha512-DV6fJoxEYWJOvaZIsok7KrYl0tPvga5OZ2yvKHNNYyk/2roMLqQAbGhr78EQ5YhHpnhLKJD3S1WFusAkmUuV5g=="],
"@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.60.3", "", { "os": "linux", "cpu": "arm" }, "sha512-mQKoJAzvuOs6F+TZybQO4GOTSMUu7v0WdxEk24krQ/uUxXoPTtHjuaUuPmFhtBcM4K0ons8nrE3JyhTuCFtT/w=="],
"@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.60.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-Whjj2qoiJ6+OOJMGptTYazaJvjOJm+iKHpXQM1P3LzGjt7Ff++Tp7nH4N8J/BUA7R9IHfDyx4DJIflifwnbmIA=="],
"@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.60.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-4YTNHKqGng5+yiZt3mg77nmyuCfmNfX4fPmyUapBcIk+BdwSwmCWGXOUxhXbBEkFHtoN5boLj/5NON+u5QC9tg=="],
"@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.60.3", "", { "os": "linux", "cpu": "none" }, "sha512-SU3kNlhkpI4UqlUc2VXPGK9o886ZsSeGfMAX2ba2b8DKmMXq4AL7KUrkSWVbb7koVqx41Yczx6dx5PNargIrEA=="],
"@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.60.3", "", { "os": "linux", "cpu": "none" }, "sha512-6lDLl5h4TXpB1mTf2rQWnAk/LcXrx9vBfu/DT5TIPhvMhRWaZ5MxkIc8u4lJAmBo6klTe1ywXIUHFjylW505sg=="],
"@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.60.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-BMo8bOw8evlup/8G+cj5xWtPyp93xPdyoSN16Zy90Q2QZ0ZYRhCt6ZJSwbrRzG9HApFabjwj2p25TUPDWrhzqQ=="],
"@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.60.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-E0L8X1dZN1/Rph+5VPF6Xj2G7JJvMACVXtamTJIDrVI44Y3K+G8gQaMEAavbqCGTa16InptiVrX6eM6pmJ+7qA=="],
"@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.60.3", "", { "os": "linux", "cpu": "none" }, "sha512-oZJ/WHaVfHUiRAtmTAeo3DcevNsVvH8mbvodjZy7D5QKvCefO371SiKRpxoDcCxB3PTRTLayWBkvmDQKTcX/sw=="],
"@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.60.3", "", { "os": "linux", "cpu": "none" }, "sha512-Dhbyh7j9FybM3YaTgaHmVALwA8AkUwTPccyCQ79TG9AJUsMQqgN1DDEZNr4+QUfwiWvLDumW5vdwzoeUF+TNxQ=="],
"@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.60.3", "", { "os": "linux", "cpu": "s390x" }, "sha512-cJd1X5XhHHlltkaypz1UcWLA8AcoIi1aWhsvaWDskD1oz2eKCypnqvTQ8ykMNI0RSmm7NkTdSqSSD7zM0xa6Ig=="],
"@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.60.3", "", { "os": "linux", "cpu": "x64" }, "sha512-DAZDBHQfG2oQuhY7mc6I3/qB4LU2fQCjRvxbDwd/Jdvb9fypP4IJ4qmtu6lNjes6B531AI8cg1aKC2di97bUxA=="],
"@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.60.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cRxsE8c13mZOh3vP+wLDxpQBRrOHDIGOWyDL93Sy0Ga8y515fBcC2pjUfFwUe5T7tqvTvWbCpg1URM/AXdWIXA=="],
"@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.60.3", "", { "os": "openbsd", "cpu": "x64" }, "sha512-QaWcIgRxqEdQdhJqW4DJctsH6HCmo5vHxY0krHSX4jMtOqfzC+dqDGuHM87bu4H8JBeibWx7jFz+h6/4C8wA5Q=="],
"@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.60.3", "", { "os": "none", "cpu": "arm64" }, "sha512-AaXwSvUi3QIPtroAUw1t5yHGIyqKEXwH54WUocFolZhpGDruJcs8c+xPNDRn4XiQsS7MEwnYsHW2l0MBLDMkWg=="],
"@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.60.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-65LAKM/bAWDqKNEelHlcHvm2V+Vfb8C6INFxQXRHCvaVN1rJfwr4NvdP4FyzUaLqWfaCGaadf6UbTm8xJeYfEg=="],
"@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.60.3", "", { "os": "win32", "cpu": "ia32" }, "sha512-EEM2gyhBF5MFnI6vMKdX1LAosE627RGBzIoGMdLloPZkXrUN0Ckqgr2Qi8+J3zip/8NVVro3/FjB+tjhZUgUHA=="],
"@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.60.3", "", { "os": "win32", "cpu": "x64" }, "sha512-E5Eb5H/DpxaoXH++Qkv28RcUJboMopmdDUALBczvHMf7hNIxaDZqwY5lK12UK1BHacSmvupoEWGu+n993Z0y1A=="],
"@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.60.3", "", { "os": "win32", "cpu": "x64" }, "sha512-hPt/bgL5cE+Qp+/TPHBqptcAgPzgj46mPcg/16zNUmbQk0j+mOEQV/+Lqu8QRtDV3Ek95Q6FeFITpuhl6OTsAA=="],
"@types/better-sqlite3": ["@types/better-sqlite3@7.6.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA=="],
"@types/chai": ["@types/chai@5.2.3", "", { "dependencies": { "@types/deep-eql": "*", "assertion-error": "^2.0.1" } }, "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA=="],
"@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="],
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
"@types/node": ["@types/node@24.12.3", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-8oljBDGun9cIsZRJR6fkihn0TSXJI0UDOOhncYaERq6M0JMDoPLxyscwruJcb4GKS6dvK/d8xebYBg27h/duaQ=="],
"@types/pg": ["@types/pg@8.20.0", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^2.2.0" } }, "sha512-bEPFOaMAHTEP1EzpvHTbmwR8UsFyHSKsRisLIHVMXnpNefSbGA1bD6CVy+qKjGSqmZqNqBDV2azOBo8TgkcVow=="],
"@vitest/expect": ["@vitest/expect@3.2.4", "", { "dependencies": { "@types/chai": "^5.2.2", "@vitest/spy": "3.2.4", "@vitest/utils": "3.2.4", "chai": "^5.2.0", "tinyrainbow": "^2.0.0" } }, "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig=="],
"@vitest/mocker": ["@vitest/mocker@3.2.4", "", { "dependencies": { "@vitest/spy": "3.2.4", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, "peerDependencies": { "msw": "^2.4.9", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "optionalPeers": ["msw", "vite"] }, "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ=="],
"@vitest/pretty-format": ["@vitest/pretty-format@3.2.4", "", { "dependencies": { "tinyrainbow": "^2.0.0" } }, "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA=="],
"@vitest/runner": ["@vitest/runner@3.2.4", "", { "dependencies": { "@vitest/utils": "3.2.4", "pathe": "^2.0.3", "strip-literal": "^3.0.0" } }, "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ=="],
"@vitest/snapshot": ["@vitest/snapshot@3.2.4", "", { "dependencies": { "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", "pathe": "^2.0.3" } }, "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ=="],
"@vitest/spy": ["@vitest/spy@3.2.4", "", { "dependencies": { "tinyspy": "^4.0.3" } }, "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw=="],
"@vitest/utils": ["@vitest/utils@3.2.4", "", { "dependencies": { "@vitest/pretty-format": "3.2.4", "loupe": "^3.1.4", "tinyrainbow": "^2.0.0" } }, "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA=="],
"acorn": ["acorn@8.16.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw=="],
"any-promise": ["any-promise@1.3.0", "", {}, "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A=="],
"assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="],
"aws-ssl-profiles": ["aws-ssl-profiles@1.1.2", "", {}, "sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g=="],
"base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="],
"better-sqlite3": ["better-sqlite3@12.9.0", "", { "dependencies": { "bindings": "^1.5.0", "prebuild-install": "^7.1.1" } }, "sha512-wqUv4Gm3toFpHDQmaKD4QhZm3g1DjUBI0yzS4UBl6lElUmXFYdTQmmEDpAFa5o8FiFiymURypEnfVHzILKaxqQ=="],
"bindings": ["bindings@1.5.0", "", { "dependencies": { "file-uri-to-path": "1.0.0" } }, "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ=="],
"bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="],
"buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
"bundle-require": ["bundle-require@5.1.0", "", { "dependencies": { "load-tsconfig": "^0.2.3" }, "peerDependencies": { "esbuild": ">=0.18" } }, "sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA=="],
"cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
"chai": ["chai@5.3.3", "", { "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", "deep-eql": "^5.0.1", "loupe": "^3.1.0", "pathval": "^2.0.0" } }, "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw=="],
"check-error": ["check-error@2.1.3", "", {}, "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA=="],
"chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
"chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="],
"commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="],
"confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="],
"consola": ["consola@3.4.2", "", {}, "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA=="],
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
"decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="],
"deep-eql": ["deep-eql@5.0.2", "", {}, "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q=="],
"deep-extend": ["deep-extend@0.6.0", "", {}, "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="],
"denque": ["denque@2.1.0", "", {}, "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw=="],
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="],
"es-module-lexer": ["es-module-lexer@1.7.0", "", {}, "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA=="],
"esbuild": ["esbuild@0.27.7", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.7", "@esbuild/android-arm": "0.27.7", "@esbuild/android-arm64": "0.27.7", "@esbuild/android-x64": "0.27.7", "@esbuild/darwin-arm64": "0.27.7", "@esbuild/darwin-x64": "0.27.7", "@esbuild/freebsd-arm64": "0.27.7", "@esbuild/freebsd-x64": "0.27.7", "@esbuild/linux-arm": "0.27.7", "@esbuild/linux-arm64": "0.27.7", "@esbuild/linux-ia32": "0.27.7", "@esbuild/linux-loong64": "0.27.7", "@esbuild/linux-mips64el": "0.27.7", "@esbuild/linux-ppc64": "0.27.7", "@esbuild/linux-riscv64": "0.27.7", "@esbuild/linux-s390x": "0.27.7", "@esbuild/linux-x64": "0.27.7", "@esbuild/netbsd-arm64": "0.27.7", "@esbuild/netbsd-x64": "0.27.7", "@esbuild/openbsd-arm64": "0.27.7", "@esbuild/openbsd-x64": "0.27.7", "@esbuild/openharmony-arm64": "0.27.7", "@esbuild/sunos-x64": "0.27.7", "@esbuild/win32-arm64": "0.27.7", "@esbuild/win32-ia32": "0.27.7", "@esbuild/win32-x64": "0.27.7" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w=="],
"estree-walker": ["estree-walker@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.0" } }, "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g=="],
"expand-template": ["expand-template@2.0.3", "", {}, "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg=="],
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
"file-uri-to-path": ["file-uri-to-path@1.0.0", "", {}, "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="],
"fix-dts-default-cjs-exports": ["fix-dts-default-cjs-exports@1.0.1", "", { "dependencies": { "magic-string": "^0.30.17", "mlly": "^1.7.4", "rollup": "^4.34.8" } }, "sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg=="],
"fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="],
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
"generate-function": ["generate-function@2.3.1", "", { "dependencies": { "is-property": "^1.0.2" } }, "sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ=="],
"github-from-package": ["github-from-package@0.0.0", "", {}, "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="],
"iconv-lite": ["iconv-lite@0.7.2", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw=="],
"ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
"ini": ["ini@1.3.8", "", {}, "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="],
"is-property": ["is-property@1.0.2", "", {}, "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g=="],
"joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="],
"js-tokens": ["js-tokens@9.0.1", "", {}, "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ=="],
"kysely": ["kysely@0.28.17", "", {}, "sha512-nbD8lB9EB3wNdMhOCdx5Li8DxnLbvKByylRLcJ1h+4SkrowVeECAyZlyiKMThF7xFdRz0jSQ2MoJr+wXux2y0Q=="],
"lilconfig": ["lilconfig@3.1.3", "", {}, "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="],
"lines-and-columns": ["lines-and-columns@1.2.4", "", {}, "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg=="],
"load-tsconfig": ["load-tsconfig@0.2.5", "", {}, "sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg=="],
"long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="],
"loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="],
"lru.min": ["lru.min@1.1.4", "", {}, "sha512-DqC6n3QQ77zdFpCMASA1a3Jlb64Hv2N2DciFGkO/4L9+q/IpIAuRlKOvCXabtRW6cQf8usbmM6BE/TOPysCdIA=="],
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
"mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="],
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
"mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="],
"mlly": ["mlly@1.8.2", "", { "dependencies": { "acorn": "^8.16.0", "pathe": "^2.0.3", "pkg-types": "^1.3.1", "ufo": "^1.6.3" } }, "sha512-d+ObxMQFmbt10sretNDytwt85VrbkhhUA/JBGm1MPaWJ65Cl4wOgLaB1NYvJSZ0Ef03MMEU/0xpPMXUIQ29UfA=="],
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
"mysql2": ["mysql2@3.22.3", "", { "dependencies": { "aws-ssl-profiles": "^1.1.2", "denque": "^2.1.0", "generate-function": "^2.3.1", "iconv-lite": "^0.7.2", "long": "^5.3.2", "lru.min": "^1.1.4", "named-placeholders": "^1.1.6", "sql-escaper": "^1.3.3" }, "peerDependencies": { "@types/node": ">= 8" } }, "sha512-uWWxvZSRvRhtBdh2CdcuK83YcOfPdmEeEYB069bAmPnV93QApDGVPuvCQOLjlh7tYHEWdgQPrn6kosDxHBVLkA=="],
"mz": ["mz@2.7.0", "", { "dependencies": { "any-promise": "^1.0.0", "object-assign": "^4.0.1", "thenify-all": "^1.0.0" } }, "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q=="],
"named-placeholders": ["named-placeholders@1.1.6", "", { "dependencies": { "lru.min": "^1.1.0" } }, "sha512-Tz09sEL2EEuv5fFowm419c1+a/jSMiBjI9gHxVLrVdbUkkNUUfjsVYs9pVZu5oCon/kmRh9TfLEObFtkVxmY0w=="],
"nanoid": ["nanoid@3.3.12", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-ZB9RH/39qpq5Vu6Y+NmUaFhQR6pp+M2Xt76XBnEwDaGcVAqhlvxrl3B2bKS5D3NH3QR76v3aSrKaF/Kiy7lEtQ=="],
"napi-build-utils": ["napi-build-utils@2.0.0", "", {}, "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA=="],
"node-abi": ["node-abi@3.92.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-KdHvFWZjEKDf0cakgFjebl371GPsISX2oZHcuyKqM7DtogIsHrqKeLTo8wBHxaXRAQlY2PsPlZmfo+9ZCxEREQ=="],
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
"pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
"pg": ["pg@8.20.0", "", { "dependencies": { "pg-connection-string": "^2.12.0", "pg-pool": "^3.13.0", "pg-protocol": "^1.13.0", "pg-types": "2.2.0", "pgpass": "1.0.5" }, "optionalDependencies": { "pg-cloudflare": "^1.3.0" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA=="],
"pg-cloudflare": ["pg-cloudflare@1.3.0", "", {}, "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ=="],
"pg-connection-string": ["pg-connection-string@2.12.0", "", {}, "sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ=="],
"pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="],
"pg-pool": ["pg-pool@3.13.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA=="],
"pg-protocol": ["pg-protocol@1.13.0", "", {}, "sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w=="],
"pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="],
"pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="],
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
"picomatch": ["picomatch@4.0.4", "", {}, "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A=="],
"pirates": ["pirates@4.0.7", "", {}, "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA=="],
"pkg-types": ["pkg-types@1.3.1", "", { "dependencies": { "confbox": "^0.1.8", "mlly": "^1.7.4", "pathe": "^2.0.1" } }, "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ=="],
"postcss": ["postcss@8.5.14", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-SoSL4+OSEtR99LHFZQiJLkT59C5B1amGO1NzTwj7TT1qCUgUO6hxOvzkOYxD+vMrXBM3XJIKzokoERdqQq/Zmg=="],
"postcss-load-config": ["postcss-load-config@6.0.1", "", { "dependencies": { "lilconfig": "^3.1.1" }, "peerDependencies": { "jiti": ">=1.21.0", "postcss": ">=8.0.9", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["jiti", "postcss", "tsx", "yaml"] }, "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g=="],
"postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="],
"postgres-bytea": ["postgres-bytea@1.0.1", "", {}, "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ=="],
"postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="],
"postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="],
"prebuild-install": ["prebuild-install@7.1.3", "", { "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^2.0.0", "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" } }, "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug=="],
"pump": ["pump@3.0.4", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA=="],
"rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="],
"readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
"resolve-from": ["resolve-from@5.0.0", "", {}, "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw=="],
"rollup": ["rollup@4.60.3", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.60.3", "@rollup/rollup-android-arm64": "4.60.3", "@rollup/rollup-darwin-arm64": "4.60.3", "@rollup/rollup-darwin-x64": "4.60.3", "@rollup/rollup-freebsd-arm64": "4.60.3", "@rollup/rollup-freebsd-x64": "4.60.3", "@rollup/rollup-linux-arm-gnueabihf": "4.60.3", "@rollup/rollup-linux-arm-musleabihf": "4.60.3", "@rollup/rollup-linux-arm64-gnu": "4.60.3", "@rollup/rollup-linux-arm64-musl": "4.60.3", "@rollup/rollup-linux-loong64-gnu": "4.60.3", "@rollup/rollup-linux-loong64-musl": "4.60.3", "@rollup/rollup-linux-ppc64-gnu": "4.60.3", "@rollup/rollup-linux-ppc64-musl": "4.60.3", "@rollup/rollup-linux-riscv64-gnu": "4.60.3", "@rollup/rollup-linux-riscv64-musl": "4.60.3", "@rollup/rollup-linux-s390x-gnu": "4.60.3", "@rollup/rollup-linux-x64-gnu": "4.60.3", "@rollup/rollup-linux-x64-musl": "4.60.3", "@rollup/rollup-openbsd-x64": "4.60.3", "@rollup/rollup-openharmony-arm64": "4.60.3", "@rollup/rollup-win32-arm64-msvc": "4.60.3", "@rollup/rollup-win32-ia32-msvc": "4.60.3", "@rollup/rollup-win32-x64-gnu": "4.60.3", "@rollup/rollup-win32-x64-msvc": "4.60.3", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-pAQK9HalE84QSm4Po3EmWIZPd3FnjkShVkiMlz1iligWYkWQ7wHYd1PF/T7QZ5TVSD6uSTon5gBVMSM4JfBV+A=="],
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
"semver": ["semver@7.8.0", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-AcM7dV/5ul4EekoQ29Agm5vri8JNqRyj39o0qpX6vDF2GZrtutZl5RwgD1XnZjiTAfncsJhMI48QQH3sN87YNA=="],
"siginfo": ["siginfo@2.0.0", "", {}, "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g=="],
"simple-concat": ["simple-concat@1.0.1", "", {}, "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q=="],
"simple-get": ["simple-get@4.0.1", "", { "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", "simple-concat": "^1.0.0" } }, "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA=="],
"source-map": ["source-map@0.7.6", "", {}, "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ=="],
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
"sql-escaper": ["sql-escaper@1.3.3", "", {}, "sha512-BsTCV265VpTp8tm1wyIm1xqQCS+Q9NHx2Sr+WcnUrgLrQ6yiDIvHYJV5gHxsj1lMBy2zm5twLaZao8Jd+S8JJw=="],
"stackback": ["stackback@0.0.2", "", {}, "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw=="],
"std-env": ["std-env@3.10.0", "", {}, "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg=="],
"string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="],
"strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="],
"strip-literal": ["strip-literal@3.1.0", "", { "dependencies": { "js-tokens": "^9.0.1" } }, "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg=="],
"sucrase": ["sucrase@3.35.1", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.2", "commander": "^4.0.0", "lines-and-columns": "^1.1.6", "mz": "^2.7.0", "pirates": "^4.0.1", "tinyglobby": "^0.2.11", "ts-interface-checker": "^0.1.9" }, "bin": { "sucrase": "bin/sucrase", "sucrase-node": "bin/sucrase-node" } }, "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw=="],
"tar-fs": ["tar-fs@2.1.4", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ=="],
"tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="],
"thenify": ["thenify@3.3.1", "", { "dependencies": { "any-promise": "^1.0.0" } }, "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw=="],
"thenify-all": ["thenify-all@1.6.0", "", { "dependencies": { "thenify": ">= 3.1.0 < 4" } }, "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA=="],
"tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="],
"tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
"tinyglobby": ["tinyglobby@0.2.16", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.4" } }, "sha512-pn99VhoACYR8nFHhxqix+uvsbXineAasWm5ojXoN8xEwK5Kd3/TrhNn1wByuD52UxWRLy8pu+kRMniEi6Eq9Zg=="],
"tinypool": ["tinypool@1.1.1", "", {}, "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg=="],
"tinyrainbow": ["tinyrainbow@2.0.0", "", {}, "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw=="],
"tinyspy": ["tinyspy@4.0.4", "", {}, "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q=="],
"tree-kill": ["tree-kill@1.2.2", "", { "bin": { "tree-kill": "cli.js" } }, "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A=="],
"ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="],
"tsup": ["tsup@8.5.1", "", { "dependencies": { "bundle-require": "^5.1.0", "cac": "^6.7.14", "chokidar": "^4.0.3", "consola": "^3.4.0", "debug": "^4.4.0", "esbuild": "^0.27.0", "fix-dts-default-cjs-exports": "^1.0.0", "joycon": "^3.1.1", "picocolors": "^1.1.1", "postcss-load-config": "^6.0.1", "resolve-from": "^5.0.0", "rollup": "^4.34.8", "source-map": "^0.7.6", "sucrase": "^3.35.0", "tinyexec": "^0.3.2", "tinyglobby": "^0.2.11", "tree-kill": "^1.2.2" }, "peerDependencies": { "@microsoft/api-extractor": "^7.36.0", "@swc/core": "^1", "postcss": "^8.4.12", "typescript": ">=4.5.0" }, "optionalPeers": ["@microsoft/api-extractor", "@swc/core", "postcss", "typescript"], "bin": { "tsup": "dist/cli-default.js", "tsup-node": "dist/cli-node.js" } }, "sha512-xtgkqwdhpKWr3tKPmCkvYmS9xnQK3m3XgxZHwSUjvfTjp7YfXe5tT3GgWi0F2N+ZSMsOeWeZFh7ZZFg5iPhing=="],
"tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
"ufo": ["ufo@1.6.4", "", {}, "sha512-JFNbkD1Svwe0KvGi8GOeLcP4kAWQ609twvCdcHxq1oSL8svv39ZuSvajcD8B+5D0eL4+s1Is2D/O6KN3qcTeRA=="],
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
"vite": ["vite@7.3.3", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-/4XH147Ui7OGTjg3HbdWe5arnZQSbfuRzdr9Ec7TQi5I7R+ir0Rlc9GIvD4v0XZurELqA035KVXJXpR61xhiTA=="],
"vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg=="],
"vitest": ["vitest@3.2.4", "", { "dependencies": { "@types/chai": "^5.2.2", "@vitest/expect": "3.2.4", "@vitest/mocker": "3.2.4", "@vitest/pretty-format": "^3.2.4", "@vitest/runner": "3.2.4", "@vitest/snapshot": "3.2.4", "@vitest/spy": "3.2.4", "@vitest/utils": "3.2.4", "chai": "^5.2.0", "debug": "^4.4.1", "expect-type": "^1.2.1", "magic-string": "^0.30.17", "pathe": "^2.0.3", "picomatch": "^4.0.2", "std-env": "^3.9.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", "tinyglobby": "^0.2.14", "tinypool": "^1.1.1", "tinyrainbow": "^2.0.0", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", "vite-node": "3.2.4", "why-is-node-running": "^2.3.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "@vitest/browser": "3.2.4", "@vitest/ui": "3.2.4", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@types/debug", "@types/node", "@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A=="],
"why-is-node-running": ["why-is-node-running@2.3.0", "", { "dependencies": { "siginfo": "^2.0.0", "stackback": "0.0.2" }, "bin": { "why-is-node-running": "cli.js" } }, "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w=="],
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
"xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="],
"estree-walker/@types/estree": ["@types/estree@1.0.9", "", {}, "sha512-GhdPgy1el4/ImP05X05Uw4cw2/M93BCUmnEvWZNStlCzEKME4Fkk+YpoA5OiHNQmoS7Cafb8Xa3Pya8m1Qrzeg=="],
}
}

View File

@@ -0,0 +1,419 @@
# IdentityDB Foundation Implementation Plan
> **For Hermes:** Use the `subagent-driven-development` skill to execute this plan task-by-task. Enforce strict TDD for every production behavior.
**Goal:** Build the first usable version of `IdentityDB`, a TypeScript package that wraps relational databases and exposes a structured API for storing topics, facts, and their many-to-many graph relationships.
**Architecture:** IdentityDB will use a layered architecture: a storage layer based on Kysely + dialect adapters, a domain layer for topics/facts/links, and a service layer that exposes ergonomic high-level APIs for querying and ingesting memory. Schema initialization will be automatic and idempotent. AI-assisted ingestion will be abstracted behind a pluggable extractor interface so callers can use a small LLM or a deterministic extractor without coupling the core package to a specific model provider.
**Tech Stack:** TypeScript, Bun, Node.js, Kysely, better-sqlite3, pg, mysql2, Vitest, tsup.
---
## Product constraints and interpretation
- The package must support SQLite, PostgreSQL, MySQL, and MariaDB.
- The database model must treat a single fact as a connector between multiple topics.
- Topics can represent concrete entities (`TypeScript`), abstract concepts (`programming language`), or temporal anchors (`2025`).
- Topic abstraction should be explicit in the schema so broad topics can store broad facts while specific topics store specific facts.
- The initial release should prioritize correctness, portability, and ergonomic API design over advanced search or embedding features.
- AI-assisted topic extraction should be implemented as an integration point in v1 foundation, not as a hardcoded provider-specific dependency.
---
## Target repository structure
```text
IdentityDB/
├── src/
│ ├── adapters/
│ │ ├── dialect.ts
│ │ └── index.ts
│ ├── core/
│ │ ├── errors.ts
│ │ ├── identity-db.ts
│ │ ├── migrations.ts
│ │ └── schema.ts
│ ├── ingestion/
│ │ ├── extractor.ts
│ │ ├── naive-extractor.ts
│ │ └── types.ts
│ ├── queries/
│ │ ├── topics.ts
│ │ └── facts.ts
│ ├── types/
│ │ ├── api.ts
│ │ ├── domain.ts
│ │ └── database.ts
│ └── index.ts
├── tests/
│ ├── identity-db.test.ts
│ ├── migrations.test.ts
│ ├── queries.test.ts
│ └── ingestion.test.ts
├── docs/
│ └── plans/
│ └── 2026-05-11-identitydb-foundation.md
├── package.json
├── tsconfig.json
├── tsup.config.ts
├── vitest.config.ts
├── .gitignore
└── README.md
```
---
## Data model proposal
### Tables
#### `topics`
- `id` — string UUID
- `name` — canonical display name, unique
- `normalized_name` — lowercase normalized unique key
- `category``entity | concept | temporal | custom`
- `granularity``abstract | concrete | mixed`
- `description` — nullable text
- `metadata` — JSON / JSON-text depending on dialect
- `created_at`
- `updated_at`
#### `facts`
- `id` — string UUID
- `statement` — original fact text
- `summary` — optional normalized/clean summary
- `source` — optional source identifier
- `confidence` — nullable numeric confidence
- `metadata` — JSON / JSON-text depending on dialect
- `created_at`
- `updated_at`
#### `fact_topics`
- `fact_id`
- `topic_id`
- `role` — optional semantic label (`subject`, `object`, `time`, etc.)
- `position` — stable order for fact-topic relationships
- composite unique key on (`fact_id`, `topic_id`, `role`)
### Notes
- The graph is modeled through `fact_topics`; facts are the connective tissue between topics.
- No separate topic-to-topic edge table is needed in the initial version because relationships are derived from shared facts.
- JSON portability should be implemented through small helpers so SQLite stores stringified JSON while Postgres/MySQL can still use text-compatible serialization safely.
---
## Public API proposal
### Construction and lifecycle
```ts
const db = await IdentityDB.connect({
client: 'sqlite',
filename: ':memory:',
});
await db.initialize();
await db.close();
```
### Core write APIs
```ts
await db.upsertTopic({
name: 'TypeScript',
category: 'entity',
granularity: 'concrete',
});
await db.addFact({
statement: 'I have worked with TypeScript since 2025.',
topics: [
{ name: 'I', category: 'entity', granularity: 'concrete', role: 'subject' },
{ name: 'TypeScript', category: 'entity', granularity: 'concrete', role: 'object' },
{ name: '2025', category: 'temporal', granularity: 'concrete', role: 'time' },
],
});
```
### Query APIs
```ts
await db.getTopicByName('TypeScript', { includeFacts: true });
await db.getTopicFacts('TypeScript');
await db.getTopicFactsLinkedTo('TypeScript', '2025');
await db.listTopics();
await db.listTopics({ includeFacts: false, limit: 100 });
await db.findConnectedTopics('TypeScript');
await db.findFactsConnectingTopics(['I', 'TypeScript', '2025']);
```
### AI-assisted ingestion API
```ts
await db.ingestStatement('I have worked with TypeScript since 2025.', {
extractor,
});
```
Where `extractor` implements:
```ts
interface FactExtractor {
extract(input: string): Promise<ExtractedFact>;
}
```
The package will ship a simple `NaiveExtractor` for tests/examples, while real deployments can inject an LLM-backed extractor.
---
## Execution plan
### Task 1: Scaffold package tooling and baseline configuration
**Objective:** Create a clean TypeScript package foundation with build and test tooling.
**Files:**
- Create: `package.json`
- Create: `tsconfig.json`
- Create: `tsup.config.ts`
- Create: `vitest.config.ts`
- Create: `.gitignore`
- Modify: `README.md`
**Steps:**
1. Add package metadata, scripts, dependency placeholders, and ESM export configuration.
2. Add TypeScript config for library output.
3. Add tsup config for bundling ESM + type declarations.
4. Add Vitest config targeting Node.
5. Expand README with project direction and current scope.
6. Install dependencies and confirm `bun test` starts correctly.
**Verification:**
- Run: `bun install`
- Run: `bun test`
- Expected: test runner executes successfully even if there are zero or placeholder tests.
**Commit:**
```bash
git add package.json tsconfig.json tsup.config.ts vitest.config.ts .gitignore README.md bun.lock
git commit -m "chore: scaffold IdentityDB package tooling"
```
---
### Task 2: Define domain types and write migration tests first
**Objective:** Lock down the domain model and schema contract before implementing migrations.
**Files:**
- Create: `src/types/domain.ts`
- Create: `src/types/database.ts`
- Create: `src/types/api.ts`
- Create: `src/core/schema.ts`
- Create: `tests/migrations.test.ts`
**Steps:**
1. Write tests that describe the required tables and columns after initialization.
2. Write tests for idempotent initialization (calling twice should not fail).
3. Add domain and API type definitions that match the product model.
4. Add schema description constants used by migrations.
**Verification:**
- Run: `bun test tests/migrations.test.ts`
- Expected before implementation: FAIL because initialization does not exist yet.
**Commit:**
```bash
git add src/types src/core/schema.ts tests/migrations.test.ts
git commit -m "test: define schema contract for topic fact graph"
```
---
### Task 3: Implement dialect adapters and automatic schema initialization
**Objective:** Make the package connect to supported databases and create its schema automatically.
**Files:**
- Create: `src/adapters/dialect.ts`
- Create: `src/adapters/index.ts`
- Create: `src/core/migrations.ts`
- Create: `src/core/errors.ts`
- Modify: `src/core/schema.ts`
- Modify: `tests/migrations.test.ts`
**Steps:**
1. Implement a connection config union for SQLite/Postgres/MySQL-family.
2. Build a dialect factory returning a Kysely instance.
3. Implement `initializeSchema()` with idempotent table creation.
4. Add lightweight helpers for JSON serialization/deserialization portability.
5. Re-run migration tests until green.
**Verification:**
- Run: `bun test tests/migrations.test.ts`
- Expected: PASS
**Commit:**
```bash
git add src/adapters src/core tests/migrations.test.ts
git commit -m "feat: add multi-dialect schema initialization"
```
---
### Task 4: Write failing query tests for topic/fact operations
**Objective:** Specify the behavior of the high-level memory APIs before implementation.
**Files:**
- Create: `tests/identity-db.test.ts`
- Create: `tests/queries.test.ts`
**Steps:**
1. Write tests for `upsertTopic` deduplication by normalized name.
2. Write tests for `addFact` linking multiple topics to one fact.
3. Write tests for `getTopicByName(..., { includeFacts: true })`.
4. Write tests for `getTopicFactsLinkedTo(topicA, topicB)`.
5. Write tests for `listTopics({ includeFacts: false })` returning topic-only records.
6. Write tests for `findConnectedTopics(name)`.
**Verification:**
- Run: `bun test tests/identity-db.test.ts tests/queries.test.ts`
- Expected before implementation: FAIL because `IdentityDB` methods are not implemented.
**Commit:**
```bash
git add tests/identity-db.test.ts tests/queries.test.ts
git commit -m "test: specify memory graph query APIs"
```
---
### Task 5: Implement `IdentityDB` core service and query helpers
**Objective:** Deliver the first usable high-level API for writing and reading memory graph data.
**Files:**
- Create: `src/core/identity-db.ts`
- Create: `src/queries/topics.ts`
- Create: `src/queries/facts.ts`
- Create: `src/index.ts`
- Modify: `src/types/api.ts`
- Modify: `tests/identity-db.test.ts`
- Modify: `tests/queries.test.ts`
**Steps:**
1. Implement `IdentityDB.connect()` and `initialize()`.
2. Implement topic upsert with normalized key handling.
3. Implement fact insertion plus topic linking transactionally.
4. Implement topic lookup with optional fact expansion.
5. Implement topic-to-topic and multi-topic fact queries.
6. Implement topic listing and connected-topic discovery.
7. Re-run the full test suite.
**Verification:**
- Run: `bun test`
- Expected: PASS
**Commit:**
```bash
git add src tests
git commit -m "feat: add IdentityDB core memory graph APIs"
```
---
### Task 6: Add ingestion abstractions and a naive extractor
**Objective:** Support automatic topic/fact ingestion through a pluggable extraction pipeline.
**Files:**
- Create: `src/ingestion/types.ts`
- Create: `src/ingestion/extractor.ts`
- Create: `src/ingestion/naive-extractor.ts`
- Create: `tests/ingestion.test.ts`
- Modify: `src/core/identity-db.ts`
- Modify: `src/index.ts`
**Steps:**
1. Write failing tests for `ingestStatement()` using a fake extractor.
2. Define the extraction contracts and validation rules.
3. Implement `ingestStatement()` by piping extractor output into `addFact()`.
4. Add a deterministic `NaiveExtractor` for examples/tests.
5. Add tests proving extractor-driven topic creation works.
**Verification:**
- Run: `bun test tests/ingestion.test.ts`
- Run: `bun test`
- Expected: PASS
**Commit:**
```bash
git add src/ingestion src/core/identity-db.ts src/index.ts tests/ingestion.test.ts
git commit -m "feat: add pluggable fact ingestion pipeline"
```
---
### Task 7: Polish package docs and publish-ready ergonomics
**Objective:** Make the repository understandable and usable after the foundation lands.
**Files:**
- Modify: `README.md`
- Optionally create: `docs/examples/basic-usage.md`
**Steps:**
1. Document supported databases and the current API surface.
2. Document the topic/fact graph model with a concrete example.
3. Add example code for initialization, querying, and AI-assisted ingestion.
4. Call out current limitations and near-term roadmap.
**Verification:**
- Manually review the README examples against actual exports.
- Run: `bun run build`
- Expected: PASS
**Commit:**
```bash
git add README.md docs/examples/basic-usage.md
git commit -m "docs: document IdentityDB foundation usage"
```
---
## Test strategy
- Use SQLite in-memory for the main automated tests.
- Treat PostgreSQL/MySQL/MariaDB support as adapter-compatibility in the code path, with optional future integration tests behind environment variables.
- Keep all public behavior covered through unit/integration-style tests against the public `IdentityDB` API.
- Add regression tests for normalization, many-to-many fact linking, and topic filtering by connected topic.
---
## Risks and tradeoffs
1. **Cross-dialect JSON handling** — JSON support differs between engines. The initial version should serialize metadata defensively for portability.
2. **Case normalization semantics** — topic uniqueness depends on normalization. The first version should use a simple lowercase-trim normalization and document it.
3. **Temporal topic modeling** — time can be a topic, but richer interval modeling should wait until a later phase.
4. **Abstract vs concrete topic boundaries** — this is partly editorial, so the API should store explicit `granularity` rather than trying to infer it automatically.
5. **LLM extraction variability** — extractor output can be messy. The core package should validate extractor results before writing them.
---
## Out of scope for this foundation pass
- Embeddings or semantic vector search
- Ranking/relevance algorithms
- Full-text search indices
- Topic merging/synonym resolution workflows
- Multi-user authorization / remote HTTP service layer
- Hosted API server package
---
## Immediate execution target
For the first automated execution pass, implement Tasks 1 through 7 in order, but treat SQLite-backed functionality as the required tested path and the other SQL engines as supported adapter targets in the library surface.

View File

@@ -0,0 +1,87 @@
# IdentityDB LLM Extractor Adapter Implementation Plan
> **For Hermes:** Use the `subagent-driven-development` skill to execute this plan task-by-task. Enforce strict TDD for every production behavior.
**Goal:** Add a provider-agnostic LLM-backed fact extractor adapter so callers can plug a small language model into IdentityDB ingestion without coupling the package to a specific SDK.
**Architecture:** Keep `FactExtractor` as the stable ingestion contract, then add an `LlmFactExtractor` adapter that delegates prompting and text generation to a narrow model interface. The adapter should build a deterministic JSON-only extraction prompt, parse structured JSON from the model response, validate the shape, and return `ExtractedFact` objects that flow through the existing ingestion validation path.
**Tech Stack:** TypeScript, Bun, Node.js, Kysely, Vitest, tsup.
---
## Scope and interpretation
- The new adapter must remain provider-agnostic and must not depend on OpenAI, Anthropic, or any other SDK.
- The adapter should accept a minimal language-model interface that returns text so package consumers can bridge any LLM client they want.
- Structured output must be validated in the adapter before returning it to `extractFact()`.
- The adapter should tolerate common model formatting noise such as fenced ```json blocks around the payload.
- Initial release should focus on correctness and predictable integration, not prompt-optimization or retries.
---
## Public API additions
```ts
const extractor = new LlmFactExtractor({
model: {
async generateText(prompt) {
return jsonStringFromSomeLlm(prompt);
},
},
});
const fact = await db.ingestStatement('I have worked with Bun and TypeScript since 2025.', {
extractor,
});
```
Optional customization:
```ts
const extractor = new LlmFactExtractor({
model,
instructions: 'Prefer product and technology topics over generic nouns.',
});
```
---
## Execution plan
### Task 1: Lock the adapter behavior with failing tests
**Objective:** Define the LLM adapter contract before implementation.
**Files:**
- Modify: `tests/ingestion.test.ts`
- Modify: `src/ingestion/types.ts`
- Modify: `src/index.ts`
**Verification:**
- Run focused ingestion tests and confirm they fail for the missing adapter behavior.
### Task 2: Implement the LLM adapter and response parsing
**Objective:** Add a reusable `LlmFactExtractor` implementation plus robust JSON extraction helpers.
**Files:**
- Create: `src/ingestion/llm-extractor.ts`
- Modify: `src/ingestion/types.ts`
- Modify: `src/ingestion/extractor.ts`
- Modify: `src/index.ts`
**Verification:**
- Run the focused ingestion tests until green.
### Task 3: Document the adapter and run the full suite
**Objective:** Expose the new adapter in docs and ensure the whole package still passes verification.
**Files:**
- Modify: `README.md`
- Modify: `src/index.ts`
**Verification:**
- Run `bun run test && bun run check && bun run build`
- Confirm the README shows how to bridge an arbitrary LLM client into the adapter.

View File

@@ -0,0 +1,181 @@
# IdentityDB Memory Expansion Implementation Plan
> **For Hermes:** Use the `subagent-driven-development` skill to execute this plan task-by-task. Enforce strict TDD for every production behavior.
**Goal:** Extend IdentityDB with explicit topic hierarchy, topic alias/canonicalization controls, and portable semantic fact search with embedding-backed similarity APIs.
**Architecture:** Keep the relational core portable across SQLite, PostgreSQL, MySQL, and MariaDB by introducing dedicated extension tables: `topic_relations` for abstract/concrete hierarchy, `topic_aliases` for canonical topic resolution, and `fact_embeddings` for semantic indexing. Expose high-level APIs from `IdentityDB` while preserving DB-agnostic behavior by doing semantic scoring in the application layer first.
**Tech Stack:** TypeScript, Bun, Node.js, Kysely, better-sqlite3, pg, mysql2, Vitest, tsup.
---
## Scope and interpretation
- Topic hierarchy must be explicit rather than inferred only from shared facts.
- Canonical topics must remain first-class records in `topics`; aliases should resolve into those topics without duplicating canonical rows.
- Semantic search must stay provider-agnostic through a pluggable `EmbeddingProvider` interface.
- The first semantic-search release should favor portability and deterministic testing over ANN/vector-extension optimization.
- Ingestion should be able to detect likely duplicate facts by semantic similarity without forcing automatic merges.
---
## Data model additions
### `topic_relations`
- `parent_topic_id`
- `child_topic_id`
- `relation` — initially `parent_of`
- `created_at`
- composite primary key on (`parent_topic_id`, `child_topic_id`, `relation`)
### `topic_aliases`
- `id`
- `topic_id`
- `alias`
- `normalized_alias`
- `is_primary`
- `created_at`
- `updated_at`
- unique key on `normalized_alias`
### `fact_embeddings`
- `fact_id`
- `model`
- `dimensions`
- `embedding`
- `content_hash`
- `created_at`
- `updated_at`
- composite primary key on (`fact_id`, `model`)
---
## Public API additions
### Topic hierarchy
```ts
await db.linkTopics({
parentName: 'programming language',
childName: 'TypeScript',
});
await db.getTopicChildren('programming language');
await db.getTopicParents('TypeScript');
await db.getTopicLineage('TypeScript');
```
### Topic aliases
```ts
await db.addTopicAlias('TypeScript', 'TS');
await db.resolveTopic('ts');
await db.getTopicAliases('TypeScript');
```
### Semantic indexing and search
```ts
await db.indexFactEmbeddings({ provider });
await db.searchFacts({ query: 'When did I start using TS?', provider, limit: 5 });
await db.findSimilarFacts({ statement: 'I started using TypeScript in 2025.', provider, threshold: 0.9 });
```
### Dedup-aware ingestion
```ts
await db.ingestStatement(statement, {
extractor,
dedup: {
provider,
threshold: 0.9,
},
});
```
---
## Execution plan
### Task 1: Lock the extension schema and APIs with failing tests
**Objective:** Define tests for hierarchy, aliases, and semantic search before production code changes.
**Files:**
- Modify: `tests/migrations.test.ts`
- Modify: `tests/identity-db.test.ts`
- Modify: `tests/queries.test.ts`
- Create: `tests/semantic-search.test.ts`
- Modify: `src/types/api.ts`
- Modify: `src/types/domain.ts`
- Modify: `src/types/database.ts`
- Modify: `src/core/schema.ts`
**Verification:**
- Run focused test commands and confirm they fail for missing behavior.
### Task 2: Implement topic hierarchy storage and query APIs
**Objective:** Add `topic_relations` schema support plus parent/child/lineage APIs.
**Files:**
- Modify: `src/core/migrations.ts`
- Modify: `src/core/identity-db.ts`
- Modify: `src/core/utils.ts`
- Modify: `src/queries/topics.ts`
- Modify: `src/types/api.ts`
- Modify: `src/types/domain.ts`
- Modify: `src/types/database.ts`
**Verification:**
- Run hierarchy-focused tests until green.
### Task 3: Implement canonical topic aliases
**Objective:** Add alias storage, alias-aware resolution, and canonical topic lookup semantics.
**Files:**
- Modify: `src/core/migrations.ts`
- Modify: `src/core/identity-db.ts`
- Modify: `src/queries/topics.ts`
- Modify: `src/core/utils.ts`
- Modify: `src/types/api.ts`
- Modify: `src/types/domain.ts`
- Modify: `src/types/database.ts`
**Verification:**
- Run alias-focused tests until green.
### Task 4: Implement embedding-backed indexing and semantic search
**Objective:** Add `EmbeddingProvider`, embedding storage, search APIs, and similarity ranking.
**Files:**
- Create: `src/embeddings/provider.ts`
- Create: `src/queries/embeddings.ts`
- Modify: `src/core/migrations.ts`
- Modify: `src/core/identity-db.ts`
- Modify: `src/core/utils.ts`
- Modify: `src/types/api.ts`
- Modify: `src/types/domain.ts`
- Modify: `src/types/database.ts`
- Modify: `src/index.ts`
- Create: `tests/semantic-search.test.ts`
**Verification:**
- Run semantic-search tests until green.
### Task 5: Add dedup-aware ingestion, docs, and full verification
**Objective:** Surface semantic dedup hints during ingestion, document the new APIs, and run the full suite.
**Files:**
- Modify: `src/ingestion/types.ts`
- Modify: `src/core/identity-db.ts`
- Modify: `README.md`
- Modify: `src/index.ts`
**Verification:**
- Run `bun run test && bun run check && bun run build`
- Update docs to reflect the new public surface.

View File

@@ -0,0 +1,65 @@
# IdentityDB Wiki Documentation Implementation Plan
> **For Hermes:** Execute this plan step-by-step. Prefer concrete repository inspection over assumptions, and verify the wiki remote after each major write.
**Goal:** Verify the IdentityDB wiki repository state, create or clone it as needed, and publish concrete wiki documentation covering the project's purpose, usage, and extractor choices including `NaiveExtractor`.
**Architecture:** Treat the Gitea wiki as a separate Git repository. First verify whether the wiki feature is enabled and whether the `.wiki.git` remote already exists. If the remote does not exist yet, bootstrap it with a minimal `Home.md`, then clone the wiki repo into a local working directory and author Markdown pages there. Keep the documentation practical, using the package README and current source files as the canonical content source.
**Tech Stack:** Gitea, tea CLI, Git, Markdown, Bun/TypeScript project docs.
---
## Execution plan
### Task 1: Inspect wiki availability and remote state
**Objective:** Confirm that the repository has wiki support enabled and determine whether the Git-backed wiki repo already exists.
**Files:**
- Inspect: `https://git.psw.kr/p-sw/IdentityDB`
- Read: `/home/hermes-agent/IdentityDB/README.md`
- Read: `/home/hermes-agent/IdentityDB/src/ingestion/naive-extractor.ts`
- Read: `/home/hermes-agent/IdentityDB/src/ingestion/llm-extractor.ts`
**Verification:**
- Check Gitea repo metadata for `has_wiki=true`.
- Check whether `https://git.psw.kr/p-sw/IdentityDB.wiki.git` is readable.
### Task 2: Bootstrap the wiki repo if missing
**Objective:** Create the Git-backed wiki repository if it has not been materialized yet.
**Files:**
- Create temporarily: `/home/hermes-agent/IdentityDB-wiki-bootstrap/Home.md`
**Verification:**
- Push a first commit to `https://git.psw.kr/p-sw/IdentityDB.wiki.git`.
- Confirm the remote becomes cloneable afterward.
### Task 3: Clone the wiki repo and author concrete pages
**Objective:** Write practical docs explaining why IdentityDB exists, how to use it, and where `NaiveExtractor` fits.
**Files:**
- Clone to: `/home/hermes-agent/IdentityDB.wiki`
- Create/modify: `/home/hermes-agent/IdentityDB.wiki/Home.md`
- Create/modify: `/home/hermes-agent/IdentityDB.wiki/Getting-Started.md`
- Create/modify: `/home/hermes-agent/IdentityDB.wiki/Extractors.md`
- Create/modify: `/home/hermes-agent/IdentityDB.wiki/_Sidebar.md`
**Verification:**
- Review the generated Markdown files locally.
- Ensure internal wiki links resolve by page name.
### Task 4: Commit, push, and verify the published wiki state
**Objective:** Publish the wiki docs and verify the remote history reflects the changes.
**Files:**
- Commit within: `/home/hermes-agent/IdentityDB.wiki`
**Verification:**
- Run `git status --short` and `git log --oneline -n 3` in the wiki repo.
- Push to the remote wiki repo.
- Confirm the wiki is cloneable and the latest commit is visible remotely.

52
package.json Normal file
View File

@@ -0,0 +1,52 @@
{
"name": "identitydb",
"version": "0.1.0",
"description": "TypeScript memory graph database wrapper for topics, facts, and AI-assisted ingestion.",
"license": "MIT",
"type": "module",
"main": "./dist/index.js",
"module": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js"
}
},
"files": [
"dist",
"README.md",
"LICENSE"
],
"engines": {
"node": ">=20.0.0"
},
"scripts": {
"build": "tsup",
"check": "tsc --noEmit",
"test": "vitest run",
"test:watch": "vitest",
"clean": "rm -rf dist coverage"
},
"keywords": [
"memory",
"graph",
"database",
"typescript",
"ai"
],
"dependencies": {
"better-sqlite3": "^12.1.1",
"kysely": "^0.28.8",
"mysql2": "^3.15.3",
"pg": "^8.16.0"
},
"devDependencies": {
"@types/better-sqlite3": "^7.6.13",
"@types/node": "^24.0.0",
"@types/pg": "^8.20.0",
"tsup": "^8.5.0",
"typescript": "^5.8.3",
"vitest": "^3.2.4"
}
}

163
src/adapters/dialect.ts Normal file
View File

@@ -0,0 +1,163 @@
import Database from 'better-sqlite3';
import { Kysely, MysqlDialect, PostgresDialect, SqliteDialect } from 'kysely';
import { createPool as createMysqlPool } from 'mysql2';
import { Pool as PostgresPool } from 'pg';
import type { IdentityDatabaseSchema } from '../types/database';
import { IdentityDBConfigurationError } from '../core/errors';
export interface SqliteConnectionConfig {
client: 'sqlite';
filename: string;
readonly?: boolean;
}
export interface PostgresConnectionConfig {
client: 'postgres';
connectionString?: string;
host?: string;
port?: number;
database?: string;
user?: string;
password?: string;
ssl?: boolean;
}
export interface MysqlConnectionConfig {
client: 'mysql' | 'mariadb';
uri?: string;
host?: string;
port?: number;
database?: string;
user?: string;
password?: string;
}
export type IdentityDBConnectionConfig =
| SqliteConnectionConfig
| PostgresConnectionConfig
| MysqlConnectionConfig;
export interface DatabaseConnection {
client: IdentityDBConnectionConfig['client'];
db: Kysely<IdentityDatabaseSchema>;
destroy: () => Promise<void>;
}
export async function createDatabase(
config: IdentityDBConnectionConfig,
): Promise<DatabaseConnection> {
switch (config.client) {
case 'sqlite': {
const sqlite = new Database(config.filename, {
readonly: config.readonly ?? false,
});
sqlite.pragma('foreign_keys = ON');
const db = new Kysely<IdentityDatabaseSchema>({
dialect: new SqliteDialect({
database: sqlite,
}),
});
return {
client: config.client,
db,
destroy: async () => {
await db.destroy();
sqlite.close();
},
};
}
case 'postgres': {
const pool = new PostgresPool({
connectionString: config.connectionString,
host: config.host,
port: config.port,
database: config.database,
user: config.user,
password: config.password,
ssl: config.ssl ? { rejectUnauthorized: false } : undefined,
});
const db = new Kysely<IdentityDatabaseSchema>({
dialect: new PostgresDialect({ pool }),
});
return {
client: config.client,
db,
destroy: async () => {
await db.destroy();
await pool.end();
},
};
}
case 'mysql':
case 'mariadb': {
const mysqlOptions: {
host?: string;
port?: number;
database?: string;
user?: string;
password?: string;
} = {};
if (config.host !== undefined) {
mysqlOptions.host = config.host;
}
if (config.port !== undefined) {
mysqlOptions.port = config.port;
}
if (config.database !== undefined) {
mysqlOptions.database = config.database;
}
if (config.user !== undefined) {
mysqlOptions.user = config.user;
}
if (config.password !== undefined) {
mysqlOptions.password = config.password;
}
const pool = config.uri
? createMysqlPool(config.uri)
: createMysqlPool(mysqlOptions);
const db = new Kysely<IdentityDatabaseSchema>({
dialect: new MysqlDialect({ pool }),
});
return {
client: config.client,
db,
destroy: async () => {
await db.destroy();
await new Promise<void>((resolve, reject) => {
pool.end((error) => {
if (error) {
reject(error);
return;
}
resolve();
});
});
},
};
}
default: {
const neverClient: never = config;
throw new IdentityDBConfigurationError(
`Unsupported database client: ${JSON.stringify(neverClient)}`,
);
}
}
}

1
src/adapters/index.ts Normal file
View File

@@ -0,0 +1 @@
export * from './dialect';

13
src/core/errors.ts Normal file
View File

@@ -0,0 +1,13 @@
export class IdentityDBError extends Error {
constructor(message: string) {
super(message);
this.name = 'IdentityDBError';
}
}
export class IdentityDBConfigurationError extends IdentityDBError {
constructor(message: string) {
super(message);
this.name = 'IdentityDBConfigurationError';
}
}

736
src/core/identity-db.ts Normal file
View File

@@ -0,0 +1,736 @@
import {
type ConnectedTopic,
type Fact,
type FactTopic,
type FindSimilarFactsInput,
type IndexFactEmbeddingsInput,
type ListTopicsOptions,
type ScoredFact,
type SearchFactsInput,
type Topic,
type TopicLookupOptions,
type TopicWithFacts,
type UpsertTopicInput,
type AddFactInput,
type LinkTopicsInput,
} from '../types/api';
import type { IngestStatementOptions } from '../ingestion/types';
import type { DatabaseConnection, IdentityDBConnectionConfig } from '../adapters/dialect';
import type { IdentityDatabaseSchema } from '../types/database';
import type { FactRecord, TopicRecord } from '../types/domain';
import { createDatabase } from '../adapters/dialect';
import { IdentityDBError } from './errors';
import { initializeSchema } from './migrations';
import {
canonicalizeTopicName,
cosineSimilarity,
createContentHash,
createId,
deserializeEmbedding,
mapFactRow,
mapTopicRow,
normalizeTopicName,
nowIsoString,
serializeEmbedding,
serializeMetadata,
} from './utils';
import { extractFact } from '../ingestion/extractor';
import {
findFactRowsConnectingTopicIds,
findFactRowsForTopicId,
findTopicLinksForFactIds,
} from '../queries/facts';
import {
findConnectedTopicRows,
findTopicRowByNameOrAlias,
findTopicRowByNormalizedAlias,
findTopicRowByNormalizedName,
listTopicAliasRowsForTopicId,
listTopicRows,
findChildTopicRows,
findParentTopicRows,
type DatabaseExecutor,
} from '../queries/topics';
export class IdentityDB {
private constructor(private readonly connection: DatabaseConnection) {}
static async connect(config: IdentityDBConnectionConfig): Promise<IdentityDB> {
const connection = await createDatabase(config);
return new IdentityDB(connection);
}
async initialize(): Promise<void> {
await initializeSchema(this.connection.db);
}
async close(): Promise<void> {
await this.connection.destroy();
}
async upsertTopic(input: UpsertTopicInput): Promise<Topic> {
return this.upsertTopicInExecutor(this.connection.db, input);
}
async addFact(input: AddFactInput): Promise<Fact> {
if (input.statement.trim().length === 0) {
throw new IdentityDBError('Fact statement cannot be empty.');
}
if (input.topics.length === 0) {
throw new IdentityDBError('A fact must be linked to at least one topic.');
}
return this.connection.db.transaction().execute(async (trx) => {
const createdAt = nowIsoString();
const factId = createId();
await trx
.insertInto('facts')
.values({
id: factId,
statement: input.statement.trim(),
summary: input.summary ?? null,
source: input.source ?? null,
confidence: input.confidence ?? null,
metadata: serializeMetadata(input.metadata),
created_at: createdAt,
updated_at: createdAt,
})
.execute();
const topics: FactTopic[] = [];
for (let index = 0; index < input.topics.length; index += 1) {
const topicInput = input.topics[index]!;
const topic = await this.upsertTopicInExecutor(trx, topicInput);
await trx
.insertInto('fact_topics')
.values({
fact_id: factId,
topic_id: topic.id,
role: topicInput.role ?? null,
position: index,
created_at: createdAt,
})
.execute();
topics.push({
...topic,
role: topicInput.role ?? null,
position: index,
});
}
return {
id: factId,
statement: input.statement.trim(),
summary: input.summary ?? null,
source: input.source ?? null,
confidence: input.confidence ?? null,
metadata: input.metadata ?? null,
createdAt,
updatedAt: createdAt,
topics,
};
});
}
async ingestStatement(
statement: string,
options: IngestStatementOptions,
): Promise<Fact> {
const extracted = await extractFact(statement, options.extractor);
const factInput: AddFactInput = {
statement: extracted.statement ?? statement,
topics: extracted.topics,
};
if (extracted.summary !== undefined) {
factInput.summary = extracted.summary;
}
if (extracted.source !== undefined) {
factInput.source = extracted.source;
}
if (extracted.confidence !== undefined) {
factInput.confidence = extracted.confidence;
}
if (extracted.metadata !== undefined) {
factInput.metadata = extracted.metadata;
}
if (options.embeddingProvider) {
const similarFacts = await this.findSimilarFacts({
statement: factInput.statement,
provider: options.embeddingProvider,
topicNames: factInput.topics.map((topic) => topic.name),
limit: 1,
minimumScore: options.duplicateThreshold ?? 0.97,
});
if (similarFacts[0]) {
return similarFacts[0];
}
}
const fact = await this.addFact(factInput);
if (options.embeddingProvider) {
await this.indexFactEmbedding(fact.id, { provider: options.embeddingProvider });
}
return fact;
}
async indexFactEmbeddings(input: IndexFactEmbeddingsInput): Promise<void> {
const factRows = await this.connection.db.selectFrom('facts').selectAll().orderBy('created_at', 'asc').execute();
if (factRows.length === 0) {
return;
}
const embeddings = input.provider.embedMany
? await input.provider.embedMany(factRows.map((factRow) => factRow.statement))
: await Promise.all(factRows.map((factRow) => input.provider.embed(factRow.statement)));
if (embeddings.length !== factRows.length) {
throw new IdentityDBError('Embedding provider returned a mismatched number of embeddings.');
}
await this.connection.db.transaction().execute(async (trx) => {
for (let index = 0; index < factRows.length; index += 1) {
const factRow = factRows[index]!;
const embedding = embeddings[index]!;
this.assertEmbeddingShape(embedding, input.provider.dimensions);
await this.upsertFactEmbeddingRecord(trx, factRow.id, factRow.statement, embedding, input.provider.model);
}
});
}
async indexFactEmbedding(factId: string, input: IndexFactEmbeddingsInput): Promise<void> {
const factRow = await this.connection.db
.selectFrom('facts')
.selectAll()
.where('id', '=', factId)
.executeTakeFirst();
if (!factRow) {
throw new IdentityDBError(`Fact not found: ${factId}`);
}
const embedding = await input.provider.embed(factRow.statement);
this.assertEmbeddingShape(embedding, input.provider.dimensions);
await this.connection.db.transaction().execute(async (trx) => {
await this.upsertFactEmbeddingRecord(trx, factRow.id, factRow.statement, embedding, input.provider.model);
});
}
async searchFacts(input: SearchFactsInput): Promise<ScoredFact[]> {
const queryText = input.query.trim();
if (queryText.length === 0) {
return [];
}
const queryEmbedding = await input.provider.embed(queryText);
this.assertEmbeddingShape(queryEmbedding, input.provider.dimensions);
return this.searchFactsByEmbedding({
providerModel: input.provider.model,
queryEmbedding,
topicNames: input.topicNames,
limit: input.limit,
minimumScore: input.minimumScore,
});
}
async findSimilarFacts(input: FindSimilarFactsInput): Promise<ScoredFact[]> {
const statement = input.statement.trim();
if (statement.length === 0) {
return [];
}
const queryEmbedding = await input.provider.embed(statement);
this.assertEmbeddingShape(queryEmbedding, input.provider.dimensions);
return this.searchFactsByEmbedding({
providerModel: input.provider.model,
queryEmbedding,
topicNames: input.topicNames,
limit: input.limit,
minimumScore: input.minimumScore,
});
}
async linkTopics(input: LinkTopicsInput): Promise<void> {
const parentNormalizedName = normalizeTopicName(input.parentName);
const childNormalizedName = normalizeTopicName(input.childName);
if (parentNormalizedName.length === 0 || childNormalizedName.length === 0) {
throw new IdentityDBError('Topic hierarchy links require both a parent and child topic name.');
}
if (parentNormalizedName === childNormalizedName) {
throw new IdentityDBError('A topic cannot be linked as its own parent.');
}
await this.connection.db.transaction().execute(async (trx) => {
const parentTopic = await this.upsertTopicInExecutor(trx, {
name: input.parentName,
granularity: 'abstract',
});
const childTopic = await this.upsertTopicInExecutor(trx, {
name: input.childName,
});
const existing = await trx
.selectFrom('topic_relations')
.select(['parent_topic_id'])
.where('parent_topic_id', '=', parentTopic.id)
.where('child_topic_id', '=', childTopic.id)
.where('relation', '=', 'parent_of')
.executeTakeFirst();
if (!existing) {
await trx
.insertInto('topic_relations')
.values({
parent_topic_id: parentTopic.id,
child_topic_id: childTopic.id,
relation: 'parent_of',
created_at: nowIsoString(),
})
.execute();
}
});
}
async addTopicAlias(canonicalName: string, alias: string): Promise<void> {
const normalizedAlias = normalizeTopicName(alias);
if (normalizedAlias.length === 0) {
throw new IdentityDBError('Topic alias cannot be empty.');
}
await this.connection.db.transaction().execute(async (trx) => {
const canonicalTopic = await this.upsertTopicInExecutor(trx, { name: canonicalName });
if (normalizedAlias === canonicalTopic.normalizedName) {
return;
}
const exactTopicMatch = await findTopicRowByNormalizedName(trx, normalizedAlias);
if (exactTopicMatch && exactTopicMatch.id !== canonicalTopic.id) {
throw new IdentityDBError('Cannot assign an alias that already belongs to another canonical topic.');
}
const aliasMatch = await findTopicRowByNormalizedAlias(trx, normalizedAlias);
if (aliasMatch) {
if (aliasMatch.id !== canonicalTopic.id) {
throw new IdentityDBError('Cannot assign an alias that already resolves to another topic.');
}
return;
}
const createdAt = nowIsoString();
await trx
.insertInto('topic_aliases')
.values({
id: createId(),
topic_id: canonicalTopic.id,
alias: canonicalizeTopicName(alias),
normalized_alias: normalizedAlias,
is_primary: 0,
created_at: createdAt,
updated_at: createdAt,
})
.execute();
});
}
async resolveTopic(name: string): Promise<Topic | null> {
const topicRow = await this.getRequiredTopicRow(name);
return topicRow ? mapTopicRow(topicRow) : null;
}
async getTopicAliases(name: string): Promise<string[]> {
const topicRow = await this.getRequiredTopicRow(name);
if (!topicRow) {
return [];
}
const aliasRows = await listTopicAliasRowsForTopicId(this.connection.db, topicRow.id);
return aliasRows.map((aliasRow) => aliasRow.alias);
}
async getTopicChildren(name: string): Promise<Topic[]> {
const topicRow = await this.getRequiredTopicRow(name);
if (!topicRow) {
return [];
}
const childRows = await findChildTopicRows(this.connection.db, topicRow.id);
return childRows.map(mapTopicRow);
}
async getTopicParents(name: string): Promise<Topic[]> {
const topicRow = await this.getRequiredTopicRow(name);
if (!topicRow) {
return [];
}
const parentRows = await findParentTopicRows(this.connection.db, topicRow.id);
return parentRows.map(mapTopicRow);
}
async getTopicLineage(name: string): Promise<Topic[]> {
const topicRow = await this.getRequiredTopicRow(name);
if (!topicRow) {
return [];
}
const lineage: Topic[] = [];
const visitedTopicIds = new Set<string>([topicRow.id]);
let currentLevelIds = [topicRow.id];
while (currentLevelIds.length > 0) {
const nextLevelIds: string[] = [];
for (const currentId of currentLevelIds) {
const parentRows = await findParentTopicRows(this.connection.db, currentId);
for (const parentRow of parentRows) {
if (visitedTopicIds.has(parentRow.id)) {
continue;
}
visitedTopicIds.add(parentRow.id);
nextLevelIds.push(parentRow.id);
lineage.push(mapTopicRow(parentRow));
}
}
currentLevelIds = nextLevelIds;
}
return lineage;
}
async getTopicFacts(name: string): Promise<Fact[]> {
const topicRow = await this.getRequiredTopicRow(name);
if (!topicRow) {
return [];
}
const factRows = await findFactRowsForTopicId(this.connection.db, topicRow.id);
return this.hydrateFacts(factRows);
}
async getTopicFactsLinkedTo(name: string, linkedTopicName: string): Promise<Fact[]> {
return this.findFactsConnectingTopics([name, linkedTopicName]);
}
async findFactsConnectingTopics(names: string[]): Promise<Fact[]> {
if (names.length === 0) {
return [];
}
const topicRows = await Promise.all(names.map((name) => this.getRequiredTopicRow(name)));
if (topicRows.some((topicRow) => topicRow === undefined)) {
return [];
}
const topicIds = topicRows.map((topicRow) => topicRow!.id);
const factRows = await findFactRowsConnectingTopicIds(this.connection.db, topicIds);
return this.hydrateFacts(factRows);
}
async getTopicByName(
name: string,
options: { includeFacts: true },
): Promise<TopicWithFacts | null>;
async getTopicByName(name: string, options?: TopicLookupOptions): Promise<Topic | null>;
async getTopicByName(
name: string,
options?: TopicLookupOptions,
): Promise<Topic | TopicWithFacts | null> {
const topicRow = await this.getRequiredTopicRow(name);
if (!topicRow) {
return null;
}
const topic = mapTopicRow(topicRow);
if (options?.includeFacts) {
return {
...topic,
facts: await this.getTopicFacts(name),
};
}
return topic;
}
async listTopics(options: { includeFacts: true; limit?: number }): Promise<TopicWithFacts[]>;
async listTopics(options?: ListTopicsOptions): Promise<Topic[]>;
async listTopics(
options?: ListTopicsOptions,
): Promise<Topic[] | TopicWithFacts[]> {
const rows = await listTopicRows(this.connection.db, options?.limit);
if (!options?.includeFacts) {
return rows.map(mapTopicRow);
}
const topicsWithFacts: TopicWithFacts[] = [];
for (const row of rows) {
topicsWithFacts.push({
...mapTopicRow(row),
facts: await this.getTopicFacts(row.name),
});
}
return topicsWithFacts;
}
async findConnectedTopics(name: string): Promise<ConnectedTopic[]> {
const topicRow = await this.getRequiredTopicRow(name);
if (!topicRow) {
return [];
}
const rows = await findConnectedTopicRows(this.connection.db, topicRow.id);
return rows.map((row) => ({
...mapTopicRow(row),
sharedFactCount: row.shared_fact_count,
}));
}
private async searchFactsByEmbedding(input: {
providerModel: string;
queryEmbedding: number[];
topicNames?: string[] | undefined;
limit?: number | undefined;
minimumScore?: number | undefined;
}): Promise<ScoredFact[]> {
const topicIds = await this.resolveTopicIds(input.topicNames);
if (topicIds === null) {
return [];
}
const factRows = topicIds.length > 0
? await findFactRowsConnectingTopicIds(this.connection.db, topicIds)
: await this.connection.db
.selectFrom('facts')
.innerJoin('fact_embeddings', 'fact_embeddings.fact_id', 'facts.id')
.selectAll('facts')
.where('fact_embeddings.model', '=', input.providerModel)
.orderBy('facts.created_at', 'asc')
.execute();
if (factRows.length === 0) {
return [];
}
const embeddingRowsQuery = this.connection.db
.selectFrom('fact_embeddings')
.selectAll()
.where('model', '=', input.providerModel);
const embeddingRows = factRows.length > 0
? await embeddingRowsQuery.where('fact_id', 'in', factRows.map((factRow) => factRow.id)).execute()
: [];
const embeddingsByFactId = new Map(
embeddingRows.map((embeddingRow) => [embeddingRow.fact_id, deserializeEmbedding(embeddingRow.embedding)]),
);
const scoredRows = factRows
.map((factRow) => ({
factRow,
score: cosineSimilarity(input.queryEmbedding, embeddingsByFactId.get(factRow.id) ?? []),
}))
.filter((entry) => entry.score >= (input.minimumScore ?? 0))
.sort((left, right) => {
if (right.score !== left.score) {
return right.score - left.score;
}
return left.factRow.created_at.localeCompare(right.factRow.created_at);
})
.slice(0, input.limit ?? 5);
if (scoredRows.length === 0) {
return [];
}
const hydratedFacts = await this.hydrateFacts(scoredRows.map((entry) => entry.factRow));
const factsById = new Map(hydratedFacts.map((fact) => [fact.id, fact]));
return scoredRows.map((entry) => ({
...factsById.get(entry.factRow.id)!,
score: entry.score,
}));
}
private async resolveTopicIds(topicNames?: string[]): Promise<string[] | null> {
if (!topicNames || topicNames.length === 0) {
return [];
}
const topicRows = await Promise.all(topicNames.map((topicName) => this.getRequiredTopicRow(topicName)));
if (topicRows.some((topicRow) => !topicRow)) {
return null;
}
return topicRows.map((topicRow) => topicRow!.id);
}
private async upsertFactEmbeddingRecord(
executor: DatabaseExecutor,
factId: string,
statement: string,
embedding: number[],
model: string,
): Promise<void> {
const timestamp = nowIsoString();
await executor
.deleteFrom('fact_embeddings')
.where('fact_id', '=', factId)
.where('model', '=', model)
.execute();
await executor
.insertInto('fact_embeddings')
.values({
fact_id: factId,
model,
dimensions: embedding.length,
embedding: serializeEmbedding(embedding),
content_hash: createContentHash(statement),
created_at: timestamp,
updated_at: timestamp,
})
.execute();
}
private assertEmbeddingShape(embedding: number[], expectedDimensions: number): void {
if (embedding.length !== expectedDimensions) {
throw new IdentityDBError(
`Embedding dimension mismatch. Expected ${expectedDimensions}, received ${embedding.length}.`,
);
}
}
private async upsertTopicInExecutor(
executor: DatabaseExecutor,
input: UpsertTopicInput,
): Promise<Topic> {
const normalizedName = normalizeTopicName(input.name);
if (normalizedName.length === 0) {
throw new IdentityDBError('Topic name cannot be empty.');
}
const existing = await findTopicRowByNormalizedName(executor, normalizedName);
const now = nowIsoString();
if (existing) {
return this.updateTopicRowInExecutor(executor, existing, input, now, true);
}
const aliasedTopic = await findTopicRowByNormalizedAlias(executor, normalizedName);
if (aliasedTopic) {
return this.updateTopicRowInExecutor(executor, aliasedTopic, input, now, false);
}
const createdRow: TopicRecord = {
id: createId(),
name: canonicalizeTopicName(input.name),
normalized_name: normalizedName,
category: input.category ?? 'custom',
granularity: input.granularity ?? 'mixed',
description: input.description ?? null,
metadata: serializeMetadata(input.metadata),
created_at: now,
updated_at: now,
};
await executor.insertInto('topics').values(createdRow).execute();
return mapTopicRow(createdRow);
}
private async updateTopicRowInExecutor(
executor: DatabaseExecutor,
existing: TopicRecord,
input: UpsertTopicInput,
now: string,
shouldRename: boolean,
): Promise<Topic> {
await executor
.updateTable('topics')
.set({
name: shouldRename ? canonicalizeTopicName(input.name) : existing.name,
category: input.category ?? existing.category,
granularity: input.granularity ?? existing.granularity,
description: input.description !== undefined ? input.description : existing.description,
metadata: input.metadata !== undefined ? serializeMetadata(input.metadata) : existing.metadata,
updated_at: now,
})
.where('id', '=', existing.id)
.execute();
const updated = await executor
.selectFrom('topics')
.selectAll()
.where('id', '=', existing.id)
.executeTakeFirstOrThrow();
return mapTopicRow(updated);
}
private async getRequiredTopicRow(name: string): Promise<TopicRecord | undefined> {
const normalizedName = normalizeTopicName(name);
if (normalizedName.length === 0) {
return undefined;
}
return findTopicRowByNameOrAlias(this.connection.db, normalizedName);
}
private async hydrateFacts(factRows: FactRecord[]): Promise<Fact[]> {
const factIds = factRows.map((fact) => fact.id);
const topicLinks = await findTopicLinksForFactIds(this.connection.db, factIds);
const topicsByFactId = new Map<string, FactTopic[]>();
for (const topicLink of topicLinks) {
const topics = topicsByFactId.get(topicLink.fact_id) ?? [];
topics.push({
...mapTopicRow(topicLink),
role: topicLink.role,
position: topicLink.position,
});
topicsByFactId.set(topicLink.fact_id, topics);
}
return factRows.map((factRow) => mapFactRow(factRow, topicsByFactId.get(factRow.id) ?? []));
}
}

142
src/core/migrations.ts Normal file
View File

@@ -0,0 +1,142 @@
import type { Kysely } from 'kysely';
import {
FACTS_TABLE,
FACT_EMBEDDINGS_TABLE,
FACT_TOPICS_TABLE,
TOPIC_ALIASES_TABLE,
TOPIC_RELATIONS_TABLE,
TOPICS_TABLE,
} from './schema';
import type { IdentityDatabaseSchema } from '../types/database';
export async function initializeSchema(
db: Kysely<IdentityDatabaseSchema>,
): Promise<void> {
await db.schema
.createTable(TOPICS_TABLE)
.ifNotExists()
.addColumn('id', 'text', (column) => column.primaryKey())
.addColumn('name', 'text', (column) => column.notNull())
.addColumn('normalized_name', 'text', (column) => column.notNull().unique())
.addColumn('category', 'text', (column) => column.notNull())
.addColumn('granularity', 'text', (column) => column.notNull())
.addColumn('description', 'text')
.addColumn('metadata', 'text')
.addColumn('created_at', 'text', (column) => column.notNull())
.addColumn('updated_at', 'text', (column) => column.notNull())
.execute();
await db.schema
.createTable(FACTS_TABLE)
.ifNotExists()
.addColumn('id', 'text', (column) => column.primaryKey())
.addColumn('statement', 'text', (column) => column.notNull())
.addColumn('summary', 'text')
.addColumn('source', 'text')
.addColumn('confidence', 'real')
.addColumn('metadata', 'text')
.addColumn('created_at', 'text', (column) => column.notNull())
.addColumn('updated_at', 'text', (column) => column.notNull())
.execute();
await db.schema
.createTable(FACT_EMBEDDINGS_TABLE)
.ifNotExists()
.addColumn('fact_id', 'text', (column) =>
column.notNull().references(`${FACTS_TABLE}.id`).onDelete('cascade'),
)
.addColumn('model', 'text', (column) => column.notNull())
.addColumn('dimensions', 'integer', (column) => column.notNull())
.addColumn('embedding', 'text', (column) => column.notNull())
.addColumn('content_hash', 'text', (column) => column.notNull())
.addColumn('created_at', 'text', (column) => column.notNull())
.addColumn('updated_at', 'text', (column) => column.notNull())
.addPrimaryKeyConstraint('fact_embeddings_pk', ['fact_id', 'model'])
.execute();
await db.schema
.createTable(FACT_TOPICS_TABLE)
.ifNotExists()
.addColumn('fact_id', 'text', (column) =>
column.notNull().references(`${FACTS_TABLE}.id`).onDelete('cascade'),
)
.addColumn('topic_id', 'text', (column) =>
column.notNull().references(`${TOPICS_TABLE}.id`).onDelete('cascade'),
)
.addColumn('role', 'text')
.addColumn('position', 'integer', (column) => column.notNull())
.addColumn('created_at', 'text', (column) => column.notNull())
.addPrimaryKeyConstraint('fact_topics_pk', ['fact_id', 'topic_id', 'position'])
.execute();
await db.schema
.createTable(TOPIC_RELATIONS_TABLE)
.ifNotExists()
.addColumn('parent_topic_id', 'text', (column) =>
column.notNull().references(`${TOPICS_TABLE}.id`).onDelete('cascade'),
)
.addColumn('child_topic_id', 'text', (column) =>
column.notNull().references(`${TOPICS_TABLE}.id`).onDelete('cascade'),
)
.addColumn('relation', 'text', (column) => column.notNull())
.addColumn('created_at', 'text', (column) => column.notNull())
.addPrimaryKeyConstraint('topic_relations_pk', ['parent_topic_id', 'child_topic_id', 'relation'])
.execute();
await db.schema
.createTable(TOPIC_ALIASES_TABLE)
.ifNotExists()
.addColumn('id', 'text', (column) => column.primaryKey())
.addColumn('topic_id', 'text', (column) =>
column.notNull().references(`${TOPICS_TABLE}.id`).onDelete('cascade'),
)
.addColumn('alias', 'text', (column) => column.notNull())
.addColumn('normalized_alias', 'text', (column) => column.notNull().unique())
.addColumn('is_primary', 'integer', (column) => column.notNull())
.addColumn('created_at', 'text', (column) => column.notNull())
.addColumn('updated_at', 'text', (column) => column.notNull())
.execute();
await db.schema
.createIndex('fact_topics_topic_id_idx')
.ifNotExists()
.on(FACT_TOPICS_TABLE)
.column('topic_id')
.execute();
await db.schema
.createIndex('fact_topics_fact_id_idx')
.ifNotExists()
.on(FACT_TOPICS_TABLE)
.column('fact_id')
.execute();
await db.schema
.createIndex('fact_embeddings_model_idx')
.ifNotExists()
.on(FACT_EMBEDDINGS_TABLE)
.column('model')
.execute();
await db.schema
.createIndex('topic_relations_parent_topic_id_idx')
.ifNotExists()
.on(TOPIC_RELATIONS_TABLE)
.column('parent_topic_id')
.execute();
await db.schema
.createIndex('topic_relations_child_topic_id_idx')
.ifNotExists()
.on(TOPIC_RELATIONS_TABLE)
.column('child_topic_id')
.execute();
await db.schema
.createIndex('topic_aliases_topic_id_idx')
.ifNotExists()
.on(TOPIC_ALIASES_TABLE)
.column('topic_id')
.execute();
}

64
src/core/schema.ts Normal file
View File

@@ -0,0 +1,64 @@
export const TOPICS_TABLE = 'topics';
export const FACTS_TABLE = 'facts';
export const FACT_TOPICS_TABLE = 'fact_topics';
export const TOPIC_RELATIONS_TABLE = 'topic_relations';
export const TOPIC_ALIASES_TABLE = 'topic_aliases';
export const FACT_EMBEDDINGS_TABLE = 'fact_embeddings';
export const TOPIC_COLUMNS = [
'id',
'name',
'normalized_name',
'category',
'granularity',
'description',
'metadata',
'created_at',
'updated_at',
] as const;
export const FACT_COLUMNS = [
'id',
'statement',
'summary',
'source',
'confidence',
'metadata',
'created_at',
'updated_at',
] as const;
export const FACT_TOPIC_COLUMNS = [
'fact_id',
'topic_id',
'role',
'position',
'created_at',
] as const;
export const TOPIC_RELATION_COLUMNS = [
'parent_topic_id',
'child_topic_id',
'relation',
'created_at',
] as const;
export const TOPIC_ALIAS_COLUMNS = [
'id',
'topic_id',
'alias',
'normalized_alias',
'is_primary',
'created_at',
'updated_at',
] as const;
export const FACT_EMBEDDING_COLUMNS = [
'fact_id',
'model',
'dimensions',
'embedding',
'content_hash',
'created_at',
'updated_at',
] as const;

100
src/core/utils.ts Normal file
View File

@@ -0,0 +1,100 @@
import { createHash, randomUUID } from 'node:crypto';
import type { Fact, FactTopic, Topic } from '../types/api';
import type { FactRecord, TopicRecord } from '../types/domain';
export function normalizeTopicName(name: string): string {
return name.trim().replace(/\s+/g, ' ').toLowerCase();
}
export function canonicalizeTopicName(name: string): string {
return name.trim().replace(/\s+/g, ' ');
}
export function nowIsoString(): string {
return new Date().toISOString();
}
export function createId(): string {
return randomUUID();
}
export function serializeMetadata(metadata: unknown): string | null {
if (metadata === undefined || metadata === null) {
return null;
}
return JSON.stringify(metadata);
}
export function deserializeMetadata(metadata: string | null): unknown | null {
if (metadata === null) {
return null;
}
return JSON.parse(metadata);
}
export function serializeEmbedding(embedding: number[]): string {
return JSON.stringify(embedding);
}
export function deserializeEmbedding(embedding: string): number[] {
return JSON.parse(embedding) as number[];
}
export function createContentHash(input: string): string {
return createHash('sha256').update(input).digest('hex');
}
export function cosineSimilarity(left: number[], right: number[]): number {
if (left.length === 0 || left.length !== right.length) {
return 0;
}
let dot = 0;
let leftMagnitude = 0;
let rightMagnitude = 0;
for (let index = 0; index < left.length; index += 1) {
const leftValue = left[index] ?? 0;
const rightValue = right[index] ?? 0;
dot += leftValue * rightValue;
leftMagnitude += leftValue * leftValue;
rightMagnitude += rightValue * rightValue;
}
if (leftMagnitude === 0 || rightMagnitude === 0) {
return 0;
}
return dot / (Math.sqrt(leftMagnitude) * Math.sqrt(rightMagnitude));
}
export function mapTopicRow(record: TopicRecord): Topic {
return {
id: record.id,
name: record.name,
normalizedName: record.normalized_name,
category: record.category,
granularity: record.granularity,
description: record.description,
metadata: deserializeMetadata(record.metadata) as Topic['metadata'],
createdAt: record.created_at,
updatedAt: record.updated_at,
};
}
export function mapFactRow(record: FactRecord, topics: FactTopic[]): Fact {
return {
id: record.id,
statement: record.statement,
summary: record.summary,
source: record.source,
confidence: record.confidence,
metadata: deserializeMetadata(record.metadata) as Fact['metadata'],
createdAt: record.created_at,
updatedAt: record.updated_at,
topics,
};
}

10
src/index.ts Normal file
View File

@@ -0,0 +1,10 @@
export * from './adapters';
export * from './core/identity-db';
export * from './core/migrations';
export * from './ingestion/extractor';
export * from './ingestion/llm-extractor';
export * from './ingestion/naive-extractor';
export * from './ingestion/types';
export * from './types/api';
export * from './types/database';
export * from './types/domain';

View File

@@ -0,0 +1,42 @@
import { IdentityDBError } from '../core/errors';
import { normalizeTopicName } from '../core/utils';
import type { FactExtractor, ExtractedFact } from './types';
export async function extractFact(
input: string,
extractor: FactExtractor,
): Promise<ExtractedFact> {
const extracted = await extractor.extract(input);
const statement = extracted.statement?.trim() || input.trim();
if (statement.length === 0) {
throw new IdentityDBError('Extractor returned an empty statement.');
}
const dedupedTopics = new Map<string, ExtractedFact['topics'][number]>();
for (const topic of extracted.topics) {
const normalizedName = normalizeTopicName(topic.name);
if (normalizedName.length === 0) {
continue;
}
if (!dedupedTopics.has(normalizedName)) {
dedupedTopics.set(normalizedName, topic);
}
}
if (dedupedTopics.size === 0) {
throw new IdentityDBError('Extractor returned no usable topics.');
}
return {
statement,
summary: extracted.summary ?? null,
source: extracted.source ?? null,
confidence: extracted.confidence ?? null,
metadata: extracted.metadata ?? null,
topics: Array.from(dedupedTopics.values()),
};
}

View File

@@ -0,0 +1,273 @@
import { IdentityDBError } from '../core/errors';
import type { TopicCategory, TopicGranularity } from '../types/domain';
import type {
ExtractedFact,
FactExtractor,
LlmFactExtractorOptions,
} from './types';
const DEFAULT_INSTRUCTIONS = [
'Extract one structured fact from the user input.',
'Return JSON only. Do not include markdown, explanations, or prose outside the JSON object.',
'Use this shape: {"statement": string?, "summary": string|null, "source": string|null, "confidence": number|null, "metadata": object|null, "topics": Array<{"name": string, "category": "entity"|"concept"|"temporal"|"custom"?, "granularity": "abstract"|"concrete"|"mixed"?, "role": string|null, "description": string|null, "metadata": object|null}>}.',
'Only include topics that are explicitly supported by the input.',
].join('\n');
export class LlmFactExtractor implements FactExtractor {
constructor(private readonly options: LlmFactExtractorOptions) {}
async extract(input: string): Promise<ExtractedFact> {
const prompt = this.buildPrompt(input);
const response = await this.options.model.generateText(prompt);
return parseLlmExtractedFactResponse(response);
}
private buildPrompt(input: string): string {
if (this.options.promptBuilder) {
return this.options.promptBuilder(input, this.options.instructions);
}
const instructions = this.options.instructions?.trim();
return [
DEFAULT_INSTRUCTIONS,
instructions && instructions.length > 0 ? `Additional instructions:\n${instructions}` : null,
`Input:\n${input.trim()}`,
]
.filter((value): value is string => value !== null)
.join('\n\n');
}
}
export function parseLlmExtractedFactResponse(response: string): ExtractedFact {
const payload = parseJsonCandidate(response);
if (!isRecord(payload)) {
throw new IdentityDBError('LLM extractor response must be a JSON object.');
}
const topics = parseTopics(payload.topics);
const extracted: ExtractedFact = { topics };
const statement = optionalString(payload.statement);
if (statement !== undefined) {
extracted.statement = statement;
}
const summary = optionalNullableString(payload.summary);
if (summary !== undefined) {
extracted.summary = summary;
}
const source = optionalNullableString(payload.source);
if (source !== undefined) {
extracted.source = source;
}
const confidence = optionalNullableNumber(payload.confidence);
if (confidence !== undefined) {
extracted.confidence = confidence;
}
const metadata = optionalMetadata(payload.metadata);
if (metadata !== undefined) {
extracted.metadata = metadata;
}
return extracted;
}
function parseJsonCandidate(response: string): unknown {
const trimmed = response.trim();
for (const candidate of collectJsonCandidates(trimmed)) {
try {
return JSON.parse(candidate);
} catch {
continue;
}
}
throw new IdentityDBError('LLM extractor returned invalid JSON.');
}
function collectJsonCandidates(response: string): string[] {
const candidates = new Set<string>();
candidates.add(response);
const fencePattern = /```(?:json)?\s*([\s\S]*?)```/gi;
let match: RegExpExecArray | null = fencePattern.exec(response);
while (match) {
const candidate = match[1]?.trim();
if (candidate) {
candidates.add(candidate);
}
match = fencePattern.exec(response);
}
const firstBrace = response.indexOf('{');
const lastBrace = response.lastIndexOf('}');
if (firstBrace >= 0 && lastBrace > firstBrace) {
candidates.add(response.slice(firstBrace, lastBrace + 1));
}
return Array.from(candidates);
}
function parseTopics(value: unknown): ExtractedFact['topics'] {
if (!Array.isArray(value)) {
throw new IdentityDBError('LLM extractor response must include a topics array.');
}
return value.map((entry) => parseTopic(entry));
}
function parseTopic(value: unknown): ExtractedFact['topics'][number] {
if (!isRecord(value)) {
throw new IdentityDBError('LLM extractor topics must be JSON objects.');
}
const name = optionalString(value.name)?.trim();
if (!name) {
throw new IdentityDBError('LLM extractor topics must include a non-empty name.');
}
const topic: ExtractedFact['topics'][number] = { name };
const category = optionalTopicCategory(value.category);
if (category !== undefined) {
topic.category = category;
}
const granularity = optionalTopicGranularity(value.granularity);
if (granularity !== undefined) {
topic.granularity = granularity;
}
const role = optionalNullableString(value.role);
if (role !== undefined) {
topic.role = role;
}
const description = optionalNullableString(value.description);
if (description !== undefined) {
topic.description = description;
}
const metadata = optionalMetadata(value.metadata);
if (metadata !== undefined) {
topic.metadata = metadata;
}
return topic;
}
function optionalString(value: unknown): string | undefined {
if (value === undefined) {
return undefined;
}
if (typeof value !== 'string') {
throw new IdentityDBError('LLM extractor expected a string field.');
}
return value;
}
function optionalNullableString(value: unknown): string | null | undefined {
if (value === undefined) {
return undefined;
}
if (value === null) {
return null;
}
if (typeof value !== 'string') {
throw new IdentityDBError('LLM extractor expected a nullable string field.');
}
return value;
}
function optionalNullableNumber(value: unknown): number | null | undefined {
if (value === undefined) {
return undefined;
}
if (value === null) {
return null;
}
if (typeof value !== 'number' || Number.isNaN(value)) {
throw new IdentityDBError('LLM extractor expected confidence to be a number or null.');
}
return value;
}
function optionalMetadata(value: unknown): ExtractedFact['metadata'] | undefined {
if (value === undefined) {
return undefined;
}
if (value === null) {
return null;
}
if (!isJsonLike(value)) {
throw new IdentityDBError('LLM extractor metadata must be valid JSON-compatible data.');
}
return value as ExtractedFact['metadata'];
}
function optionalTopicCategory(value: unknown): TopicCategory | undefined {
if (value === undefined) {
return undefined;
}
if (value === 'entity' || value === 'concept' || value === 'temporal' || value === 'custom') {
return value;
}
throw new IdentityDBError('LLM extractor returned an unsupported topic category.');
}
function optionalTopicGranularity(value: unknown): TopicGranularity | undefined {
if (value === undefined) {
return undefined;
}
if (value === 'abstract' || value === 'concrete' || value === 'mixed') {
return value;
}
throw new IdentityDBError('LLM extractor returned an unsupported topic granularity.');
}
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null && !Array.isArray(value);
}
function isJsonLike(value: unknown): boolean {
if (value === null) {
return true;
}
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
return true;
}
if (Array.isArray(value)) {
return value.every((entry) => isJsonLike(entry));
}
if (isRecord(value)) {
return Object.values(value).every((entry) => isJsonLike(entry));
}
return false;
}

View File

@@ -0,0 +1,39 @@
import type { ExtractedFact, FactExtractor } from './types';
export class NaiveExtractor implements FactExtractor {
async extract(input: string): Promise<ExtractedFact> {
const topics: ExtractedFact['topics'] = [];
const seen = new Set<string>();
const tokens = input.match(/\bI\b|\b\d{4}\b|\b[A-Z][A-Za-z0-9+#.-]*\b/g) ?? [];
for (const token of tokens) {
const normalized = token.trim().toLowerCase();
if (seen.has(normalized)) {
continue;
}
seen.add(normalized);
if (/^\d{4}$/.test(token)) {
topics.push({
name: token,
category: 'temporal',
granularity: 'concrete',
role: 'time',
});
continue;
}
topics.push({
name: token,
category: 'entity',
granularity: 'concrete',
role: token === 'I' ? 'subject' : 'object',
});
}
return {
statement: input.trim(),
topics,
};
}
}

34
src/ingestion/types.ts Normal file
View File

@@ -0,0 +1,34 @@
import type {
AddFactInput,
EmbeddingProvider,
TopicLinkInput,
} from '../types/api';
export interface ExtractedFact {
statement?: string;
summary?: string | null;
source?: string | null;
confidence?: number | null;
metadata?: AddFactInput['metadata'];
topics: TopicLinkInput[];
}
export interface FactExtractor {
extract(input: string): Promise<ExtractedFact>;
}
export interface LlmTextGenerationModel {
generateText(prompt: string): Promise<string>;
}
export interface LlmFactExtractorOptions {
model: LlmTextGenerationModel;
instructions?: string;
promptBuilder?: (input: string, instructions?: string) => string;
}
export interface IngestStatementOptions {
extractor: FactExtractor;
embeddingProvider?: EmbeddingProvider;
duplicateThreshold?: number;
}

66
src/queries/facts.ts Normal file
View File

@@ -0,0 +1,66 @@
import type { Kysely, Transaction } from 'kysely';
import type { IdentityDatabaseSchema } from '../types/database';
import type { FactRecord, TopicRecord } from '../types/domain';
export type DatabaseExecutor = Kysely<IdentityDatabaseSchema> | Transaction<IdentityDatabaseSchema>;
export interface FactTopicJoinRow extends TopicRecord {
fact_id: string;
role: string | null;
position: number;
}
export async function findFactRowsForTopicId(
executor: DatabaseExecutor,
topicId: string,
): Promise<FactRecord[]> {
return executor
.selectFrom('facts')
.innerJoin('fact_topics', 'fact_topics.fact_id', 'facts.id')
.selectAll('facts')
.where('fact_topics.topic_id', '=', topicId)
.orderBy('facts.created_at', 'asc')
.execute();
}
export async function findFactRowsConnectingTopicIds(
executor: DatabaseExecutor,
topicIds: string[],
): Promise<FactRecord[]> {
if (topicIds.length === 0) {
return [];
}
return executor
.selectFrom('facts')
.innerJoin('fact_topics', 'fact_topics.fact_id', 'facts.id')
.selectAll('facts')
.where('fact_topics.topic_id', 'in', topicIds)
.groupBy('facts.id')
.having((eb) => eb.fn.count<number>('fact_topics.topic_id'), '=', topicIds.length)
.orderBy('facts.created_at', 'asc')
.execute();
}
export async function findTopicLinksForFactIds(
executor: DatabaseExecutor,
factIds: string[],
): Promise<FactTopicJoinRow[]> {
if (factIds.length === 0) {
return [];
}
return executor
.selectFrom('fact_topics')
.innerJoin('topics', 'topics.id', 'fact_topics.topic_id')
.selectAll('topics')
.select([
'fact_topics.fact_id as fact_id',
'fact_topics.role as role',
'fact_topics.position as position',
])
.where('fact_topics.fact_id', 'in', factIds)
.orderBy('fact_topics.position', 'asc')
.execute() as Promise<FactTopicJoinRow[]>;
}

117
src/queries/topics.ts Normal file
View File

@@ -0,0 +1,117 @@
import type { Kysely, Transaction } from 'kysely';
import type { IdentityDatabaseSchema } from '../types/database';
import type { TopicAliasRecord, TopicRecord } from '../types/domain';
export type DatabaseExecutor = Kysely<IdentityDatabaseSchema> | Transaction<IdentityDatabaseSchema>;
export interface ConnectedTopicRow extends TopicRecord {
shared_fact_count: number;
}
export async function findTopicRowByNormalizedName(
executor: DatabaseExecutor,
normalizedName: string,
): Promise<TopicRecord | undefined> {
return executor
.selectFrom('topics')
.selectAll()
.where('normalized_name', '=', normalizedName)
.executeTakeFirst();
}
export async function findTopicRowByNormalizedAlias(
executor: DatabaseExecutor,
normalizedAlias: string,
): Promise<TopicRecord | undefined> {
return executor
.selectFrom('topic_aliases')
.innerJoin('topics', 'topics.id', 'topic_aliases.topic_id')
.selectAll('topics')
.where('topic_aliases.normalized_alias', '=', normalizedAlias)
.executeTakeFirst();
}
export async function findTopicRowByNameOrAlias(
executor: DatabaseExecutor,
normalizedName: string,
): Promise<TopicRecord | undefined> {
const directMatch = await findTopicRowByNormalizedName(executor, normalizedName);
if (directMatch) {
return directMatch;
}
return findTopicRowByNormalizedAlias(executor, normalizedName);
}
export async function listTopicAliasRowsForTopicId(
executor: DatabaseExecutor,
topicId: string,
): Promise<TopicAliasRecord[]> {
return executor
.selectFrom('topic_aliases')
.selectAll()
.where('topic_id', '=', topicId)
.orderBy('is_primary', 'desc')
.orderBy('normalized_alias', 'asc')
.execute();
}
export async function listTopicRows(
executor: DatabaseExecutor,
limit?: number,
): Promise<TopicRecord[]> {
let query = executor.selectFrom('topics').selectAll().orderBy('normalized_name', 'asc');
if (limit !== undefined) {
query = query.limit(limit);
}
return query.execute();
}
export async function findConnectedTopicRows(
executor: DatabaseExecutor,
topicId: string,
): Promise<ConnectedTopicRow[]> {
return executor
.selectFrom('fact_topics as source_link')
.innerJoin('fact_topics as related_link', 'related_link.fact_id', 'source_link.fact_id')
.innerJoin('topics', 'topics.id', 'related_link.topic_id')
.selectAll('topics')
.select((eb) => eb.fn.count<number>('related_link.fact_id').as('shared_fact_count'))
.where('source_link.topic_id', '=', topicId)
.whereRef('related_link.topic_id', '!=', 'source_link.topic_id')
.groupBy('topics.id')
.orderBy('shared_fact_count', 'desc')
.orderBy('topics.normalized_name', 'asc')
.execute() as Promise<ConnectedTopicRow[]>;
}
export async function findChildTopicRows(
executor: DatabaseExecutor,
parentTopicId: string,
): Promise<TopicRecord[]> {
return executor
.selectFrom('topic_relations')
.innerJoin('topics', 'topics.id', 'topic_relations.child_topic_id')
.selectAll('topics')
.where('topic_relations.parent_topic_id', '=', parentTopicId)
.where('topic_relations.relation', '=', 'parent_of')
.orderBy('topics.normalized_name', 'asc')
.execute();
}
export async function findParentTopicRows(
executor: DatabaseExecutor,
childTopicId: string,
): Promise<TopicRecord[]> {
return executor
.selectFrom('topic_relations')
.innerJoin('topics', 'topics.id', 'topic_relations.parent_topic_id')
.selectAll('topics')
.where('topic_relations.child_topic_id', '=', childTopicId)
.where('topic_relations.relation', '=', 'parent_of')
.orderBy('topics.normalized_name', 'asc')
.execute();
}

104
src/types/api.ts Normal file
View File

@@ -0,0 +1,104 @@
import type { JsonValue, TopicCategory, TopicGranularity } from './domain';
export interface UpsertTopicInput {
name: string;
category?: TopicCategory;
granularity?: TopicGranularity;
description?: string | null;
metadata?: JsonValue | null;
}
export interface TopicLinkInput extends UpsertTopicInput {
role?: string | null;
}
export interface AddFactInput {
statement: string;
summary?: string | null;
source?: string | null;
confidence?: number | null;
metadata?: JsonValue | null;
topics: TopicLinkInput[];
}
export interface LinkTopicsInput {
parentName: string;
childName: string;
}
export interface Topic {
id: string;
name: string;
normalizedName: string;
category: TopicCategory;
granularity: TopicGranularity;
description: string | null;
metadata: JsonValue | null;
createdAt: string;
updatedAt: string;
}
export interface FactTopic extends Topic {
role: string | null;
position: number;
}
export interface Fact {
id: string;
statement: string;
summary: string | null;
source: string | null;
confidence: number | null;
metadata: JsonValue | null;
createdAt: string;
updatedAt: string;
topics: FactTopic[];
}
export interface TopicWithFacts extends Topic {
facts: Fact[];
}
export interface ConnectedTopic extends Topic {
sharedFactCount: number;
}
export interface TopicLookupOptions {
includeFacts?: boolean;
}
export interface ListTopicsOptions {
includeFacts?: boolean;
limit?: number;
}
export interface EmbeddingProvider {
model: string;
dimensions: number;
embed(input: string): Promise<number[]>;
embedMany?(inputs: string[]): Promise<number[][]>;
}
export interface IndexFactEmbeddingsInput {
provider: EmbeddingProvider;
}
export interface SearchFactsInput {
query: string;
provider: EmbeddingProvider;
topicNames?: string[];
limit?: number;
minimumScore?: number;
}
export interface FindSimilarFactsInput {
statement: string;
provider: EmbeddingProvider;
topicNames?: string[];
limit?: number;
minimumScore?: number;
}
export interface ScoredFact extends Fact {
score: number;
}

17
src/types/database.ts Normal file
View File

@@ -0,0 +1,17 @@
import type {
FactEmbeddingRecord,
FactRecord,
FactTopicRecord,
TopicAliasRecord,
TopicRecord,
TopicRelationRecord,
} from './domain';
export interface IdentityDatabaseSchema {
topics: TopicRecord;
facts: FactRecord;
fact_topics: FactTopicRecord;
topic_relations: TopicRelationRecord;
topic_aliases: TopicAliasRecord;
fact_embeddings: FactEmbeddingRecord;
}

64
src/types/domain.ts Normal file
View File

@@ -0,0 +1,64 @@
export type TopicCategory = 'entity' | 'concept' | 'temporal' | 'custom';
export type TopicGranularity = 'abstract' | 'concrete' | 'mixed';
export type JsonPrimitive = string | number | boolean | null;
export type JsonValue = JsonPrimitive | JsonValue[] | { [key: string]: JsonValue };
export interface TopicRecord {
id: string;
name: string;
normalized_name: string;
category: TopicCategory;
granularity: TopicGranularity;
description: string | null;
metadata: string | null;
created_at: string;
updated_at: string;
}
export interface FactRecord {
id: string;
statement: string;
summary: string | null;
source: string | null;
confidence: number | null;
metadata: string | null;
created_at: string;
updated_at: string;
}
export interface FactTopicRecord {
fact_id: string;
topic_id: string;
role: string | null;
position: number;
created_at: string;
}
export interface TopicRelationRecord {
parent_topic_id: string;
child_topic_id: string;
relation: string;
created_at: string;
}
export interface TopicAliasRecord {
id: string;
topic_id: string;
alias: string;
normalized_alias: string;
is_primary: number;
created_at: string;
updated_at: string;
}
export interface FactEmbeddingRecord {
fact_id: string;
model: string;
dimensions: number;
embedding: string;
content_hash: string;
created_at: string;
updated_at: string;
}

91
tests/identity-db.test.ts Normal file
View File

@@ -0,0 +1,91 @@
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { IdentityDB } from '../src/core/identity-db';
describe('IdentityDB topic and fact writes', () => {
let db: IdentityDB;
beforeEach(async () => {
db = await IdentityDB.connect({ client: 'sqlite', filename: ':memory:' });
await db.initialize();
});
afterEach(async () => {
await db.close();
});
it('deduplicates topics by normalized name during upsert', async () => {
const first = await db.upsertTopic({
name: 'TypeScript',
category: 'entity',
granularity: 'concrete',
});
const second = await db.upsertTopic({
name: ' typescript ',
category: 'entity',
granularity: 'concrete',
});
expect(second.id).toBe(first.id);
expect(second.normalizedName).toBe('typescript');
const topics = await db.listTopics({ includeFacts: false });
expect(topics).toHaveLength(1);
});
it('adds one fact that links multiple topics', async () => {
const fact = await db.addFact({
statement: 'I have worked with TypeScript since 2025.',
topics: [
{ name: 'I', category: 'entity', granularity: 'concrete', role: 'subject' },
{ name: 'TypeScript', category: 'entity', granularity: 'concrete', role: 'object' },
{ name: '2025', category: 'temporal', granularity: 'concrete', role: 'time' },
],
});
expect(fact.statement).toBe('I have worked with TypeScript since 2025.');
expect(fact.topics.map((topic) => topic.name)).toEqual(['I', 'TypeScript', '2025']);
const typeScriptFacts = await db.getTopicFacts('TypeScript');
expect(typeScriptFacts).toHaveLength(1);
expect(typeScriptFacts[0]?.statement).toBe('I have worked with TypeScript since 2025.');
});
it('resolves alias names to a canonical topic', async () => {
await db.upsertTopic({
name: 'TypeScript',
category: 'entity',
granularity: 'concrete',
});
await db.addTopicAlias('TypeScript', 'TS');
const resolved = await db.resolveTopic('ts');
const aliases = await db.getTopicAliases('TypeScript');
expect(resolved?.name).toBe('TypeScript');
expect(aliases).toEqual(['TS']);
});
it('reuses the canonical topic when a fact is added through an alias', async () => {
await db.upsertTopic({
name: 'TypeScript',
category: 'entity',
granularity: 'concrete',
});
await db.addTopicAlias('TypeScript', 'TS');
await db.addFact({
statement: 'TS compiles to JavaScript.',
topics: [{ name: 'TS', category: 'entity', granularity: 'concrete' }],
});
const topics = await db.listTopics({ includeFacts: false });
const facts = await db.getTopicFacts('TypeScript');
expect(topics.map((topic) => topic.name)).toEqual(['TypeScript']);
expect(facts.map((fact) => fact.statement)).toEqual(['TS compiles to JavaScript.']);
});
});

137
tests/ingestion.test.ts Normal file
View File

@@ -0,0 +1,137 @@
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { IdentityDB } from '../src/core/identity-db';
import { LlmFactExtractor } from '../src/ingestion/llm-extractor';
import { NaiveExtractor } from '../src/ingestion/naive-extractor';
import type { FactExtractor } from '../src/ingestion/types';
describe('IdentityDB ingestion', () => {
let db: IdentityDB;
beforeEach(async () => {
db = await IdentityDB.connect({ client: 'sqlite', filename: ':memory:' });
await db.initialize();
});
afterEach(async () => {
await db.close();
});
it('ingests a statement using a provided extractor', async () => {
const extractor: FactExtractor = {
async extract(input) {
return {
statement: input,
topics: [
{ name: 'I', category: 'entity', granularity: 'concrete', role: 'subject' },
{ name: 'TypeScript', category: 'entity', granularity: 'concrete', role: 'object' },
{ name: '2025', category: 'temporal', granularity: 'concrete', role: 'time' },
],
};
},
};
const fact = await db.ingestStatement('I have worked with TypeScript since 2025.', {
extractor,
});
expect(fact.topics.map((topic) => topic.name)).toEqual(['I', 'TypeScript', '2025']);
const linkedFacts = await db.getTopicFactsLinkedTo('TypeScript', '2025');
expect(linkedFacts).toHaveLength(1);
expect(linkedFacts[0]?.statement).toBe('I have worked with TypeScript since 2025.');
});
it('ships a deterministic naive extractor for local usage', async () => {
const fact = await db.ingestStatement('I have worked with TypeScript since 2025.', {
extractor: new NaiveExtractor(),
});
expect(fact.topics.map((topic) => topic.name)).toEqual(['I', 'TypeScript', '2025']);
const topic = await db.getTopicByName('TypeScript', { includeFacts: true });
expect(topic?.facts).toHaveLength(1);
});
it('ships an LLM extractor adapter that turns structured JSON responses into facts', async () => {
let prompt = '';
const extractor = new LlmFactExtractor({
model: {
async generateText(input) {
prompt = input;
return JSON.stringify({
statement: 'I have worked with Bun and TypeScript since 2025.',
summary: 'The speaker has Bun and TypeScript experience.',
source: 'chat',
confidence: 0.91,
metadata: { channel: 'telegram' },
topics: [
{ name: 'I', category: 'entity', granularity: 'concrete', role: 'subject' },
{ name: 'Bun', category: 'entity', granularity: 'concrete', role: 'object' },
{ name: 'TypeScript', category: 'entity', granularity: 'concrete', role: 'object' },
{ name: '2025', category: 'temporal', granularity: 'concrete', role: 'time' },
],
});
},
},
instructions: 'Prefer technology and time topics.',
});
const fact = await db.ingestStatement('I have worked with Bun and TypeScript since 2025.', {
extractor,
});
expect(prompt).toContain('Prefer technology and time topics.');
expect(prompt).toContain('I have worked with Bun and TypeScript since 2025.');
expect(fact.summary).toBe('The speaker has Bun and TypeScript experience.');
expect(fact.source).toBe('chat');
expect(fact.confidence).toBe(0.91);
expect(fact.metadata).toEqual({ channel: 'telegram' });
expect(fact.topics.map((topic) => topic.name)).toEqual(['I', 'Bun', 'TypeScript', '2025']);
});
it('parses JSON responses wrapped in markdown code fences', async () => {
const extractor = new LlmFactExtractor({
model: {
async generateText() {
return [
'Here is the extracted fact:',
'```json',
JSON.stringify({
statement: 'Bun powers TypeScript tooling.',
topics: [
{ name: 'Bun', category: 'entity', granularity: 'concrete' },
{ name: 'TypeScript', category: 'entity', granularity: 'concrete' },
],
}),
'```',
].join('\n');
},
},
});
const fact = await db.ingestStatement('Bun powers TypeScript tooling.', {
extractor,
});
expect(fact.topics.map((topic) => topic.name)).toEqual(['Bun', 'TypeScript']);
});
it('rejects invalid LLM responses before writing facts', async () => {
const extractor = new LlmFactExtractor({
model: {
async generateText() {
return 'not json at all';
},
},
});
await expect(
db.ingestStatement('Bun powers TypeScript tooling.', {
extractor,
}),
).rejects.toThrow('LLM extractor returned invalid JSON.');
});
});

121
tests/migrations.test.ts Normal file
View File

@@ -0,0 +1,121 @@
import { sql } from 'kysely';
import { afterEach, describe, expect, it } from 'vitest';
import { createDatabase } from '../src/adapters/dialect';
import { initializeSchema } from '../src/core/migrations';
const openConnections: Array<() => Promise<void>> = [];
afterEach(async () => {
while (openConnections.length > 0) {
const close = openConnections.pop();
if (close) {
await close();
}
}
});
describe('initializeSchema', () => {
it('creates the topics, facts, fact_embeddings, fact_topics, topic_relations, and topic_aliases tables', async () => {
const connection = await createDatabase({ client: 'sqlite', filename: ':memory:' });
openConnections.push(connection.destroy);
await initializeSchema(connection.db);
const tables = await sql<{ name: string }>`
SELECT name
FROM sqlite_master
WHERE type = 'table'
ORDER BY name
`.execute(connection.db);
const tableNames = tables.rows.map((row) => row.name);
expect(tableNames).toContain('topics');
expect(tableNames).toContain('facts');
expect(tableNames).toContain('fact_embeddings');
expect(tableNames).toContain('fact_topics');
expect(tableNames).toContain('topic_relations');
expect(tableNames).toContain('topic_aliases');
});
it('creates the expected columns for each table', async () => {
const connection = await createDatabase({ client: 'sqlite', filename: ':memory:' });
openConnections.push(connection.destroy);
await initializeSchema(connection.db);
const topicsColumns = await sql<{ name: string }>`PRAGMA table_info(topics)`.execute(connection.db);
const factsColumns = await sql<{ name: string }>`PRAGMA table_info(facts)`.execute(connection.db);
const factEmbeddingsColumns = await sql<{ name: string }>`PRAGMA table_info(fact_embeddings)`.execute(connection.db);
const factTopicsColumns = await sql<{ name: string }>`PRAGMA table_info(fact_topics)`.execute(connection.db);
const topicRelationsColumns = await sql<{ name: string }>`PRAGMA table_info(topic_relations)`.execute(connection.db);
const topicAliasesColumns = await sql<{ name: string }>`PRAGMA table_info(topic_aliases)`.execute(connection.db);
expect(topicsColumns.rows.map((row) => row.name)).toEqual([
'id',
'name',
'normalized_name',
'category',
'granularity',
'description',
'metadata',
'created_at',
'updated_at',
]);
expect(factsColumns.rows.map((row) => row.name)).toEqual([
'id',
'statement',
'summary',
'source',
'confidence',
'metadata',
'created_at',
'updated_at',
]);
expect(factEmbeddingsColumns.rows.map((row) => row.name)).toEqual([
'fact_id',
'model',
'dimensions',
'embedding',
'content_hash',
'created_at',
'updated_at',
]);
expect(factTopicsColumns.rows.map((row) => row.name)).toEqual([
'fact_id',
'topic_id',
'role',
'position',
'created_at',
]);
expect(topicRelationsColumns.rows.map((row) => row.name)).toEqual([
'parent_topic_id',
'child_topic_id',
'relation',
'created_at',
]);
expect(topicAliasesColumns.rows.map((row) => row.name)).toEqual([
'id',
'topic_id',
'alias',
'normalized_alias',
'is_primary',
'created_at',
'updated_at',
]);
});
it('is idempotent when called more than once', async () => {
const connection = await createDatabase({ client: 'sqlite', filename: ':memory:' });
openConnections.push(connection.destroy);
await initializeSchema(connection.db);
await expect(initializeSchema(connection.db)).resolves.toBeUndefined();
});
});

124
tests/queries.test.ts Normal file
View File

@@ -0,0 +1,124 @@
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { IdentityDB } from '../src/core/identity-db';
async function seedMemoryGraph(db: IdentityDB): Promise<void> {
await db.addFact({
statement: 'I have worked with TypeScript since 2025.',
topics: [
{ name: 'I', category: 'entity', granularity: 'concrete', role: 'subject' },
{ name: 'TypeScript', category: 'entity', granularity: 'concrete', role: 'object' },
{ name: '2025', category: 'temporal', granularity: 'concrete', role: 'time' },
],
});
await db.addFact({
statement: 'TypeScript is a programming language.',
topics: [
{ name: 'TypeScript', category: 'entity', granularity: 'concrete', role: 'subject' },
{ name: 'programming language', category: 'concept', granularity: 'abstract', role: 'classification' },
],
});
await db.linkTopics({
parentName: 'software technology',
childName: 'programming language',
});
await db.linkTopics({
parentName: 'programming language',
childName: 'TypeScript',
});
}
describe('IdentityDB queries', () => {
let db: IdentityDB;
beforeEach(async () => {
db = await IdentityDB.connect({ client: 'sqlite', filename: ':memory:' });
await db.initialize();
await seedMemoryGraph(db);
});
afterEach(async () => {
await db.close();
});
it('gets a topic with its facts', async () => {
const topic = await db.getTopicByName('TypeScript', { includeFacts: true });
expect(topic).not.toBeNull();
expect(topic?.name).toBe('TypeScript');
expect(topic?.facts).toHaveLength(2);
expect(topic?.facts.map((fact) => fact.statement)).toEqual([
'I have worked with TypeScript since 2025.',
'TypeScript is a programming language.',
]);
});
it('gets only the facts linked to another topic', async () => {
const facts = await db.getTopicFactsLinkedTo('TypeScript', '2025');
expect(facts).toHaveLength(1);
expect(facts[0]?.statement).toBe('I have worked with TypeScript since 2025.');
});
it('lists topics without expanding facts', async () => {
const topics = await db.listTopics({ includeFacts: false });
expect(topics.map((topic) => topic.name)).toEqual([
'2025',
'I',
'programming language',
'software technology',
'TypeScript',
]);
expect('facts' in topics[0]!).toBe(false);
});
it('finds connected topics with shared fact counts', async () => {
const connectedTopics = await db.findConnectedTopics('TypeScript');
expect(connectedTopics).toEqual([
expect.objectContaining({ name: '2025', sharedFactCount: 1 }),
expect.objectContaining({ name: 'I', sharedFactCount: 1 }),
expect.objectContaining({ name: 'programming language', sharedFactCount: 1 }),
]);
});
it('finds facts that connect all requested topics', async () => {
const facts = await db.findFactsConnectingTopics(['I', 'TypeScript', '2025']);
expect(facts).toHaveLength(1);
expect(facts[0]?.statement).toBe('I have worked with TypeScript since 2025.');
});
it('lists direct child topics for a parent topic', async () => {
const children = await db.getTopicChildren('programming language');
expect(children.map((topic) => topic.name)).toEqual(['TypeScript']);
});
it('lists direct parent topics for a child topic', async () => {
const parents = await db.getTopicParents('TypeScript');
expect(parents.map((topic) => topic.name)).toEqual(['programming language']);
});
it('returns lineage from nearest parent outward', async () => {
const lineage = await db.getTopicLineage('TypeScript');
expect(lineage.map((topic) => topic.name)).toEqual([
'programming language',
'software technology',
]);
});
it('resolves alias names in topic lookups', async () => {
await db.addTopicAlias('TypeScript', 'TS');
const topic = await db.getTopicByName('ts');
expect(topic?.name).toBe('TypeScript');
});
});

View File

@@ -0,0 +1,170 @@
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { IdentityDB } from '../src/core/identity-db';
import type { FactExtractor } from '../src/ingestion/types';
import type { EmbeddingProvider } from '../src/types/api';
class FakeEmbeddingProvider implements EmbeddingProvider {
model = 'fake-semantic-v1';
dimensions = 3;
async embed(input: string): Promise<number[]> {
return embeddingFor(input);
}
async embedMany(inputs: string[]): Promise<number[][]> {
return Promise.all(inputs.map((input) => this.embed(input)));
}
}
function embeddingFor(input: string): number[] {
const normalized = input.toLowerCase();
if (normalized.includes('bun') && normalized.includes('typescript')) {
return [1, 0, 0];
}
if (normalized.includes('tooling') || normalized.includes('runtime')) {
return [0.98, 0.02, 0];
}
if (normalized.includes('typescript')) {
return [0.9, 0.1, 0];
}
if (normalized.includes('python')) {
return [0, 1, 0];
}
if (normalized.includes('database')) {
return [0, 0.2, 0.8];
}
return [0.1, 0.1, 0.1];
}
describe('IdentityDB semantic search', () => {
let db: IdentityDB;
let provider: FakeEmbeddingProvider;
beforeEach(async () => {
provider = new FakeEmbeddingProvider();
db = await IdentityDB.connect({ client: 'sqlite', filename: ':memory:' });
await db.initialize();
await db.addFact({
statement: 'Bun runs TypeScript tooling quickly.',
topics: [
{ name: 'Bun', category: 'entity', granularity: 'concrete' },
{ name: 'TypeScript', category: 'entity', granularity: 'concrete' },
],
});
await db.addFact({
statement: 'TypeScript compiles to JavaScript.',
topics: [
{ name: 'TypeScript', category: 'entity', granularity: 'concrete' },
{ name: 'JavaScript', category: 'entity', granularity: 'concrete' },
],
});
await db.addFact({
statement: 'Python uses indentation syntax.',
topics: [
{ name: 'Python', category: 'entity', granularity: 'concrete' },
],
});
});
afterEach(async () => {
await db.close();
});
it('indexes facts and returns semantic search matches ordered by score', async () => {
await db.indexFactEmbeddings({ provider });
const matches = await db.searchFacts({
query: 'TypeScript runtime tooling',
provider,
limit: 2,
});
expect(matches).toHaveLength(2);
expect(matches[0]?.statement).toBe('Bun runs TypeScript tooling quickly.');
expect(matches[1]?.statement).toBe('TypeScript compiles to JavaScript.');
expect(matches[0]!.score).toBeGreaterThan(matches[1]!.score);
});
it('filters semantic search candidates by topic names', async () => {
await db.indexFactEmbeddings({ provider });
const matches = await db.searchFacts({
query: 'TypeScript runtime tooling',
provider,
topicNames: ['Python'],
limit: 5,
});
expect(matches.map((match) => match.statement)).toEqual(['Python uses indentation syntax.']);
});
it('finds similar facts from an input statement', async () => {
await db.indexFactEmbeddings({ provider });
const matches = await db.findSimilarFacts({
statement: 'Bun makes TypeScript tooling fast.',
provider,
limit: 2,
});
expect(matches[0]?.statement).toBe('Bun runs TypeScript tooling quickly.');
expect(matches[0]!.score).toBeGreaterThan(matches[1]!.score);
});
});
describe('IdentityDB dedup-aware ingestion', () => {
let db: IdentityDB;
let provider: FakeEmbeddingProvider;
let extractor: FactExtractor;
beforeEach(async () => {
provider = new FakeEmbeddingProvider();
extractor = {
async extract(input) {
return {
statement: input,
topics: [
{ name: 'Bun', category: 'entity', granularity: 'concrete' },
{ name: 'TypeScript', category: 'entity', granularity: 'concrete' },
],
};
},
};
db = await IdentityDB.connect({ client: 'sqlite', filename: ':memory:' });
await db.initialize();
});
afterEach(async () => {
await db.close();
});
it('returns the existing fact when ingestion detects a semantic duplicate', async () => {
const first = await db.ingestStatement('Bun runs TypeScript tooling quickly.', {
extractor,
embeddingProvider: provider,
});
const second = await db.ingestStatement('Bun makes TypeScript tooling fast.', {
extractor,
embeddingProvider: provider,
duplicateThreshold: 0.95,
});
const facts = await db.getTopicFacts('TypeScript');
expect(second.id).toBe(first.id);
expect(facts).toHaveLength(1);
expect(facts[0]?.statement).toBe('Bun runs TypeScript tooling quickly.');
});
});

23
tsconfig.json Normal file
View File

@@ -0,0 +1,23 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "Bundler",
"lib": ["ES2022"],
"declaration": true,
"declarationMap": true,
"outDir": "dist",
"rootDir": ".",
"strict": true,
"noUncheckedIndexedAccess": true,
"exactOptionalPropertyTypes": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"skipLibCheck": true,
"resolveJsonModule": true,
"isolatedModules": true,
"types": ["node", "vitest/globals"]
},
"include": ["src/**/*.ts", "tests/**/*.ts", "vitest.config.ts", "tsup.config.ts"],
"exclude": ["dist", "node_modules"]
}

11
tsup.config.ts Normal file
View File

@@ -0,0 +1,11 @@
import { defineConfig } from 'tsup';
export default defineConfig({
entry: ['src/index.ts'],
format: ['esm'],
dts: true,
sourcemap: true,
clean: true,
target: 'node20',
treeshake: true,
});

12
vitest.config.ts Normal file
View File

@@ -0,0 +1,12 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
environment: 'node',
globals: true,
passWithNoTests: true,
coverage: {
enabled: false,
},
},
});