src/
├── index.ts # Hono server entry
├── config.ts # YAML config + Zod validation
-├── schemas/
-│ └── chat.ts # Request/response schemas
-├── routes/ # All route handlers
-│ ├── chat.ts # POST /openai/v1/chat/completions
+├── routes/
+│ ├── proxy.ts # POST /openai/v1/chat/completions
│ ├── dashboard.tsx # Dashboard routes + API
│ ├── health.ts # GET /health
│ └── info.ts # GET /info
-├── views/ # JSX components
+├── views/
│ └── dashboard/
│ └── page.tsx # Dashboard UI
└── services/
- ├── decision.ts # Route/mask logic
- ├── pii-detector.ts # Presidio client
- ├── llm-client.ts # OpenAI/Ollama client
- ├── masking.ts # PII mask/unmask
- ├── stream-transformer.ts # SSE unmask for streaming
- ├── language-detector.ts # Auto language detection
- └── logger.ts # SQLite logging
+ ├── decision.ts # Route/mask logic
+ ├── pii-detector.ts # Presidio client
+ ├── llm-client.ts # OpenAI/Ollama client
+ ├── masking.ts # PII mask/unmask
+ ├── stream-transformer.ts # SSE unmask for streaming
+ ├── language-detector.ts # Auto language detection
+ └── logger.ts # SQLite logging
```
Tests are colocated (`*.test.ts`).
import { HTTPException } from "hono/http-exception";
import { logger } from "hono/logger";
import { getConfig } from "./config";
-import { chatRoutes } from "./routes/chat";
import { dashboardRoutes } from "./routes/dashboard";
import { healthRoutes } from "./routes/health";
import { infoRoutes } from "./routes/info";
+import { proxyRoutes } from "./routes/proxy";
import { getLogger } from "./services/logger";
import { getPIIDetector } from "./services/pii-detector";
app.route("/", healthRoutes);
app.route("/", infoRoutes);
-app.route("/openai/v1", chatRoutes);
+app.route("/openai/v1", proxyRoutes);
if (config.dashboard.enabled) {
app.route("/dashboard", dashboardRoutes);
import { describe, expect, test } from "bun:test";
import { Hono } from "hono";
-import { chatRoutes } from "./chat";
+import { proxyRoutes } from "./proxy";
const app = new Hono();
-app.route("/openai/v1", chatRoutes);
+app.route("/openai/v1", proxyRoutes);
describe("POST /openai/v1/chat/completions", () => {
test("returns 400 for missing messages", async () => {
})
.passthrough();
-export const chatRoutes = new Hono();
+export const proxyRoutes = new Hono();
/**
* Type guard for MaskDecision
return decision.mode === "mask";
}
-chatRoutes.get("/models", (c) => {
+proxyRoutes.get("/models", (c) => {
const { upstream } = getRouter().getProvidersInfo();
return proxy(`${upstream.baseUrl}/models`, {
/**
* POST /v1/chat/completions - OpenAI-compatible chat completion endpoint
*/
-chatRoutes.post(
+proxyRoutes.post(
"/chat/completions",
zValidator("json", ChatCompletionSchema, (result, c) => {
if (!result.success) {