Rename chat routes to proxy for clarity (#4)
authorStefan Gasser <redacted>
Fri, 9 Jan 2026 08:07:56 +0000 (09:07 +0100)
committerGitHub <redacted>
Fri, 9 Jan 2026 08:07:56 +0000 (09:07 +0100)
- Rename chat.ts → proxy.ts to better reflect purpose (LLM proxy routing)
- Update CLAUDE.md architecture to match actual file structure

CLAUDE.md
src/index.ts
src/routes/proxy.test.ts [moved from src/routes/chat.test.ts with 95% similarity]
src/routes/proxy.ts [moved from src/routes/chat.ts with 98% similarity]

index b881b4ecfd882cef71d3ed2c264380c08ac218ef..601e9948c38fb38282b5ef35edceb0860e93d857 100644 (file)
--- a/CLAUDE.md
+++ b/CLAUDE.md
@@ -18,24 +18,22 @@ OpenAI-compatible proxy with two privacy modes: route to local LLM or mask PII f
 src/
 ├── index.ts                 # Hono server entry
 ├── config.ts                # YAML config + Zod validation
-├── schemas/
-│   └── chat.ts              # Request/response schemas
-├── routes/                  # All route handlers
-│   ├── chat.ts              # POST /openai/v1/chat/completions
+├── routes/
+│   ├── proxy.ts             # POST /openai/v1/chat/completions
 │   ├── dashboard.tsx        # Dashboard routes + API
 │   ├── health.ts            # GET /health
 │   └── info.ts              # GET /info
-├── views/                   # JSX components
+├── views/
 │   └── dashboard/
 │       └── page.tsx         # Dashboard UI
 └── services/
-    ├── decision.ts             # Route/mask logic
-    ├── pii-detector.ts         # Presidio client
-    ├── llm-client.ts           # OpenAI/Ollama client
-    ├── masking.ts              # PII mask/unmask
-    ├── stream-transformer.ts   # SSE unmask for streaming
-    ├── language-detector.ts    # Auto language detection
-    └── logger.ts               # SQLite logging
+    ├── decision.ts          # Route/mask logic
+    ├── pii-detector.ts      # Presidio client
+    ├── llm-client.ts        # OpenAI/Ollama client
+    ├── masking.ts           # PII mask/unmask
+    ├── stream-transformer.ts # SSE unmask for streaming
+    ├── language-detector.ts # Auto language detection
+    └── logger.ts            # SQLite logging
 ```
 
 Tests are colocated (`*.test.ts`).
index 4768477eb4c608840f12473dcf052851a812a3a7..4724522e0b08aa7c00c63d48253e3825e77872d1 100644 (file)
@@ -4,10 +4,10 @@ import { createMiddleware } from "hono/factory";
 import { HTTPException } from "hono/http-exception";
 import { logger } from "hono/logger";
 import { getConfig } from "./config";
-import { chatRoutes } from "./routes/chat";
 import { dashboardRoutes } from "./routes/dashboard";
 import { healthRoutes } from "./routes/health";
 import { infoRoutes } from "./routes/info";
+import { proxyRoutes } from "./routes/proxy";
 import { getLogger } from "./services/logger";
 import { getPIIDetector } from "./services/pii-detector";
 
@@ -33,7 +33,7 @@ app.use("*", logger());
 
 app.route("/", healthRoutes);
 app.route("/", infoRoutes);
-app.route("/openai/v1", chatRoutes);
+app.route("/openai/v1", proxyRoutes);
 
 if (config.dashboard.enabled) {
   app.route("/dashboard", dashboardRoutes);
similarity index 95%
rename from src/routes/chat.test.ts
rename to src/routes/proxy.test.ts
index 99b0e457be0834ba7b590e2ef778b3af6c574704..39106a849d9c5872a9b63a8c418268d286d47116 100644 (file)
@@ -1,9 +1,9 @@
 import { describe, expect, test } from "bun:test";
 import { Hono } from "hono";
-import { chatRoutes } from "./chat";
+import { proxyRoutes } from "./proxy";
 
 const app = new Hono();
-app.route("/openai/v1", chatRoutes);
+app.route("/openai/v1", proxyRoutes);
 
 describe("POST /openai/v1/chat/completions", () => {
   test("returns 400 for missing messages", async () => {
similarity index 98%
rename from src/routes/chat.ts
rename to src/routes/proxy.ts
index 4cfcad6d08664dbe9a5edf0b66961a548221a89e..f3a10c76b45a07342573fc4a77d44eca8ec2a45b 100644 (file)
@@ -30,7 +30,7 @@ const ChatCompletionSchema = z
   })
   .passthrough();
 
-export const chatRoutes = new Hono();
+export const proxyRoutes = new Hono();
 
 /**
  * Type guard for MaskDecision
@@ -39,7 +39,7 @@ function isMaskDecision(decision: RoutingDecision): decision is MaskDecision {
   return decision.mode === "mask";
 }
 
-chatRoutes.get("/models", (c) => {
+proxyRoutes.get("/models", (c) => {
   const { upstream } = getRouter().getProvidersInfo();
 
   return proxy(`${upstream.baseUrl}/models`, {
@@ -52,7 +52,7 @@ chatRoutes.get("/models", (c) => {
 /**
  * POST /v1/chat/completions - OpenAI-compatible chat completion endpoint
  */
-chatRoutes.post(
+proxyRoutes.post(
   "/chat/completions",
   zValidator("json", ChatCompletionSchema, (result, c) => {
     if (!result.success) {
git clone https://git.99rst.org/PROJECT