Files
codeboard/apps/worker/src/jobs/generate.ts
Vectry 79dad6124f feat: initial CodeBoard monorepo scaffold
Turborepo monorepo with npm workspaces:
- apps/web: Next.js 14 frontend with Tailwind v4, SSE progress, doc viewer
- apps/worker: BullMQ job processor (clone → parse → LLM generate)
- packages/shared: TypeScript types
- packages/parser: Babel-based AST parser (JS/TS) + regex (Python)
- packages/llm: OpenAI/Anthropic provider abstraction + prompt pipeline
- packages/diagrams: Mermaid architecture & dependency graph generators
- packages/database: Prisma schema (PostgreSQL)
- Docker multi-stage build (web + worker targets)

All packages compile successfully with tsc and next build.
2026-02-09 15:22:50 +00:00

27 lines
815 B
TypeScript

import type { CodeStructure, GeneratedDocs } from "@codeboard/shared";
import { createProvider, generateDocumentation } from "@codeboard/llm";
export async function generateDocs(
codeStructure: CodeStructure,
onProgress?: (stage: string, progress: number) => void
): Promise<GeneratedDocs> {
const apiKey =
process.env.OPENAI_API_KEY ?? process.env.ANTHROPIC_API_KEY ?? "";
if (!apiKey) {
throw new Error(
"No LLM API key configured. Set OPENAI_API_KEY or ANTHROPIC_API_KEY."
);
}
const providerType = process.env.OPENAI_API_KEY ? "openai" : "anthropic";
const provider = createProvider({
provider: providerType,
apiKey,
model: process.env.LLM_MODEL,
baseUrl: process.env.LLM_BASE_URL,
});
return generateDocumentation(codeStructure, provider, onProgress);
}