feat: initial CodeBoard monorepo scaffold
Turborepo monorepo with npm workspaces: - apps/web: Next.js 14 frontend with Tailwind v4, SSE progress, doc viewer - apps/worker: BullMQ job processor (clone → parse → LLM generate) - packages/shared: TypeScript types - packages/parser: Babel-based AST parser (JS/TS) + regex (Python) - packages/llm: OpenAI/Anthropic provider abstraction + prompt pipeline - packages/diagrams: Mermaid architecture & dependency graph generators - packages/database: Prisma schema (PostgreSQL) - Docker multi-stage build (web + worker targets) All packages compile successfully with tsc and next build.
This commit is contained in:
42
apps/worker/src/index.ts
Normal file
42
apps/worker/src/index.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { Worker } from "bullmq";
|
||||
import IORedis from "ioredis";
|
||||
import { processGenerationJob } from "./processor.js";
|
||||
|
||||
const redisUrl = process.env.REDIS_URL ?? "redis://localhost:6379";
|
||||
const connection = new IORedis(redisUrl, { maxRetriesPerRequest: null });
|
||||
|
||||
const worker = new Worker(
|
||||
"codeboard:generate",
|
||||
async (job) => {
|
||||
console.log(`[worker] Processing job ${job.id}: ${job.data.repoUrl}`);
|
||||
return processGenerationJob(job);
|
||||
},
|
||||
{
|
||||
connection,
|
||||
concurrency: 2,
|
||||
removeOnComplete: { count: 100 },
|
||||
removeOnFail: { count: 50 },
|
||||
}
|
||||
);
|
||||
|
||||
worker.on("completed", (job) => {
|
||||
console.log(`[worker] Job ${job.id} completed`);
|
||||
});
|
||||
|
||||
worker.on("failed", (job, err) => {
|
||||
console.error(`[worker] Job ${job?.id} failed:`, err.message);
|
||||
});
|
||||
|
||||
worker.on("ready", () => {
|
||||
console.log("[worker] Ready and waiting for jobs on codeboard:generate");
|
||||
});
|
||||
|
||||
async function shutdown() {
|
||||
console.log("[worker] Shutting down...");
|
||||
await worker.close();
|
||||
await connection.quit();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
process.on("SIGTERM", shutdown);
|
||||
process.on("SIGINT", shutdown);
|
||||
87
apps/worker/src/jobs/clone.ts
Normal file
87
apps/worker/src/jobs/clone.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { simpleGit } from "simple-git";
|
||||
import { mkdtemp, readdir, stat } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
import type { CloneResult } from "@codeboard/shared";
|
||||
|
||||
async function countFiles(dir: string): Promise<{ files: number; lines: number }> {
|
||||
let files = 0;
|
||||
let lines = 0;
|
||||
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.name === ".git" || entry.name === "node_modules") continue;
|
||||
const fullPath = join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
const sub = await countFiles(fullPath);
|
||||
files += sub.files;
|
||||
lines += sub.lines;
|
||||
} else {
|
||||
files++;
|
||||
const fileStat = await stat(fullPath);
|
||||
lines += Math.ceil(fileStat.size / 40);
|
||||
}
|
||||
}
|
||||
|
||||
return { files, lines };
|
||||
}
|
||||
|
||||
export async function cloneRepository(repoUrl: string): Promise<CloneResult> {
|
||||
const tmpDir = await mkdtemp(join(tmpdir(), "codeboard-"));
|
||||
const git = simpleGit();
|
||||
|
||||
await git.clone(repoUrl, tmpDir, ["--depth", "1", "--single-branch"]);
|
||||
|
||||
const localGit = simpleGit(tmpDir);
|
||||
const log = await localGit.log({ maxCount: 1 });
|
||||
const lastCommit = log.latest?.hash ?? "unknown";
|
||||
|
||||
const repoName = repoUrl
|
||||
.replace(/\.git$/, "")
|
||||
.split("/")
|
||||
.slice(-1)[0] ?? "unknown";
|
||||
|
||||
const { files: totalFiles, lines: totalLines } = await countFiles(tmpDir);
|
||||
|
||||
const languageCounts: Record<string, number> = {};
|
||||
const extMap: Record<string, string> = {
|
||||
".ts": "TypeScript", ".tsx": "TypeScript",
|
||||
".js": "JavaScript", ".jsx": "JavaScript",
|
||||
".py": "Python", ".go": "Go",
|
||||
".rs": "Rust", ".java": "Java",
|
||||
".rb": "Ruby", ".php": "PHP",
|
||||
};
|
||||
|
||||
async function scanLanguages(dir: string) {
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.name.startsWith(".") || entry.name === "node_modules") continue;
|
||||
const fullPath = join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
await scanLanguages(fullPath);
|
||||
} else {
|
||||
const ext = entry.name.slice(entry.name.lastIndexOf("."));
|
||||
const lang = extMap[ext];
|
||||
if (lang) {
|
||||
const fileStat = await stat(fullPath);
|
||||
languageCounts[lang] = (languageCounts[lang] ?? 0) + fileStat.size;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
await scanLanguages(tmpDir);
|
||||
|
||||
return {
|
||||
localPath: tmpDir,
|
||||
metadata: {
|
||||
name: repoName,
|
||||
description: "",
|
||||
defaultBranch: "main",
|
||||
languages: languageCounts,
|
||||
stars: 0,
|
||||
lastCommit,
|
||||
totalFiles,
|
||||
totalLines,
|
||||
},
|
||||
};
|
||||
}
|
||||
26
apps/worker/src/jobs/generate.ts
Normal file
26
apps/worker/src/jobs/generate.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { CodeStructure, GeneratedDocs } from "@codeboard/shared";
|
||||
import { createProvider, generateDocumentation } from "@codeboard/llm";
|
||||
|
||||
export async function generateDocs(
|
||||
codeStructure: CodeStructure,
|
||||
onProgress?: (stage: string, progress: number) => void
|
||||
): Promise<GeneratedDocs> {
|
||||
const apiKey =
|
||||
process.env.OPENAI_API_KEY ?? process.env.ANTHROPIC_API_KEY ?? "";
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(
|
||||
"No LLM API key configured. Set OPENAI_API_KEY or ANTHROPIC_API_KEY."
|
||||
);
|
||||
}
|
||||
|
||||
const providerType = process.env.OPENAI_API_KEY ? "openai" : "anthropic";
|
||||
const provider = createProvider({
|
||||
provider: providerType,
|
||||
apiKey,
|
||||
model: process.env.LLM_MODEL,
|
||||
baseUrl: process.env.LLM_BASE_URL,
|
||||
});
|
||||
|
||||
return generateDocumentation(codeStructure, provider, onProgress);
|
||||
}
|
||||
8
apps/worker/src/jobs/parse.ts
Normal file
8
apps/worker/src/jobs/parse.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { CodeStructure } from "@codeboard/shared";
|
||||
import { analyzeRepository } from "@codeboard/parser";
|
||||
|
||||
export async function parseRepository(
|
||||
localPath: string
|
||||
): Promise<CodeStructure> {
|
||||
return analyzeRepository(localPath);
|
||||
}
|
||||
83
apps/worker/src/processor.ts
Normal file
83
apps/worker/src/processor.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import type { Job } from "bullmq";
|
||||
import IORedis from "ioredis";
|
||||
import { cloneRepository } from "./jobs/clone.js";
|
||||
import { parseRepository } from "./jobs/parse.js";
|
||||
import { generateDocs } from "./jobs/generate.js";
|
||||
|
||||
interface GenerateJobData {
|
||||
repoUrl: string;
|
||||
generationId: string;
|
||||
}
|
||||
|
||||
const redis = new IORedis(process.env.REDIS_URL ?? "redis://localhost:6379");
|
||||
|
||||
async function updateProgress(
|
||||
generationId: string,
|
||||
status: string,
|
||||
progress: number,
|
||||
message?: string
|
||||
) {
|
||||
await redis.publish(
|
||||
`codeboard:progress:${generationId}`,
|
||||
JSON.stringify({ status, progress, message })
|
||||
);
|
||||
await redis.set(
|
||||
`codeboard:status:${generationId}`,
|
||||
JSON.stringify({ status, progress, message }),
|
||||
"EX",
|
||||
3600
|
||||
);
|
||||
}
|
||||
|
||||
export async function processGenerationJob(
|
||||
job: Job<GenerateJobData>
|
||||
): Promise<unknown> {
|
||||
const { repoUrl, generationId } = job.data;
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
await updateProgress(generationId, "CLONING", 10, "Cloning repository...");
|
||||
const cloneResult = await cloneRepository(repoUrl);
|
||||
|
||||
await updateProgress(
|
||||
generationId,
|
||||
"PARSING",
|
||||
30,
|
||||
`Analyzing ${cloneResult.metadata.totalFiles} files...`
|
||||
);
|
||||
const codeStructure = await parseRepository(cloneResult.localPath);
|
||||
|
||||
await updateProgress(
|
||||
generationId,
|
||||
"GENERATING",
|
||||
50,
|
||||
`Generating docs for ${codeStructure.modules.length} modules...`
|
||||
);
|
||||
|
||||
const docs = await generateDocs(codeStructure, (stage, progress) => {
|
||||
const mappedProgress = 50 + Math.floor(progress * 0.4);
|
||||
updateProgress(generationId, "GENERATING", mappedProgress, `${stage}...`);
|
||||
});
|
||||
|
||||
docs.id = generationId;
|
||||
docs.repoUrl = repoUrl;
|
||||
docs.repoName = cloneResult.metadata.name;
|
||||
|
||||
const duration = Math.floor((Date.now() - startTime) / 1000);
|
||||
|
||||
await redis.set(
|
||||
`codeboard:result:${generationId}`,
|
||||
JSON.stringify(docs),
|
||||
"EX",
|
||||
86400
|
||||
);
|
||||
|
||||
await updateProgress(generationId, "COMPLETED", 100, "Done!");
|
||||
|
||||
return { generationId, duration, repoName: cloneResult.metadata.name };
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : "Unknown error";
|
||||
await updateProgress(generationId, "FAILED", 0, message);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user