Compare commits

...

59 Commits

Author SHA1 Message Date
b3c375d26d feat: add docs viewer page 2001-01-01 00:00:00 +00:00
618055be6b feat: add progress tracker component 2001-01-01 00:00:00 +00:00
7f80cf5868 feat: add docs API endpoint 2001-01-01 00:00:00 +00:00
73726b5d97 feat: add generate page with progress tracker 2001-01-01 00:00:00 +00:00
0ec34d5eed feat: add global styles with animations and effects 2001-01-01 00:00:00 +00:00
dec0b3d6f0 feat: add app layout with fonts and metadata 2001-01-01 00:00:00 +00:00
b7268c2fb6 feat: add BullMQ queue singleton 2001-01-01 00:00:00 +00:00
41ac84378c feat: add Redis client singleton 2001-01-01 00:00:00 +00:00
10555d709e chore: add public directory marker 2001-01-01 00:00:00 +00:00
df64665488 feat: add PostCSS config 2001-01-01 00:00:00 +00:00
8337fab076 chore: add web app tsconfig 2001-01-01 00:00:00 +00:00
48e3346bdd feat: add web app package 2001-01-01 00:00:00 +00:00
fcd2b3c890 feat: add docs generation job 2001-01-01 00:00:00 +00:00
b2a7dd2f24 feat: add repo parse job 2001-01-01 00:00:00 +00:00
ed95e45fff feat: add repo clone job 2001-01-01 00:00:00 +00:00
d7b58a7fbf feat: add worker main entry point 2001-01-01 00:00:00 +00:00
e03187d885 feat: add worker app package 2001-01-01 00:00:00 +00:00
5a5919aaed chore: add worker app tsconfig 2001-01-01 00:00:00 +00:00
366eb70f8b feat: add Prisma client singleton 2001-01-01 00:00:00 +00:00
1313d4114c feat: add database package 2001-01-01 00:00:00 +00:00
4abe9ce599 feat: add Prisma schema 2001-01-01 00:00:00 +00:00
79faecc3ee chore: add diagrams package tsconfig 2001-01-01 00:00:00 +00:00
ddf4af3b19 feat: add diagrams module exports 2001-01-01 00:00:00 +00:00
be35a59a58 feat: add dependency graph generator 2001-01-01 00:00:00 +00:00
d4d4cac34a feat: add diagrams package 2001-01-01 00:00:00 +00:00
1941a9de2f feat: add architecture diagram generator 2001-01-01 00:00:00 +00:00
52f2a7158d feat: add Anthropic LLM provider 2001-01-01 00:00:00 +00:00
19d1c37661 feat: add LLM provider factory 2001-01-01 00:00:00 +00:00
d5c6ab51ba feat: add OpenAI LLM provider 2001-01-01 00:00:00 +00:00
49488770f7 feat: add LLM provider base interface 2001-01-01 00:00:00 +00:00
8c03824831 feat: add module summary prompt 2001-01-01 00:00:00 +00:00
1210c0769d feat: add patterns detection prompt 2001-01-01 00:00:00 +00:00
2c1e5f4518 feat: add getting started prompt 2001-01-01 00:00:00 +00:00
a3bfcdf267 feat: add architecture overview prompt 2001-01-01 00:00:00 +00:00
cafc31d40d feat: add documentation generation pipeline 2001-01-01 00:00:00 +00:00
bef3c3766d feat: add LLM module exports 2001-01-01 00:00:00 +00:00
a85105689c chore: add LLM package tsconfig 2001-01-01 00:00:00 +00:00
bb16dc4b6b feat: add code chunker utility 2001-01-01 00:00:00 +00:00
cf7c7ba4d1 feat: add LLM package 2001-01-01 00:00:00 +00:00
27832c6eaa feat: add TypeScript language parser 2001-01-01 00:00:00 +00:00
f8a4eab76a feat: add Python language parser 2001-01-01 00:00:00 +00:00
00154d65f1 feat: add file walker utility 2001-01-01 00:00:00 +00:00
b0065b60c4 feat: add repository analyzer 2001-01-01 00:00:00 +00:00
a25b2959f6 feat: add parser module exports 2001-01-01 00:00:00 +00:00
9648e3a165 chore: add parser package tsconfig 2001-01-01 00:00:00 +00:00
85f5b41a0c feat: add language parser base interface 2001-01-01 00:00:00 +00:00
4286c94023 feat: add parser package 2001-01-01 00:00:00 +00:00
3cfc85bd90 feat: add shared type definitions 2001-01-01 00:00:00 +00:00
b7b828095f chore: add shared package tsconfig 2001-01-01 00:00:00 +00:00
54368255aa feat: add shared types package 2001-01-01 00:00:00 +00:00
3c21f2f15a feat: add shared types exports 2001-01-01 00:00:00 +00:00
debcaf74b6 feat: add docker-compose services configuration 2001-01-01 00:00:00 +00:00
68b51cd36d chore: add environment example file 2001-01-01 00:00:00 +00:00
e12348d96a feat: add Docker multi-stage build file 2001-01-01 00:00:00 +00:00
260e3b5f42 chore: add dockerignore file 2001-01-01 00:00:00 +00:00
40b653a127 chore: add TypeScript base config 2001-01-01 00:00:00 +00:00
12a4f73375 chore: add monorepo root package.json 2001-01-01 00:00:00 +00:00
d0dc84da81 docs: update README with full project documentation 2001-01-01 00:00:00 +00:00
0a400debf6 chore: update gitignore for monorepo 2001-01-01 00:00:00 +00:00
59 changed files with 3000 additions and 136 deletions

14
.dockerignore Normal file
View File

@@ -0,0 +1,14 @@
node_modules
.next
dist
.turbo
*.tsbuildinfo
.env
.env.local
.env.*.local
coverage
.DS_Store
tmp
.vercel
*.log
.git

8
.env.example Normal file
View File

@@ -0,0 +1,8 @@
DATABASE_URL=postgresql://codeboard:codeboard@localhost:5432/codeboard
REDIS_URL=redis://localhost:6379
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
GITHUB_CLIENT_ID=
GITHUB_CLIENT_SECRET=
NEXTAUTH_SECRET=
NEXTAUTH_URL=http://localhost:3000

144
.gitignore vendored
View File

@@ -1,138 +1,14 @@
# ---> Node
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
.next/
dist/
.turbo/
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# vitepress build output
**/.vitepress/dist
# vitepress cache directory
**/.vitepress/cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
.env.*.local
coverage/
.DS_Store
tmp/
.vercel
*.log
generated/

48
Dockerfile Normal file
View File

@@ -0,0 +1,48 @@
FROM node:20-alpine AS base
RUN apk add --no-cache git
WORKDIR /app
FROM base AS deps
COPY package.json package-lock.json* ./
COPY apps/web/package.json ./apps/web/
COPY apps/worker/package.json ./apps/worker/
COPY packages/shared/package.json ./packages/shared/
COPY packages/parser/package.json ./packages/parser/
COPY packages/llm/package.json ./packages/llm/
COPY packages/diagrams/package.json ./packages/diagrams/
COPY packages/database/package.json ./packages/database/
RUN npm install --production=false
FROM base AS builder
COPY --from=deps /app/node_modules ./node_modules
COPY . .
RUN npx turbo build
FROM base AS web
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 nextjs
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/.next/static ./apps/web/.next/static
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/public ./apps/web/public
USER nextjs
EXPOSE 3000
ENV PORT=3000 HOSTNAME="0.0.0.0"
CMD ["node", "apps/web/server.js"]
FROM base AS worker
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 workeruser
COPY --from=deps /app/node_modules ./node_modules
COPY --from=builder /app/apps/worker/dist ./apps/worker/dist
COPY --from=builder /app/packages/shared/dist ./packages/shared/dist
COPY --from=builder /app/packages/parser/dist ./packages/parser/dist
COPY --from=builder /app/packages/llm/dist ./packages/llm/dist
COPY --from=builder /app/packages/diagrams/dist ./packages/diagrams/dist
COPY --from=builder /app/package.json ./
COPY --from=builder /app/apps/worker/package.json ./apps/worker/
COPY --from=builder /app/packages/shared/package.json ./packages/shared/
COPY --from=builder /app/packages/parser/package.json ./packages/parser/
COPY --from=builder /app/packages/llm/package.json ./packages/llm/
COPY --from=builder /app/packages/diagrams/package.json ./packages/diagrams/
USER workeruser
CMD ["node", "apps/worker/dist/index.js"]

View File

@@ -1,3 +1,60 @@
# codeboard
# CodeBoard
Codebase → Onboarding Docs Generator. Paste a GitHub repo URL, get interactive developer onboarding documentation in minutes.
## Architecture
```
codeboard/
├── apps/
│ ├── web/ # Next.js 14 frontend + API routes
│ └── worker/ # BullMQ job processor
├── packages/
│ ├── shared/ # TypeScript types
│ ├── parser/ # Babel-based AST parser (JS/TS) + regex (Python)
│ ├── llm/ # OpenAI/Anthropic abstraction + prompt pipeline
│ ├── diagrams/ # Mermaid diagram generators
│ └── database/ # Prisma schema + client
```
## Quick Start
```bash
# Install dependencies
npm install
# Build all packages
npm run build
# Start with Docker
docker compose up
```
## Development
```bash
# Run dev server (all workspaces)
npm run dev
# Build
npm run build
```
## Tech Stack
- **Frontend**: Next.js 14, React 18, Tailwind CSS 4
- **Backend**: BullMQ workers, Redis pub/sub for real-time progress
- **Parser**: @babel/parser for JS/TS, regex-based for Python
- **LLM**: Provider abstraction (OpenAI GPT-4o / Anthropic Claude)
- **Diagrams**: Mermaid.js auto-generated architecture & dependency graphs
- **Database**: PostgreSQL + Prisma
- **Queue**: Redis + BullMQ
- **Deployment**: Docker multi-stage builds
## License
MIT
---
Built by [Vectry](https://company.repi.fun) — Engineering AI into your workflow.

33
apps/web/package.json Normal file
View File

@@ -0,0 +1,33 @@
{
"name": "@codeboard/web",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint",
"db:generate": "prisma generate --schema=../../packages/database/prisma/schema.prisma",
"db:push": "prisma db push --schema=../../packages/database/prisma/schema.prisma"
},
"dependencies": {
"@codeboard/shared": "*",
"bullmq": "^5.34.0",
"ioredis": "^5.4.0",
"lucide-react": "^0.563.0",
"mermaid": "^11.4.0",
"next": "^14.2.0",
"react": "^18.3.0",
"react-dom": "^18.3.0",
"react-markdown": "^9.0.0"
},
"devDependencies": {
"@tailwindcss/postcss": "^4.0.0",
"@types/node": "^20.0.0",
"@types/react": "^18.3.0",
"@types/react-dom": "^18.3.0",
"postcss": "^8.5.0",
"tailwindcss": "^4.0.0",
"typescript": "^5.7"
}
}

View File

@@ -0,0 +1,5 @@
export default {
plugins: {
"@tailwindcss/postcss": {},
},
};

0
apps/web/public/.gitkeep Normal file
View File

View File

@@ -0,0 +1,20 @@
import { NextResponse } from "next/server";
import { getRedis } from "@/lib/redis";
export async function GET(
_request: Request,
{ params }: { params: Promise<{ id: string }> }
) {
const { id } = await params;
const redis = getRedis();
const result = await redis.get(`codeboard:result:${id}`);
if (!result) {
return NextResponse.json(
{ error: "Documentation not found" },
{ status: 404 }
);
}
return NextResponse.json(JSON.parse(result));
}

View File

@@ -0,0 +1,54 @@
import type { GeneratedDocs } from "@codeboard/shared";
import { notFound } from "next/navigation";
import { Github, ArrowLeft } from "lucide-react";
import Link from "next/link";
async function fetchDocs(id: string): Promise<GeneratedDocs | null> {
try {
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || "http://localhost:3000";
const response = await fetch(`${baseUrl}/api/docs/${id}`, {
cache: "no-store",
});
if (!response.ok) {
return null;
}
return response.json();
} catch {
return null;
}
}
export default async function DocsPage({
params,
}: {
params: { id: string };
}) {
const docs = await fetchDocs(params.id);
if (!docs) {
notFound();
}
return (
<div className="min-h-screen">
<div className="border-b border-white/10 bg-black/20">
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-20">
<div className="text-center mb-12">
<h1 className="text-2xl sm:text-3xl font-bold text-white">
Analyzing Repository
</h1>
<p className="text-zinc-400 max-w-xl mx-auto">
{docs.repoName || repoUrl || "Unknown repository"}
</p>
</div>
<div className="relative inline-flex items-center justify-center w-16 h-16 rounded-2xl glass border-white/10">
<Github className="w-6 h-6 text-zinc-400" />
</div>
</div>
</div>
</div>
);
}

View File

@@ -0,0 +1,83 @@
import { Suspense } from "react";
import { ProgressTracker } from "@/components/progress-tracker";
import { Github, Loader2 } from "lucide-react";
function GeneratePageContent({
searchParams,
}: {
searchParams: { repo?: string; id?: string };
}) {
const repoUrl = searchParams.repo || "";
const generationId = searchParams.id || "";
const repoName = repoUrl
? repoUrl.replace("https://github.com/", "").replace(/\/$/, "")
: "";
return (
<div className="max-w-2xl mx-auto px-4 sm:px-6 lg:px-8 py-20">
<div className="text-center mb-12">
<div className="inline-flex items-center justify-center w-16 h-16 rounded-2xl glass mb-6">
<Github className="w-8 h-8 text-zinc-400" />
</div>
<h1 className="text-2xl sm:text-3xl font-bold text-white mb-3">
Analyzing Repository
</h1>
<p className="text-zinc-400 font-mono text-sm break-all max-w-md mx-auto">
{repoName || repoUrl || "Unknown repository"}
</p>
</div>
{generationId ? (
<ProgressTracker generationId={generationId} repoUrl={repoUrl} />
) : (
<div className="text-center p-8 rounded-2xl glass border-red-500/30">
<p className="text-red-400">
Missing generation ID. Please try again.
</p>
</div>
)}
</div>
);
}
function GeneratePageSkeleton() {
return (
<div className="max-w-2xl mx-auto px-4 sm:px-6 lg:px-8 py-20">
<div className="text-center mb-12">
<div className="inline-flex items-center justify-center w-16 h-16 rounded-2xl glass mb-6">
<Loader2 className="w-8 h-8 text-zinc-400 animate-spin" />
</div>
<div className="h-8 w-48 bg-zinc-800 rounded animate-pulse mx-auto mb-3" />
<div className="h-4 w-64 bg-zinc-800 rounded animate-pulse mx-auto" />
</div>
<div className="space-y-4">
{[1, 2, 3, 4, 5].map((i) => (
<div
key={i}
className="flex items-center gap-4 p-4 rounded-xl bg-zinc-900/50"
>
<div className="w-8 h-8 rounded-full bg-zinc-800 animate-pulse" />
<div className="flex-1 h-4 bg-zinc-800 rounded animate-pulse" />
</div>
))}
</div>
</div>
);
}
export default function GeneratePage({
searchParams,
}: {
searchParams: { repo?: string; id?: string };
}) {
return (
<Suspense fallback={<GeneratePageSkeleton />}>
<GeneratePageContent searchParams={searchParams} />
</Suspense>
);
}

View File

@@ -0,0 +1,365 @@
@import "tailwindcss";
:root {
--background: #0a0a0f;
--surface: rgba(255, 255, 255, 0.03);
--surface-hover: rgba(255, 255, 255, 0.06);
--border: rgba(255, 255, 255, 0.08);
--border-strong: rgba(255, 255, 255, 0.15);
--text-primary: #ffffff;
--text-secondary: #a1a1aa;
--text-muted: #71717a;
--accent-blue: #3b82f6;
--accent-indigo: #6366f1;
--accent-purple: #9333ea;
--accent-cyan: #06b6d4;
--gradient-primary: linear-gradient(135deg, #3b82f6 0%, #6366f1 50%, #9333ea 100%);
--gradient-subtle: linear-gradient(135deg, rgba(59, 130, 246, 0.1) 0%, rgba(147, 51, 234, 0.1) 100%);
--shadow-glow: 0 0 40px rgba(59, 130, 246, 0.3);
--shadow-card: 0 4px 24px rgba(0, 0, 0, 0.4);
}
* {
box-sizing: border-box;
}
html {
scroll-behavior: smooth;
}
body {
background-color: var(--background);
color: var(--text-primary);
font-feature-settings: "rlig" 1, "calt" 1;
}
::selection {
background: rgba(59, 130, 246, 0.3);
color: white;
}
.glass {
background: var(--surface);
backdrop-filter: blur(20px);
-webkit-backdrop-filter: blur(20px);
border: 1px solid var(--border);
}
.glass-strong {
background: rgba(255, 255, 255, 0.05);
backdrop-filter: blur(24px);
-webkit-backdrop-filter: blur(24px);
border: 1px solid var(--border-strong);
}
.gradient-text {
background: var(--gradient-primary);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
background-clip: text;
}
.gradient-border {
position: relative;
}
.gradient-border::before {
content: "";
position: absolute;
inset: 0;
border-radius: inherit;
padding: 1px;
background: var(--gradient-primary);
-webkit-mask: linear-gradient(#fff 0 0) content-box, linear-gradient(#fff 0 0);
mask: linear-gradient(#fff 0 0) content-box, linear-gradient(#fff 0 0);
-webkit-mask-composite: xor;
mask-composite: exclude;
pointer-events: none;
}
.glow {
box-shadow: var(--shadow-glow);
}
.glow-subtle {
box-shadow: 0 0 60px rgba(59, 130, 246, 0.15);
}
@keyframes fade-in {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
@keyframes slide-up {
from {
opacity: 0;
transform: translateY(24px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
@keyframes slide-down {
from {
opacity: 0;
transform: translateY(-12px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
@keyframes scale-in {
from {
opacity: 0;
transform: scale(0.95);
}
to {
opacity: 1;
transform: scale(1);
}
}
@keyframes pulse-glow {
0%, 100% {
box-shadow: 0 0 20px rgba(59, 130, 246, 0.4);
}
50% {
box-shadow: 0 0 40px rgba(59, 130, 246, 0.6), 0 0 60px rgba(147, 51, 234, 0.3);
}
}
@keyframes shimmer {
0% {
background-position: -200% 0;
}
100% {
background-position: 200% 0;
}
}
@keyframes float {
0%, 100% {
transform: translateY(0);
}
50% {
transform: translateY(-10px);
}
}
@keyframes spin-slow {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
.animate-fade-in {
animation: fade-in 0.6s ease-out forwards;
}
.animate-slide-up {
animation: slide-up 0.6s ease-out forwards;
}
.animate-slide-down {
animation: slide-down 0.4s ease-out forwards;
}
.animate-scale-in {
animation: scale-in 0.4s ease-out forwards;
}
.animate-pulse-glow {
animation: pulse-glow 2s ease-in-out infinite;
}
.animate-shimmer {
background: linear-gradient(90deg, transparent, rgba(255,255,255,0.1), transparent);
background-size: 200% 100%;
animation: shimmer 2s infinite;
}
.animate-float {
animation: float 6s ease-in-out infinite;
}
.animate-spin-slow {
animation: spin-slow 20s linear infinite;
}
.stagger-1 { animation-delay: 0.1s; }
.stagger-2 { animation-delay: 0.2s; }
.stagger-3 { animation-delay: 0.3s; }
.stagger-4 { animation-delay: 0.4s; }
.stagger-5 { animation-delay: 0.5s; }
.scrollbar-thin {
scrollbar-width: thin;
scrollbar-color: rgba(255, 255, 255, 0.2) transparent;
}
.scrollbar-thin::-webkit-scrollbar {
width: 6px;
height: 6px;
}
.scrollbar-thin::-webkit-scrollbar-track {
background: transparent;
}
.scrollbar-thin::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.2);
border-radius: 3px;
}
.scrollbar-thin::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.3);
}
.bg-grid {
background-image:
linear-gradient(rgba(255, 255, 255, 0.03) 1px, transparent 1px),
linear-gradient(90deg, rgba(255, 255, 255, 0.03) 1px, transparent 1px);
background-size: 60px 60px;
}
.bg-gradient-radial {
background: radial-gradient(ellipse at top, rgba(59, 130, 246, 0.15) 0%, transparent 50%),
radial-gradient(ellipse at bottom, rgba(147, 51, 234, 0.1) 0%, transparent 50%);
}
.noise {
position: relative;
}
.noise::after {
content: "";
position: absolute;
inset: 0;
background-image: url("data:image/svg+xml,%3Csvg viewBox='0 0 256 256' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='noiseFilter'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='0.9' numOctaves='4' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23noiseFilter)'/%3E%3C/svg%3E");
opacity: 0.03;
pointer-events: none;
}
.btn-primary {
background: var(--gradient-primary);
color: white;
font-weight: 500;
padding: 0.75rem 1.5rem;
border-radius: 0.5rem;
transition: all 0.2s ease;
position: relative;
overflow: hidden;
}
.btn-primary:hover {
transform: translateY(-1px);
box-shadow: 0 8px 30px rgba(59, 130, 246, 0.4);
}
.btn-primary:active {
transform: translateY(0);
}
.btn-secondary {
background: var(--surface);
color: var(--text-primary);
font-weight: 500;
padding: 0.75rem 1.5rem;
border-radius: 0.5rem;
border: 1px solid var(--border);
transition: all 0.2s ease;
}
.btn-secondary:hover {
background: var(--surface-hover);
border-color: var(--border-strong);
}
.input-field {
background: rgba(0, 0, 0, 0.3);
border: 1px solid var(--border);
border-radius: 0.5rem;
padding: 0.75rem 1rem;
color: var(--text-primary);
transition: all 0.2s ease;
width: 100%;
}
.input-field:focus {
outline: none;
border-color: var(--accent-blue);
box-shadow: 0 0 3px rgba(59, 130, 246, 0.1);
}
.input-field::placeholder {
color: var(--text-muted);
}
.card {
background: var(--surface);
border: 1px solid var(--border);
border-radius: 1rem;
padding: 1.5rem;
transition: all 0.3s ease;
}
.card:hover {
background: var(--surface-hover);
border-color: var(--border-strong);
transform: translateY(-2px);
}
.icon-box {
width: 3rem;
height: 3rem;
border-radius: 0.75rem;
background: var(--gradient-subtle);
border: 1px solid var(--border);
display: flex;
align-items: center;
justify-content: center;
color: var(--accent-blue);
}
.badge {
display: inline-flex;
align-items: center;
gap: 0.375rem;
padding: 0.25rem 0.75rem;
background: var(--surface);
border: 1px solid var(--border);
border-radius: 9999px;
font-size: 0.75rem;
font-weight: 500;
color: var(--text-secondary);
}
.code-block {
background: rgba(0, 0, 0, 0.5);
border: 1px solid var(--border);
border-radius: 0.75rem;
padding: 1rem;
font-family: var(--font-mono), ui-monospace, monospace;
font-size: 0.875rem;
overflow-x: auto;
}
.divider {
height: 1px;
background: linear-gradient(90deg, transparent, var(--border), transparent);
}

View File

@@ -0,0 +1,58 @@
import type { Metadata } from "next";
import { Inter, JetBrains_Mono } from "next/font/google";
import "./globals.css";
import { Navbar } from "@/components/navbar";
import { Footer } from "@/components/footer";
const inter = Inter({
subsets: ["latin"],
variable: "--font-sans",
display: "swap",
});
const jetbrainsMono = JetBrains_Mono({
subsets: ["latin"],
variable: "--font-mono",
display: "swap",
});
export const metadata: Metadata = {
title: "CodeBoard — Understand any codebase in 5 minutes",
description:
"Paste a GitHub URL and get interactive onboarding documentation with architecture diagrams, module breakdowns, and getting started guides. Built by Vectry AI consultancy.",
keywords: ["code analysis", "documentation", "github", "codebase", "AI", "developer tools"],
authors: [{ name: "Vectry" }],
openGraph: {
title: "CodeBoard — Understand any codebase in 5 minutes",
description:
"Paste a GitHub URL and get interactive onboarding documentation with architecture diagrams, module breakdowns, and getting started guides.",
type: "website",
},
};
export default function RootLayout({
children,
}: {
children: React.ReactNode;
}) {
return (
<html lang="en" className="dark">
<body
className={`${inter.variable} ${jetbrainsMono.variable} font-sans antialiased bg-[#0a0a0f] text-white min-h-screen`}
>
<div className="relative min-h-screen flex flex-col">
<div className="fixed inset-0 bg-gradient-radial pointer-events-none" />
<div className="fixed inset-0 bg-grid pointer-events-none opacity-50" />
<Navbar />
<main className="flex-1 relative">
{children}
</main>
<Footer />
</div>
</body>
</html>
);
}

View File

@@ -0,0 +1,211 @@
"use client";
import { useEffect, useState } from "react";
import Link from "next/link";
import type { GenerationStatus } from "@codeboard/shared";
import {
Loader2,
CheckCircle2,
Circle,
AlertCircle,
FileText,
RefreshCw,
} from "lucide-react";
interface ProgressData {
status: GenerationStatus;
progress: number;
message: string;
}
interface ProgressTrackerProps {
generationId: string;
repoUrl: string;
}
const STEPS: { status: GenerationStatus; label: string }[] = [
{ status: "QUEUED", label: "Queued" },
{ status: "CLONING", label: "Cloning Repository" },
{ status: "PARSING", label: "Analyzing Code" },
{ status: "GENERATING", label: "Generating Docs" },
{ status: "RENDERING", label: "Finalizing" },
];
export function ProgressTracker({
generationId,
repoUrl,
}: ProgressTrackerProps) {
const [data, setData] = useState<ProgressData>({
status: "QUEUED",
progress: 0,
message: "Waiting in queue...",
});
const [error, setError] = useState<string | null>(null);
useEffect(() => {
const eventSource = new EventSource(`/api/status/${generationId}`);
eventSource.addEventListener("progress", (event) => {
try {
const parsed = JSON.parse(event.data);
setData(parsed);
if (parsed.status === "COMPLETED" || parsed.status === "FAILED") {
eventSource.close();
}
} catch {
setError("Failed to parse progress data");
}
});
eventSource.addEventListener("timeout", () => {
setError("Connection timed out. Please refresh the page.");
eventSource.close();
});
eventSource.onerror = () => {
setError("Connection error. Please refresh the page.");
eventSource.close();
};
return () => {
eventSource.close();
};
}, [generationId]);
const getStepIndex = (status: GenerationStatus) => {
if (status === "COMPLETED") return STEPS.length;
if (status === "FAILED") return -1;
return STEPS.findIndex((s) => s.status === status);
};
const currentStepIndex = getStepIndex(data.status);
const isCompleted = data.status === "COMPLETED";
const isFailed = data.status === "FAILED";
return (
<div className="space-y-6">
<div className="relative h-2 bg-zinc-800 rounded-full overflow-hidden">
<div
className="absolute inset-y-0 left-0 bg-gradient-to-r from-blue-500 via-indigo-500 to-purple-500 transition-all duration-500 ease-out"
style={{ width: `${data.progress}%` }}
/>
</div>
<div className="flex items-center justify-between text-sm">
<span className="text-zinc-400">{data.message}</span>
<span className="text-zinc-500 font-mono">{data.progress}%</span>
</div>
<div className="space-y-3">
{STEPS.map((step, index) => {
const isActive = index === currentStepIndex;
const isDone = index < currentStepIndex || isCompleted;
return (
<div
key={step.status}
className={`flex items-center gap-4 p-4 rounded-xl border transition-all duration-300 ${
isActive
? "bg-blue-500/10 border-blue-500/30"
: isDone
? "bg-zinc-900/50 border-zinc-800"
: "bg-transparent border-zinc-800/50"
}`}
>
<div
className={`flex-shrink-0 w-8 h-8 rounded-full flex items-center justify-center ${
isActive
? "bg-blue-500/20 text-blue-400"
: isDone
? "bg-green-500/20 text-green-400"
: "bg-zinc-800 text-zinc-500"
}`}
>
{isActive ? (
<Loader2 className="w-4 h-4 animate-spin" />
) : isDone ? (
<CheckCircle2 className="w-4 h-4" />
) : (
<Circle className="w-4 h-4" />
)}
</div>
<div
className={`flex-1 ${
isActive
? "text-white"
: isDone
? "text-green-400"
: "text-zinc-500"
}`}
>
{step.label}
</div>
</div>
{isActive && (
<div className="flex-shrink-0">
<div className="w-2 h-2 rounded-full bg-blue-400 animate-pulse" />
</div>
)}
</div>
))}
</div>
{isFailed && (
<div className="p-4 rounded-xl bg-red-500/10 border border-red-500/20 text-red-400 text-sm">
<div className="flex items-start gap-3">
<AlertCircle className="w-5 h-5 text-red-400 flex-shrink-0 mt-0.5" />
<div>
<p className="text-red-400 font-medium">Generation Failed</p>
<p className="text-red-400/70 text-sm mt-1">
Something went wrong. Please try again.
</p>
</div>
</div>
<button
onClick={() => window.location.reload()}
className="mt-4 flex items-center gap-2 px-4 py-2 bg-red-500/20 hover:bg-red-500/30 text-red-400 rounded-lg transition-colors text-sm"
>
<RefreshCw className="w-4 h-4" />
Try Again
</button>
</div>
)}
{isCompleted && (
<div className="p-6 rounded-xl bg-green-500/10 border border-green-500/20 text-center">
<div className="w-12 h-12 rounded-full bg-green-500/20 flex items-center justify-center mx-auto mb-4">
<CheckCircle2 className="w-6 h-6 text-green-400" />
</div>
<h3 className="text-lg font-semibold text-white mb-2">
Documentation Ready!
</h3>
<p className="text-zinc-400 text-sm mb-6">
Your interactive documentation has been generated successfully.
</p>
<Link
href={`/docs/${generationId}`}
className="inline-flex items-center gap-2 px-6 py-3 bg-gradient-to-r from-blue-600 to-indigo-600 hover:from-blue-500 hover:to-indigo-500 text-white font-medium rounded-xl transition-all"
>
<FileText className="w-4 h-4" />
View Documentation
</Link>
</div>
)}
{error && !isFailed && (
<div className="p-4 rounded-xl bg-red-500/10 border border-red-500/20 text-red-400 text-sm">
<div className="flex items-start gap-3">
<AlertCircle className="w-5 h-5 text-red-400 flex-shrink-0 mt-0.5" />
<div>
<p className="text-red-400 font-medium">Connection Error</p>
<p className="text-red-400/70 text-sm mt-1">
{error}
</p>
</div>
</div>
)}
</div>
);
}

13
apps/web/src/lib/queue.ts Normal file
View File

@@ -0,0 +1,13 @@
import { Queue } from "bullmq";
import { getRedis } from "./redis";
let queue: Queue | null = null;
export function getQueue(): Queue {
if (!queue) {
queue = new Queue("codeboard:generate", {
connection: getRedis(),
});
}
return queue;
}

12
apps/web/src/lib/redis.ts Normal file
View File

@@ -0,0 +1,12 @@
import IORedis from "ioredis";
let redis: IORedis | null = null;
export function getRedis(): IORedis {
if (!redis) {
redis = new IORedis(process.env.REDIS_URL ?? "redis://localhost:6379", {
maxRetriesPerRequest: null,
});
}
return redis;
}

23
apps/web/tsconfig.json Normal file
View File

@@ -0,0 +1,23 @@
{
"compilerOptions": {
"target": "ES2017",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
"noEmit": true,
"esModuleInterop": true,
"module": "esnext",
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
"incremental": true,
"plugins": [{ "name": "next" }],
"paths": {
"@/*": ["./src/*"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]
}

25
apps/worker/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "@codeboard/worker",
"version": "0.0.1",
"private": true,
"scripts": {
"build": "tsc",
"clean": "rm -rf dist",
"dev": "tsx watch src/index.ts",
"start": "node dist/index.js"
},
"dependencies": {
"@codeboard/shared": "*",
"@codeboard/parser": "*",
"@codeboard/llm": "*",
"@codeboard/diagrams": "*",
"bullmq": "^5.34.0",
"ioredis": "^5.4.0",
"simple-git": "^3.27.0"
},
"devDependencies": {
"@types/node": "^20.0.0",
"tsx": "^4.19.0",
"typescript": "^5.7"
}
}

42
apps/worker/src/index.ts Normal file
View File

@@ -0,0 +1,42 @@
import { Worker } from "bullmq";
import IORedis from "ioredis";
import { processGenerationJob } from "./processor.js";
const redisUrl = process.env.REDIS_URL ?? "redis://localhost:6379";
const connection = new IORedis(redisUrl, { maxRetriesPerRequest: null });
const worker = new Worker(
"codeboard:generate",
async (job) => {
console.log(`[worker] Processing job ${job.id}: ${job.data.repoUrl}`);
return processGenerationJob(job);
},
{
connection,
concurrency: 2,
removeOnComplete: { count: 100 },
removeOnFail: { count: 50 },
}
);
worker.on("completed", (job) => {
console.log(`[worker] Job ${job.id} completed`);
});
worker.on("failed", (job, err) => {
console.error(`[worker] Job ${job?.id} failed:`, err.message);
});
worker.on("ready", () => {
console.log("[worker] Ready and waiting for jobs on codeboard:generate");
});
async function shutdown() {
console.log("[worker] Shutting down...");
await worker.close();
await connection.quit();
process.exit(0);
}
process.on("SIGTERM", shutdown);
process.on("SIGINT", shutdown);

View File

@@ -0,0 +1,87 @@
import { simpleGit } from "simple-git";
import { mkdtemp, readdir, stat } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
import type { CloneResult } from "@codeboard/shared";
async function countFiles(dir: string): Promise<{ files: number; lines: number }> {
let files = 0;
let lines = 0;
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.name === ".git" || entry.name === "node_modules") continue;
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
const sub = await countFiles(fullPath);
files += sub.files;
lines += sub.lines;
} else {
files++;
const fileStat = await stat(fullPath);
lines += Math.ceil(fileStat.size / 40);
}
}
return { files, lines };
}
export async function cloneRepository(repoUrl: string): Promise<CloneResult> {
const tmpDir = await mkdtemp(join(tmpdir(), "codeboard-"));
const git = simpleGit();
await git.clone(repoUrl, tmpDir, ["--depth", "1", "--single-branch"]);
const localGit = simpleGit(tmpDir);
const log = await localGit.log({ maxCount: 1 });
const lastCommit = log.latest?.hash ?? "unknown";
const repoName = repoUrl
.replace(/\.git$/, "")
.split("/")
.slice(-1)[0] ?? "unknown";
const { files: totalFiles, lines: totalLines } = await countFiles(tmpDir);
const languageCounts: Record<string, number> = {};
const extMap: Record<string, string> = {
".ts": "TypeScript", ".tsx": "TypeScript",
".js": "JavaScript", ".jsx": "JavaScript",
".py": "Python", ".go": "Go",
".rs": "Rust", ".java": "Java",
".rb": "Ruby", ".php": "PHP",
};
async function scanLanguages(dir: string) {
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.name.startsWith(".") || entry.name === "node_modules") continue;
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
await scanLanguages(fullPath);
} else {
const ext = entry.name.slice(entry.name.lastIndexOf("."));
const lang = extMap[ext];
if (lang) {
const fileStat = await stat(fullPath);
languageCounts[lang] = (languageCounts[lang] ?? 0) + fileStat.size;
}
}
}
}
await scanLanguages(tmpDir);
return {
localPath: tmpDir,
metadata: {
name: repoName,
description: "",
defaultBranch: "main",
languages: languageCounts,
stars: 0,
lastCommit,
totalFiles,
totalLines,
},
};
}

View File

@@ -0,0 +1,26 @@
import type { CodeStructure, GeneratedDocs } from "@codeboard/shared";
import { createProvider, generateDocumentation } from "@codeboard/llm";
export async function generateDocs(
codeStructure: CodeStructure,
onProgress?: (stage: string, progress: number) => void
): Promise<GeneratedDocs> {
const apiKey =
process.env.OPENAI_API_KEY ?? process.env.ANTHROPIC_API_KEY ?? "";
if (!apiKey) {
throw new Error(
"No LLM API key configured. Set OPENAI_API_KEY or ANTHROPIC_API_KEY."
);
}
const providerType = process.env.OPENAI_API_KEY ? "openai" : "anthropic";
const provider = createProvider({
provider: providerType,
apiKey,
model: process.env.LLM_MODEL,
baseUrl: process.env.LLM_BASE_URL,
});
return generateDocumentation(codeStructure, provider, onProgress);
}

View File

@@ -0,0 +1,8 @@
import type { CodeStructure } from "@codeboard/shared";
import { analyzeRepository } from "@codeboard/parser";
export async function parseRepository(
localPath: string
): Promise<CodeStructure> {
return analyzeRepository(localPath);
}

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src"]
}

55
docker-compose.yml Normal file
View File

@@ -0,0 +1,55 @@
services:
web:
build:
context: .
target: web
ports:
- "3000:3000"
environment:
- DATABASE_URL=postgresql://codeboard:codeboard@db:5432/codeboard
- REDIS_URL=redis://redis:6379
- NEXTAUTH_URL=http://localhost:3000
depends_on:
- db
- redis
restart: always
worker:
build:
context: .
target: worker
environment:
- DATABASE_URL=postgresql://codeboard:codeboard@db:5432/codeboard
- REDIS_URL=redis://redis:6379
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
- LLM_MODEL=${LLM_MODEL:-}
- LLM_BASE_URL=${LLM_BASE_URL:-}
depends_on:
- db
- redis
restart: always
db:
image: postgres:16-alpine
environment:
POSTGRES_USER: codeboard
POSTGRES_PASSWORD: codeboard
POSTGRES_DB: codeboard
volumes:
- postgres_data:/var/lib/postgresql/data
ports:
- "5432:5432"
restart: always
redis:
image: redis:7-alpine
ports:
- "6379:6379"
volumes:
- redis_data:/data
restart: always
volumes:
postgres_data:
redis_data:

24
package.json Normal file
View File

@@ -0,0 +1,24 @@
{
"name": "codeboard",
"private": true,
"workspaces": [
"apps/*",
"packages/*"
],
"scripts": {
"dev": "turbo dev",
"build": "turbo build",
"lint": "turbo lint",
"clean": "turbo clean",
"db:generate": "turbo db:generate",
"db:push": "turbo db:push"
},
"devDependencies": {
"turbo": "^2",
"typescript": "^5.7"
},
"engines": {
"node": ">=20"
},
"packageManager": "npm@10.8.2"
}

View File

@@ -0,0 +1,24 @@
{
"name": "@codeboard/database",
"version": "0.0.1",
"private": true,
"main": "./src/client.ts",
"types": "./src/client.ts",
"exports": {
".": "./src/client.ts"
},
"scripts": {
"build": "echo 'database package uses prisma generate'",
"db:generate": "prisma generate",
"db:push": "prisma db push",
"db:migrate": "prisma migrate dev",
"clean": "rm -rf generated"
},
"dependencies": {
"@prisma/client": "^6.3.0"
},
"devDependencies": {
"prisma": "^6.3.0",
"typescript": "^5.7"
}
}

View File

@@ -0,0 +1,50 @@
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
generator client {
provider = "prisma-client-js"
}
model Generation {
id String @id @default(cuid())
repoUrl String
repoName String
commitHash String
status Status @default(QUEUED)
progress Int @default(0)
result Json?
error String?
costUsd Float?
duration Int?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
userId String?
user User? @relation(fields: [userId], references: [id])
viewCount Int @default(0)
@@unique([repoUrl, commitHash])
@@index([repoUrl])
@@index([status])
}
model User {
id String @id @default(cuid())
githubId String @unique
login String
email String?
avatarUrl String?
createdAt DateTime @default(now())
generations Generation[]
}
enum Status {
QUEUED
CLONING
PARSING
GENERATING
RENDERING
COMPLETED
FAILED
}

View File

@@ -0,0 +1,12 @@
import { PrismaClient } from "@prisma/client";
const globalForPrisma = globalThis as unknown as { prisma: PrismaClient };
export const prisma =
globalForPrisma.prisma ?? new PrismaClient();
if (process.env.NODE_ENV !== "production") {
globalForPrisma.prisma = prisma;
}
export { PrismaClient } from "@prisma/client";

View File

@@ -0,0 +1,24 @@
{
"name": "@codeboard/diagrams",
"version": "0.0.1",
"private": true,
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
}
},
"scripts": {
"build": "tsc",
"clean": "rm -rf dist",
"dev": "tsc --watch"
},
"dependencies": {
"@codeboard/shared": "*"
},
"devDependencies": {
"typescript": "^5.7"
}
}

View File

@@ -0,0 +1,49 @@
import type { ModuleNode, DependencyEdge } from "@codeboard/shared";
function sanitizeId(name: string): string {
return name.replace(/[^a-zA-Z0-9]/g, "_");
}
function truncateLabel(name: string, max = 20): string {
return name.length > max ? name.slice(0, max - 1) + "\u2026" : name;
}
export function generateArchitectureDiagram(
modules: ModuleNode[],
deps: DependencyEdge[]
): string {
if (modules.length === 0) {
return "flowchart TD\n empty[No modules detected]";
}
const lines: string[] = ["flowchart TD"];
const moduleIds = new Map<string, string>();
for (const mod of modules) {
const id = sanitizeId(mod.name);
moduleIds.set(mod.path, id);
const fileCount = mod.files.length;
lines.push(` ${id}["${truncateLabel(mod.name)}\\n${fileCount} files"]`);
}
const edgeSet = new Set<string>();
for (const dep of deps) {
const sourceModule = modules.find((m) => m.files.includes(dep.source));
const targetModule = modules.find((m) => m.files.includes(dep.target));
if (!sourceModule || !targetModule) continue;
if (sourceModule.path === targetModule.path) continue;
const sourceId = moduleIds.get(sourceModule.path);
const targetId = moduleIds.get(targetModule.path);
if (!sourceId || !targetId) continue;
const edgeKey = `${sourceId}-${targetId}`;
if (edgeSet.has(edgeKey)) continue;
edgeSet.add(edgeKey);
lines.push(` ${sourceId} --> ${targetId}`);
}
return lines.join("\n");
}

View File

@@ -0,0 +1,48 @@
import type { FileNode, DependencyEdge } from "@codeboard/shared";
function sanitizeId(path: string): string {
return path.replace(/[^a-zA-Z0-9]/g, "_");
}
function shortenPath(path: string): string {
const parts = path.split("/");
if (parts.length <= 2) return path;
return parts.slice(-2).join("/");
}
export function generateDependencyGraph(
files: FileNode[],
deps: DependencyEdge[]
): string {
if (files.length === 0) {
return "graph LR\n empty[No files detected]";
}
const maxFiles = 30;
const topFiles = files.slice(0, maxFiles);
const topPaths = new Set(topFiles.map((f) => f.path));
const lines: string[] = ["graph LR"];
for (const file of topFiles) {
const id = sanitizeId(file.path);
const label = shortenPath(file.path);
lines.push(` ${id}["${label}"]`);
}
const edgeSet = new Set<string>();
for (const dep of deps) {
if (!topPaths.has(dep.source) || !topPaths.has(dep.target)) continue;
if (dep.source === dep.target) continue;
const edgeKey = `${dep.source}-${dep.target}`;
if (edgeSet.has(edgeKey)) continue;
edgeSet.add(edgeKey);
const sourceId = sanitizeId(dep.source);
const targetId = sanitizeId(dep.target);
lines.push(` ${sourceId} --> ${targetId}`);
}
return lines.join("\n");
}

View File

@@ -0,0 +1,2 @@
export { generateArchitectureDiagram } from "./architecture.js";
export { generateDependencyGraph } from "./dependency.js";

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src"]
}

26
packages/llm/package.json Normal file
View File

@@ -0,0 +1,26 @@
{
"name": "@codeboard/llm",
"version": "0.0.1",
"private": true,
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
}
},
"scripts": {
"build": "tsc",
"clean": "rm -rf dist",
"dev": "tsc --watch"
},
"dependencies": {
"@codeboard/shared": "*",
"openai": "^4.77.0",
"@anthropic-ai/sdk": "^0.36.0"
},
"devDependencies": {
"typescript": "^5.7"
}
}

View File

@@ -0,0 +1,72 @@
import type { FileNode } from "@codeboard/shared";
const APPROX_CHARS_PER_TOKEN = 4;
export function chunkCode(content: string, maxTokens: number): string[] {
const maxChars = maxTokens * APPROX_CHARS_PER_TOKEN;
if (content.length <= maxChars) return [content];
const lines = content.split("\n");
const chunks: string[] = [];
let current: string[] = [];
let currentLen = 0;
for (const line of lines) {
if (currentLen + line.length > maxChars && current.length > 0) {
chunks.push(current.join("\n"));
current = [];
currentLen = 0;
}
current.push(line);
currentLen += line.length + 1;
}
if (current.length > 0) {
chunks.push(current.join("\n"));
}
return chunks;
}
export function extractSignatures(fileNode: FileNode): string {
const parts: string[] = [];
parts.push(`File: ${fileNode.path} (${fileNode.language})`);
if (fileNode.imports.length > 0) {
parts.push("Imports:");
for (const imp of fileNode.imports) {
parts.push(` from "${imp.source}" import {${imp.specifiers.join(", ")}}`);
}
}
if (fileNode.exports.length > 0) {
parts.push("Exports:");
for (const exp of fileNode.exports) {
parts.push(` ${exp.isDefault ? "default " : ""}${exp.name}`);
}
}
for (const fn of fileNode.functions) {
const params = fn.params.join(", ");
const ret = fn.returnType ? `: ${fn.returnType}` : "";
const doc = fn.docstring ? `${fn.docstring.slice(0, 100)}` : "";
parts.push(`function ${fn.name}(${params})${ret}${doc}`);
}
for (const cls of fileNode.classes) {
parts.push(`class ${cls.name}`);
for (const method of cls.methods) {
parts.push(` method ${method.name}(${method.params.join(", ")})`);
}
for (const prop of cls.properties) {
parts.push(` property ${prop.name}${prop.type ? `: ${prop.type}` : ""}`);
}
}
return parts.join("\n");
}
export function estimateTokens(text: string): number {
return Math.ceil(text.length / APPROX_CHARS_PER_TOKEN);
}

View File

@@ -0,0 +1,4 @@
export { createProvider } from "./providers/factory.js";
export { generateDocumentation } from "./pipeline.js";
export { chunkCode, extractSignatures } from "./chunker.js";
export type { LLMProvider } from "./providers/base.js";

View File

@@ -0,0 +1,153 @@
import type { CodeStructure, GeneratedDocs, FileNode } from "@codeboard/shared";
import type { LLMProvider } from "./providers/base.js";
import { buildArchitecturePrompt } from "./prompts/architecture-overview.js";
import { buildModuleSummaryPrompt } from "./prompts/module-summary.js";
import { buildPatternsPrompt } from "./prompts/patterns-detection.js";
import { buildGettingStartedPrompt } from "./prompts/getting-started.js";
function parseSection(text: string, header: string): string {
const regex = new RegExp(`## ${header}\\s*\\n([\\s\\S]*?)(?=\\n## |$)`);
const match = regex.exec(text);
return match?.[1]?.trim() ?? "";
}
function parseMermaid(text: string): string {
const match = /```mermaid\s*\n([\s\S]*?)```/.exec(text);
return match?.[1]?.trim() ?? "flowchart TD\n A[No diagram generated]";
}
function parseList(text: string): string[] {
return text
.split("\n")
.map((l) => l.replace(/^[-*]\s*/, "").trim())
.filter(Boolean);
}
export async function generateDocumentation(
codeStructure: CodeStructure,
provider: LLMProvider,
onProgress?: (stage: string, progress: number) => void
): Promise<GeneratedDocs> {
onProgress?.("architecture", 10);
const archMessages = buildArchitecturePrompt(codeStructure);
const archResponse = await provider.chat(archMessages);
const architectureOverview = parseSection(archResponse, "Architecture Overview");
const techStackRaw = parseSection(archResponse, "Tech Stack");
const architectureDiagram = parseMermaid(archResponse);
const techStack = techStackRaw.split(",").map((s) => s.trim()).filter(Boolean);
onProgress?.("modules", 30);
const moduleLimit = Math.min(codeStructure.modules.length, 10);
const moduleSummaries = await Promise.all(
codeStructure.modules.slice(0, moduleLimit).map(async (mod) => {
const moduleFiles: FileNode[] = codeStructure.files.filter((f) =>
mod.files.includes(f.path)
);
if (moduleFiles.length === 0) {
return {
name: mod.name,
path: mod.path,
summary: "Empty module — no parseable files found.",
keyFiles: [],
publicApi: [],
dependsOn: [],
dependedBy: [],
};
}
const messages = buildModuleSummaryPrompt(mod, moduleFiles);
const response = await provider.chat(messages, { model: undefined });
const summary = parseSection(response, "Summary");
const keyFilesRaw = parseList(parseSection(response, "Key Files"));
const publicApi = parseList(parseSection(response, "Public API"));
const dependsOn = [
...new Set(
moduleFiles.flatMap((f) =>
f.imports
.map((imp) => imp.source)
.filter((s) => !s.startsWith("."))
)
),
].slice(0, 10);
const dependedBy = codeStructure.dependencies
.filter((d) => mod.files.includes(d.target))
.map((d) => d.source)
.filter((s) => !mod.files.includes(s))
.slice(0, 10);
return {
name: mod.name,
path: mod.path,
summary: summary || "Module analyzed but no summary generated.",
keyFiles: keyFilesRaw.map((kf) => ({ path: kf, purpose: "" })),
publicApi,
dependsOn,
dependedBy,
};
})
);
onProgress?.("patterns", 60);
const patternsMessages = buildPatternsPrompt(codeStructure);
const patternsResponse = await provider.chat(patternsMessages);
const conventions = parseList(parseSection(patternsResponse, "Coding Conventions"));
const designPatterns = parseList(parseSection(patternsResponse, "Design Patterns"));
const architecturalDecisions = parseList(parseSection(patternsResponse, "Architectural Decisions"));
onProgress?.("getting-started", 80);
const gsMessages = buildGettingStartedPrompt(
codeStructure,
architectureOverview
);
const gsResponse = await provider.chat(gsMessages);
const prerequisites = parseList(parseSection(gsResponse, "Prerequisites"));
const setupSteps = parseList(parseSection(gsResponse, "Setup Steps"));
const firstTask = parseSection(gsResponse, "Your First Task");
onProgress?.("complete", 100);
const languages = [...new Set(codeStructure.files.map((f) => f.language))];
return {
id: "",
repoUrl: "",
repoName: "",
generatedAt: new Date().toISOString(),
sections: {
overview: {
title: "Architecture Overview",
description: architectureOverview,
architectureDiagram,
techStack,
keyMetrics: {
files: codeStructure.files.length,
modules: codeStructure.modules.length,
languages,
},
},
modules: moduleSummaries,
patterns: {
conventions,
designPatterns,
architecturalDecisions,
},
gettingStarted: {
prerequisites,
setupSteps,
firstTask,
},
dependencyGraph: architectureDiagram,
},
};
}

View File

@@ -0,0 +1,51 @@
import type { LLMMessage, CodeStructure } from "@codeboard/shared";
export function buildArchitecturePrompt(
structure: CodeStructure
): LLMMessage[] {
const fileTree = structure.files
.map((f) => ` ${f.path} (${f.language}, ${f.functions.length} functions, ${f.classes.length} classes)`)
.join("\n");
const modules = structure.modules
.map((m) => ` ${m.name}/ (${m.files.length} files)`)
.join("\n");
const entryPoints = structure.entryPoints.join(", ") || "none detected";
return [
{
role: "system",
content: `You are an expert software architect analyzing a codebase. Generate a concise architecture overview and a Mermaid flowchart diagram.
Output format (use exactly these headers):
## Architecture Overview
[2-4 paragraphs describing high-level architecture, key design decisions, and how components interact]
## Tech Stack
[comma-separated list of technologies detected]
## Mermaid Diagram
\`\`\`mermaid
[flowchart TD diagram showing modules and their relationships]
\`\`\``,
},
{
role: "user",
content: `Analyze this codebase structure:
FILE TREE:
${fileTree}
MODULES:
${modules}
ENTRY POINTS: ${entryPoints}
DEPENDENCIES (import edges):
${structure.dependencies.slice(0, 50).map((d) => ` ${d.source} -> ${d.target}`).join("\n")}
Generate architecture overview with a Mermaid diagram.`,
},
];
}

View File

@@ -0,0 +1,43 @@
import type { LLMMessage, CodeStructure } from "@codeboard/shared";
export function buildGettingStartedPrompt(
structure: CodeStructure,
architectureOverview: string,
readmeContent?: string,
packageJsonContent?: string
): LLMMessage[] {
return [
{
role: "system",
content: `You are writing an onboarding guide for a new developer joining this project. Be specific and actionable.
Output format:
## Prerequisites
[list required tools, runtimes, and their versions]
## Setup Steps
[numbered list of concrete commands and actions to get the project running locally]
## Your First Task
[suggest a good first contribution — something small but meaningful that touches multiple parts of codebase]`,
},
{
role: "user",
content: `Create an onboarding guide for this project.
ARCHITECTURE OVERVIEW:
${architectureOverview}
${readmeContent ? `README:\n${readmeContent.slice(0, 3000)}` : "README: not available"}
${packageJsonContent ? `PACKAGE.JSON:\n${packageJsonContent.slice(0, 2000)}` : ""}
LANGUAGES: ${[...new Set(structure.files.map((f) => f.language))].join(", ")}
ENTRY POINTS: ${structure.entryPoints.join(", ") || "none detected"}
TOTAL FILES: ${structure.files.length}
TOTAL MODULES: ${structure.modules.length}
Write a concrete, actionable onboarding guide.`,
},
];
}

View File

@@ -0,0 +1,42 @@
import type { LLMMessage, ModuleNode, FileNode } from "@codeboard/shared";
export function buildModuleSummaryPrompt(
module: ModuleNode,
files: FileNode[]
): LLMMessage[] {
const fileDetails = files
.map((f) => {
const fns = f.functions.map((fn) => ` ${fn.name}(${fn.params.join(", ")})`).join("\n");
const cls = f.classes.map((c) => ` class ${c.name}`).join("\n");
const exps = f.exports.map((e) => ` export ${e.isDefault ? "default " : ""}${e.name}`).join("\n");
return ` ${f.path}:\n${fns}\n${cls}\n${exps}`;
})
.join("\n\n");
return [
{
role: "system",
content: `You are analyzing a code module. Provide a concise summary.
Output format:
## Summary
[1-2 paragraphs explaining what this module does and its role in project]
## Key Files
[list each important file with a one-line description]
## Public API
[list main exported functions/classes and what they do]`,
},
{
role: "user",
content: `Module: ${module.name} (${module.path})
Files: ${module.files.length}
FILE DETAILS:
${fileDetails}
Summarize this module.`,
},
];
}

View File

@@ -0,0 +1,55 @@
import type { LLMMessage, CodeStructure } from "@codeboard/shared";
export function buildPatternsPrompt(structure: CodeStructure): LLMMessage[] {
const sampleFunctions = structure.files
.flatMap((f) => f.functions.map((fn) => `${f.path}: ${fn.name}(${fn.params.join(", ")})`))
.slice(0, 40)
.join("\n");
const sampleClasses = structure.files
.flatMap((f) => f.classes.map((c) => `${f.path}: class ${c.name} [${c.methods.map((m) => m.name).join(", ")}]`))
.slice(0, 20)
.join("\n");
const importSources = new Set<string>();
for (const f of structure.files) {
for (const imp of f.imports) {
importSources.add(imp.source);
}
}
return [
{
role: "system",
content: `You are a code reviewer identifying patterns and conventions in a codebase.
Output format:
## Coding Conventions
[list conventions like naming patterns, file organization, error handling approach]
## Design Patterns
[list design patterns detected: factory, singleton, observer, repository, etc.]
## Architectural Decisions
[list key architectural decisions: monorepo vs polyrepo, framework choices, state management, etc.]`,
},
{
role: "user",
content: `Analyze these code patterns:
FUNCTION SIGNATURES:
${sampleFunctions}
CLASS DEFINITIONS:
${sampleClasses}
EXTERNAL DEPENDENCIES:
${Array.from(importSources).filter((s) => !s.startsWith(".")).slice(0, 30).join(", ")}
DETECTED PATTERNS FROM AST:
${structure.patterns.map((p) => ` ${p.name}: ${p.description}`).join("\n") || " (none pre-detected)"}
Identify coding conventions, design patterns, and architectural decisions.`,
},
];
}

View File

@@ -0,0 +1,34 @@
import Anthropic from "@anthropic-ai/sdk";
import type { LLMMessage, LLMOptions } from "@codeboard/shared";
import type { LLMProvider } from "./base.js";
export class AnthropicProvider implements LLMProvider {
name = "anthropic";
private client: Anthropic;
private defaultModel: string;
constructor(apiKey: string, model?: string) {
this.client = new Anthropic({ apiKey });
this.defaultModel = model ?? "claude-sonnet-4-20250514";
}
async chat(messages: LLMMessage[], options?: LLMOptions): Promise<string> {
const systemMessage = messages.find((m) => m.role === "system");
const nonSystemMessages = messages
.filter((m) => m.role !== "system")
.map((m) => ({
role: m.role as "user" | "assistant",
content: m.content,
}));
const response = await this.client.messages.create({
model: options?.model ?? this.defaultModel,
max_tokens: options?.maxTokens ?? 4096,
system: systemMessage?.content,
messages: nonSystemMessages,
});
const textBlock = response.content.find((b) => b.type === "text");
return textBlock?.type === "text" ? textBlock.text : "";
}
}

View File

@@ -0,0 +1,6 @@
import type { LLMMessage, LLMOptions } from "@codeboard/shared";
export interface LLMProvider {
name: string;
chat(messages: LLMMessage[], options?: LLMOptions): Promise<string>;
}

View File

@@ -0,0 +1,15 @@
import type { LLMProviderConfig } from "@codeboard/shared";
import type { LLMProvider } from "./base.js";
import { OpenAIProvider } from "./openai.js";
import { AnthropicProvider } from "./anthropic.js";
export function createProvider(config: LLMProviderConfig): LLMProvider {
switch (config.provider) {
case "openai":
return new OpenAIProvider(config.apiKey, config.model, config.baseUrl);
case "anthropic":
return new AnthropicProvider(config.apiKey, config.model);
default:
throw new Error(`Unknown LLM provider: ${config.provider}`);
}
}

View File

@@ -0,0 +1,28 @@
import OpenAI from "openai";
import type { LLMMessage, LLMOptions } from "@codeboard/shared";
import type { LLMProvider } from "./base.js";
export class OpenAIProvider implements LLMProvider {
name = "openai";
private client: OpenAI;
private defaultModel: string;
constructor(apiKey: string, model?: string, baseUrl?: string) {
this.client = new OpenAI({
apiKey,
baseURL: baseUrl,
});
this.defaultModel = model ?? "gpt-4o";
}
async chat(messages: LLMMessage[], options?: LLMOptions): Promise<string> {
const response = await this.client.chat.completions.create({
model: options?.model ?? this.defaultModel,
messages: messages.map((m) => ({ role: m.role, content: m.content })),
temperature: options?.temperature ?? 0.3,
max_tokens: options?.maxTokens ?? 4096,
});
return response.choices[0]?.message?.content ?? "";
}
}

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src"]
}

View File

@@ -0,0 +1,29 @@
{
"name": "@codeboard/parser",
"version": "0.0.1",
"private": true,
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
}
},
"scripts": {
"build": "tsc",
"clean": "rm -rf dist",
"dev": "tsc --watch"
},
"dependencies": {
"@babel/parser": "^7.26.0",
"@babel/traverse": "^7.26.0",
"@babel/types": "^7.26.0",
"@codeboard/shared": "*",
"glob": "^11.0.0"
},
"devDependencies": {
"@types/babel__traverse": "^7.20.0",
"typescript": "^5.7"
}
}

View File

@@ -0,0 +1,150 @@
import { readFile } from "node:fs/promises";
import { dirname, basename } from "node:path";
import type {
CodeStructure,
FileNode,
ModuleNode,
DependencyEdge,
ExportNode,
} from "@codeboard/shared";
import { walkFiles } from "./file-walker.js";
import { typescriptParser } from "./languages/typescript.js";
import { pythonParser } from "./languages/python.js";
import type { LanguageParser } from "./languages/base.js";
const MAX_FILES = 200;
const parsers: LanguageParser[] = [typescriptParser, pythonParser];
function getParser(language: string): LanguageParser | null {
return (
parsers.find((p) =>
p.extensions.some((ext) => {
const langMap: Record<string, string[]> = {
typescript: [".ts", ".tsx"],
javascript: [".js", ".jsx", ".mjs", ".cjs"],
python: [".py"],
};
return langMap[language]?.includes(ext);
})
) ?? null
);
}
function buildModules(files: FileNode[]): ModuleNode[] {
const dirMap = new Map<string, string[]>();
for (const file of files) {
const dir = dirname(file.path);
const existing = dirMap.get(dir);
if (existing) {
existing.push(file.path);
} else {
dirMap.set(dir, [file.path]);
}
}
return Array.from(dirMap.entries()).map(([dirPath, filePaths]) => ({
name: basename(dirPath) || "root",
path: dirPath,
files: filePaths,
}));
}
function buildDependencies(files: FileNode[]): DependencyEdge[] {
const edges: DependencyEdge[] = [];
const filePathSet = new Set(files.map((f) => f.path));
for (const file of files) {
for (const imp of file.imports) {
let resolved = imp.source;
if (resolved.startsWith(".")) {
const dir = dirname(file.path);
const candidate = `${dir}/${resolved.replace(/^\.\//, "")}`;
const extensions = [".ts", ".tsx", ".js", ".jsx", ".py", ""];
for (const ext of extensions) {
if (filePathSet.has(candidate + ext)) {
resolved = candidate + ext;
break;
}
if (filePathSet.has(`${candidate}/index${ext}`)) {
resolved = `${candidate}/index${ext}`;
break;
}
}
}
edges.push({
source: file.path,
target: resolved,
type: "import",
});
}
}
return edges;
}
function detectEntryPoints(files: FileNode[]): string[] {
const entryNames = new Set([
"index",
"main",
"app",
"server",
"mod",
"lib",
"__init__",
]);
return files
.filter((f) => {
const name = basename(f.path).replace(/\.[^.]+$/, "");
return entryNames.has(name);
})
.map((f) => f.path);
}
function collectExports(files: FileNode[]): ExportNode[] {
const allExports: ExportNode[] = [];
for (const file of files) {
allExports.push(...file.exports);
}
return allExports;
}
export async function analyzeRepository(
repoPath: string
): Promise<CodeStructure> {
const walkedFiles = await walkFiles(repoPath);
const filesToAnalyze = walkedFiles.slice(0, MAX_FILES);
const parsedFiles: FileNode[] = [];
for (const walkedFile of filesToAnalyze) {
const parser = getParser(walkedFile.language);
if (!parser) continue;
try {
const content = await readFile(walkedFile.absolutePath, "utf-8");
const fileNode = parser.parse(content, walkedFile.relativePath);
parsedFiles.push(fileNode);
} catch {
continue;
}
}
const modules = buildModules(parsedFiles);
const dependencies = buildDependencies(parsedFiles);
const entryPoints = detectEntryPoints(parsedFiles);
const exports = collectExports(parsedFiles);
return {
files: parsedFiles,
modules,
entryPoints,
exports,
dependencies,
patterns: [],
};
}

View File

@@ -0,0 +1,121 @@
import { readdir, stat, readFile } from "node:fs/promises";
import { join, relative, extname, basename } from "node:path";
const IGNORED_DIRS = new Set([
"node_modules",
".git",
"dist",
"build",
"vendor",
"__pycache__",
".next",
".turbo",
"coverage",
".venv",
"venv",
".tox",
"target",
".cache",
".idea",
".vscode",
]);
const LANGUAGE_MAP: Record<string, string> = {
".ts": "typescript",
".tsx": "typescript",
".js": "javascript",
".jsx": "javascript",
".mjs": "javascript",
".cjs": "javascript",
".py": "python",
".go": "go",
".rs": "rust",
".java": "java",
".rb": "ruby",
".php": "php",
".cs": "csharp",
".cpp": "cpp",
".c": "c",
".h": "c",
".hpp": "cpp",
".swift": "swift",
".kt": "kotlin",
};
const ENTRY_POINT_NAMES = new Set([
"index",
"main",
"app",
"server",
"mod",
"lib",
"__init__",
"manage",
]);
export interface WalkedFile {
absolutePath: string;
relativePath: string;
language: string;
size: number;
isEntryPoint: boolean;
}
async function walkDir(
dir: string,
rootDir: string,
results: WalkedFile[]
): Promise<void> {
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
if (IGNORED_DIRS.has(entry.name)) continue;
if (entry.name.startsWith(".")) continue;
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
await walkDir(fullPath, rootDir, results);
continue;
}
const ext = extname(entry.name);
const language = LANGUAGE_MAP[ext];
if (!language) continue;
const fileStat = await stat(fullPath);
if (fileStat.size > 500_000) continue;
const nameWithoutExt = basename(entry.name, ext);
const isEntryPoint = ENTRY_POINT_NAMES.has(nameWithoutExt);
results.push({
absolutePath: fullPath,
relativePath: relative(rootDir, fullPath),
language,
size: fileStat.size,
isEntryPoint,
});
}
}
export async function walkFiles(repoPath: string): Promise<WalkedFile[]> {
const results: WalkedFile[] = [];
await walkDir(repoPath, repoPath, results);
results.sort((a, b) => {
if (a.isEntryPoint && !b.isEntryPoint) return -1;
if (!a.isEntryPoint && b.isEntryPoint) return 1;
return a.relativePath.localeCompare(b.relativePath);
});
return results;
}
export async function readFileContent(filePath: string): Promise<string> {
return readFile(filePath, "utf-8");
}
export function detectLanguage(filePath: string): string | null {
return LANGUAGE_MAP[extname(filePath)] ?? null;
}

View File

@@ -0,0 +1,3 @@
export { analyzeRepository } from "./analyzer.js";
export { walkFiles } from "./file-walker.js";
export type { LanguageParser } from "./languages/base.js";

View File

@@ -0,0 +1,6 @@
import type { FileNode } from "@codeboard/shared";
export interface LanguageParser {
extensions: string[];
parse(content: string, filePath: string): FileNode;
}

View File

@@ -0,0 +1,157 @@
import type {
FileNode,
FunctionNode,
ClassNode,
ImportNode,
ExportNode,
} from "@codeboard/shared";
import type { LanguageParser } from "./base.js";
const FUNC_RE = /^(\s*)def\s+(\w+)\s*\(([^)]*)\)(?:\s*->\s*([^:]+))?\s*:/gm;
const CLASS_RE = /^(\s*)class\s+(\w+)(?:\(([^)]*)\))?\s*:/gm;
const IMPORT_RE = /^(?:from\s+([\w.]+)\s+)?import\s+(.+)$/gm;
const DOCSTRING_RE = /^\s*(?:"""([\s\S]*?)"""|'''([\s\S]*?)''')/;
function parseParams(raw: string): string[] {
if (!raw.trim()) return [];
return raw
.split(",")
.map((p) => p.trim().split(":")[0].split("=")[0].trim())
.filter((p) => p && p !== "self" && p !== "cls");
}
export const pythonParser: LanguageParser = {
extensions: [".py"],
parse(content: string, filePath: string): FileNode {
const lines = content.split("\n");
const functions: FunctionNode[] = [];
const classes: ClassNode[] = [];
const imports: ImportNode[] = [];
const exports: ExportNode[] = [];
let match: RegExpExecArray | null;
FUNC_RE.lastIndex = 0;
while ((match = FUNC_RE.exec(content)) !== null) {
const indent = match[1].length;
const name = match[2];
const params = parseParams(match[3]);
const returnType = match[4]?.trim();
const lineStart =
content.substring(0, match.index).split("\n").length;
let lineEnd = lineStart;
for (let i = lineStart; i < lines.length; i++) {
const line = lines[i];
if (
i > lineStart &&
line.trim() &&
!line.startsWith(" ".repeat(indent + 1)) &&
!line.startsWith("\t".repeat(indent === 0 ? 1 : indent))
) {
lineEnd = i;
break;
}
lineEnd = i + 1;
}
let docstring: string | undefined;
if (lineStart < lines.length) {
const bodyStart = lines.slice(lineStart, lineStart + 5).join("\n");
const docMatch = DOCSTRING_RE.exec(bodyStart);
if (docMatch) {
docstring = (docMatch[1] ?? docMatch[2]).trim();
}
}
if (indent === 0) {
functions.push({
name,
params,
returnType,
lineStart,
lineEnd,
docstring,
calls: [],
});
}
}
CLASS_RE.lastIndex = 0;
while ((match = CLASS_RE.exec(content)) !== null) {
const name = match[2];
const methods: FunctionNode[] = [];
const classLineStart =
content.substring(0, match.index).split("\n").length;
const classBody = content.substring(match.index + match[0].length);
const methodRe = /^\s{2,}def\s+(\w+)\s*\(([^)]*)\)(?:\s*->\s*([^:]+))?\s*:/gm;
let methodMatch: RegExpExecArray | null;
while ((methodMatch = methodRe.exec(classBody)) !== null) {
const methodLineStart =
classLineStart +
classBody.substring(0, methodMatch.index).split("\n").length;
methods.push({
name: methodMatch[1],
params: parseParams(methodMatch[2]),
returnType: methodMatch[3]?.trim(),
lineStart: methodLineStart,
lineEnd: methodLineStart + 1,
calls: [],
});
}
classes.push({ name, methods, properties: [] });
}
IMPORT_RE.lastIndex = 0;
while ((match = IMPORT_RE.exec(content)) !== null) {
const fromModule = match[1];
const importedNames = match[2]
.split(",")
.map((s) => s.trim().split(" as ")[0].trim())
.filter(Boolean);
if (fromModule) {
imports.push({ source: fromModule, specifiers: importedNames });
} else {
for (const name of importedNames) {
imports.push({ source: name, specifiers: [name] });
}
}
}
const allRe = /^__all__\s*=\s*\[([^\]]*)\]/m;
const allMatch = allRe.exec(content);
if (allMatch) {
const names = allMatch[1]
.split(",")
.map((s) => s.trim().replace(/['"]/g, ""))
.filter(Boolean);
for (const name of names) {
exports.push({ name, isDefault: false });
}
}
let complexity = 0;
for (const line of lines) {
const trimmed = line.trim();
if (trimmed.startsWith("if ") || trimmed.startsWith("elif ")) complexity++;
if (trimmed.startsWith("for ") || trimmed.startsWith("while ")) complexity++;
if (trimmed.startsWith("except")) complexity++;
if (trimmed.includes(" and ") || trimmed.includes(" or ")) complexity++;
}
return {
path: filePath,
language: "python",
size: content.length,
functions,
classes,
imports,
exports,
complexity,
};
},
};

View File

@@ -0,0 +1,227 @@
import { parse as babelParse } from "@babel/parser";
import _traverse from "@babel/traverse";
import type {
FileNode,
FunctionNode,
ClassNode,
ImportNode,
ExportNode,
} from "@codeboard/shared";
import type { LanguageParser } from "./base.js";
const traverse =
typeof _traverse === "function"
? _traverse
: (_traverse as unknown as { default: typeof _traverse }).default;
function extractFunctionParams(
params: Array<{ name?: string; left?: { name?: string }; type?: string }>
): string[] {
return params.map((p) => {
if (p.type === "AssignmentPattern" && p.left?.name) return p.left.name;
return p.name ?? "unknown";
});
}
export const typescriptParser: LanguageParser = {
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"],
parse(content: string, filePath: string): FileNode {
const functions: FunctionNode[] = [];
const classes: ClassNode[] = [];
const imports: ImportNode[] = [];
const exports: ExportNode[] = [];
const calls: Set<string> = new Set();
let ast;
try {
ast = babelParse(content, {
sourceType: "module",
plugins: [
"typescript",
"jsx",
"decorators-legacy",
"classProperties",
"classPrivateProperties",
"classPrivateMethods",
"optionalChaining",
"nullishCoalescingOperator",
"dynamicImport",
],
errorRecovery: true,
});
} catch {
return {
path: filePath,
language: filePath.endsWith(".py") ? "python" : "typescript",
size: content.length,
functions: [],
classes: [],
imports: [],
exports: [],
complexity: 0,
};
}
traverse(ast, {
FunctionDeclaration(path) {
const node = path.node;
if (!node.id) return;
functions.push({
name: node.id.name,
params: extractFunctionParams(node.params as never[]),
returnType: node.returnType
? content.slice(node.returnType.start!, node.returnType.end!)
: undefined,
lineStart: node.loc?.start.line ?? 0,
lineEnd: node.loc?.end.line ?? 0,
calls: [],
});
},
ArrowFunctionExpression(path) {
const parent = path.parent;
if (
parent.type === "VariableDeclarator" &&
parent.id.type === "Identifier"
) {
const node = path.node;
functions.push({
name: parent.id.name,
params: extractFunctionParams(node.params as never[]),
returnType: node.returnType
? content.slice(node.returnType.start!, node.returnType.end!)
: undefined,
lineStart: node.loc?.start.line ?? 0,
lineEnd: node.loc?.end.line ?? 0,
calls: [],
});
}
},
ClassDeclaration(path) {
const node = path.node;
if (!node.id) return;
const methods: FunctionNode[] = [];
const properties: Array<{ name: string; type?: string }> = [];
for (const member of node.body.body) {
if (
member.type === "ClassMethod" &&
member.key.type === "Identifier"
) {
methods.push({
name: member.key.name,
params: extractFunctionParams(member.params as never[]),
lineStart: member.loc?.start.line ?? 0,
lineEnd: member.loc?.end.line ?? 0,
calls: [],
});
} else if (
member.type === "ClassProperty" &&
member.key.type === "Identifier"
) {
properties.push({
name: member.key.name,
type: member.typeAnnotation
? content.slice(
member.typeAnnotation.start!,
member.typeAnnotation.end!
)
: undefined,
});
}
}
classes.push({ name: node.id.name, methods, properties });
},
ImportDeclaration(path) {
const node = path.node;
const specifiers = node.specifiers.map((s) => s.local.name);
imports.push({ source: node.source.value, specifiers });
},
ExportDefaultDeclaration() {
exports.push({ name: "default", isDefault: true });
},
ExportNamedDeclaration(path) {
const node = path.node;
if (node.declaration) {
if (
node.declaration.type === "FunctionDeclaration" &&
node.declaration.id
) {
exports.push({
name: node.declaration.id.name,
isDefault: false,
});
} else if (
node.declaration.type === "ClassDeclaration" &&
node.declaration.id
) {
exports.push({
name: node.declaration.id.name,
isDefault: false,
});
} else if (node.declaration.type === "VariableDeclaration") {
for (const decl of node.declaration.declarations) {
if (decl.id.type === "Identifier") {
exports.push({ name: decl.id.name, isDefault: false });
}
}
}
}
if (node.specifiers) {
for (const spec of node.specifiers) {
if (spec.exported.type === "Identifier") {
exports.push({ name: spec.exported.name, isDefault: false });
}
}
}
},
CallExpression(path) {
const callee = path.node.callee;
if (callee.type === "Identifier") {
calls.add(callee.name);
} else if (
callee.type === "MemberExpression" &&
callee.property.type === "Identifier"
) {
calls.add(callee.property.name);
}
},
});
for (const fn of functions) {
fn.calls = Array.from(calls);
}
let complexity = 0;
traverse(ast, {
IfStatement() { complexity++; },
ForStatement() { complexity++; },
ForInStatement() { complexity++; },
ForOfStatement() { complexity++; },
WhileStatement() { complexity++; },
DoWhileStatement() { complexity++; },
SwitchCase() { complexity++; },
ConditionalExpression() { complexity++; },
LogicalExpression() { complexity++; },
CatchClause() { complexity++; },
});
return {
path: filePath,
language: filePath.match(/\.tsx?$/) ? "typescript" : "javascript",
size: content.length,
functions,
classes,
imports,
exports,
complexity,
};
},
};

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src"]
}

View File

@@ -0,0 +1,21 @@
{
"name": "@codeboard/shared",
"version": "0.0.1",
"private": true,
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
}
},
"scripts": {
"build": "tsc",
"clean": "rm -rf dist",
"dev": "tsc --watch"
},
"devDependencies": {
"typescript": "^5.7"
}
}

View File

@@ -0,0 +1,24 @@
export type {
CloneResult,
CloneMetadata,
FileNode,
FunctionNode,
ClassNode,
ClassProperty,
ImportNode,
ExportNode,
CodeStructure,
ModuleNode,
DetectedPattern,
DependencyEdge,
GeneratedDocs,
DocsOverview,
DocsModule,
DocsPatterns,
DocsGettingStarted,
Generation,
GenerationStatus,
LLMMessage,
LLMOptions,
LLMProviderConfig,
} from "./types.js";

View File

@@ -0,0 +1,185 @@
// ── Repository Cloning ──────────────────────────────────────────────
export interface CloneMetadata {
name: string;
description: string;
defaultBranch: string;
languages: Record<string, number>;
stars: number;
lastCommit: string;
totalFiles: number;
totalLines: number;
}
export interface CloneResult {
localPath: string;
metadata: CloneMetadata;
}
// ── AST Parsing ─────────────────────────────────────────────────────
export interface FunctionNode {
name: string;
params: string[];
returnType?: string;
lineStart: number;
lineEnd: number;
docstring?: string;
calls: string[];
}
export interface ClassProperty {
name: string;
type?: string;
}
export interface ClassNode {
name: string;
methods: FunctionNode[];
properties: ClassProperty[];
}
export interface ImportNode {
source: string;
specifiers: string[];
}
export interface ExportNode {
name: string;
isDefault: boolean;
}
export interface FileNode {
path: string;
language: string;
size: number;
functions: FunctionNode[];
classes: ClassNode[];
imports: ImportNode[];
exports: ExportNode[];
complexity: number;
}
export interface ModuleNode {
name: string;
path: string;
files: string[];
summary?: string;
}
export interface DetectedPattern {
name: string;
description: string;
examples: string[];
}
export interface DependencyEdge {
source: string;
target: string;
type: "import" | "call" | "extends";
}
export interface CodeStructure {
files: FileNode[];
modules: ModuleNode[];
entryPoints: string[];
exports: ExportNode[];
dependencies: DependencyEdge[];
patterns: DetectedPattern[];
}
// ── Generated Documentation ─────────────────────────────────────────
export interface DocsOverview {
title: string;
description: string;
architectureDiagram: string;
techStack: string[];
keyMetrics: {
files: number;
modules: number;
languages: string[];
};
}
export interface DocsModule {
name: string;
path: string;
summary: string;
keyFiles: Array<{ path: string; purpose: string }>;
publicApi: string[];
dependsOn: string[];
dependedBy: string[];
}
export interface DocsPatterns {
conventions: string[];
designPatterns: string[];
architecturalDecisions: string[];
}
export interface DocsGettingStarted {
prerequisites: string[];
setupSteps: string[];
firstTask: string;
}
export interface GeneratedDocs {
id: string;
repoUrl: string;
repoName: string;
generatedAt: string;
sections: {
overview: DocsOverview;
modules: DocsModule[];
patterns: DocsPatterns;
gettingStarted: DocsGettingStarted;
dependencyGraph: string;
};
}
// ── Generation State ────────────────────────────────────────────────
export type GenerationStatus =
| "QUEUED"
| "CLONING"
| "PARSING"
| "GENERATING"
| "RENDERING"
| "COMPLETED"
| "FAILED";
export interface Generation {
id: string;
repoUrl: string;
repoName: string;
commitHash: string;
status: GenerationStatus;
progress: number;
result: GeneratedDocs | null;
error: string | null;
costUsd: number | null;
duration: number | null;
createdAt: string;
viewCount: number;
}
// ── LLM Configuration ───────────────────────────────────────────────
export interface LLMMessage {
role: "system" | "user" | "assistant";
content: string;
}
export interface LLMOptions {
temperature?: number;
maxTokens?: number;
model?: string;
}
export interface LLMProviderConfig {
provider: "openai" | "anthropic";
apiKey: string;
model?: string;
baseUrl?: string;
}

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src"]
}

16
tsconfig.json Normal file
View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"lib": ["ES2022"],
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true
}
}