feat: initial CodeBoard monorepo scaffold
Turborepo monorepo with npm workspaces: - apps/web: Next.js 14 frontend with Tailwind v4, SSE progress, doc viewer - apps/worker: BullMQ job processor (clone → parse → LLM generate) - packages/shared: TypeScript types - packages/parser: Babel-based AST parser (JS/TS) + regex (Python) - packages/llm: OpenAI/Anthropic provider abstraction + prompt pipeline - packages/diagrams: Mermaid architecture & dependency graph generators - packages/database: Prisma schema (PostgreSQL) - Docker multi-stage build (web + worker targets) All packages compile successfully with tsc and next build.
This commit is contained in:
29
packages/parser/package.json
Normal file
29
packages/parser/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "@codeboard/parser",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"default": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"clean": "rm -rf dist",
|
||||
"dev": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/parser": "^7.26.0",
|
||||
"@babel/traverse": "^7.26.0",
|
||||
"@babel/types": "^7.26.0",
|
||||
"@codeboard/shared": "*",
|
||||
"glob": "^11.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/babel__traverse": "^7.20.0",
|
||||
"typescript": "^5.7"
|
||||
}
|
||||
}
|
||||
150
packages/parser/src/analyzer.ts
Normal file
150
packages/parser/src/analyzer.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { dirname, basename } from "node:path";
|
||||
import type {
|
||||
CodeStructure,
|
||||
FileNode,
|
||||
ModuleNode,
|
||||
DependencyEdge,
|
||||
ExportNode,
|
||||
} from "@codeboard/shared";
|
||||
import { walkFiles } from "./file-walker.js";
|
||||
import { typescriptParser } from "./languages/typescript.js";
|
||||
import { pythonParser } from "./languages/python.js";
|
||||
import type { LanguageParser } from "./languages/base.js";
|
||||
|
||||
const MAX_FILES = 200;
|
||||
|
||||
const parsers: LanguageParser[] = [typescriptParser, pythonParser];
|
||||
|
||||
function getParser(language: string): LanguageParser | null {
|
||||
return (
|
||||
parsers.find((p) =>
|
||||
p.extensions.some((ext) => {
|
||||
const langMap: Record<string, string[]> = {
|
||||
typescript: [".ts", ".tsx"],
|
||||
javascript: [".js", ".jsx", ".mjs", ".cjs"],
|
||||
python: [".py"],
|
||||
};
|
||||
return langMap[language]?.includes(ext);
|
||||
})
|
||||
) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
function buildModules(files: FileNode[]): ModuleNode[] {
|
||||
const dirMap = new Map<string, string[]>();
|
||||
|
||||
for (const file of files) {
|
||||
const dir = dirname(file.path);
|
||||
const existing = dirMap.get(dir);
|
||||
if (existing) {
|
||||
existing.push(file.path);
|
||||
} else {
|
||||
dirMap.set(dir, [file.path]);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(dirMap.entries()).map(([dirPath, filePaths]) => ({
|
||||
name: basename(dirPath) || "root",
|
||||
path: dirPath,
|
||||
files: filePaths,
|
||||
}));
|
||||
}
|
||||
|
||||
function buildDependencies(files: FileNode[]): DependencyEdge[] {
|
||||
const edges: DependencyEdge[] = [];
|
||||
const filePathSet = new Set(files.map((f) => f.path));
|
||||
|
||||
for (const file of files) {
|
||||
for (const imp of file.imports) {
|
||||
let resolved = imp.source;
|
||||
|
||||
if (resolved.startsWith(".")) {
|
||||
const dir = dirname(file.path);
|
||||
const candidate = `${dir}/${resolved.replace(/^\.\//, "")}`;
|
||||
const extensions = [".ts", ".tsx", ".js", ".jsx", ".py", ""];
|
||||
for (const ext of extensions) {
|
||||
if (filePathSet.has(candidate + ext)) {
|
||||
resolved = candidate + ext;
|
||||
break;
|
||||
}
|
||||
if (filePathSet.has(`${candidate}/index${ext}`)) {
|
||||
resolved = `${candidate}/index${ext}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
edges.push({
|
||||
source: file.path,
|
||||
target: resolved,
|
||||
type: "import",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return edges;
|
||||
}
|
||||
|
||||
function detectEntryPoints(files: FileNode[]): string[] {
|
||||
const entryNames = new Set([
|
||||
"index",
|
||||
"main",
|
||||
"app",
|
||||
"server",
|
||||
"mod",
|
||||
"lib",
|
||||
"__init__",
|
||||
]);
|
||||
|
||||
return files
|
||||
.filter((f) => {
|
||||
const name = basename(f.path).replace(/\.[^.]+$/, "");
|
||||
return entryNames.has(name);
|
||||
})
|
||||
.map((f) => f.path);
|
||||
}
|
||||
|
||||
function collectExports(files: FileNode[]): ExportNode[] {
|
||||
const allExports: ExportNode[] = [];
|
||||
for (const file of files) {
|
||||
allExports.push(...file.exports);
|
||||
}
|
||||
return allExports;
|
||||
}
|
||||
|
||||
export async function analyzeRepository(
|
||||
repoPath: string
|
||||
): Promise<CodeStructure> {
|
||||
const walkedFiles = await walkFiles(repoPath);
|
||||
const filesToAnalyze = walkedFiles.slice(0, MAX_FILES);
|
||||
|
||||
const parsedFiles: FileNode[] = [];
|
||||
|
||||
for (const walkedFile of filesToAnalyze) {
|
||||
const parser = getParser(walkedFile.language);
|
||||
if (!parser) continue;
|
||||
|
||||
try {
|
||||
const content = await readFile(walkedFile.absolutePath, "utf-8");
|
||||
const fileNode = parser.parse(content, walkedFile.relativePath);
|
||||
parsedFiles.push(fileNode);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const modules = buildModules(parsedFiles);
|
||||
const dependencies = buildDependencies(parsedFiles);
|
||||
const entryPoints = detectEntryPoints(parsedFiles);
|
||||
const exports = collectExports(parsedFiles);
|
||||
|
||||
return {
|
||||
files: parsedFiles,
|
||||
modules,
|
||||
entryPoints,
|
||||
exports,
|
||||
dependencies,
|
||||
patterns: [],
|
||||
};
|
||||
}
|
||||
121
packages/parser/src/file-walker.ts
Normal file
121
packages/parser/src/file-walker.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { readdir, stat, readFile } from "node:fs/promises";
|
||||
import { join, relative, extname, basename } from "node:path";
|
||||
|
||||
const IGNORED_DIRS = new Set([
|
||||
"node_modules",
|
||||
".git",
|
||||
"dist",
|
||||
"build",
|
||||
"vendor",
|
||||
"__pycache__",
|
||||
".next",
|
||||
".turbo",
|
||||
"coverage",
|
||||
".venv",
|
||||
"venv",
|
||||
".tox",
|
||||
"target",
|
||||
".cache",
|
||||
".idea",
|
||||
".vscode",
|
||||
]);
|
||||
|
||||
const LANGUAGE_MAP: Record<string, string> = {
|
||||
".ts": "typescript",
|
||||
".tsx": "typescript",
|
||||
".js": "javascript",
|
||||
".jsx": "javascript",
|
||||
".mjs": "javascript",
|
||||
".cjs": "javascript",
|
||||
".py": "python",
|
||||
".go": "go",
|
||||
".rs": "rust",
|
||||
".java": "java",
|
||||
".rb": "ruby",
|
||||
".php": "php",
|
||||
".cs": "csharp",
|
||||
".cpp": "cpp",
|
||||
".c": "c",
|
||||
".h": "c",
|
||||
".hpp": "cpp",
|
||||
".swift": "swift",
|
||||
".kt": "kotlin",
|
||||
};
|
||||
|
||||
const ENTRY_POINT_NAMES = new Set([
|
||||
"index",
|
||||
"main",
|
||||
"app",
|
||||
"server",
|
||||
"mod",
|
||||
"lib",
|
||||
"__init__",
|
||||
"manage",
|
||||
]);
|
||||
|
||||
export interface WalkedFile {
|
||||
absolutePath: string;
|
||||
relativePath: string;
|
||||
language: string;
|
||||
size: number;
|
||||
isEntryPoint: boolean;
|
||||
}
|
||||
|
||||
async function walkDir(
|
||||
dir: string,
|
||||
rootDir: string,
|
||||
results: WalkedFile[]
|
||||
): Promise<void> {
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
if (IGNORED_DIRS.has(entry.name)) continue;
|
||||
if (entry.name.startsWith(".")) continue;
|
||||
|
||||
const fullPath = join(dir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await walkDir(fullPath, rootDir, results);
|
||||
continue;
|
||||
}
|
||||
|
||||
const ext = extname(entry.name);
|
||||
const language = LANGUAGE_MAP[ext];
|
||||
if (!language) continue;
|
||||
|
||||
const fileStat = await stat(fullPath);
|
||||
if (fileStat.size > 500_000) continue;
|
||||
|
||||
const nameWithoutExt = basename(entry.name, ext);
|
||||
const isEntryPoint = ENTRY_POINT_NAMES.has(nameWithoutExt);
|
||||
|
||||
results.push({
|
||||
absolutePath: fullPath,
|
||||
relativePath: relative(rootDir, fullPath),
|
||||
language,
|
||||
size: fileStat.size,
|
||||
isEntryPoint,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function walkFiles(repoPath: string): Promise<WalkedFile[]> {
|
||||
const results: WalkedFile[] = [];
|
||||
await walkDir(repoPath, repoPath, results);
|
||||
|
||||
results.sort((a, b) => {
|
||||
if (a.isEntryPoint && !b.isEntryPoint) return -1;
|
||||
if (!a.isEntryPoint && b.isEntryPoint) return 1;
|
||||
return a.relativePath.localeCompare(b.relativePath);
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
export async function readFileContent(filePath: string): Promise<string> {
|
||||
return readFile(filePath, "utf-8");
|
||||
}
|
||||
|
||||
export function detectLanguage(filePath: string): string | null {
|
||||
return LANGUAGE_MAP[extname(filePath)] ?? null;
|
||||
}
|
||||
3
packages/parser/src/index.ts
Normal file
3
packages/parser/src/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export { analyzeRepository } from "./analyzer.js";
|
||||
export { walkFiles } from "./file-walker.js";
|
||||
export type { LanguageParser } from "./languages/base.js";
|
||||
6
packages/parser/src/languages/base.ts
Normal file
6
packages/parser/src/languages/base.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { FileNode } from "@codeboard/shared";
|
||||
|
||||
export interface LanguageParser {
|
||||
extensions: string[];
|
||||
parse(content: string, filePath: string): FileNode;
|
||||
}
|
||||
157
packages/parser/src/languages/python.ts
Normal file
157
packages/parser/src/languages/python.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import type {
|
||||
FileNode,
|
||||
FunctionNode,
|
||||
ClassNode,
|
||||
ImportNode,
|
||||
ExportNode,
|
||||
} from "@codeboard/shared";
|
||||
import type { LanguageParser } from "./base.js";
|
||||
|
||||
const FUNC_RE = /^(\s*)def\s+(\w+)\s*\(([^)]*)\)(?:\s*->\s*([^:]+))?\s*:/gm;
|
||||
const CLASS_RE = /^(\s*)class\s+(\w+)(?:\(([^)]*)\))?\s*:/gm;
|
||||
const IMPORT_RE = /^(?:from\s+([\w.]+)\s+)?import\s+(.+)$/gm;
|
||||
const DOCSTRING_RE = /^\s*(?:"""([\s\S]*?)"""|'''([\s\S]*?)''')/;
|
||||
|
||||
function parseParams(raw: string): string[] {
|
||||
if (!raw.trim()) return [];
|
||||
return raw
|
||||
.split(",")
|
||||
.map((p) => p.trim().split(":")[0].split("=")[0].trim())
|
||||
.filter((p) => p && p !== "self" && p !== "cls");
|
||||
}
|
||||
|
||||
export const pythonParser: LanguageParser = {
|
||||
extensions: [".py"],
|
||||
|
||||
parse(content: string, filePath: string): FileNode {
|
||||
const lines = content.split("\n");
|
||||
const functions: FunctionNode[] = [];
|
||||
const classes: ClassNode[] = [];
|
||||
const imports: ImportNode[] = [];
|
||||
const exports: ExportNode[] = [];
|
||||
|
||||
let match: RegExpExecArray | null;
|
||||
|
||||
FUNC_RE.lastIndex = 0;
|
||||
while ((match = FUNC_RE.exec(content)) !== null) {
|
||||
const indent = match[1].length;
|
||||
const name = match[2];
|
||||
const params = parseParams(match[3]);
|
||||
const returnType = match[4]?.trim();
|
||||
const lineStart =
|
||||
content.substring(0, match.index).split("\n").length;
|
||||
|
||||
let lineEnd = lineStart;
|
||||
for (let i = lineStart; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (
|
||||
i > lineStart &&
|
||||
line.trim() &&
|
||||
!line.startsWith(" ".repeat(indent + 1)) &&
|
||||
!line.startsWith("\t".repeat(indent === 0 ? 1 : indent))
|
||||
) {
|
||||
lineEnd = i;
|
||||
break;
|
||||
}
|
||||
lineEnd = i + 1;
|
||||
}
|
||||
|
||||
let docstring: string | undefined;
|
||||
if (lineStart < lines.length) {
|
||||
const bodyStart = lines.slice(lineStart, lineStart + 5).join("\n");
|
||||
const docMatch = DOCSTRING_RE.exec(bodyStart);
|
||||
if (docMatch) {
|
||||
docstring = (docMatch[1] ?? docMatch[2]).trim();
|
||||
}
|
||||
}
|
||||
|
||||
if (indent === 0) {
|
||||
functions.push({
|
||||
name,
|
||||
params,
|
||||
returnType,
|
||||
lineStart,
|
||||
lineEnd,
|
||||
docstring,
|
||||
calls: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
CLASS_RE.lastIndex = 0;
|
||||
while ((match = CLASS_RE.exec(content)) !== null) {
|
||||
const name = match[2];
|
||||
const methods: FunctionNode[] = [];
|
||||
const classLineStart =
|
||||
content.substring(0, match.index).split("\n").length;
|
||||
|
||||
const classBody = content.substring(match.index + match[0].length);
|
||||
const methodRe = /^\s{2,}def\s+(\w+)\s*\(([^)]*)\)(?:\s*->\s*([^:]+))?\s*:/gm;
|
||||
let methodMatch: RegExpExecArray | null;
|
||||
while ((methodMatch = methodRe.exec(classBody)) !== null) {
|
||||
const methodLineStart =
|
||||
classLineStart +
|
||||
classBody.substring(0, methodMatch.index).split("\n").length;
|
||||
methods.push({
|
||||
name: methodMatch[1],
|
||||
params: parseParams(methodMatch[2]),
|
||||
returnType: methodMatch[3]?.trim(),
|
||||
lineStart: methodLineStart,
|
||||
lineEnd: methodLineStart + 1,
|
||||
calls: [],
|
||||
});
|
||||
}
|
||||
|
||||
classes.push({ name, methods, properties: [] });
|
||||
}
|
||||
|
||||
IMPORT_RE.lastIndex = 0;
|
||||
while ((match = IMPORT_RE.exec(content)) !== null) {
|
||||
const fromModule = match[1];
|
||||
const importedNames = match[2]
|
||||
.split(",")
|
||||
.map((s) => s.trim().split(" as ")[0].trim())
|
||||
.filter(Boolean);
|
||||
|
||||
if (fromModule) {
|
||||
imports.push({ source: fromModule, specifiers: importedNames });
|
||||
} else {
|
||||
for (const name of importedNames) {
|
||||
imports.push({ source: name, specifiers: [name] });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const allRe = /^__all__\s*=\s*\[([^\]]*)\]/m;
|
||||
const allMatch = allRe.exec(content);
|
||||
if (allMatch) {
|
||||
const names = allMatch[1]
|
||||
.split(",")
|
||||
.map((s) => s.trim().replace(/['"]/g, ""))
|
||||
.filter(Boolean);
|
||||
for (const name of names) {
|
||||
exports.push({ name, isDefault: false });
|
||||
}
|
||||
}
|
||||
|
||||
let complexity = 0;
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (trimmed.startsWith("if ") || trimmed.startsWith("elif ")) complexity++;
|
||||
if (trimmed.startsWith("for ") || trimmed.startsWith("while ")) complexity++;
|
||||
if (trimmed.startsWith("except")) complexity++;
|
||||
if (trimmed.includes(" and ") || trimmed.includes(" or ")) complexity++;
|
||||
}
|
||||
|
||||
return {
|
||||
path: filePath,
|
||||
language: "python",
|
||||
size: content.length,
|
||||
functions,
|
||||
classes,
|
||||
imports,
|
||||
exports,
|
||||
complexity,
|
||||
};
|
||||
},
|
||||
};
|
||||
227
packages/parser/src/languages/typescript.ts
Normal file
227
packages/parser/src/languages/typescript.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import { parse as babelParse } from "@babel/parser";
|
||||
import _traverse from "@babel/traverse";
|
||||
import type {
|
||||
FileNode,
|
||||
FunctionNode,
|
||||
ClassNode,
|
||||
ImportNode,
|
||||
ExportNode,
|
||||
} from "@codeboard/shared";
|
||||
import type { LanguageParser } from "./base.js";
|
||||
|
||||
const traverse =
|
||||
typeof _traverse === "function"
|
||||
? _traverse
|
||||
: (_traverse as unknown as { default: typeof _traverse }).default;
|
||||
|
||||
function extractFunctionParams(
|
||||
params: Array<{ name?: string; left?: { name?: string }; type?: string }>
|
||||
): string[] {
|
||||
return params.map((p) => {
|
||||
if (p.type === "AssignmentPattern" && p.left?.name) return p.left.name;
|
||||
return p.name ?? "unknown";
|
||||
});
|
||||
}
|
||||
|
||||
export const typescriptParser: LanguageParser = {
|
||||
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"],
|
||||
|
||||
parse(content: string, filePath: string): FileNode {
|
||||
const functions: FunctionNode[] = [];
|
||||
const classes: ClassNode[] = [];
|
||||
const imports: ImportNode[] = [];
|
||||
const exports: ExportNode[] = [];
|
||||
const calls: Set<string> = new Set();
|
||||
|
||||
let ast;
|
||||
try {
|
||||
ast = babelParse(content, {
|
||||
sourceType: "module",
|
||||
plugins: [
|
||||
"typescript",
|
||||
"jsx",
|
||||
"decorators-legacy",
|
||||
"classProperties",
|
||||
"classPrivateProperties",
|
||||
"classPrivateMethods",
|
||||
"optionalChaining",
|
||||
"nullishCoalescingOperator",
|
||||
"dynamicImport",
|
||||
],
|
||||
errorRecovery: true,
|
||||
});
|
||||
} catch {
|
||||
return {
|
||||
path: filePath,
|
||||
language: filePath.endsWith(".py") ? "python" : "typescript",
|
||||
size: content.length,
|
||||
functions: [],
|
||||
classes: [],
|
||||
imports: [],
|
||||
exports: [],
|
||||
complexity: 0,
|
||||
};
|
||||
}
|
||||
|
||||
traverse(ast, {
|
||||
FunctionDeclaration(path) {
|
||||
const node = path.node;
|
||||
if (!node.id) return;
|
||||
functions.push({
|
||||
name: node.id.name,
|
||||
params: extractFunctionParams(node.params as never[]),
|
||||
returnType: node.returnType
|
||||
? content.slice(node.returnType.start!, node.returnType.end!)
|
||||
: undefined,
|
||||
lineStart: node.loc?.start.line ?? 0,
|
||||
lineEnd: node.loc?.end.line ?? 0,
|
||||
calls: [],
|
||||
});
|
||||
},
|
||||
|
||||
ArrowFunctionExpression(path) {
|
||||
const parent = path.parent;
|
||||
if (
|
||||
parent.type === "VariableDeclarator" &&
|
||||
parent.id.type === "Identifier"
|
||||
) {
|
||||
const node = path.node;
|
||||
functions.push({
|
||||
name: parent.id.name,
|
||||
params: extractFunctionParams(node.params as never[]),
|
||||
returnType: node.returnType
|
||||
? content.slice(node.returnType.start!, node.returnType.end!)
|
||||
: undefined,
|
||||
lineStart: node.loc?.start.line ?? 0,
|
||||
lineEnd: node.loc?.end.line ?? 0,
|
||||
calls: [],
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
ClassDeclaration(path) {
|
||||
const node = path.node;
|
||||
if (!node.id) return;
|
||||
const methods: FunctionNode[] = [];
|
||||
const properties: Array<{ name: string; type?: string }> = [];
|
||||
|
||||
for (const member of node.body.body) {
|
||||
if (
|
||||
member.type === "ClassMethod" &&
|
||||
member.key.type === "Identifier"
|
||||
) {
|
||||
methods.push({
|
||||
name: member.key.name,
|
||||
params: extractFunctionParams(member.params as never[]),
|
||||
lineStart: member.loc?.start.line ?? 0,
|
||||
lineEnd: member.loc?.end.line ?? 0,
|
||||
calls: [],
|
||||
});
|
||||
} else if (
|
||||
member.type === "ClassProperty" &&
|
||||
member.key.type === "Identifier"
|
||||
) {
|
||||
properties.push({
|
||||
name: member.key.name,
|
||||
type: member.typeAnnotation
|
||||
? content.slice(
|
||||
member.typeAnnotation.start!,
|
||||
member.typeAnnotation.end!
|
||||
)
|
||||
: undefined,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
classes.push({ name: node.id.name, methods, properties });
|
||||
},
|
||||
|
||||
ImportDeclaration(path) {
|
||||
const node = path.node;
|
||||
const specifiers = node.specifiers.map((s) => s.local.name);
|
||||
imports.push({ source: node.source.value, specifiers });
|
||||
},
|
||||
|
||||
ExportDefaultDeclaration() {
|
||||
exports.push({ name: "default", isDefault: true });
|
||||
},
|
||||
|
||||
ExportNamedDeclaration(path) {
|
||||
const node = path.node;
|
||||
if (node.declaration) {
|
||||
if (
|
||||
node.declaration.type === "FunctionDeclaration" &&
|
||||
node.declaration.id
|
||||
) {
|
||||
exports.push({
|
||||
name: node.declaration.id.name,
|
||||
isDefault: false,
|
||||
});
|
||||
} else if (
|
||||
node.declaration.type === "ClassDeclaration" &&
|
||||
node.declaration.id
|
||||
) {
|
||||
exports.push({
|
||||
name: node.declaration.id.name,
|
||||
isDefault: false,
|
||||
});
|
||||
} else if (node.declaration.type === "VariableDeclaration") {
|
||||
for (const decl of node.declaration.declarations) {
|
||||
if (decl.id.type === "Identifier") {
|
||||
exports.push({ name: decl.id.name, isDefault: false });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (node.specifiers) {
|
||||
for (const spec of node.specifiers) {
|
||||
if (spec.exported.type === "Identifier") {
|
||||
exports.push({ name: spec.exported.name, isDefault: false });
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
CallExpression(path) {
|
||||
const callee = path.node.callee;
|
||||
if (callee.type === "Identifier") {
|
||||
calls.add(callee.name);
|
||||
} else if (
|
||||
callee.type === "MemberExpression" &&
|
||||
callee.property.type === "Identifier"
|
||||
) {
|
||||
calls.add(callee.property.name);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
for (const fn of functions) {
|
||||
fn.calls = Array.from(calls);
|
||||
}
|
||||
|
||||
let complexity = 0;
|
||||
traverse(ast, {
|
||||
IfStatement() { complexity++; },
|
||||
ForStatement() { complexity++; },
|
||||
ForInStatement() { complexity++; },
|
||||
ForOfStatement() { complexity++; },
|
||||
WhileStatement() { complexity++; },
|
||||
DoWhileStatement() { complexity++; },
|
||||
SwitchCase() { complexity++; },
|
||||
ConditionalExpression() { complexity++; },
|
||||
LogicalExpression() { complexity++; },
|
||||
CatchClause() { complexity++; },
|
||||
});
|
||||
|
||||
return {
|
||||
path: filePath,
|
||||
language: filePath.match(/\.tsx?$/) ? "typescript" : "javascript",
|
||||
size: content.length,
|
||||
functions,
|
||||
classes,
|
||||
imports,
|
||||
exports,
|
||||
complexity,
|
||||
};
|
||||
},
|
||||
};
|
||||
8
packages/parser/tsconfig.json
Normal file
8
packages/parser/tsconfig.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
Reference in New Issue
Block a user