// parse_ts.js const fs = require('fs'); const ts = require('typescript'); const { tsquery } = require('@phenomnomnominal/tsquery'); let data = ''; process.stdin.on('data', function (chunk) { data += chunk; }); process.stdin.on('end', function () { const sourceFile = ts.createSourceFile('temp.ts', data.toString(), ts.ScriptTarget.ES2015, true); // Define the node types that constitute a 'semantic chunk' const semanticChunkNodeTypes = [ 'FunctionDeclaration', 'ArrowFunction', 'ClassDeclaration', 'InterfaceDeclaration', 'EnumDeclaration', 'TypeAliasDeclaration', 'MethodDeclaration', ]; const semanticChunks = semanticChunkNodeTypes.flatMap(nodeType => tsquery(sourceFile, nodeType) ); const jsonl = semanticChunks.map(chunk => { const comments = ts.getLeadingCommentRanges(sourceFile.getFullText(), chunk.getFullStart()) || []; const commentTexts = comments.map(comment => sourceFile.getFullText().slice(comment.pos, comment.end)).join('\n'); // Append the leading comments to the front of the chunk's content const contentWithComments = commentTexts + '\n' + chunk.getText(sourceFile); return JSON.stringify({ type: ts.SyntaxKind[chunk.kind], content: contentWithComments }); }).join('\n'); fs.writeFileSync('semantic_chunks.jsonl', jsonl); });