Datasets:
File size: 1,437 Bytes
cbb6264 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
// parse_ts.js
const fs = require('fs');
const ts = require('typescript');
const { tsquery } = require('@phenomnomnominal/tsquery');
let data = '';
process.stdin.on('data', function (chunk) {
data += chunk;
});
process.stdin.on('end', function () {
const sourceFile = ts.createSourceFile('temp.ts', data.toString(), ts.ScriptTarget.ES2015, true);
// Define the node types that constitute a 'semantic chunk'
const semanticChunkNodeTypes = [
'FunctionDeclaration',
'ArrowFunction',
'ClassDeclaration',
'InterfaceDeclaration',
'EnumDeclaration',
'TypeAliasDeclaration',
'MethodDeclaration',
];
const semanticChunks = semanticChunkNodeTypes.flatMap(nodeType =>
tsquery(sourceFile, nodeType)
);
const jsonl = semanticChunks.map(chunk => {
const comments = ts.getLeadingCommentRanges(sourceFile.getFullText(), chunk.getFullStart()) || [];
const commentTexts = comments.map(comment => sourceFile.getFullText().slice(comment.pos, comment.end)).join('\n');
// Append the leading comments to the front of the chunk's content
const contentWithComments = commentTexts + '\n' + chunk.getText(sourceFile);
return JSON.stringify({
type: ts.SyntaxKind[chunk.kind],
content: contentWithComments
});
}).join('\n');
fs.writeFileSync('semantic_chunks.jsonl', jsonl);
});
|