index.js 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839
  1. import { Markers } from '../primitives.js';
  2. import { splitLines } from '../util.js';
  3. import blockParser from './block-parser.js';
  4. import sourceParser from './source-parser.js';
  5. import specParser from './spec-parser.js';
  6. import tokenizeTag from './tokenizers/tag.js';
  7. import tokenizeType from './tokenizers/type.js';
  8. import tokenizeName from './tokenizers/name.js';
  9. import tokenizeDescription, { getJoiner as getDescriptionJoiner, } from './tokenizers/description.js';
  10. export default function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = Markers, tokenizers = [
  11. tokenizeTag(),
  12. tokenizeType(spacing),
  13. tokenizeName(),
  14. tokenizeDescription(spacing),
  15. ], } = {}) {
  16. if (startLine < 0 || startLine % 1 > 0)
  17. throw new Error('Invalid startLine');
  18. const parseSource = sourceParser({ startLine, markers });
  19. const parseBlock = blockParser({ fence });
  20. const parseSpec = specParser({ tokenizers });
  21. const joinDescription = getDescriptionJoiner(spacing);
  22. return function (source) {
  23. const blocks = [];
  24. for (const line of splitLines(source)) {
  25. const lines = parseSource(line);
  26. if (lines === null)
  27. continue;
  28. const sections = parseBlock(lines);
  29. const specs = sections.slice(1).map(parseSpec);
  30. blocks.push({
  31. description: joinDescription(sections[0], markers),
  32. tags: specs,
  33. source: lines,
  34. problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
  35. });
  36. }
  37. return blocks;
  38. };
  39. }