content.js 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. /**
  2. * @typedef {import('micromark-util-types').Construct} Construct
  3. * @typedef {import('micromark-util-types').Resolver} Resolver
  4. * @typedef {import('micromark-util-types').State} State
  5. * @typedef {import('micromark-util-types').Token} Token
  6. * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
  7. * @typedef {import('micromark-util-types').Tokenizer} Tokenizer
  8. */
  9. import {factorySpace} from 'micromark-factory-space'
  10. import {markdownLineEnding} from 'micromark-util-character'
  11. import {subtokenize} from 'micromark-util-subtokenize'
  12. import {codes, constants, types} from 'micromark-util-symbol'
  13. import {ok as assert} from 'devlop'
  14. /**
  15. * No name because it must not be turned off.
  16. * @type {Construct}
  17. */
  18. export const content = {tokenize: tokenizeContent, resolve: resolveContent}
  19. /** @type {Construct} */
  20. const continuationConstruct = {tokenize: tokenizeContinuation, partial: true}
  21. /**
  22. * Content is transparent: it’s parsed right now. That way, definitions are also
  23. * parsed right now: before text in paragraphs (specifically, media) are parsed.
  24. *
  25. * @type {Resolver}
  26. */
  27. function resolveContent(events) {
  28. subtokenize(events)
  29. return events
  30. }
  31. /**
  32. * @this {TokenizeContext}
  33. * @type {Tokenizer}
  34. */
  35. function tokenizeContent(effects, ok) {
  36. /** @type {Token | undefined} */
  37. let previous
  38. return chunkStart
  39. /**
  40. * Before a content chunk.
  41. *
  42. * ```markdown
  43. * > | abc
  44. * ^
  45. * ```
  46. *
  47. * @type {State}
  48. */
  49. function chunkStart(code) {
  50. assert(
  51. code !== codes.eof && !markdownLineEnding(code),
  52. 'expected no eof or eol'
  53. )
  54. effects.enter(types.content)
  55. previous = effects.enter(types.chunkContent, {
  56. contentType: constants.contentTypeContent
  57. })
  58. return chunkInside(code)
  59. }
  60. /**
  61. * In a content chunk.
  62. *
  63. * ```markdown
  64. * > | abc
  65. * ^^^
  66. * ```
  67. *
  68. * @type {State}
  69. */
  70. function chunkInside(code) {
  71. if (code === codes.eof) {
  72. return contentEnd(code)
  73. }
  74. // To do: in `markdown-rs`, each line is parsed on its own, and everything
  75. // is stitched together resolving.
  76. if (markdownLineEnding(code)) {
  77. return effects.check(
  78. continuationConstruct,
  79. contentContinue,
  80. contentEnd
  81. )(code)
  82. }
  83. // Data.
  84. effects.consume(code)
  85. return chunkInside
  86. }
  87. /**
  88. *
  89. *
  90. * @type {State}
  91. */
  92. function contentEnd(code) {
  93. effects.exit(types.chunkContent)
  94. effects.exit(types.content)
  95. return ok(code)
  96. }
  97. /**
  98. *
  99. *
  100. * @type {State}
  101. */
  102. function contentContinue(code) {
  103. assert(markdownLineEnding(code), 'expected eol')
  104. effects.consume(code)
  105. effects.exit(types.chunkContent)
  106. assert(previous, 'expected previous token')
  107. previous.next = effects.enter(types.chunkContent, {
  108. contentType: constants.contentTypeContent,
  109. previous
  110. })
  111. previous = previous.next
  112. return chunkInside
  113. }
  114. }
  115. /**
  116. * @this {TokenizeContext}
  117. * @type {Tokenizer}
  118. */
  119. function tokenizeContinuation(effects, ok, nok) {
  120. const self = this
  121. return startLookahead
  122. /**
  123. *
  124. *
  125. * @type {State}
  126. */
  127. function startLookahead(code) {
  128. assert(markdownLineEnding(code), 'expected a line ending')
  129. effects.exit(types.chunkContent)
  130. effects.enter(types.lineEnding)
  131. effects.consume(code)
  132. effects.exit(types.lineEnding)
  133. return factorySpace(effects, prefixed, types.linePrefix)
  134. }
  135. /**
  136. *
  137. *
  138. * @type {State}
  139. */
  140. function prefixed(code) {
  141. if (code === codes.eof || markdownLineEnding(code)) {
  142. return nok(code)
  143. }
  144. // Always populated by defaults.
  145. assert(
  146. self.parser.constructs.disable.null,
  147. 'expected `disable.null` to be populated'
  148. )
  149. const tail = self.events[self.events.length - 1]
  150. if (
  151. !self.parser.constructs.disable.null.includes('codeIndented') &&
  152. tail &&
  153. tail[1].type === types.linePrefix &&
  154. tail[2].sliceSerialize(tail[1], true).length >= constants.tabSize
  155. ) {
  156. return ok(code)
  157. }
  158. return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)
  159. }
  160. }