From 92033e57c80656df104e1d1de50de311f6526c82 Mon Sep 17 00:00:00 2001 From: Tony Brix Date: Fri, 13 Oct 2023 14:52:19 -0500 Subject: [PATCH] fix: trim newline from blockquote token.text (#3037) --- src/Tokenizer.ts | 2 +- test/unit/Lexer-spec.js | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/src/Tokenizer.ts b/src/Tokenizer.ts index 66fdccb82f..34c8e7510b 100644 --- a/src/Tokenizer.ts +++ b/src/Tokenizer.ts @@ -156,7 +156,7 @@ export class _Tokenizer { blockquote(src: string): Tokens.Blockquote | undefined { const cap = this.rules.block.blockquote.exec(src); if (cap) { - const text = cap[0].replace(/^ *>[ \t]?/gm, ''); + const text = rtrim(cap[0].replace(/^ *>[ \t]?/gm, ''), '\n'); const top = this.lexer.state.top; this.lexer.state.top = true; const tokens = this.lexer.blockTokens(text); diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js index a0d4c16d10..4d26da4629 100644 --- a/test/unit/Lexer-spec.js +++ b/test/unit/Lexer-spec.js @@ -382,6 +382,27 @@ a | b }); }); + it('trim newline in text', () => { + expectTokens({ + md: '> blockquote\n', + tokens: [ + { + type: 'blockquote', + raw: '> blockquote\n', + text: 'blockquote', + tokens: [{ + type: 'paragraph', + raw: 'blockquote', + text: 'blockquote', + tokens: [ + { type: 'text', raw: 'blockquote', text: 'blockquote' } + ] + }] + } + ] + }); + }); + it('paragraph token in list', () => { expectTokens({ md: '- > blockquote',