diff --git a/src/mfm/parse.ts b/src/mfm/parse.ts index 71e3f25f0..0eb1f810f 100644 --- a/src/mfm/parse.ts +++ b/src/mfm/parse.ts @@ -1,4 +1,4 @@ -import parser, { plainParser } from './parser'; +import parser from './parser'; import { MfmForest } from './types'; import { normalize } from './normalize'; @@ -7,6 +7,6 @@ export default (source: string, plainText = false): MfmForest => { return null; } - const raw = plainText ? plainParser.root.tryParse(source) : parser.root.tryParse(source) as MfmForest; + const raw = plainText ? parser.plain.tryParse(source) : parser.root.tryParse(source) as MfmForest; return normalize(raw); }; diff --git a/src/mfm/parser.ts b/src/mfm/parser.ts index 6ff0ad396..cfa3f5262 100644 --- a/src/mfm/parser.ts +++ b/src/mfm/parser.ts @@ -28,29 +28,6 @@ const newline = P((input, i) => { } }); -export const plainParser = P.createLanguage({ - root: r => P.alt( - r.emoji, - r.text - ).atLeast(1), - - text: () => P.any.map(x => createLeaf('text', { text: x })), - - //#region Emoji - emoji: r => - P.alt( - P.regexp(/:([a-z0-9_+-]+):/i, 1) - .map(x => createLeaf('emoji', { - name: x - })), - P.regexp(emojiRegex) - .map(x => createLeaf('emoji', { - emoji: x - })), - ), - //#endregion -}); - const mfm = P.createLanguage({ root: r => P.alt( r.big, @@ -78,6 +55,11 @@ const mfm = P.createLanguage({ r.text ).atLeast(1), + plain: r => P.alt( + r.emoji, + r.text + ).atLeast(1), + text: () => P.any.map(x => createLeaf('text', { text: x })), //#region Big diff --git a/test/mfm.ts b/test/mfm.ts index 3497c6258..9532b7659 100644 --- a/test/mfm.ts +++ b/test/mfm.ts @@ -1091,6 +1091,38 @@ describe('MFM', () => { }); }); + describe('plainText', () => { + it('text', () => { + const tokens = analyze('foo', true); + assert.deepStrictEqual(tokens, [ + text('foo'), + ]); + }); + + it('emoji', () => { + const tokens = analyze(':foo:', true); + assert.deepStrictEqual(tokens, [ + leaf('emoji', { name: 'foo' }) + ]); + }); + + it('emoji in text', () => { + const tokens = analyze('foo:bar:baz', true); + assert.deepStrictEqual(tokens, [ + text('foo'), + leaf('emoji', { name: 'bar' }), + text('baz'), + ]); + }); + + it('disallow other syntax', () => { + const tokens = analyze('foo **bar** baz', true); + assert.deepStrictEqual(tokens, [ + text('foo **bar** baz'), + ]); + }); + }); + describe('toHtml', () => { it('br', () => { const input = 'foo\nbar\nbaz';