Rename analyze to parse in MFM tests
This commit is contained in:
parent
e3b1d00e4c
commit
c12ccb2a15
1 changed files with 105 additions and 105 deletions
210
test/mfm.ts
210
test/mfm.ts
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
import * as assert from 'assert';
|
import * as assert from 'assert';
|
||||||
|
|
||||||
import analyze, { parsePlain } from '../src/mfm/parse';
|
import parse, { parsePlain } from '../src/mfm/parse';
|
||||||
import toHtml from '../src/mfm/toHtml';
|
import toHtml from '../src/mfm/toHtml';
|
||||||
import { createTree as tree, createLeaf as leaf, MfmTree } from '../src/mfm/types';
|
import { createTree as tree, createLeaf as leaf, MfmTree } from '../src/mfm/types';
|
||||||
import { removeOrphanedBrackets } from '../src/mfm/parser';
|
import { removeOrphanedBrackets } from '../src/mfm/parser';
|
||||||
|
@ -151,7 +151,7 @@ describe('removeOrphanedBrackets', () => {
|
||||||
|
|
||||||
describe('MFM', () => {
|
describe('MFM', () => {
|
||||||
it('can be analyzed', () => {
|
it('can be analyzed', () => {
|
||||||
const tokens = analyze('@himawari @hima_sub@namori.net お腹ペコい :cat: #yryr');
|
const tokens = parse('@himawari @hima_sub@namori.net お腹ペコい :cat: #yryr');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('mention', {
|
leaf('mention', {
|
||||||
acct: '@himawari',
|
acct: '@himawari',
|
||||||
|
@ -176,7 +176,7 @@ describe('MFM', () => {
|
||||||
describe('elements', () => {
|
describe('elements', () => {
|
||||||
describe('bold', () => {
|
describe('bold', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('**foo**');
|
const tokens = parse('**foo**');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('bold', [
|
tree('bold', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -185,7 +185,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with other texts', () => {
|
it('with other texts', () => {
|
||||||
const tokens = analyze('bar**foo**bar');
|
const tokens = parse('bar**foo**bar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('bar'),
|
text('bar'),
|
||||||
tree('bold', [
|
tree('bold', [
|
||||||
|
@ -196,7 +196,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with underscores', () => {
|
it('with underscores', () => {
|
||||||
const tokens = analyze('__foo__');
|
const tokens = parse('__foo__');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('bold', [
|
tree('bold', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -205,21 +205,21 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with underscores (ensure it allows alphabet only)', () => {
|
it('with underscores (ensure it allows alphabet only)', () => {
|
||||||
const tokens = analyze('(=^・__________・^=)');
|
const tokens = parse('(=^・__________・^=)');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('(=^・__________・^=)')
|
text('(=^・__________・^=)')
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('mixed syntax', () => {
|
it('mixed syntax', () => {
|
||||||
const tokens = analyze('**foo__');
|
const tokens = parse('**foo__');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('**foo__'),
|
text('**foo__'),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('mixed syntax', () => {
|
it('mixed syntax', () => {
|
||||||
const tokens = analyze('__foo**');
|
const tokens = parse('__foo**');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('__foo**'),
|
text('__foo**'),
|
||||||
]);
|
]);
|
||||||
|
@ -227,7 +227,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('big', () => {
|
it('big', () => {
|
||||||
const tokens = analyze('***Strawberry*** Pasta');
|
const tokens = parse('***Strawberry*** Pasta');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('big', [
|
tree('big', [
|
||||||
text('Strawberry')
|
text('Strawberry')
|
||||||
|
@ -237,7 +237,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('small', () => {
|
it('small', () => {
|
||||||
const tokens = analyze('<small>smaller</small>');
|
const tokens = parse('<small>smaller</small>');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('small', [
|
tree('small', [
|
||||||
text('smaller')
|
text('smaller')
|
||||||
|
@ -246,7 +246,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('flip', () => {
|
it('flip', () => {
|
||||||
const tokens = analyze('<flip>foo</flip>');
|
const tokens = parse('<flip>foo</flip>');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('flip', [
|
tree('flip', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -256,7 +256,7 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('spin', () => {
|
describe('spin', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('<spin>:foo:</spin>');
|
const tokens = parse('<spin>:foo:</spin>');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('spin', [
|
tree('spin', [
|
||||||
leaf('emoji', { name: 'foo' })
|
leaf('emoji', { name: 'foo' })
|
||||||
|
@ -267,7 +267,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with attr', () => {
|
it('with attr', () => {
|
||||||
const tokens = analyze('<spin left>:foo:</spin>');
|
const tokens = parse('<spin left>:foo:</spin>');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('spin', [
|
tree('spin', [
|
||||||
leaf('emoji', { name: 'foo' })
|
leaf('emoji', { name: 'foo' })
|
||||||
|
@ -279,7 +279,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('jump', () => {
|
it('jump', () => {
|
||||||
const tokens = analyze('<jump>:foo:</jump>');
|
const tokens = parse('<jump>:foo:</jump>');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('jump', [
|
tree('jump', [
|
||||||
leaf('emoji', { name: 'foo' })
|
leaf('emoji', { name: 'foo' })
|
||||||
|
@ -289,7 +289,7 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('motion', () => {
|
describe('motion', () => {
|
||||||
it('by triple brackets', () => {
|
it('by triple brackets', () => {
|
||||||
const tokens = analyze('(((foo)))');
|
const tokens = parse('(((foo)))');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('motion', [
|
tree('motion', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -298,7 +298,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('by triple brackets (with other texts)', () => {
|
it('by triple brackets (with other texts)', () => {
|
||||||
const tokens = analyze('bar(((foo)))bar');
|
const tokens = parse('bar(((foo)))bar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('bar'),
|
text('bar'),
|
||||||
tree('motion', [
|
tree('motion', [
|
||||||
|
@ -309,7 +309,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('by <motion> tag', () => {
|
it('by <motion> tag', () => {
|
||||||
const tokens = analyze('<motion>foo</motion>');
|
const tokens = parse('<motion>foo</motion>');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('motion', [
|
tree('motion', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -318,7 +318,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('by <motion> tag (with other texts)', () => {
|
it('by <motion> tag (with other texts)', () => {
|
||||||
const tokens = analyze('bar<motion>foo</motion>bar');
|
const tokens = parse('bar<motion>foo</motion>bar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('bar'),
|
text('bar'),
|
||||||
tree('motion', [
|
tree('motion', [
|
||||||
|
@ -331,7 +331,7 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('mention', () => {
|
describe('mention', () => {
|
||||||
it('local', () => {
|
it('local', () => {
|
||||||
const tokens = analyze('@himawari foo');
|
const tokens = parse('@himawari foo');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('mention', {
|
leaf('mention', {
|
||||||
acct: '@himawari',
|
acct: '@himawari',
|
||||||
|
@ -344,7 +344,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('remote', () => {
|
it('remote', () => {
|
||||||
const tokens = analyze('@hima_sub@namori.net foo');
|
const tokens = parse('@hima_sub@namori.net foo');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('mention', {
|
leaf('mention', {
|
||||||
acct: '@hima_sub@namori.net',
|
acct: '@hima_sub@namori.net',
|
||||||
|
@ -357,7 +357,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('remote punycode', () => {
|
it('remote punycode', () => {
|
||||||
const tokens = analyze('@hima_sub@xn--q9j5bya.xn--zckzah foo');
|
const tokens = parse('@hima_sub@xn--q9j5bya.xn--zckzah foo');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('mention', {
|
leaf('mention', {
|
||||||
acct: '@hima_sub@xn--q9j5bya.xn--zckzah',
|
acct: '@hima_sub@xn--q9j5bya.xn--zckzah',
|
||||||
|
@ -370,12 +370,12 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore', () => {
|
it('ignore', () => {
|
||||||
const tokens = analyze('idolm@ster');
|
const tokens = parse('idolm@ster');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('idolm@ster')
|
text('idolm@ster')
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze('@a\n@b\n@c');
|
const tokens2 = parse('@a\n@b\n@c');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
leaf('mention', {
|
leaf('mention', {
|
||||||
acct: '@a',
|
acct: '@a',
|
||||||
|
@ -399,7 +399,7 @@ describe('MFM', () => {
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens3 = analyze('**x**@a');
|
const tokens3 = parse('**x**@a');
|
||||||
assert.deepStrictEqual(tokens3, [
|
assert.deepStrictEqual(tokens3, [
|
||||||
tree('bold', [
|
tree('bold', [
|
||||||
text('x')
|
text('x')
|
||||||
|
@ -412,7 +412,7 @@ describe('MFM', () => {
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens4 = analyze('@\n@v\n@veryverylongusername');
|
const tokens4 = parse('@\n@v\n@veryverylongusername');
|
||||||
assert.deepStrictEqual(tokens4, [
|
assert.deepStrictEqual(tokens4, [
|
||||||
text('@\n'),
|
text('@\n'),
|
||||||
leaf('mention', {
|
leaf('mention', {
|
||||||
|
@ -434,14 +434,14 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('hashtag', () => {
|
describe('hashtag', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('#alice');
|
const tokens = parse('#alice');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('hashtag', { hashtag: 'alice' })
|
leaf('hashtag', { hashtag: 'alice' })
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('after line break', () => {
|
it('after line break', () => {
|
||||||
const tokens = analyze('foo\n#alice');
|
const tokens = parse('foo\n#alice');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('foo\n'),
|
text('foo\n'),
|
||||||
leaf('hashtag', { hashtag: 'alice' })
|
leaf('hashtag', { hashtag: 'alice' })
|
||||||
|
@ -449,7 +449,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with text', () => {
|
it('with text', () => {
|
||||||
const tokens = analyze('Strawberry Pasta #alice');
|
const tokens = parse('Strawberry Pasta #alice');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('Strawberry Pasta '),
|
text('Strawberry Pasta '),
|
||||||
leaf('hashtag', { hashtag: 'alice' })
|
leaf('hashtag', { hashtag: 'alice' })
|
||||||
|
@ -457,7 +457,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with text (zenkaku)', () => {
|
it('with text (zenkaku)', () => {
|
||||||
const tokens = analyze('こんにちは#世界');
|
const tokens = parse('こんにちは#世界');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('こんにちは'),
|
text('こんにちは'),
|
||||||
leaf('hashtag', { hashtag: '世界' })
|
leaf('hashtag', { hashtag: '世界' })
|
||||||
|
@ -465,7 +465,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore comma and period', () => {
|
it('ignore comma and period', () => {
|
||||||
const tokens = analyze('Foo #bar, baz #piyo.');
|
const tokens = parse('Foo #bar, baz #piyo.');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('Foo '),
|
text('Foo '),
|
||||||
leaf('hashtag', { hashtag: 'bar' }),
|
leaf('hashtag', { hashtag: 'bar' }),
|
||||||
|
@ -476,7 +476,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore exclamation mark', () => {
|
it('ignore exclamation mark', () => {
|
||||||
const tokens = analyze('#Foo!');
|
const tokens = parse('#Foo!');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('hashtag', { hashtag: 'Foo' }),
|
leaf('hashtag', { hashtag: 'Foo' }),
|
||||||
text('!'),
|
text('!'),
|
||||||
|
@ -484,7 +484,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore colon', () => {
|
it('ignore colon', () => {
|
||||||
const tokens = analyze('#Foo:');
|
const tokens = parse('#Foo:');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('hashtag', { hashtag: 'Foo' }),
|
leaf('hashtag', { hashtag: 'Foo' }),
|
||||||
text(':'),
|
text(':'),
|
||||||
|
@ -492,7 +492,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore single quote', () => {
|
it('ignore single quote', () => {
|
||||||
const tokens = analyze('#foo\'');
|
const tokens = parse('#foo\'');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('hashtag', { hashtag: 'foo' }),
|
leaf('hashtag', { hashtag: 'foo' }),
|
||||||
text('\''),
|
text('\''),
|
||||||
|
@ -500,7 +500,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore double quote', () => {
|
it('ignore double quote', () => {
|
||||||
const tokens = analyze('#foo"');
|
const tokens = parse('#foo"');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('hashtag', { hashtag: 'foo' }),
|
leaf('hashtag', { hashtag: 'foo' }),
|
||||||
text('"'),
|
text('"'),
|
||||||
|
@ -508,21 +508,21 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('allow including number', () => {
|
it('allow including number', () => {
|
||||||
const tokens = analyze('#foo123');
|
const tokens = parse('#foo123');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('hashtag', { hashtag: 'foo123' }),
|
leaf('hashtag', { hashtag: 'foo123' }),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with brackets', () => {
|
it('with brackets', () => {
|
||||||
const tokens1 = analyze('(#foo)');
|
const tokens1 = parse('(#foo)');
|
||||||
assert.deepStrictEqual(tokens1, [
|
assert.deepStrictEqual(tokens1, [
|
||||||
text('('),
|
text('('),
|
||||||
leaf('hashtag', { hashtag: 'foo' }),
|
leaf('hashtag', { hashtag: 'foo' }),
|
||||||
text(')'),
|
text(')'),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze('「#foo」');
|
const tokens2 = parse('「#foo」');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
text('「'),
|
text('「'),
|
||||||
leaf('hashtag', { hashtag: 'foo' }),
|
leaf('hashtag', { hashtag: 'foo' }),
|
||||||
|
@ -531,7 +531,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with mixed brackets', () => {
|
it('with mixed brackets', () => {
|
||||||
const tokens = analyze('「#foo(bar)」');
|
const tokens = parse('「#foo(bar)」');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('「'),
|
text('「'),
|
||||||
leaf('hashtag', { hashtag: 'foo(bar)' }),
|
leaf('hashtag', { hashtag: 'foo(bar)' }),
|
||||||
|
@ -540,14 +540,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with brackets (space before)', () => {
|
it('with brackets (space before)', () => {
|
||||||
const tokens1 = analyze('(bar #foo)');
|
const tokens1 = parse('(bar #foo)');
|
||||||
assert.deepStrictEqual(tokens1, [
|
assert.deepStrictEqual(tokens1, [
|
||||||
text('(bar '),
|
text('(bar '),
|
||||||
leaf('hashtag', { hashtag: 'foo' }),
|
leaf('hashtag', { hashtag: 'foo' }),
|
||||||
text(')'),
|
text(')'),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze('「bar #foo」');
|
const tokens2 = parse('「bar #foo」');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
text('「bar '),
|
text('「bar '),
|
||||||
leaf('hashtag', { hashtag: 'foo' }),
|
leaf('hashtag', { hashtag: 'foo' }),
|
||||||
|
@ -556,14 +556,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('disallow number only', () => {
|
it('disallow number only', () => {
|
||||||
const tokens = analyze('#123');
|
const tokens = parse('#123');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('#123'),
|
text('#123'),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('disallow number only (with brackets)', () => {
|
it('disallow number only (with brackets)', () => {
|
||||||
const tokens = analyze('(#123)');
|
const tokens = parse('(#123)');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('(#123)'),
|
text('(#123)'),
|
||||||
]);
|
]);
|
||||||
|
@ -572,14 +572,14 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('quote', () => {
|
describe('quote', () => {
|
||||||
it('basic', () => {
|
it('basic', () => {
|
||||||
const tokens1 = analyze('> foo');
|
const tokens1 = parse('> foo');
|
||||||
assert.deepStrictEqual(tokens1, [
|
assert.deepStrictEqual(tokens1, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
], {})
|
], {})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze('>foo');
|
const tokens2 = parse('>foo');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -588,7 +588,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('series', () => {
|
it('series', () => {
|
||||||
const tokens = analyze('> foo\n\n> bar');
|
const tokens = parse('> foo\n\n> bar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -601,14 +601,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('trailing line break', () => {
|
it('trailing line break', () => {
|
||||||
const tokens1 = analyze('> foo\n');
|
const tokens1 = parse('> foo\n');
|
||||||
assert.deepStrictEqual(tokens1, [
|
assert.deepStrictEqual(tokens1, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
], {}),
|
], {}),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze('> foo\n\n');
|
const tokens2 = parse('> foo\n\n');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -618,14 +618,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('multiline', () => {
|
it('multiline', () => {
|
||||||
const tokens1 = analyze('>foo\n>bar');
|
const tokens1 = parse('>foo\n>bar');
|
||||||
assert.deepStrictEqual(tokens1, [
|
assert.deepStrictEqual(tokens1, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo\nbar')
|
text('foo\nbar')
|
||||||
], {})
|
], {})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze('> foo\n> bar');
|
const tokens2 = parse('> foo\n> bar');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo\nbar')
|
text('foo\nbar')
|
||||||
|
@ -634,14 +634,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('multiline with trailing line break', () => {
|
it('multiline with trailing line break', () => {
|
||||||
const tokens1 = analyze('> foo\n> bar\n');
|
const tokens1 = parse('> foo\n> bar\n');
|
||||||
assert.deepStrictEqual(tokens1, [
|
assert.deepStrictEqual(tokens1, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo\nbar')
|
text('foo\nbar')
|
||||||
], {}),
|
], {}),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze('> foo\n> bar\n\n');
|
const tokens2 = parse('> foo\n> bar\n\n');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo\nbar')
|
text('foo\nbar')
|
||||||
|
@ -651,7 +651,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with before and after texts', () => {
|
it('with before and after texts', () => {
|
||||||
const tokens = analyze('before\n> foo\nafter');
|
const tokens = parse('before\n> foo\nafter');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('before\n'),
|
text('before\n'),
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
|
@ -662,7 +662,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('multiple quotes', () => {
|
it('multiple quotes', () => {
|
||||||
const tokens = analyze('> foo\nbar\n\n> foo\nbar\n\n> foo\nbar');
|
const tokens = parse('> foo\nbar\n\n> foo\nbar\n\n> foo\nbar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -680,14 +680,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('require line break before ">"', () => {
|
it('require line break before ">"', () => {
|
||||||
const tokens = analyze('foo>bar');
|
const tokens = parse('foo>bar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('foo>bar'),
|
text('foo>bar'),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('nested', () => {
|
it('nested', () => {
|
||||||
const tokens = analyze('>> foo\n> bar');
|
const tokens = parse('>> foo\n> bar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
|
@ -699,7 +699,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('trim line breaks', () => {
|
it('trim line breaks', () => {
|
||||||
const tokens = analyze('foo\n\n>a\n>>b\n>>\n>>>\n>>>c\n>>>\n>d\n\n');
|
const tokens = parse('foo\n\n>a\n>>b\n>>\n>>>\n>>>c\n>>>\n>d\n\n');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('foo\n\n'),
|
text('foo\n\n'),
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
|
@ -719,14 +719,14 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('url', () => {
|
describe('url', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('https://example.com');
|
const tokens = parse('https://example.com');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('url', { url: 'https://example.com' })
|
leaf('url', { url: 'https://example.com' })
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore trailing period', () => {
|
it('ignore trailing period', () => {
|
||||||
const tokens = analyze('https://example.com.');
|
const tokens = parse('https://example.com.');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('url', { url: 'https://example.com' }),
|
leaf('url', { url: 'https://example.com' }),
|
||||||
text('.')
|
text('.')
|
||||||
|
@ -734,14 +734,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with comma', () => {
|
it('with comma', () => {
|
||||||
const tokens = analyze('https://example.com/foo?bar=a,b');
|
const tokens = parse('https://example.com/foo?bar=a,b');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('url', { url: 'https://example.com/foo?bar=a,b' })
|
leaf('url', { url: 'https://example.com/foo?bar=a,b' })
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore trailing comma', () => {
|
it('ignore trailing comma', () => {
|
||||||
const tokens = analyze('https://example.com/foo, bar');
|
const tokens = parse('https://example.com/foo, bar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('url', { url: 'https://example.com/foo' }),
|
leaf('url', { url: 'https://example.com/foo' }),
|
||||||
text(', bar')
|
text(', bar')
|
||||||
|
@ -749,14 +749,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with brackets', () => {
|
it('with brackets', () => {
|
||||||
const tokens = analyze('https://example.com/foo(bar)');
|
const tokens = parse('https://example.com/foo(bar)');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('url', { url: 'https://example.com/foo(bar)' })
|
leaf('url', { url: 'https://example.com/foo(bar)' })
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore parent brackets', () => {
|
it('ignore parent brackets', () => {
|
||||||
const tokens = analyze('(https://example.com/foo)');
|
const tokens = parse('(https://example.com/foo)');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('('),
|
text('('),
|
||||||
leaf('url', { url: 'https://example.com/foo' }),
|
leaf('url', { url: 'https://example.com/foo' }),
|
||||||
|
@ -765,7 +765,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore parent brackets 2', () => {
|
it('ignore parent brackets 2', () => {
|
||||||
const tokens = analyze('(foo https://example.com/foo)');
|
const tokens = parse('(foo https://example.com/foo)');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('(foo '),
|
text('(foo '),
|
||||||
leaf('url', { url: 'https://example.com/foo' }),
|
leaf('url', { url: 'https://example.com/foo' }),
|
||||||
|
@ -774,7 +774,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore parent brackets with internal brackets', () => {
|
it('ignore parent brackets with internal brackets', () => {
|
||||||
const tokens = analyze('(https://example.com/foo(bar))');
|
const tokens = parse('(https://example.com/foo(bar))');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('('),
|
text('('),
|
||||||
leaf('url', { url: 'https://example.com/foo(bar)' }),
|
leaf('url', { url: 'https://example.com/foo(bar)' }),
|
||||||
|
@ -785,7 +785,7 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('link', () => {
|
describe('link', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('[foo](https://example.com)');
|
const tokens = parse('[foo](https://example.com)');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('link', [
|
tree('link', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -794,7 +794,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('simple (with silent flag)', () => {
|
it('simple (with silent flag)', () => {
|
||||||
const tokens = analyze('?[foo](https://example.com)');
|
const tokens = parse('?[foo](https://example.com)');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('link', [
|
tree('link', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -803,7 +803,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('in text', () => {
|
it('in text', () => {
|
||||||
const tokens = analyze('before[foo](https://example.com)after');
|
const tokens = parse('before[foo](https://example.com)after');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('before'),
|
text('before'),
|
||||||
tree('link', [
|
tree('link', [
|
||||||
|
@ -814,7 +814,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with brackets', () => {
|
it('with brackets', () => {
|
||||||
const tokens = analyze('[foo](https://example.com/foo(bar))');
|
const tokens = parse('[foo](https://example.com/foo(bar))');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('link', [
|
tree('link', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -823,7 +823,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with parent brackets', () => {
|
it('with parent brackets', () => {
|
||||||
const tokens = analyze('([foo](https://example.com/foo(bar)))');
|
const tokens = parse('([foo](https://example.com/foo(bar)))');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('('),
|
text('('),
|
||||||
tree('link', [
|
tree('link', [
|
||||||
|
@ -835,19 +835,19 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('emoji', () => {
|
it('emoji', () => {
|
||||||
const tokens1 = analyze(':cat:');
|
const tokens1 = parse(':cat:');
|
||||||
assert.deepStrictEqual(tokens1, [
|
assert.deepStrictEqual(tokens1, [
|
||||||
leaf('emoji', { name: 'cat' })
|
leaf('emoji', { name: 'cat' })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze(':cat::cat::cat:');
|
const tokens2 = parse(':cat::cat::cat:');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
leaf('emoji', { name: 'cat' }),
|
leaf('emoji', { name: 'cat' }),
|
||||||
leaf('emoji', { name: 'cat' }),
|
leaf('emoji', { name: 'cat' }),
|
||||||
leaf('emoji', { name: 'cat' })
|
leaf('emoji', { name: 'cat' })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens3 = analyze('🍎');
|
const tokens3 = parse('🍎');
|
||||||
assert.deepStrictEqual(tokens3, [
|
assert.deepStrictEqual(tokens3, [
|
||||||
leaf('emoji', { emoji: '🍎' })
|
leaf('emoji', { emoji: '🍎' })
|
||||||
]);
|
]);
|
||||||
|
@ -855,21 +855,21 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('block code', () => {
|
describe('block code', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('```\nvar x = "Strawberry Pasta";\n```');
|
const tokens = parse('```\nvar x = "Strawberry Pasta";\n```');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('blockCode', { code: 'var x = "Strawberry Pasta";', lang: null })
|
leaf('blockCode', { code: 'var x = "Strawberry Pasta";', lang: null })
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('can specify language', () => {
|
it('can specify language', () => {
|
||||||
const tokens = analyze('``` json\n{ "x": 42 }\n```');
|
const tokens = parse('``` json\n{ "x": 42 }\n```');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('blockCode', { code: '{ "x": 42 }', lang: 'json' })
|
leaf('blockCode', { code: '{ "x": 42 }', lang: 'json' })
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('require line break before "```"', () => {
|
it('require line break before "```"', () => {
|
||||||
const tokens = analyze('before```\nfoo\n```');
|
const tokens = parse('before```\nfoo\n```');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('before'),
|
text('before'),
|
||||||
leaf('inlineCode', { code: '`' }),
|
leaf('inlineCode', { code: '`' }),
|
||||||
|
@ -879,7 +879,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('series', () => {
|
it('series', () => {
|
||||||
const tokens = analyze('```\nfoo\n```\n```\nbar\n```\n```\nbaz\n```');
|
const tokens = parse('```\nfoo\n```\n```\nbar\n```\n```\nbaz\n```');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('blockCode', { code: 'foo', lang: null }),
|
leaf('blockCode', { code: 'foo', lang: null }),
|
||||||
leaf('blockCode', { code: 'bar', lang: null }),
|
leaf('blockCode', { code: 'bar', lang: null }),
|
||||||
|
@ -888,14 +888,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore internal marker', () => {
|
it('ignore internal marker', () => {
|
||||||
const tokens = analyze('```\naaa```bbb\n```');
|
const tokens = parse('```\naaa```bbb\n```');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('blockCode', { code: 'aaa```bbb', lang: null })
|
leaf('blockCode', { code: 'aaa```bbb', lang: null })
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('trim after line break', () => {
|
it('trim after line break', () => {
|
||||||
const tokens = analyze('```\nfoo\n```\nbar');
|
const tokens = parse('```\nfoo\n```\nbar');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('blockCode', { code: 'foo', lang: null }),
|
leaf('blockCode', { code: 'foo', lang: null }),
|
||||||
text('bar')
|
text('bar')
|
||||||
|
@ -905,21 +905,21 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('inline code', () => {
|
describe('inline code', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('`var x = "Strawberry Pasta";`');
|
const tokens = parse('`var x = "Strawberry Pasta";`');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('inlineCode', { code: 'var x = "Strawberry Pasta";' })
|
leaf('inlineCode', { code: 'var x = "Strawberry Pasta";' })
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('disallow line break', () => {
|
it('disallow line break', () => {
|
||||||
const tokens = analyze('`foo\nbar`');
|
const tokens = parse('`foo\nbar`');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('`foo\nbar`')
|
text('`foo\nbar`')
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('disallow ´', () => {
|
it('disallow ´', () => {
|
||||||
const tokens = analyze('`foo´bar`');
|
const tokens = parse('`foo´bar`');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('`foo´bar`')
|
text('`foo´bar`')
|
||||||
]);
|
]);
|
||||||
|
@ -929,7 +929,7 @@ describe('MFM', () => {
|
||||||
it('mathInline', () => {
|
it('mathInline', () => {
|
||||||
const fomula = 'x = {-b \\pm \\sqrt{b^2-4ac} \\over 2a}';
|
const fomula = 'x = {-b \\pm \\sqrt{b^2-4ac} \\over 2a}';
|
||||||
const content = `\\(${fomula}\\)`;
|
const content = `\\(${fomula}\\)`;
|
||||||
const tokens = analyze(content);
|
const tokens = parse(content);
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('mathInline', { formula: fomula })
|
leaf('mathInline', { formula: fomula })
|
||||||
]);
|
]);
|
||||||
|
@ -939,7 +939,7 @@ describe('MFM', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const fomula = 'x = {-b \\pm \\sqrt{b^2-4ac} \\over 2a}';
|
const fomula = 'x = {-b \\pm \\sqrt{b^2-4ac} \\over 2a}';
|
||||||
const content = `\\[\n${fomula}\n\\]`;
|
const content = `\\[\n${fomula}\n\\]`;
|
||||||
const tokens = analyze(content);
|
const tokens = parse(content);
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('mathBlock', { formula: fomula })
|
leaf('mathBlock', { formula: fomula })
|
||||||
]);
|
]);
|
||||||
|
@ -947,22 +947,22 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('search', () => {
|
it('search', () => {
|
||||||
const tokens1 = analyze('a b c 検索');
|
const tokens1 = parse('a b c 検索');
|
||||||
assert.deepStrictEqual(tokens1, [
|
assert.deepStrictEqual(tokens1, [
|
||||||
leaf('search', { content: 'a b c 検索', query: 'a b c' })
|
leaf('search', { content: 'a b c 検索', query: 'a b c' })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens2 = analyze('a b c Search');
|
const tokens2 = parse('a b c Search');
|
||||||
assert.deepStrictEqual(tokens2, [
|
assert.deepStrictEqual(tokens2, [
|
||||||
leaf('search', { content: 'a b c Search', query: 'a b c' })
|
leaf('search', { content: 'a b c Search', query: 'a b c' })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens3 = analyze('a b c search');
|
const tokens3 = parse('a b c search');
|
||||||
assert.deepStrictEqual(tokens3, [
|
assert.deepStrictEqual(tokens3, [
|
||||||
leaf('search', { content: 'a b c search', query: 'a b c' })
|
leaf('search', { content: 'a b c search', query: 'a b c' })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens4 = analyze('a b c SEARCH');
|
const tokens4 = parse('a b c SEARCH');
|
||||||
assert.deepStrictEqual(tokens4, [
|
assert.deepStrictEqual(tokens4, [
|
||||||
leaf('search', { content: 'a b c SEARCH', query: 'a b c' })
|
leaf('search', { content: 'a b c SEARCH', query: 'a b c' })
|
||||||
]);
|
]);
|
||||||
|
@ -970,7 +970,7 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('title', () => {
|
describe('title', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('【foo】');
|
const tokens = parse('【foo】');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('title', [
|
tree('title', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -979,14 +979,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('require line break', () => {
|
it('require line break', () => {
|
||||||
const tokens = analyze('a【foo】');
|
const tokens = parse('a【foo】');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('a【foo】')
|
text('a【foo】')
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with before and after texts', () => {
|
it('with before and after texts', () => {
|
||||||
const tokens = analyze('before\n【foo】\nafter');
|
const tokens = parse('before\n【foo】\nafter');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('before\n'),
|
text('before\n'),
|
||||||
tree('title', [
|
tree('title', [
|
||||||
|
@ -997,14 +997,14 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore multiple title blocks', () => {
|
it('ignore multiple title blocks', () => {
|
||||||
const tokens = analyze('【foo】bar【baz】');
|
const tokens = parse('【foo】bar【baz】');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('【foo】bar【baz】')
|
text('【foo】bar【baz】')
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('disallow linebreak in title', () => {
|
it('disallow linebreak in title', () => {
|
||||||
const tokens = analyze('【foo\nbar】');
|
const tokens = parse('【foo\nbar】');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('【foo\nbar】')
|
text('【foo\nbar】')
|
||||||
]);
|
]);
|
||||||
|
@ -1013,7 +1013,7 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('center', () => {
|
describe('center', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('<center>foo</center>');
|
const tokens = parse('<center>foo</center>');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('center', [
|
tree('center', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -1024,7 +1024,7 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('strike', () => {
|
describe('strike', () => {
|
||||||
it('simple', () => {
|
it('simple', () => {
|
||||||
const tokens = analyze('~~foo~~');
|
const tokens = parse('~~foo~~');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('strike', [
|
tree('strike', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -1035,7 +1035,7 @@ describe('MFM', () => {
|
||||||
|
|
||||||
describe('italic', () => {
|
describe('italic', () => {
|
||||||
it('<i>', () => {
|
it('<i>', () => {
|
||||||
const tokens = analyze('<i>foo</i>');
|
const tokens = parse('<i>foo</i>');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('italic', [
|
tree('italic', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -1044,7 +1044,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('underscore', () => {
|
it('underscore', () => {
|
||||||
const tokens = analyze('_foo_');
|
const tokens = parse('_foo_');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('italic', [
|
tree('italic', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -1053,7 +1053,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('simple with asterix', () => {
|
it('simple with asterix', () => {
|
||||||
const tokens = analyze('*foo*');
|
const tokens = parse('*foo*');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('italic', [
|
tree('italic', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -1062,28 +1062,28 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('exlude emotes', () => {
|
it('exlude emotes', () => {
|
||||||
const tokens = analyze('*.*');
|
const tokens = parse('*.*');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text("*.*"),
|
text("*.*"),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('mixed', () => {
|
it('mixed', () => {
|
||||||
const tokens = analyze('_foo*');
|
const tokens = parse('_foo*');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('_foo*'),
|
text('_foo*'),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('mixed', () => {
|
it('mixed', () => {
|
||||||
const tokens = analyze('*foo_');
|
const tokens = parse('*foo_');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('*foo_'),
|
text('*foo_'),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ignore snake_case string', () => {
|
it('ignore snake_case string', () => {
|
||||||
const tokens = analyze('foo_bar_baz');
|
const tokens = parse('foo_bar_baz');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
text('foo_bar_baz'),
|
text('foo_bar_baz'),
|
||||||
]);
|
]);
|
||||||
|
@ -1127,18 +1127,18 @@ describe('MFM', () => {
|
||||||
it('br', () => {
|
it('br', () => {
|
||||||
const input = 'foo\nbar\nbaz';
|
const input = 'foo\nbar\nbaz';
|
||||||
const output = '<p><span>foo<br>bar<br>baz</span></p>';
|
const output = '<p><span>foo<br>bar<br>baz</span></p>';
|
||||||
assert.equal(toHtml(analyze(input)), output);
|
assert.equal(toHtml(parse(input)), output);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('br alt', () => {
|
it('br alt', () => {
|
||||||
const input = 'foo\r\nbar\rbaz';
|
const input = 'foo\r\nbar\rbaz';
|
||||||
const output = '<p><span>foo<br>bar<br>baz</span></p>';
|
const output = '<p><span>foo<br>bar<br>baz</span></p>';
|
||||||
assert.equal(toHtml(analyze(input)), output);
|
assert.equal(toHtml(parse(input)), output);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('code block with quote', () => {
|
it('code block with quote', () => {
|
||||||
const tokens = analyze('> foo\n```\nbar\n```');
|
const tokens = parse('> foo\n```\nbar\n```');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
|
@ -1148,7 +1148,7 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('quote between two code blocks', () => {
|
it('quote between two code blocks', () => {
|
||||||
const tokens = analyze('```\nbefore\n```\n> foo\n```\nafter\n```');
|
const tokens = parse('```\nbefore\n```\n> foo\n```\nafter\n```');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
leaf('blockCode', { code: 'before', lang: null }),
|
leaf('blockCode', { code: 'before', lang: null }),
|
||||||
tree('quote', [
|
tree('quote', [
|
||||||
|
|
Loading…
Reference in a new issue