Change italic syntax to fit with other markdown implementations (#3916)
currently italics require an html <i> syntax, unlike every other formatter. this is very confusing, especially as there doesnt seem to be any documentation. this change makes both _text_ and *text* work, both things that users would expect from other implementations of markdown such as pleroma, discord and github. Add test
This commit is contained in:
parent
11689e6d18
commit
361af34956
2 changed files with 38 additions and 7 deletions
|
@ -224,7 +224,7 @@ const mfm = P.createLanguage({
|
||||||
|
|
||||||
//#region Italic
|
//#region Italic
|
||||||
italic: r =>
|
italic: r =>
|
||||||
P.regexp(/<i>([\s\S]+?)<\/i>/, 1)
|
P.regexp(/(\*|_)([a-zA-Z0-9]+?[\s\S]*?)\1/, 2)
|
||||||
.map(x => createTree('italic', P.alt(
|
.map(x => createTree('italic', P.alt(
|
||||||
r.bold,
|
r.bold,
|
||||||
r.strike,
|
r.strike,
|
||||||
|
|
37
test/mfm.ts
37
test/mfm.ts
|
@ -288,7 +288,7 @@ describe('MFM', () => {
|
||||||
leaf('mention', { acct: '@a', canonical: '@a', username: 'a', host: null })
|
leaf('mention', { acct: '@a', canonical: '@a', username: 'a', host: null })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const tokens4 = analyze('@\n@v\n@veryverylongusername' /* \n@toolongtobeasamention */ );
|
const tokens4 = analyze('@\n@v\n@veryverylongusername' /* \n@toolongtobeasamention */);
|
||||||
assert.deepStrictEqual(tokens4, [
|
assert.deepStrictEqual(tokens4, [
|
||||||
text('@\n'),
|
text('@\n'),
|
||||||
leaf('mention', { acct: '@v', canonical: '@v', username: 'v', host: null }),
|
leaf('mention', { acct: '@v', canonical: '@v', username: 'v', host: null }),
|
||||||
|
@ -883,15 +883,46 @@ describe('MFM', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('italic', () => {
|
describe('italic', () => {
|
||||||
it('simple', () => {
|
it('underscore', () => {
|
||||||
const tokens = analyze('<i>foo</i>');
|
const tokens = analyze('_foo_');
|
||||||
assert.deepStrictEqual(tokens, [
|
assert.deepStrictEqual(tokens, [
|
||||||
tree('italic', [
|
tree('italic', [
|
||||||
text('foo')
|
text('foo')
|
||||||
], {}),
|
], {}),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('simple with asterix', () => {
|
||||||
|
const tokens = analyze('*foo*');
|
||||||
|
assert.deepStrictEqual(tokens, [
|
||||||
|
tree('italic', [
|
||||||
|
text('foo')
|
||||||
|
], {}),
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('exlude emotes', () => {
|
||||||
|
const tokens = analyze('*.*');
|
||||||
|
assert.deepStrictEqual(tokens, [
|
||||||
|
text("*.*"),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('mixed', () => {
|
||||||
|
const tokens = analyze('_foo*');
|
||||||
|
assert.deepStrictEqual(tokens, [
|
||||||
|
text('_foo*'),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('mixed', () => {
|
||||||
|
const tokens = analyze('*foo_');
|
||||||
|
assert.deepStrictEqual(tokens, [
|
||||||
|
text('*foo_'),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('toHtml', () => {
|
describe('toHtml', () => {
|
||||||
|
|
Loading…
Reference in a new issue