make wikilinks for journals all render in iso format
(very inefficient implementation for now)
This commit is contained in:
parent
71f0a3c4e4
commit
7f93ee8173
3 changed files with 30 additions and 0 deletions
|
@ -57,6 +57,11 @@ class Node:
|
||||||
# i.e. if two users contribute subnodes titled [[foo]], they both show up when querying node [[foo]].
|
# i.e. if two users contribute subnodes titled [[foo]], they both show up when querying node [[foo]].
|
||||||
self.wikilink = wikilink
|
self.wikilink = wikilink
|
||||||
self.uri = wikilink
|
self.uri = wikilink
|
||||||
|
# ensure wikilinks to journal entries are all shown in iso format
|
||||||
|
# (important to do it after self.uri = wikilink to avoid breaking
|
||||||
|
# links)
|
||||||
|
if util.is_journal(wikilink):
|
||||||
|
self.wikilink = util.canonical_wikilink(wikilink)
|
||||||
self.url = '/node/' + self.uri
|
self.url = '/node/' + self.uri
|
||||||
self.subnodes = []
|
self.subnodes = []
|
||||||
|
|
||||||
|
|
23
app/util.py
23
app/util.py
|
@ -12,8 +12,25 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import re
|
import re
|
||||||
|
from dateparser import DateDataParser
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=None)
|
||||||
def canonical_wikilink(wikilink):
|
def canonical_wikilink(wikilink):
|
||||||
|
|
||||||
|
if is_journal(wikilink):
|
||||||
|
try:
|
||||||
|
parser = DateDataParser(languages=['en'])
|
||||||
|
date = parser.get_date_data(wikilink).date_obj
|
||||||
|
new_wikilink = date.isoformat().split("T")[0]
|
||||||
|
if "nov" in wikilink:
|
||||||
|
print(f'>> Journal! "{wikilink}" -> "{new_wikilink}"')
|
||||||
|
wikilink = new_wikilink
|
||||||
|
except:
|
||||||
|
# TODO: if we add logging, maybe log that we couldn't parse a date here
|
||||||
|
pass
|
||||||
|
|
||||||
# hack hack
|
# hack hack
|
||||||
wikilink = (
|
wikilink = (
|
||||||
wikilink.lower()
|
wikilink.lower()
|
||||||
|
@ -25,7 +42,10 @@ def canonical_wikilink(wikilink):
|
||||||
return wikilink
|
return wikilink
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=None)
|
||||||
def is_journal(wikilink):
|
def is_journal(wikilink):
|
||||||
|
|
||||||
date_regexes = [
|
date_regexes = [
|
||||||
# iso format
|
# iso format
|
||||||
'[0-9]{4}-[0-9]{2}-[0-9]{2}',
|
'[0-9]{4}-[0-9]{2}-[0-9]{2}',
|
||||||
|
@ -36,6 +56,9 @@ def is_journal(wikilink):
|
||||||
]
|
]
|
||||||
|
|
||||||
# combine all the date regexes into one super regex
|
# combine all the date regexes into one super regex
|
||||||
|
# TODO: it'd really be better to compile this regex once rather than on
|
||||||
|
# each request, but as the knuth would say premature optimization is the
|
||||||
|
# root of all evil, etc. etc.
|
||||||
combined_date_regex = re.compile(f'^({"|".join(date_regexes)})$')
|
combined_date_regex = re.compile(f'^({"|".join(date_regexes)})$')
|
||||||
|
|
||||||
return combined_date_regex.match(wikilink)
|
return combined_date_regex.match(wikilink)
|
||||||
|
|
|
@ -20,3 +20,5 @@ webencodings==0.5.1
|
||||||
Werkzeug==1.0.1
|
Werkzeug==1.0.1
|
||||||
WTForms==2.3.3
|
WTForms==2.3.3
|
||||||
zipp==3.4.0
|
zipp==3.4.0
|
||||||
|
dateparser==1.0.0
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue