File size: 3,214 Bytes
065fee7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
# ~~strike through~~
from __future__ import annotations
from .state_inline import Delimiter, StateInline
def tokenize(state: StateInline, silent: bool) -> bool:
"""Insert each marker as a separate text token, and add it to delimiter list"""
start = state.pos
ch = state.src[start]
if silent:
return False
if ch != "~":
return False
scanned = state.scanDelims(state.pos, True)
length = scanned.length
if length < 2:
return False
if length % 2:
token = state.push("text", "", 0)
token.content = ch
length -= 1
i = 0
while i < length:
token = state.push("text", "", 0)
token.content = ch + ch
state.delimiters.append(
Delimiter(
marker=ord(ch),
length=0, # disable "rule of 3" length checks meant for emphasis
token=len(state.tokens) - 1,
end=-1,
open=scanned.can_open,
close=scanned.can_close,
)
)
i += 2
state.pos += scanned.length
return True
def _postProcess(state: StateInline, delimiters: list[Delimiter]) -> None:
loneMarkers = []
maximum = len(delimiters)
i = 0
while i < maximum:
startDelim = delimiters[i]
if startDelim.marker != 0x7E: # /* ~ */
i += 1
continue
if startDelim.end == -1:
i += 1
continue
endDelim = delimiters[startDelim.end]
token = state.tokens[startDelim.token]
token.type = "s_open"
token.tag = "s"
token.nesting = 1
token.markup = "~~"
token.content = ""
token = state.tokens[endDelim.token]
token.type = "s_close"
token.tag = "s"
token.nesting = -1
token.markup = "~~"
token.content = ""
if (
state.tokens[endDelim.token - 1].type == "text"
and state.tokens[endDelim.token - 1].content == "~"
):
loneMarkers.append(endDelim.token - 1)
i += 1
# If a marker sequence has an odd number of characters, it's split
# like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the
# start of the sequence.
#
# So, we have to move all those markers after subsequent s_close tags.
#
while loneMarkers:
i = loneMarkers.pop()
j = i + 1
while (j < len(state.tokens)) and (state.tokens[j].type == "s_close"):
j += 1
j -= 1
if i != j:
token = state.tokens[j]
state.tokens[j] = state.tokens[i]
state.tokens[i] = token
def postProcess(state: StateInline) -> None:
"""Walk through delimiter list and replace text tokens with tags."""
tokens_meta = state.tokens_meta
maximum = len(state.tokens_meta)
_postProcess(state, state.delimiters)
curr = 0
while curr < maximum:
try:
curr_meta = tokens_meta[curr]
except IndexError:
pass
else:
if curr_meta and "delimiters" in curr_meta:
_postProcess(state, curr_meta["delimiters"])
curr += 1
|