Fix missing lexer handling
This commit is contained in:
parent
084129c827
commit
81bf91532f
|
@ -1,12 +1,9 @@
|
|||
import re
|
||||
from html import escape
|
||||
from io import StringIO
|
||||
from eorg.const import Token, ESCAPE
|
||||
from eorg import tokens
|
||||
from eorg.tokens import Token
|
||||
from pygments import highlight
|
||||
from pygments.util import ClassNotFound
|
||||
from pygments.lexers import PythonLexer
|
||||
from pygments.lexers import get_lexer_by_name
|
||||
from pygments.formatters import HtmlFormatter
|
||||
|
||||
|
@ -15,7 +12,7 @@ def src(doc, code, cls="", root=True):
|
|||
try:
|
||||
lexer = get_lexer_by_name(code.attrs.get("language", "shell"))
|
||||
except ClassNotFound as e:
|
||||
lexer = get_lexer_by_name(code.attrs.get("language", "text"))
|
||||
lexer = get_lexer_by_name("text")
|
||||
return highlight(code.value, lexer, HtmlFormatter(linenos=True))
|
||||
|
||||
|
||||
|
|
|
@ -115,7 +115,6 @@ def parsebody(text, rx):
|
|||
def parseline(text, stream):
|
||||
attrs = None
|
||||
for key, token in TOKENS.items():
|
||||
print(token)
|
||||
match = re.search(token.start, text)
|
||||
if not match:
|
||||
continue
|
||||
|
|
|
@ -1 +1 @@
|
|||
__version__=0.81
|
||||
__version__=0.82
|
||||
|
|
|
@ -200,3 +200,30 @@ def test_bullet_block():
|
|||
assert result[0].value == expected[0].value
|
||||
assert result[1].token == tokens.BULLET
|
||||
assert result[1].value == expected[1].value
|
||||
|
||||
|
||||
@pytest.mark.skip
|
||||
def test_src_block_images():
|
||||
text = StringIO("""
|
||||
#+BEGIN_SRC latex :exports results :file test.png :results raw file
|
||||
\begin{equation}
|
||||
x=\sqrt{b}
|
||||
\end{equation}
|
||||
#+END_SRC
|
||||
|
||||
#+RESULTS:
|
||||
[[file:test.png]]
|
||||
""")
|
||||
expected = [
|
||||
Token(tokens.BLANK, ""),
|
||||
Token(tokens.SOURCE, """\begin{equation}\nx=\sqrt{b}\n\end{equation}"""),
|
||||
Token(tokens.BLANK, ""),
|
||||
|
||||
]
|
||||
result = parse(text).doc
|
||||
assert result[0].token == tokens.BLANK
|
||||
assert result[0].value == expected[0].value
|
||||
assert result[1].token == tokens.SOURCE
|
||||
assert result[2].token == tokens.BLANK
|
||||
assert result[3].value == expected[0].value
|
||||
assert result[3].token == tokens.RESULTS
|
||||
|
|
Loading…
Reference in New Issue