From 003a5923193ec12ec201ba88971bfaa889e3073c Mon Sep 17 00:00:00 2001 From: Oliver Marks Date: Sun, 4 Nov 2018 14:16:55 +0000 Subject: [PATCH] Basic table support --- eorg/const.py | 3 +++ eorg/parser.py | 11 ++++++++--- eorg/tokens.py | 1 + eorg/version.py | 2 +- tests/test_html.py | 2 +- tests/test_token_types.py | 21 +++++++++++++++++++++ 6 files changed, 35 insertions(+), 5 deletions(-) create mode 100644 tests/test_token_types.py diff --git a/eorg/const.py b/eorg/const.py index 458b3a5..923ac2b 100755 --- a/eorg/const.py +++ b/eorg/const.py @@ -11,6 +11,8 @@ t_EXAMPLE_BEGIN = r"^\#\+BEGIN_EXAMPLE" t_EXAMPLE_END = r"^\#\+END_EXAMPLE" t_SRC_BEGIN = r"^\#\+BEGIN_SRC\s+" t_SRC_END = r"^\#\+END_SRC" +t_TABLE_START = r"^\s*\|" +t_TABLE_END = r"^(?!\s*\|).*$" t_RESULTS_START = r"^\#\+RESULTS:" t_CAPTIONS = r"^\#\+CAPTION:" t_IMG = r"^\[\[\s]]$" @@ -26,6 +28,7 @@ TOKENS = { tokens.IMAGE: (t_IMG, False, 2, None, False), tokens.CAPTION: (t_CAPTIONS, False, 2, None, False), tokens.SOURCE: (t_SRC_BEGIN, t_SRC_END, 2, None, False), + tokens.TABLE: (t_TABLE_START, t_TABLE_END, 0, None, False), tokens.RESULTS: (t_SRC_BEGIN, t_SRC_END, 2, None, False), tokens.HEADER: (t_HEADER, False, 1, None, True), } diff --git a/eorg/parser.py b/eorg/parser.py index 3f4911f..b7245ba 100644 --- a/eorg/parser.py +++ b/eorg/parser.py @@ -76,7 +76,9 @@ class Document: def parse_attrs(text): attrs = {} value_list = text.split(':') + print(value_list) attrs['language'] = value_list.pop(0).strip() + #attrs['language'] = value_list.pop(0).strip() for row in value_list: values = row.strip().split(' ') attrs[values[0]] = values[1:] @@ -94,17 +96,20 @@ def parseline(text): match = re.search(rx, text) if not match: continue + value = text[match.end() :] level = len(match.group(0)) if count is True: attrs={'depth': level} if key == tokens.META: return ( block, - Token(token=match.group(0)[s:e], value=text[match.end() :]), + Token(token=match.group(0)[s:e], value=value), ) if key == tokens.SOURCE: - return block, Token(token=key, attrs=parse_attrs(text[match.end():])) - return block, Token(token=key, value=text[match.end():], attrs=attrs) + return block, Token(token=key, attrs=parse_attrs(value)) + if key == tokens.TABLE: + return block, Token(token=key, value=text+"\n") + return block, Token(token=key, value=value, attrs=attrs) text = text.strip() if text == "": diff --git a/eorg/tokens.py b/eorg/tokens.py index 0af1879..481b43c 100644 --- a/eorg/tokens.py +++ b/eorg/tokens.py @@ -14,6 +14,7 @@ SOURCE = 50 EXAMPLE = 51 RESULTS = 52 COMMENT = 53 +TABLE = 54 diff --git a/eorg/version.py b/eorg/version.py index 1e842aa..22ba738 100755 --- a/eorg/version.py +++ b/eorg/version.py @@ -1 +1 @@ -__version__=0.60 +__version__=0.65 diff --git a/tests/test_html.py b/tests/test_html.py index 606723a..34a5588 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -99,7 +99,7 @@ head -n 5 examples/html-plain/example.py Token(tokens.SOURCE, """head -n 5 examples/html-plain/example.py\n"""), ] result = parse(text).doc - print(result) + assert result[0].token == tokens.BLANK assert result[0].value == expected[0].value assert result[1].attrs.get('language') == 'sh' assert result[1].value == expected[1].value diff --git a/tests/test_token_types.py b/tests/test_token_types.py new file mode 100644 index 0000000..67b19f8 --- /dev/null +++ b/tests/test_token_types.py @@ -0,0 +1,21 @@ +import os +import pytest +from io import StringIO +from eorg import tokens +from eorg.parser import parse +from eorg.generate import html + + +def test_basic(): + document = """| Header 1 | Header 2 | +| row 1 | row 2 | + """ + doc = parse(StringIO(document)) + assert doc.doc[0].token == tokens.TABLE + r1 = doc.doc[0].value.strip() + r2 = document.strip() + print(r1) + print(r2) + assert r1 == r2 + +