Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/seqdiag/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def instantiate(self, group, block, tree):
group.set_attribute(stmt)

elif isinstance(stmt, parser.Separator):
sep = EdgeSeparator(stmt.type, unquote(stmt.value))
sep = EdgeSeparator(stmt.type, unquote(stmt.value), stmt.href)
sep.group = group
self.diagram.separators.append(sep)
group.edges.append(sep)
Expand Down
13 changes: 9 additions & 4 deletions src/seqdiag/drawer.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,17 +152,22 @@ def edge_label(self, edge):
fill=edge.color, halign=halign)

def separator(self, sep):
if sep.href and self.format == 'SVG':
drawer = self.drawer.anchor(sep.href)
else:
drawer = self.drawer

m = self.metrics.separator(sep)
for line in m.lines:
self.drawer.line(line, fill=self.fill, style=sep.style)
drawer.line(line, fill=self.fill, style=sep.style)

if sep.type == 'delay':
self.drawer.rectangle(m.labelbox, fill='white', outline='white')
drawer.rectangle(m.labelbox, fill='white', outline='white')
elif sep.type == 'divider':
self.drawer.rectangle(m.labelbox, fill=sep.color,
drawer.rectangle(m.labelbox, fill=sep.color,
outline=sep.linecolor)

self.drawer.textarea(m.labelbox, sep.label,
drawer.textarea(m.labelbox, sep.label,
self.metrics.font_for(sep), fill=sep.textcolor)

def altblock(self, block):
Expand Down
3 changes: 2 additions & 1 deletion src/seqdiag/elements.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,10 @@ def clear(cls):
cls.basecolor = (208, 208, 208)
cls.linecolor = (0, 0, 0)

def __init__(self, _type, label):
def __init__(self, _type, label, href):
super(EdgeSeparator, self).__init__()
self.label = label
self.href = href
self.group = None
self.style = None
self.color = self.basecolor
Expand Down
34 changes: 21 additions & 13 deletions src/seqdiag/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@

import io
from collections import namedtuple
import re
from re import DOTALL, MULTILINE

from blockdiag.parser import create_mapper, flatten, oneplus_to_list
Expand All @@ -51,7 +52,7 @@
Edge = namedtuple('Edge', ('from_node edge_type to_node '
'followers attrs edge_block'))
Statements = namedtuple('Statements', 'stmts')
Separator = namedtuple('Separator', 'type value')
Separator = namedtuple('Separator', 'type value href')
Extension = namedtuple('Extension', 'type name attrs')
Fragment = namedtuple('Fragment', 'type id stmts')

Expand All @@ -60,20 +61,24 @@ class ParseException(Exception):
pass


SEP_PATTERN = r'(?P<sep>===|\.\.\.)(?P<label>[^\r\n]+)(?P=sep)(?P<href>.*)'
SEP_RE = re.compile(SEP_PATTERN)


def tokenize(string):
"""str -> Sequence(Token)"""
# flake8: NOQA
specs = [ # NOQA
('Comment', (r'/\*(.|[\r\n])*?\*/', MULTILINE)), # NOQA
('Comment', (r'(//|#).*',)), # NOQA
('NL', (r'[\r\n]+',)), # NOQA
('Space', (r'[ \t\r\n]+',)), # NOQA
('Separator', (r'(?P<sep>===|\.\.\.)[^\r\n]+(?P=sep)',)), # NOQA
('Name', ('[A-Za-z_0-9\u0080-\uffff]' + # NOQA
'[A-Za-z_\\-.0-9\u0080-\uffff]*',)), # NOQA
('Op', (r'(=>)|[{};,=\[\]]|(<<?--?)|(--?>>?)',)), # NOQA
('Number', (r'-?(\.[0-9]+)|([0-9]+(\.[0-9]*)?)',)), # NOQA
('String', (r'(?P<quote>"|\').*?(?<!\\)(?P=quote)', DOTALL)), # NOQA
specs = [ # NOQA
('Comment', (r'/\*(.|[\r\n])*?\*/', MULTILINE)), # NOQA
('Comment', (r'(//|#).*',)), # NOQA
('NL', (r'[\r\n]+',)), # NOQA
('Space', (r'[ \t\r\n]+',)), # NOQA
('Separator', (SEP_PATTERN,)), # NOQA
('Name', ('[A-Za-z_0-9\u0080-\uffff]' + # NOQA
'[A-Za-z_\\-.0-9\u0080-\uffff]*',)), # NOQA
('Op', (r'(=>)|[{};,=\[\]]|(<<?--?)|(--?>>?)',)), # NOQA
('Number', (r'-?(\.[0-9]+)|([0-9]+(\.[0-9]*)?)',)), # NOQA
('String', (r'(?P<quote>"|\').*?(?<!\\)(?P=quote)', DOTALL)), # NOQA
]
useless = ['Comment', 'NL', 'Space']
t = make_tokenizer(specs)
Expand All @@ -90,7 +95,10 @@ def parse(seq):
separator = some(lambda t: t.type == 'Separator') >> tokval

def make_separator(sep):
return Separator(sep[0:3], sep[3:-3].strip())
m = SEP_RE.match(sep)
assert m is not None
d = m.groupdict()
return Separator(d["sep"].strip(), d["label"].strip(), d["href"].strip() or None)

#
# parts of syntax
Expand Down