MoinMoin processor for dot.
Copyright (C) 2004 Alexandre Duret-Lutz
Error: processor %s: invalid argument: %s
Processor %s usage:
[\'"])(?P
[\'"])%s(?P=quote) *\) *\]\]%s' %
(p1_re, p2_re, end_re) )
# get pseudo-macro
get_re = re.compile(
r'\[\[ *Get *\( *%s *\) *\]\]' % (p1_re) )
def _usage (full=False):
"""Return the interesting part of the module's doc"""
if full: return __doc__
lines = __doc__.splitlines ()
start = 0
end = len (lines)
for i in range (end):
if lines [i].strip ().lower () == "usage:":
start = i
break
for i in range (start, end):
if lines [i].startswith ('--'):
end = i
break
return '\n'.join (lines [start:end])
def _format(src_text, request, formatter):
"""Parse the source text (in wiki source format) and make HTML,
after diverting sys.stdout to a string"""
# create str to collect output and divert output to that string
str_out = cStringIO.StringIO()
request.redirect(str_out)
# parse this line and restore output
wiki.Parser(src_text, request).format(formatter)
request.redirect()
# return what was generated
return str_out.getvalue().strip()
def _resolve_link(request, url, this_page):
"""Return external URL, anchor, or wiki link"""
if notwiki_re.match(url) or url.startswith("#"):
# return url as-is
return url
elif url.startswith("/"):
# a wiki subpage
return "%s/%s%s" % (request.getScriptname(), this_page, url)
else:
# a wiki page
return "%s/%s" % (request.getScriptname(), url)
def _preprocess(request, formatter, lines, newlines, substs, recursions):
"""Resolve URLs and pseudo-macros (incl. includes) """
for line in lines:
# Handle URLs to resolve Wiki links
sline = line.strip()
url_match = url_re.search(line)
inc_match = inc_re.match(sline)
set_match = set_re.match(sline)
get_match = get_re.search(line)
this_page = formatter.page.page_name
if url_match:
# Process URL; handle both normal URLs and wiki names
url = url_match.group('url')
newurl = _resolve_link(request, url, this_page)
line = line[:url_match.start()] \
+ '[URL="%s"]' % newurl \
+ line[url_match.end():]
newlines.append(line)
elif inc_match:
# Process [[Include(page[,ident])]]
page = inc_match.group('p1')
ident = inc_match.group('p2')
# load page, search for named dot section, add it
other_line = _get_include(page, ident, this_page)
newlines.extend(other_line)
elif set_match:
# Process [[Set(var,'value')]]
var = set_match.group('p1')
val = set_match.group('p2')
substs[var] = val
elif get_match:
# Process [[Get(var)]]
var = get_match.group('p1')
val = substs.get(var, None)
if val is None:
raise RuntimeError("Cannot resolve Variable '%s'" % var)
line = line[:get_match.start()] + val + line[get_match.end():]
newlines.append(line)
else:
# Process other lines
newlines.append(line)
return newlines
def _get_include(page, ident, this_page):
"""Return the content of the given page; if ident is not empty,
extract the content of an enclosed section:
{{{#!dot ... name=ident ...
...content...
}}}
"""
lines = _get_page_body(page, this_page)
if not ident: return lines
start_re = re.compile(r'{{{#!%s.* name=' % NAME)
inside = False
found =[]
for line in lines:
if not inside:
f = start_re.search(line)
if f:
name = line[f.end():].split()[0]
inside = name == ident
else:
pos = line.find('}}}')
if pos >=0:
found.append(line[:pos])
inside = False
else: found.append(line)
if len(found)==0:
raise RuntimeError("Identifier '%s' not found in page '%s'" %
(ident, page))
return found
def _get_page_body(page, this_page):
"""Return the content of a named page; accepts relative pages"""
if page.startswith("/") or len(page)==0:
page = this_page + page
p = Page(page)
if not p.exists ():
raise RuntimeError("Page '%s' not found" % page)
else:
return p.get_raw_body().split('\n')
def process(request, formatter, lines):
"""The processor's entry point"""
# parse bangpath for arguments
opt_show = 1
opt_dbg = False
opt_name = None
opt_help = None
bang = lines[0]
for arg in bang.split()[1:]:
if arg.startswith("show"): opt_show = arg[4:] != "=0"
elif arg.startswith("debug"): opt_dbg = arg[5:] != "=0"
elif arg.startswith("name="): opt_name = arg[5:]
elif arg.startswith("help"): opt_help = arg[4:]
else:
request.write(formatter.rawHTML("""
%s
%s
Error: macro %s: %s
""" % (NAME, str) )) opt_dbg = True # debug ? pre-print and exit if opt_dbg: l = ["{{{%s" % bang] l.extend(lines) l.append("}}}") request.write(formatter.rawHTML( "\n%s\n" % '\n'.join(l))) # go ! all = '\n'.join(lines).strip() name = 'autogenerated-' + sha.new(all).hexdigest() pngname = name + '.png' dotname = name + '.map' need_map = 0 <= all.find('URL') pagename = formatter.page.page_name attdir = AttachFile.getAttachDir(pagename, create=1) + '/' pngpath = attdir + pngname mappath = attdir + dotname dm2ri = attdir + "delete.me.to.regenerate.images" # delete autogenerated attachments if dm2ri attachment does not exist if not os.path.isfile(dm2ri): # create dm2ri attachment open(dm2ri,'w').close() # delete autogenerated attachments for root, dirs, files in os.walk(attdir, topdown=False): for name in files: if name.startswith("autogenerated-"): os.remove(os.path.join(root, name)) if not os.path.exists(pngpath): p = os.popen('dot -Tpng -Gbgcolor=transparent -o ' + pngpath, 'w') p.write(all) p.close() if need_map and not os.path.exists(mappath): p = os.popen('dot -Tcmap -o ' + mappath, 'w') p.write(all) p.close() url = AttachFile.getAttachUrl(pagename, pngname, request) if not need_map: request.write(formatter.image(src = url)) else: request.write(formatter.image(src = url, usemap = '#' + name, border = 0)) request.write(formatter.rawHTML(''))