"""
    MoinMoin - VisualSiteMap action

    Idea is based on the webdot.py action.

    More or less redid it from scratch. Differs from the webdot action in several ways:
    * Uses the dot executable, not webdot, since webdot is not available on windows.
    * All links up to the search depth are displayed.
    * There's no maximal limit to the displayed nodes.
    * Nodes are marked during depth first visit, so each node is visited only once.
    * The visit method in class LocalSiteMap gets the whole tree as parameter.
      That way additional treenode information may be shown in the graph.
    * All edges between nodes contained in the graph are displayed, even if MAX_DEPTH is exceeded that way.
    * Optional depth controls
    * Nodes linked more then STRONG_LINK_NR times are highlighted using the STRONG_COLOR.
    * Search depth is configurable.

    Add the following line to your (apache?) webserver configuration:
      Alias /moin_cache/ /var/cache/moin/
    See CACHE_URL and CACHE_DIR below.

    Add this to your stylesheet:
    img.sitemap
    {
      border-width: 1;
      border-color: #000000;
    }

    The following settings may be worth a change:
    * DEPTH_CONTROL
    * OUTPUT_FORMAT
    * DEFAULT_DEPTH
    * MAX_DEPTH
    * LINK_TO_SITEMAP

    07.10.2004
    * Maximum image size can be configured
    * Output image format is configurable
    * David Linke changed the output code (print() -> request.write())
    * Changed link counting algorithm to get the depth controls right.

    08.10.2004
    * IE caching problem with depth controls resolved. Now the current search depth is part of the file names.
    * Problems with pagenames containing non ASCII characters fixed.

    14.03.2005
    * cleanup & adapted to moin 1.3.4 -- ThomasWaldmann
    * Fixed for utf-8 and sub pages

    16.3.2005
    * included patch from David Linke for Windows compatibility
    * FONTNAME and FONTSIZE
    * removed invalid print debug statements
    * use config.charset

    19.08.2014
    * Cleanup & adapted for moin 1.9.4
    * Changed default output type from PNG to SVG.
    * Use configured category regex instead of separate setting.
    * Configurable: node links point to the VisualSiteMap of the target node (instead of its wiki page).
    * Fixed FS/URL escaping inconsistency for pagenames with special characters.
    * Enabled DEPTH_CONTROL by default.

    01.09.2015 - v1.10
    * escape xml-Entities in URLs (this broke svg output)
    * fixed a python3-incompatibility (old octal number format)
    * tested with moin 1.9.8
"""

##################################################################
# Be warned that calculating large graphs may block your server! #
# So be careful with the parameter settings.                     #
##################################################################

# This should be a public path on your web server. The dot files, images and map files are created in this directory and
# served from there.
#CACHE_DIR  = "C:/DocumentRoot/cache"
#CACHE_URL  = "http://my-server/cache"
CACHE_DIR  = "/var/cache/moin/"
CACHE_URL  = "/moin_cache"

# Absolute location of the dot (or neato) executable.
#DOT_EXE    = "C:/Programme/ATT/GraphViz/bin/dot.exe"
#DOT_EXE    = "/usr/bin/dot"
DOT_EXE    = "/usr/bin/neato"

# Graph controls.
DEFAULT_DEPTH = 2
STRONG_LINK_NR = 4

# nodes are linked their sitemap (instead of their wiki page)
LINK_TO_SITEMAP = True

# Optional controls for interactive modification of the search depth.
DEPTH_CONTROL = True
MAX_DEPTH  = 4

# Desired image format (eg. png, jpg, gif - see the dot documentation)
OUTPUT_FORMAT = "svg"

# Maximum output size in inches. Set to None to disable size limitation,
# then the graph is made as big as needed (best for readability).
# OUTPUT_SIZE="8,12" sets maximum width to 8, maximum height to 12 inches.
OUTPUT_SIZE = None

# Name and Size of the font use
# Times, Helvetica, Courier, Symbol are supported on any platform.
# Others may NOT be supported.
# When selecting a font, make sure it support unicode chars (at least the
# ones you use, e.g. german umlauts or french accented chars).
FONTNAME = "Times"
FONTSIZE = "10"

# Colors of boxes and edges.
BOX_COLOR = "#E0F0FF"
ROOT_COLOR = "#FFE0E0"
STRONG_COLOR = "#E0FFE0"
EDGE_COLOR = "#888888"


import re
import os
import subprocess

from MoinMoin import config, wikiutil
from MoinMoin.Page import Page

# escape special characters for XML output
try:
    # python 3
    from xml import escape as xml_escape
except ImportError:
    # python 2
    from cgi import escape as xml_escape


class LocalSiteMap:
    def __init__(self, name, maxdepth):
        self.name = name
        self.maxdepth = maxdepth
        self.result = []

    def output(self, request):
        pagebuilder = GraphBuilder(request, self.maxdepth)
        root = pagebuilder.build_graph(self.name)
        # count links
        for edge in pagebuilder.all_edges:
            edge[0].linkedfrom += 1
            edge[1].linkedto += 1
        # write nodes
        for node in pagebuilder.all_nodes:
            self.append('  "%s"'% node.name)
            if node.depth > 0:
                if node.linkedto >= STRONG_LINK_NR:
                    self.append('  [label="%s",color="%s"];\n' % (node.name, STRONG_COLOR))
                else:
                    self.append('  [label="%s"];\n' % (node.name))
            else:
                self.append('[label="%s",shape=box,style=filled,color="%s"];\n' % (node.name, ROOT_COLOR))
        # write edges
        for edge in pagebuilder.all_edges:
            self.append('  "%s"->"%s";\n' % (edge[0].name, edge[1].name))

        return ''.join(self.result)

    def append(self, text):
        self.result.append(text)


class GraphBuilder:

    def __init__(self, request, maxdepth):
        self.request = request
        self.maxdepth = maxdepth
        self.all_nodes = []
        self.all_edges = []

    def is_ok(self, child):
        if not self.request.user.may.read(child):
            return 0
        if Page(self.request, child).exists() and not re.search(self.request.cfg.page_category_regex, child):
            return 1
        return 0

    def build_graph(self, name):
        # Reuse generated trees
        nodesMap = {}
        root = Node(name)
        nodesMap[name] = root
        root.visited = 1
        self.all_nodes.append(root)
        self.recurse_build([root], 1, nodesMap)
        return root

    def recurse_build(self, nodes, depth, nodesMap):
        # collect all nodes of the current search depth here for the next recursion step
        child_nodes = []
        # iterate over the nodes
        for node in nodes:
            for child in Page(self.request, node.name).getPageLinks(self.request):
                if self.is_ok(child):
                    # Create the node with the given name
                    if not nodesMap.has_key(child):
                        # create the new node and store it
                        newNode = Node(child)
                        newNode.depth = depth
                    else:
                        newNode = nodesMap[child]
                    # If the current depth doesn't exceed the maximum depth, add newNode to recursion step
                    if depth <= self.maxdepth:
                        # The node is appended to the nodes list for the next recursion step.
                        nodesMap[child] = newNode
                        self.all_nodes.append(newNode)
                        child_nodes.append(newNode)
                        node.append(newNode)
                        # Draw an edge.
                        edge = (node, newNode)
                        if not edge in self.all_edges:
                            self.all_edges.append(edge)
        # recurse, if the current recursion step yields children
        if len(child_nodes):
            self.recurse_build(child_nodes, depth+1, nodesMap)


class Node:
    def __init__(self, name):
        self.name = name
        self.children = []
        self.visited = 0
        self.linkedfrom = 0
        self.linkedto = 0
        self.depth = 0

    def append(self, node):
        self.children.append(node)


def execute(pagename, request):
    _ = request.getText

    maxdepth = DEFAULT_DEPTH
    if DEPTH_CONTROL and request.values.has_key('depth'):
        maxdepth = int(request.values['depth'][0])

    if maxdepth > MAX_DEPTH:
        maxdepth = MAX_DEPTH

    baseurl = request.getBaseURL().rstrip("/")
    def get_page_link(pname, to_sitemap=LINK_TO_SITEMAP, **kwargs):
        if pname is None:
            pagelinkname = r'\N'
        else:
            pagelinkname = wikiutil.quoteWikinameURL(pname)
        if to_sitemap:
            link = "%s/%s?action=VisualSiteMap" % (baseurl, pagelinkname)
            for key, value in kwargs.iteritems():
                link += xml_escape("&%s=%s" % (key, value))
        else:
            link = "%s/%s" % (baseurl, pagelinkname)
        return link

    request.theme.send_title(_('Visual Map of %s') % pagename, pagename=pagename)

    wikinamefs = wikiutil.quoteWikinameFS(pagename)
    fnprefix = os.path.join(CACHE_DIR, '%s_%s' % (wikinamefs, maxdepth))
    dotfilename = '%s.%s' % (fnprefix, 'dot')
    imagefilename = '%s.%s' % (fnprefix, OUTPUT_FORMAT)
    mapfilename = '%s.%s' % (fnprefix, 'cmap')
    imageurl = '%s/%s_%s.%s' % (CACHE_URL, wikinamefs, maxdepth, OUTPUT_FORMAT)

    lsm = LocalSiteMap(pagename, maxdepth).output(request).encode(config.charset)

    os.umask(0o22)
    dotfile = file(dotfilename, 'w')
    dotfile.write('digraph G {\n')
    if OUTPUT_SIZE:
        dotfile.write('  size="%s"\n' % OUTPUT_SIZE)
        dotfile.write('  ratio=compress;\n')
    dotfile.write('  URL="%s";\n' % get_page_link(pagename, to_sitemap=False))
    dotfile.write('  overlap=false;\n')
    dotfile.write('  concentrate=true;\n')
    dotfile.write('  edge [color="%s"];\n' % EDGE_COLOR)
    dotfile.write('  node [URL="%s", ' % get_page_link(None, depth=maxdepth))
    dotfile.write('fontcolor=black, fontname="%s", fontsize=%s, style=filled, color="%s"]\n' % (FONTNAME, FONTSIZE, BOX_COLOR))
    dotfile.write(lsm)
    dotfile.write('}\n')
    dotfile.close()

    subprocess.call([DOT_EXE, "-T%s" % OUTPUT_FORMAT, "-o%s" % imagefilename, dotfilename])
    subprocess.call([DOT_EXE, "-Tcmap", "-o%s" % mapfilename, dotfilename])

    # show the image
    request.write('<center><img class="sitemap" border="1" src="%s" usemap="#map1"/></center>' % imageurl)

    # image map for links ("img" does not enable svg links)
    request.write('<map name="map1">')
    mapfile = file(mapfilename, 'r')
    for row in mapfile:
        request.write(row)
    mapfile.close()
    request.write('</map>')

    if DEPTH_CONTROL:
        linkname = wikiutil.quoteWikinameURL(pagename)
        links = []
        if maxdepth > 1:
            links.append('<a href="%s">Less</a>' % get_page_link(pagename, depth=maxdepth-1))
        if maxdepth < MAX_DEPTH:
            links.append('<a href="%s">More</a>' % get_page_link(pagename, depth=maxdepth+1))
        request.write('<p align="center">%s</p>' % ' | '.join(links))

    request.write('<p align="center"><small>Search depth is %s. Nodes linked more than %s times are highlighted.</small></p>' % (maxdepth, STRONG_LINK_NR))

    request.theme.send_footer(pagename)

