# HG changeset patch # User Alexander Schremmer # Date 1148774075 -7200 # Node ID 43b158d3cf225442ca342c124e428160f525b67c # Parent f78e361cb7862c392dc55afa8db0156f968c3d73 Clarified comment in the mailimportconf.py file diff -r f78e361cb786 -r 43b158d3cf22 wiki/config/mailimportconf.py --- a/wiki/config/mailimportconf.py Sat May 27 23:25:52 2006 +0200 +++ b/wiki/config/mailimportconf.py Sun May 28 01:54:35 2006 +0200 @@ -3,5 +3,5 @@ # This secret has to be known by the wiki server mailimport_secret = u"foo" -# Only needed for wiki farms -mailimport_url = u"http://localhost:81/?action=xmlrpc2" +# The target wiki URL +mailimport_url = u"http://localhost/?action=xmlrpc2" # HG changeset patch # User Alexander Schremmer # Date 1149497866 -7200 # Node ID 0ec051831d5277d7d7f413520fb239f67cdc2845 # Parent 8802d6935b6fad5ee15628e7f35a8508ba091bd8 Add listAttachments, getAttachment, and putAttachment to the xmlrpc api. (Thanks to Matthew Gilbert) diff -r 8802d6935b6f -r 0ec051831d52 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Fri Jun 02 21:48:03 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Mon Jun 05 10:57:46 2006 +0200 @@ -27,12 +27,13 @@ from MoinMoin.util import pysupport modules = pysupport.getPackageModules(__file__) -import sys, time, xmlrpclib +import os, sys, time, xmlrpclib from MoinMoin import config, user, wikiutil from MoinMoin.Page import Page from MoinMoin.PageEditor import PageEditor from MoinMoin.logfile import editlog +from MoinMoin.action import AttachFile _debug = 0 @@ -396,6 +397,71 @@ class XmlRpcBase: return [(self._outstr(hit.page_name), self._outstr(results.formatContext(hit, 180, 1))) for hit in results.hits] + + def xmlrpc_listAttachments(self, pagename): + """ Get all attachments associated with pagename + + @param pagename: pagename (utf-8) + @rtype: list + @return: a list of utf-8 attachment names + """ + pagename = self._instr(pagename) + # User may read page? + if not self.request.user.may.read(pagename): + return self.notAllowedFault() + + result = AttachFile._get_files(self.request, pagename) + return result + + def xmlrpc_getAttachment(self, pagename, attachname): + """ Get attachname associated with pagename + + @param pagename: pagename (utf-8) + @param attachname: attachment name (utf-8) + @rtype base64 + @return base64 data + """ + pagename = self._instr(pagename) + # User may read page? + if not self.request.user.may.read(pagename): + return self.notAllowedFault() + + filename = wikiutil.taintfilename(filename) + filename = AttachFile.getFilename(self.request, pagename, attachname) + if not os.path.isfile(filename): + return self.noSuchPageFault() + return self._outlob(open(filename, 'rb').read()) + + def xmlrpc_putAttachment(self, pagename, attachname, data): + """ Set attachname associated with pagename to data + + @param pagename: pagename (utf-8) + @param attachname: attachment name (utf-8) + @param data: file data (base64) + @rtype boolean + @return True if attachment was set + """ + pagename = self._instr(pagename) + # User may read page? + if not self.request.user.may.read(pagename): + return self.notAllowedFault() + + if not self.request.cfg.xmlrpc_putpage_enabled: + return xmlrpclib.Boolean(0) + if self.request.cfg.xmlrpc_putpage_trusted_only and not self.request.user.trusted: + return xmlrpclib.Fault(1, "You are not allowed to edit this page") + # also check ACLs + if not self.request.user.may.write(pagename): + return xmlrpclib.Fault(1, "You are not allowed to edit this page") + + attachname = wikiutil.taintfilename(attachname) + filename = AttachFile.getFilename(self.request, pagename, attachname) + if os.path.exists(filename) and not os.path.isfile(filename): + return self.noSuchPageFault() + open(filename, 'wb+').write(data.data) + os.chmod(filename, 0666 & config.umask) + AttachFile._addLogEntry(self.request, 'ATTNEW', pagename, filename) + return xmlrpclib.Boolean(1) def process(self): """ xmlrpc v1 and v2 dispatcher """ diff -r 8802d6935b6f -r 0ec051831d52 docs/CHANGES --- a/docs/CHANGES Fri Jun 02 21:48:03 2006 +0200 +++ b/docs/CHANGES Mon Jun 05 10:57:46 2006 +0200 @@ -77,6 +77,7 @@ Version 1.6.current: * You can have a common cache_dir for your farm (will save a bit space and cpu time as it shares some stuff). You need to set "cache_dir = '/some/farm/cachedir' in your farmconfig. + * Added XMLRPC methods for attachment handling. Thanks to Matthew Gilbert. Bugfixes: * on action "info" page, "revert" link will not be displayed for empty page # HG changeset patch # User Alexander Schremmer # Date 1149498018 -7200 # Node ID 9595eaf676a716227726601d3dc887957a6fd505 # Parent 3fbbb954f275a353b6917c7d431e44950b240a98 Added note in CHANGES. diff -r 3fbbb954f275 -r 9595eaf676a7 docs/CHANGES --- a/docs/CHANGES Mon Jun 05 10:58:54 2006 +0200 +++ b/docs/CHANGES Mon Jun 05 11:00:18 2006 +0200 @@ -78,6 +78,7 @@ Version 1.6.current: and cpu time as it shares some stuff). You need to set "cache_dir = '/some/farm/cachedir' in your farmconfig. * Added XMLRPC methods for attachment handling. Thanks to Matthew Gilbert. + * Added TLS/SSL support to the standalone server. Thanks to Matthew Gilbert. Bugfixes: * on action "info" page, "revert" link will not be displayed for empty page # HG changeset patch # User Alexander Schremmer # Date 1149759673 -7200 # Node ID 0ba3bd233965b700f245b99a36df7eb42143cf67 # Parent 86ca6b17aba9aa5f12a5b4a9309fb08e5be4fa0c Added entry in the CHANGES file. diff -r 86ca6b17aba9 -r 0ba3bd233965 docs/CHANGES --- a/docs/CHANGES Thu Jun 08 11:37:36 2006 +0200 +++ b/docs/CHANGES Thu Jun 08 11:41:13 2006 +0200 @@ -103,6 +103,7 @@ Version 1.6.current: * allow "-" in usernames (fixes "Invalid user name" msg) * fixed smiley caching bug (smileys didn't change theme) * fixed backtrace when user removed css_url entry from user_form_fields + * Fixed the output of macro and "attachment:" usages of the rst parser. Other changes: * we use (again) the same browser compatibility check as FCKeditor uses # HG changeset patch # User Alexander Schremmer # Date 1149763762 -7200 # Node ID df1f57a8a286405944a95b9861ef83577bfcf327 # Parent 0ba3bd233965b700f245b99a36df7eb42143cf67 Fix to the getAttachment XMLRPC method. diff -r 0ba3bd233965 -r df1f57a8a286 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Jun 08 11:41:13 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Jun 08 12:49:22 2006 +0200 @@ -412,7 +412,7 @@ class XmlRpcBase: result = AttachFile._get_files(self.request, pagename) return result - + def xmlrpc_getAttachment(self, pagename, attachname): """ Get attachname associated with pagename @@ -425,13 +425,13 @@ class XmlRpcBase: # User may read page? if not self.request.user.may.read(pagename): return self.notAllowedFault() - - filename = wikiutil.taintfilename(filename) - filename = AttachFile.getFilename(self.request, pagename, attachname) + + filename = wikiutil.taintfilename(self._instr(attachname)) + filename = AttachFile.getFilename(self.request, pagename, filename) if not os.path.isfile(filename): return self.noSuchPageFault() return self._outlob(open(filename, 'rb').read()) - + def xmlrpc_putAttachment(self, pagename, attachname, data): """ Set attachname associated with pagename to data # HG changeset patch # User Alexander Schremmer # Date 1149199636 -7200 # Node ID 9834dda36973b71a61d4d051f980b51d1b249ba3 # Parent 43b158d3cf225442ca342c124e428160f525b67c Added conflict icon in RecentChanges, refactored conflict handling in the code. Translators, esp. german translators!, there seems to be an outdated string. You will see it on the editing conflict. diff -r 43b158d3cf22 -r 9834dda36973 MoinMoin/Page.py --- a/MoinMoin/Page.py Sun May 28 01:54:35 2006 +0200 +++ b/MoinMoin/Page.py Fri Jun 02 00:07:16 2006 +0200 @@ -1681,4 +1681,23 @@ class Page: text = text.replace(u'\r', u'') return text - + def isConflict(self): + """ Returns true if there is a known editing conflict for that page. + + @return: true if there is a known conflict. + """ + + cache = caching.CacheEntry(self.request, self, 'conflict', scope='item') + return cache.exists() + + def setConflict(self, state): + """ Sets the editing conflict flag. + + @param state: bool, true if there is a conflict. + """ + + cache = caching.CacheEntry(self.request, self, 'conflict', scope='item') + if state: + cache.update("") # touch it! + else: + cache.remove() diff -r 43b158d3cf22 -r 9834dda36973 MoinMoin/PageEditor.py --- a/MoinMoin/PageEditor.py Sun May 28 01:54:35 2006 +0200 +++ b/MoinMoin/PageEditor.py Fri Jun 02 00:07:16 2006 +0200 @@ -109,6 +109,7 @@ class PageEditor(Page): self.set_raw_body(verynewtext) return True + # this should never happen, except for empty pages return False def sendconfirmleaving(self): @@ -923,7 +924,10 @@ Try a different name.""") % (newpagename if newtext==saved_page.get_raw_body(): msg = _("You already saved this page!") return msg - + else: + msg = _("You already edited this page! Please do not use the back button.") + raise self.EditConflict, msg + msg = _("""Sorry, someone else saved the page while you edited it. Please do the following: Use the back button of your browser, and cut&paste diff -r 43b158d3cf22 -r 9834dda36973 MoinMoin/PageGraphicalEditor.py --- a/MoinMoin/PageGraphicalEditor.py Sun May 28 01:54:35 2006 +0200 +++ b/MoinMoin/PageGraphicalEditor.py Fri Jun 02 00:07:16 2006 +0200 @@ -141,6 +141,8 @@ Have a look at the diff of %(difflink)s # page creation rev = 0 + self.setConflict(bool(conflict_msg)) + # Page editing is done using user language self.request.setContentLanguage(self.request.lang) diff -r 43b158d3cf22 -r 9834dda36973 MoinMoin/action/__init__.py --- a/MoinMoin/action/__init__.py Sun May 28 01:54:35 2006 +0200 +++ b/MoinMoin/action/__init__.py Fri Jun 02 00:07:16 2006 +0200 @@ -427,26 +427,19 @@ def do_edit(pagename, request): # Save new text else: try: + still_conflict = "/!\ '''Edit conflict" in savetext + pg.setConflict(still_conflict) savemsg = pg.saveText(savetext, rev, trivial=trivial, comment=comment) - except pg.EditConflict, msg: + except pg.EditConflict, e: + msg = e.message + # Handle conflict and send editor - - # TODO: conflict messages are duplicated from PageEditor, - # refactor to one place only. - conflict_msg = _('Someone else changed this page while you were editing!') pg.set_raw_body(savetext, modified=1) - if pg.mergeEditConflict(rev): - conflict_msg = _("""Someone else saved this page while you were editing! -Please review the page and save then. Do not save this page as it is! -Have a look at the diff of %(difflink)s to see what has been changed.""") % { - 'difflink': pg.link_to(pg.request, - querystr='action=diff&rev=%d' % rev) - } - # We don't send preview when we do merge conflict - pg.sendEditor(msg=conflict_msg, comment=comment) - return - else: - savemsg = conflict_msg + + pg.mergeEditConflict(rev) + # We don't send preview when we do merge conflict + pg.sendEditor(msg=msg, comment=comment) + return except pg.SaveError, msg: # msg contain a unicode string diff -r 43b158d3cf22 -r 9834dda36973 MoinMoin/macro/RecentChanges.py --- a/MoinMoin/macro/RecentChanges.py Sun May 28 01:54:35 2006 +0200 +++ b/MoinMoin/macro/RecentChanges.py Fri Jun 02 00:07:16 2006 +0200 @@ -60,6 +60,11 @@ def format_page_edits(macro, lines, book if not page.exists(): # indicate page was deleted html_link = request.theme.make_icon('deleted') + elif page.isConflict(): + img = request.theme.make_icon('help') # XXX introduce new icon + html_link = wikiutil.link_tag(request, + wikiutil.quoteWikinameURL(pagename) + "?action=edit", + img, formatter=macro.formatter) elif is_new: # show "NEW" icon if page was created after the user's bookmark if hilite: # HG changeset patch # User Alexander Schremmer # Date 1149343693 -7200 # Node ID 3b6a22e4e2f9c0a611739f97c27e4f099efcea92 # Parent 9834dda36973b71a61d4d051f980b51d1b249ba3 Changed conflict icon from question mark to alert sign. diff -r 9834dda36973 -r 3b6a22e4e2f9 MoinMoin/macro/RecentChanges.py --- a/MoinMoin/macro/RecentChanges.py Fri Jun 02 00:07:16 2006 +0200 +++ b/MoinMoin/macro/RecentChanges.py Sat Jun 03 16:08:13 2006 +0200 @@ -61,7 +61,8 @@ def format_page_edits(macro, lines, book # indicate page was deleted html_link = request.theme.make_icon('deleted') elif page.isConflict(): - img = request.theme.make_icon('help') # XXX introduce new icon + img = macro.formatter.smiley("/!\\") + #img = request.theme.make_icon('help') html_link = wikiutil.link_tag(request, wikiutil.quoteWikinameURL(pagename) + "?action=edit", img, formatter=macro.formatter) # HG changeset patch # User Alexander Schremmer # Date 1149357391 -7200 # Node ID e46109ce944ee4e52a0b40d5e985e4a826905e21 # Parent 3b6a22e4e2f9c0a611739f97c27e4f099efcea92 Introduced multicall support. Refactored XmlRpcBase (moved methods, separated dispatcher). Added Python 2.3 support. diff -r 3b6a22e4e2f9 -r e46109ce944e MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sat Jun 03 16:08:13 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sat Jun 03 19:56:31 2006 +0200 @@ -107,6 +107,100 @@ class XmlRpcBase: '\n'.join(traceback.format_tb(sys.exc_info()[2])), ) + def process(self): + """ xmlrpc v1 and v2 dispatcher """ + try: + data = self.request.read() + params, method = xmlrpclib.loads(data) + + if _debug: + sys.stderr.write('- XMLRPC ' + '-' * 70 + '\n') + sys.stderr.write('%s(%s)\n\n' % (method, repr(params))) + + response = self.dispatch(method, params) + + except: + # report exception back to server + response = xmlrpclib.dumps(xmlrpclib.Fault(1, self._dump_exc())) + else: + # wrap response in a singleton tuple + response = (response,) + + # serialize it + response = xmlrpclib.dumps(response, methodresponse=1) + + self.request.http_headers([ + "Content-Type: text/xml;charset=utf-8", + "Content-Length: %d" % len(response), + ]) + self.request.write(response) + + if _debug: + sys.stderr.write('- XMLRPC ' + '-' * 70 + '\n') + sys.stderr.write(response + '\n\n') + + def dispatch(self, method, params): + method = method.replace(".", "_") + + try: + fn = getattr(self, 'xmlrpc_' + method) + except AttributeError: + try: + fn = wikiutil.importPlugin(self.request.cfg, 'xmlrpc', + method, 'execute') + except wikiutil.PluginMissingError: + response = xmlrpclib.Fault(1, "No such method: %s." % + method) + else: + response = fn(self, *params) + else: + response = fn(*params) + + return response + + # Common faults ----------------------------------------------------- + + def notAllowedFault(self): + return xmlrpclib.Fault(1, "You are not allowed to read this page.") + + def noSuchPageFault(self): + return xmlrpclib.Fault(1, "No such page was found.") + + ############################################################################# + ### System methods + ############################################################################# + + def xmlrpc_system_multicall(self, call_list): + """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => [[4], ...] + + Allows the caller to package multiple XML-RPC calls into a single + request. + + See http://www.xmlrpc.com/discuss/msgReader$1208 + + Copied from SimpleXMLRPCServer.py + """ + + results = [] + for call in call_list: + method_name = call['methodName'] + params = call['params'] + + try: + # XXX A marshalling error in any response will fail the entire + # multicall. If someone cares they should fix this. + results.append([self.dispatch(method_name, params)]) + except xmlrpclib.Fault, fault: + results.append( + {'faultCode' : fault.faultCode, + 'faultString' : fault.faultString} + ) + except: + results.append( + {'faultCode' : 1, + 'faultString' : "%s:%s" % (sys.exc_type, sys.exc_value)} + ) + return results ############################################################################# ### Interface implementation @@ -397,57 +491,6 @@ class XmlRpcBase: self._outstr(results.formatContext(hit, 180, 1))) for hit in results.hits] - def process(self): - """ xmlrpc v1 and v2 dispatcher """ - try: - data = self.request.read() - params, method = xmlrpclib.loads(data) - - if _debug: - sys.stderr.write('- XMLRPC ' + '-' * 70 + '\n') - sys.stderr.write('%s(%s)\n\n' % (method, repr(params))) - - try: - fn = getattr(self, 'xmlrpc_' + method) - except AttributeError: - try: - fn = wikiutil.importPlugin(self.request.cfg, 'xmlrpc', - method, 'execute') - except wikiutil.PluginMissingError: - response = xmlrpclib.Fault(1, "No such method: %s." % - method) - else: - response = fn(self, *params) - else: - response = fn(*params) - except: - # report exception back to server - response = xmlrpclib.dumps(xmlrpclib.Fault(1, self._dump_exc())) - else: - # wrap response in a singleton tuple - response = (response,) - - # serialize it - response = xmlrpclib.dumps(response, methodresponse=1) - - self.request.http_headers([ - "Content-Type: text/xml;charset=utf-8", - "Content-Length: %d" % len(response), - ]) - self.request.write(response) - - if _debug: - sys.stderr.write('- XMLRPC ' + '-' * 70 + '\n') - sys.stderr.write(response + '\n\n') - - # Common faults ----------------------------------------------------- - - def notAllowedFault(self): - return xmlrpclib.Fault(1, "You are not allowed to read this page.") - - def noSuchPageFault(self): - return xmlrpclib.Fault(1, "No such page was found.") - class XmlRpc1(XmlRpcBase): diff -r 3b6a22e4e2f9 -r e46109ce944e MoinMoin/support/multicall.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/MoinMoin/support/multicall.py Sat Jun 03 19:56:31 2006 +0200 @@ -0,0 +1,70 @@ +""" XMLRPC MultiCall support for Python 2.3. Copied from xmlrpclib.py of Python 2.4.3. """ + +try: + from xmlrpclib import MultiCall +except ImportError: + from xmlrpclib import Fault + + class _MultiCallMethod: + # some lesser magic to store calls made to a MultiCall object + # for batch execution + def __init__(self, call_list, name): + self.__call_list = call_list + self.__name = name + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + self.__call_list.append((self.__name, args)) + + class MultiCallIterator: + """Iterates over the results of a multicall. Exceptions are + thrown in response to xmlrpc faults.""" + + def __init__(self, results): + self.results = results + + def __getitem__(self, i): + item = self.results[i] + if type(item) == type({}): + raise Fault(item['faultCode'], item['faultString']) + elif type(item) == type([]): + return item[0] + else: + raise ValueError,\ + "unexpected type in multicall result" + + class MultiCall: + """server -> a object used to boxcar method calls + + server should be a ServerProxy object. + + Methods can be added to the MultiCall using normal + method call syntax e.g.: + + multicall = MultiCall(server_proxy) + multicall.add(2,3) + multicall.get_address("Guido") + + To execute the multicall, call the MultiCall object e.g.: + + add_result, address = multicall() + """ + + def __init__(self, server): + self.__server = server + self.__call_list = [] + + def __repr__(self): + return "" % id(self) + + __str__ = __repr__ + + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, name) + + def __call__(self): + marshalled_list = [] + for name, args in self.__call_list: + marshalled_list.append({'methodName' : name, 'params' : args}) + + return MultiCallIterator(self.__server.system.multicall(marshalled_list)) # HG changeset patch # User Alexander Schremmer # Date 1149357979 -7200 # Node ID c4c66a5a2221ae3e15755dc0cf12f148a2e29959 # Parent e46109ce944ee4e52a0b40d5e985e4a826905e21 Added xmlrpc method to return the MoinMoin version. diff -r e46109ce944e -r c4c66a5a2221 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sat Jun 03 19:56:31 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sat Jun 03 20:06:19 2006 +0200 @@ -491,6 +491,9 @@ class XmlRpcBase: self._outstr(results.formatContext(hit, 180, 1))) for hit in results.hits] + def xmlrpc_getMoinVersion(self): + from MoinMoin import version + return (version.project, version.release, version.revision) class XmlRpc1(XmlRpcBase): # HG changeset patch # User Alexander Schremmer # Date 1149454770 -7200 # Node ID d5c59be8b62c40372813c43161a430c72e958b94 # Parent c4c66a5a2221ae3e15755dc0cf12f148a2e29959 Added entries to my CHANGES file. diff -r c4c66a5a2221 -r d5c59be8b62c docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Jun 03 20:06:19 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Jun 04 22:59:30 2006 +0200 @@ -1,6 +1,3 @@ Please use your CHANGES.$yourname for re -Please use your CHANGES.$yourname for recording your changes you do while -Google Summer of Code. - Branch moin/1.6-sync-aschremmer =============================== @@ -8,26 +5,28 @@ Branch moin/1.6-sync-aschremmer * ... ToDo: - * ... + * Implement actual syncronisation. + * Implement a cross-site authentication system. New Features: - * ... - + * XMLRPC method to return the Moin version + * XMLRPC multicall support + * Conflict icon in RecentChanges + Bugfixes (only stuff that is buggy in moin/1.6 main branch): * ... Other Changes: - * ... - + * Refactored conflict resolution and XMLRPC code. + Developer notes: * ... Diary ===== -Please make at least one entry per day (and commit it) about what your work was about. -2006-05-29 ... -2006-05-30 ... -2006-05-31 ... +Week 21: Basic Infrastructur setup (repos), initial talks to the mentor, started writing the design document, helped other students to get started, started evaluating Mercurial as a DVCS backend +Week 22: Tax forms, Fulfilled transcription request, written conflict icon support, refactored conflict handling, changed conflict icon, Added xmlrpc multicall support into the server and backported the client code from python 2.4 + # HG changeset patch # User Alexander Schremmer # Date 1149455653 -7200 # Node ID f3c1ea6ef86e75eb825ee77c68ed74b1071ae521 # Parent 73850527064e12653d23760028936e7045a7161b Refactored conflict messages, removed link to diff because it is not necessary for a 3-way merge. diff -r 73850527064e -r f3c1ea6ef86e MoinMoin/PageEditor.py --- a/MoinMoin/PageEditor.py Sun Jun 04 23:00:23 2006 +0200 +++ b/MoinMoin/PageEditor.py Sun Jun 04 23:14:13 2006 +0200 @@ -218,11 +218,7 @@ class PageEditor(Page): conflict_msg = _('Someone else changed this page while you were editing!') if self.mergeEditConflict(rev): conflict_msg = _("""Someone else saved this page while you were editing! -Please review the page and save then. Do not save this page as it is! -Have a look at the diff of %(difflink)s to see what has been changed.""") % { - 'difflink': self.link_to(self.request, - querystr='action=diff&rev=%d' % rev) - } +Please review the page and save then. Do not save this page as it is!""") rev = self.current_rev() if conflict_msg: # We don't show preview when in conflict @@ -928,16 +924,8 @@ Try a different name.""") % (newpagename msg = _("You already edited this page! Please do not use the back button.") raise self.EditConflict, msg - msg = _("""Sorry, someone else saved the page while you edited it. - -Please do the following: Use the back button of your browser, and cut&paste -your changes from there. Then go forward to here, and click EditText again. -Now re-add your changes to the current page contents. - -''Do not just replace -the content editbox with your version of the page, because that would -delete the changes of the other person, which is excessively rude!'' -""") + msg = _("""Someone else saved this page while you were editing! +Please review the page and save then. Do not save this page as it is!""") if backup_url: msg += "

%s

" % _( diff -r 73850527064e -r f3c1ea6ef86e MoinMoin/PageGraphicalEditor.py --- a/MoinMoin/PageGraphicalEditor.py Sun Jun 04 23:00:23 2006 +0200 +++ b/MoinMoin/PageGraphicalEditor.py Sun Jun 04 23:14:13 2006 +0200 @@ -124,11 +124,7 @@ class PageGraphicalEditor(PageEditor.Pag conflict_msg = _('Someone else changed this page while you were editing!') if self.mergeEditConflict(rev): conflict_msg = _("""Someone else saved this page while you were editing! -Please review the page and save then. Do not save this page as it is! -Have a look at the diff of %(difflink)s to see what has been changed.""") % { - 'difflink': self.link_to(self.request, - querystr='action=diff&rev=%d' % rev) - } +Please review the page and save then. Do not save this page as it is!""") rev = self.current_rev() if conflict_msg: # We don't show preview when in conflict # HG changeset patch # User Alexander Schremmer # Date 1151761199 -7200 # Node ID 62a67c72ca6080f204593dea0bede5a2bc2e673c # Parent 49d164e275f336ce82c01f63a844f042cd1dc6f3 Merged newest htmlmarkup.py changes from upstream (trac). diff -r 49d164e275f3 -r 62a67c72ca60 MoinMoin/support/htmlmarkup.py --- a/MoinMoin/support/htmlmarkup.py Fri Jun 30 23:16:45 2006 +0200 +++ b/MoinMoin/support/htmlmarkup.py Sat Jul 01 15:39:59 2006 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copied from trac.util.markup, version 2006-04-10 +# copied from trac.util.markup, revision 3446, merged on 2006-06-30 # # Copyright (C) 2003-2006 Edgewall Software # All rights reserved. @@ -103,7 +103,7 @@ class Markup(unicode): as is. Escaping quotes is generally only required for strings that are to be used in attribute values. """ - if isinstance(text, cls): + if isinstance(text, (cls, Element)): return text text = unicode(text) if not text: @@ -192,7 +192,7 @@ class HTMLSanitizer(HTMLParser): 'td', 'textarea', 'tfoot', 'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var']) safe_attrs = frozenset(['abbr', 'accept', 'accept-charset', - 'accesskey', 'action', 'align', 'alt', 'axis', 'border', + 'accesskey', 'action', 'align', 'alt', 'axis', 'border', 'bgcolor', 'cellpadding', 'cellspacing', 'char', 'charoff', 'charset', 'checked', 'cite', 'class', 'clear', 'cols', 'colspan', 'color', 'compact', 'coords', 'datetime', 'dir', 'disabled', 'enctype', @@ -305,14 +305,9 @@ class Fragment(object): except TypeError: self.children.append(node) - def __getitem__(self, nodes): - """Add child nodes to the element.""" - if not isinstance(nodes, (basestring, Fragment)): - try: - nodes = iter(nodes) - except TypeError: - nodes = [str(nodes)] - self.append(nodes) + def __call__(self, *args): + for arg in args: + self.append(arg) return self def serialize(self): @@ -327,77 +322,78 @@ class Fragment(object): return Markup(''.join(self.serialize())) def __add__(self, other): - return Fragment()[self, other] + return Fragment()(self, other) class Element(Fragment): """Simple XHTML output generator based on the builder pattern. - + Construct XHTML elements by passing the tag name to the constructor: - + >>> print Element('strong') - + Attributes can be specified using keyword arguments. The values of the arguments will be converted to strings and any special XML characters escaped: - + >>> print Element('textarea', rows=10, cols=60) >>> print Element('span', title='1 < 2') >>> print Element('span', title='"baz"') - + The " character is escaped using a numerical entity. The order in which attributes are rendered is undefined. - + If an attribute value evaluates to `None`, that attribute is not included in the output: - + >>> print Element('a', name=None) - + Attribute names that conflict with Python keywords can be specified by appending an underscore: - + >>> print Element('div', class_='warning')
- + While the tag names and attributes are not restricted to the XHTML language, some HTML characteristics such as boolean (minimized) attributes and empty elements get special treatment. - + For compatibility with HTML user agents, some XHTML elements need to be closed using a separate closing tag even if they are empty. For this, the close tag is only ommitted for a small set of elements which are known be be safe for use as empty elements: - + >>> print Element('br')
- + Trying to add nested elements to such an element will cause an `AssertionError`: - >>> Element('br')['Oops'] + >>> Element('br')('Oops') Traceback (most recent call last): ... AssertionError: 'br' elements must not have content - + Furthermore, boolean attributes such as "selected" or "checked" are omitted if the value evaluates to `False`. Otherwise, the name of the attribute is used for the value: - + >>> print Element('option', value=0, selected=False) >>> print Element('option', selected='yeah') - - Nested elements can be added to an element using item access notation. - The call notation can also be used for this and for adding attributes - using keyword arguments, as one would do in the constructor. - - >>> print Element('ul')[Element('li'), Element('li')] + + + Nested elements can be added to an element by calling the instance using + positional arguments. The same technique can also be used for adding + attributes using keyword arguments, as one would do in the constructor: + + >>> print Element('ul')(Element('li'), Element('li'))
>>> print Element('a')('Label') Label @@ -407,16 +403,16 @@ class Element(Fragment): Text nodes can be nested in an element by adding strings instead of elements. Any special characters in the strings are escaped automatically: - >>> print Element('em')['Hello world'] + >>> print Element('em')('Hello world') Hello world - >>> print Element('em')[42] + >>> print Element('em')(42) 42 - >>> print Element('em')['1 < 2'] + >>> print Element('em')('1 < 2') 1 < 2 This technique also allows mixed content: - >>> print Element('p')['Hello ', Element('b')['world']] + >>> print Element('p')('Hello ', Element('b')('world'))

Hello world

Elements can also be combined with other elements or strings using the @@ -436,10 +432,8 @@ class Element(Fragment): self(**attr) def __call__(self, *args, **attr): - for arg in args: - self.append(arg) self.attr.update(attr) - return self + return Fragment.__call__(self, *args) def append(self, node): """Append an element or string as child node.""" # HG changeset patch # User Alexander Schremmer # Date 1152196961 -7200 # Node ID e2c03a3c79d65e4e79e712813b995d8cf5683690 # Parent 21095ba16b58c027af20fb1492636258e4e63174 Allow the NewPage macro to add pages below the user's home page. Thanks to Reimar Bauer. diff -r 21095ba16b58 -r e2c03a3c79d6 MoinMoin/macro/NewPage.py --- a/MoinMoin/macro/NewPage.py Tue Jul 04 10:31:25 2006 +0200 +++ b/MoinMoin/macro/NewPage.py Thu Jul 06 16:42:41 2006 +0200 @@ -9,6 +9,7 @@ @copyright: 2004 Vito Miliano (vito_moinnewpagewithtemplate@perilith.com) @copyright: 2004 by Nir Soffer @copyright: 2004 Alexander Schremmer + @copyright: 2006 MoinMoin:ReimarBauer @license: GNU GPL, see COPYING for details. """ @@ -75,6 +76,9 @@ class NewPage: label = self.args.get('buttonLabel') nametemplate = self.args.get('nameTemplate') or u'%s' + if parent == '@ME' and self.request.user.valid: + parent = self.request.user.name + requires_input = nametemplate.find('%s') != -1 if label: # HG changeset patch # User Alexander Schremmer # Date 1152202174 -7200 # Node ID a170df39355f78365b6740f2ff162029103544fb # Parent e2c03a3c79d65e4e79e712813b995d8cf5683690 @SIG@ etc. generate local time stamps now, no difference visible on page view. Thanks to anarcat. diff -r e2c03a3c79d6 -r a170df39355f MoinMoin/PageEditor.py --- a/MoinMoin/PageEditor.py Thu Jul 06 16:42:41 2006 +0200 +++ b/MoinMoin/PageEditor.py Thu Jul 06 18:09:34 2006 +0200 @@ -629,6 +629,34 @@ Try a different name.""") % (newpagename # No mail sent, no message. return '' + def _get_local_timestamp(self): + """ + Returns the string that can be used by the TIME substitution. + + @return: str with a timestamp in it + """ + + now = time.time() + # default: UTC + zone = "Z" + user = self.request.user + + # setup the timezone + if user.valid and user.tz_offset: + tz = user.tz_offset + # round to minutes + tz -= tz % 60 + minutes = tz / 60 + hours = minutes / 60 + minutes -= hours * 60 + + # construct the offset + zone = "%+0.2d%02d" % (hours, minutes) + # correct the time by the offset we've found + now += tz + + return time.strftime("%Y-%m-%dT%H:%M:%S", timefuncs.tmtuple(now)) + zone + def _expand_variables(self, text): """ Expand @VARIABLE@ in `text`and return the expanded text. @@ -639,7 +667,7 @@ Try a different name.""") % (newpagename """ # TODO: Allow addition of variables via wikiconfig or a global # wiki dict. - now = time.strftime("%Y-%m-%dT%H:%M:%SZ", timefuncs.tmtuple()) + now = self._get_local_timestamp() user = self.request.user signature = user.signature() variables = { diff -r e2c03a3c79d6 -r a170df39355f docs/CHANGES --- a/docs/CHANGES Thu Jul 06 16:42:41 2006 +0200 +++ b/docs/CHANGES Thu Jul 06 18:09:34 2006 +0200 @@ -156,6 +156,8 @@ Version 1.6.current: engines don't fetch the targets (if they do, they will just get 403 and cause unnecessary traffic). * Included EmbedObject macro for embedding shockwave flash, mp3, visio, ... + * @SIG@ etc. generate local time stamps now, no difference visible on page + view. Bugfixes: * on action "info" page, "revert" link will not be displayed for empty page # HG changeset patch # User Alexander Schremmer # Date 1152211175 -7200 # Node ID 203686df2c6d17f7d5516bc3fdded4c1e8ad712f # Parent a170df39355f78365b6740f2ff162029103544fb Added a makefile target for coverage analysis of the unit tests (called coverage). diff -r a170df39355f -r 203686df2c6d Makefile --- a/Makefile Thu Jul 06 18:09:34 2006 +0200 +++ b/Makefile Thu Jul 06 20:39:35 2006 +0200 @@ -78,6 +78,11 @@ test: @python tests/maketestwiki.py @python tests/runtests.py +coverage: + @python tests/maketestwiki.py + @python -u -m trace --count --coverdir=cover --missing tests/runtests.py + + clean: clean-testwiki clean-pyc rm -rf build # HG changeset patch # User Alexander Schremmer # Date 1152221048 -7200 # Node ID 6cc24670fd0fb9ca4e20486133aad2678953c766 # Parent f719f5c0bd4e0fa23421dbb7c4f1ab6bad40bc4d Speeded up linkto: search by avoiding read locks on pagelinks retrieval. diff -r f719f5c0bd4e -r 6cc24670fd0f MoinMoin/Page.py --- a/MoinMoin/Page.py Thu Jul 06 20:40:56 2006 +0200 +++ b/MoinMoin/Page.py Thu Jul 06 23:24:08 2006 +0200 @@ -1535,7 +1535,7 @@ class Page: """ if not self.exists(): return [] - cache = caching.CacheEntry(request, self, 'pagelinks', scope='item') + cache = caching.CacheEntry(request, self, 'pagelinks', scope='item', do_locking=False) if cache.needsUpdate(self._text_filename()): links = self.parsePageLinks(request) cache.update('\n'.join(links) + '\n', True) diff -r f719f5c0bd4e -r 6cc24670fd0f MoinMoin/caching.py --- a/MoinMoin/caching.py Thu Jul 06 20:40:56 2006 +0200 +++ b/MoinMoin/caching.py Thu Jul 06 23:24:08 2006 +0200 @@ -15,12 +15,13 @@ if locking: from MoinMoin.util import lock class CacheEntry: - def __init__(self, request, arena, key, scope='page_or_wiki'): + def __init__(self, request, arena, key, scope='page_or_wiki', do_locking=True): """ init a cache entry @param request: the request object @param arena: either a string or a page object, when we want to use page local cache area @param key: under which key we access the cache content + @param lock: if there should be a lock, normally True @param scope: the scope where we are caching: 'item' - an item local cache 'wiki' - a wiki local cache @@ -43,7 +44,8 @@ class CacheEntry: self.arena_dir = os.path.join(request.cfg.cache_dir, '__common__', arena) filesys.makeDirs(self.arena_dir) self.key = key - if locking: + self.locking = do_locking and locking + if self.locking: self.lock_dir = os.path.join(self.arena_dir, '__lock__') self.rlock = lock.ReadLock(self.lock_dir, 60.0) self.wlock = lock.WriteLock(self.lock_dir, 60.0) @@ -84,7 +86,7 @@ class CacheEntry: def copyto(self, filename): import shutil - if not locking or locking and self.wlock.acquire(1.0): + if not self.locking or self.locking and self.wlock.acquire(1.0): try: shutil.copyfile(filename, self._filename()) try: @@ -92,7 +94,7 @@ class CacheEntry: except OSError: pass finally: - if locking: + if self.locking: self.wlock.release() else: self.request.log("Can't acquire write lock in %s" % self.lock_dir) @@ -100,7 +102,7 @@ class CacheEntry: def update(self, content, encode=False): if encode: content = content.encode(config.charset) - if not locking or locking and self.wlock.acquire(1.0): + if not self.locking or self.locking and self.wlock.acquire(1.0): try: f = open(self._filename(), 'wb') f.write(content) @@ -110,7 +112,7 @@ class CacheEntry: except OSError: pass finally: - if locking: + if self.locking: self.wlock.release() else: self.request.log("Can't acquire write lock in %s" % self.lock_dir) @@ -122,13 +124,13 @@ class CacheEntry: pass def content(self, decode=False): - if not locking or locking and self.rlock.acquire(1.0): + if not self.locking or self.locking and self.rlock.acquire(1.0): try: f = open(self._filename(), 'rb') data = f.read() f.close() finally: - if locking: + if self.locking: self.rlock.release() else: self.request.log("Can't acquire read lock in %s" % self.lock_dir) diff -r f719f5c0bd4e -r 6cc24670fd0f docs/CHANGES --- a/docs/CHANGES Thu Jul 06 20:40:56 2006 +0200 +++ b/docs/CHANGES Thu Jul 06 23:24:08 2006 +0200 @@ -158,6 +158,7 @@ Version 1.6.current: * Included EmbedObject macro for embedding shockwave flash, mp3, visio, ... * @SIG@ etc. generate local time stamps now, no difference visible on page view. + * Speeded up linkto search by avoiding read locks on the pagelinks file. Bugfixes: * on action "info" page, "revert" link will not be displayed for empty page # HG changeset patch # User Alexander Schremmer # Date 1152300520 -7200 # Node ID 8166d766c440edd8010f3ea5cf2edf48aac5a4d7 # Parent 51eae6ce2be786d7739e727d04ff3fbced619346 Unused method, not tested -> removed dynamic_content. diff -r 51eae6ce2be7 -r 8166d766c440 MoinMoin/formatter/__init__.py --- a/MoinMoin/formatter/__init__.py Thu Jul 06 19:36:50 2006 +0200 +++ b/MoinMoin/formatter/__init__.py Fri Jul 07 21:28:40 2006 +0200 @@ -325,14 +325,6 @@ class FormatterBase: del p return '' - def dynamic_content(self, parser, callback, arg_list=[], arg_dict={}, - returns_content=1): - content = parser[callback](*arg_list, **arg_dict) - if returns_content: - return content - else: - return '' - # Other ############################################################## def div(self, on, **kw): diff -r 51eae6ce2be7 -r 8166d766c440 MoinMoin/formatter/dom_xml.py --- a/MoinMoin/formatter/dom_xml.py Thu Jul 06 19:36:50 2006 +0200 +++ b/MoinMoin/formatter/dom_xml.py Fri Jul 07 21:28:40 2006 +0200 @@ -217,13 +217,6 @@ class Formatter(FormatterBase): self.text('\n'.join(lines)) + self._set_tag('parser', False)) - def dynamic_content(self, parser, callback, arg_list=[], arg_dict={}, returns_content=1): - content = parser[callback](*arg_list, **arg_dict) - if returns_content: - return content - else: - return '' - def url(self, on, url='', css=None, **kw): kw['href'] = str(url) if css: diff -r 51eae6ce2be7 -r 8166d766c440 MoinMoin/formatter/text_python.py --- a/MoinMoin/formatter/text_python.py Thu Jul 06 19:36:50 2006 +0200 +++ b/MoinMoin/formatter/text_python.py Fri Jul 07 21:28:40 2006 +0200 @@ -111,16 +111,6 @@ if moincode_timestamp > %d or request.cf self.__in_pre = self.formatter.in_pre return result - def dynamic_content(self, parser, callback, arg_list=[], arg_dict={}, - returns_content=1): - adjust = self.__adjust_formatter_state() - if returns_content: - return self.__insert_code('%srequest.write(%s.%s(*%r,**%r))' % - (adjust, self.__parser, callback, arg_list, arg_dict)) - else: - return self.__insert_code('%s%s.%s(*%r,**%r)' % - (adjust, self.__parser, callback, arg_list, arg_dict)) - # Public methods --------------------------------------------------- def pagelink(self, on, pagename='', page=None, **kw): # HG changeset patch # User Alexander Schremmer # Date 1152451820 -7200 # Node ID 7cf99d3ddb265693114c364268d851da66dbbc02 # Parent 14c71dafc9dd23741e3c9ee75c5e72741149e875 Added cover and testwiki directory to hgignore diff -r 14c71dafc9dd -r 7cf99d3ddb26 .hgignore --- a/.hgignore Fri Jul 07 21:29:10 2006 +0200 +++ b/.hgignore Sun Jul 09 15:30:20 2006 +0200 @@ -2,4 +2,6 @@ .*\.arch-ids/.* .*\.py[co] sa +cover +testwiki # HG changeset patch # User Alexander Schremmer # Date 1150631888 -7200 # Node ID cd3019c751e6afd49ec299ea75194c5aadc20d8e # Parent 9d74a2f53323788eb16e249c5ea2c758489ad474 Status update diff -r 9d74a2f53323 -r cd3019c751e6 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Jun 10 16:45:05 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Jun 18 13:58:08 2006 +0200 @@ -26,7 +26,19 @@ Diary Diary ===== -Week 21: Basic Infrastructur setup (repos), initial talks to the mentor, started writing the design document, helped other students to get started, started evaluating Mercurial as a DVCS backend -Week 22: Tax forms, Fulfilled transcription request, written conflict icon support, refactored conflict handling, changed conflict icon, Added xmlrpc multicall support into the server and backported the client code from python 2.4 +Week 21: Basic Infrastructur setup (repos), + initial talks to the mentor, started writing the design document, + helped other students to get started +Week 22: Tax forms, Fulfilled transcription request, + written conflict icon support, refactored conflict handling, + changed conflict icon, + Added xmlrpc multicall support into the server and + backported the client code from python 2.4 +Week 23: Initial thoughts about Mercurial as a base for syncronisation. +Week 24: Evaluation of OpenID as a base for authentication - +Time plan +========= +In July and August, most parts of the implementation will be finished +from 07-10 to 07-14 and from 08-03 to 08-19. Between those time spans, there +are exams. # HG changeset patch # User Alexander Schremmer # Date 1151577392 -7200 # Node ID a7e98fd10e973d0f9626d807cb4a4c637132684e # Parent 3abaabbe631ecd065e0c5270d1dfd502aeacd587 Added dummy auth token methods for XMLRPC, reworded comments. diff -r 3abaabbe631e -r a7e98fd10e97 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Jun 29 01:05:53 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Jun 29 12:36:32 2006 +0200 @@ -493,18 +493,34 @@ class XmlRpcBase: for hit in results.hits] def xmlrpc_getMoinVersion(self): + """ Returns a tuple of the MoinMoin version: + (project, release, revision) + """ from MoinMoin import version return (version.project, version.release, version.revision) - + # authorization methods + + def xmlrpc_getAuthToken(self, username, password, *args): + """ Returns a token which can be used for authentication + in other XMLRPC calls. """ + return "foo" + + def xmlrpc_applyAuthToken(self, auth_token, method_name, *args): + # do something with token XXX + return self.dispatch(method_name, args) + # XXX BEGIN WARNING XXX # All xmlrpc_*Attachment* functions have to be considered as UNSTABLE API - # they are neither standard nor are they what we need when we have switched # attachments (1.5 style) to mimetype items (hopefully in 1.6). - # They are likely to get removed again when we remove AttachFile module. - # So use them on your own risk. + # They will be partly removed, esp. the semantics of the function "listAttachments" + # cannot be sensibly defined for items. + # If the first beta or more stable release of 1.6 will have new item semantics, + # we will remove the functions before it is released. def xmlrpc_listAttachments(self, pagename): """ Get all attachments associated with pagename + Deprecated. @param pagename: pagename (utf-8) @rtype: list # HG changeset patch # User Alexander Schremmer # Date 1151602173 -7200 # Node ID 05b7c435a3d6b791733926618a8b23197f558de3 # Parent a7e98fd10e973d0f9626d807cb4a4c637132684e Implemented token functions that use the normal moin id diff -r a7e98fd10e97 -r 05b7c435a3d6 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Jun 29 12:36:32 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Jun 29 19:29:33 2006 +0200 @@ -504,10 +504,18 @@ class XmlRpcBase: def xmlrpc_getAuthToken(self, username, password, *args): """ Returns a token which can be used for authentication in other XMLRPC calls. """ - return "foo" + u = user.User(request, name=username, password=password, auth_method='xmlrpc_gettoken') + if u.valid: + return u.id + else: + return None def xmlrpc_applyAuthToken(self, auth_token, method_name, *args): - # do something with token XXX + u = user.User(request, id=cookie[MOIN_ID].value, auth_method='xmlrpc_applytoken') + if u.valid: + self.request.user = u + else: + raise Exception("Invalid token.") # XXX make a distinct class return self.dispatch(method_name, args) # XXX BEGIN WARNING XXX # HG changeset patch # User Alexander Schremmer # Date 1151605433 -7200 # Node ID 413263beeafc47e08bf72bde635c76bea90ef686 # Parent 05b7c435a3d6b791733926618a8b23197f558de3 Changed auth token api to authenticate only (no calls because it would not work with MultiCall otherwise diff -r 05b7c435a3d6 -r 413263beeafc MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Jun 29 19:29:33 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Jun 29 20:23:53 2006 +0200 @@ -503,20 +503,22 @@ class XmlRpcBase: def xmlrpc_getAuthToken(self, username, password, *args): """ Returns a token which can be used for authentication - in other XMLRPC calls. """ - u = user.User(request, name=username, password=password, auth_method='xmlrpc_gettoken') + in other XMLRPC calls. If the token is empty, the username + or the password were wrong. """ + u = user.User(self.request, name=username, password=password, auth_method='xmlrpc_gettoken') if u.valid: return u.id else: - return None - - def xmlrpc_applyAuthToken(self, auth_token, method_name, *args): - u = user.User(request, id=cookie[MOIN_ID].value, auth_method='xmlrpc_applytoken') + return "" + + def xmlrpc_applyAuthToken(self, auth_token): + """ Applies the auth token and thereby authenticates the user. """ + u = user.User(self.request, id=auth_token, auth_method='xmlrpc_applytoken') if u.valid: self.request.user = u + return "SUCCESS" else: raise Exception("Invalid token.") # XXX make a distinct class - return self.dispatch(method_name, args) # XXX BEGIN WARNING XXX # All xmlrpc_*Attachment* functions have to be considered as UNSTABLE API - # HG changeset patch # User Alexander Schremmer # Date 1151605706 -7200 # Node ID 1b84b9a471295230dcc51842c5c6bce420bba73d # Parent 413263beeafc47e08bf72bde635c76bea90ef686 Added entries to my CHANGES file. diff -r 413263beeafc -r 1b84b9a47129 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Jun 29 20:23:53 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Jun 29 20:28:26 2006 +0200 @@ -6,7 +6,8 @@ Branch moin/1.6-sync-aschremmer ToDo: * Implement actual syncronisation. - * Implement a cross-site authentication system. + * Implement a cross-site authentication system, i.e. mainly an + identity storage. New Features: * XMLRPC method to return the Moin version @@ -36,6 +37,8 @@ Week 22: Tax forms, Fulfilled transcript backported the client code from python 2.4 Week 23: Initial thoughts about Mercurial as a base for syncronisation. Week 24: Evaluation of OpenID as a base for authentication +Week 25: Conference in Chile. +Week 26: Implementation of the XMLRPC authentication system. Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1151696071 -7200 # Node ID 7bb50cdd6fbe9ce71e1a78a7471eab7d7b58bd1e # Parent 1b84b9a471295230dcc51842c5c6bce420bba73d Reworded a few ideas. diff -r 1b84b9a47129 -r 7bb50cdd6fbe docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Jun 29 20:28:26 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Jun 30 21:34:31 2006 +0200 @@ -13,6 +13,7 @@ Branch moin/1.6-sync-aschremmer * XMLRPC method to return the Moin version * XMLRPC multicall support * Conflict icon in RecentChanges + * XMLRPC Authentication System Bugfixes (only stuff that is buggy in moin/1.6 main branch): * ... @@ -23,6 +24,7 @@ Branch moin/1.6-sync-aschremmer Developer notes: * ... +Do not forget to check the related wiki page: http://moinmoin.wikiwikiweb.de/WikiSyncronisation Diary ===== @@ -35,8 +37,8 @@ Week 22: Tax forms, Fulfilled transcript changed conflict icon, Added xmlrpc multicall support into the server and backported the client code from python 2.4 -Week 23: Initial thoughts about Mercurial as a base for syncronisation. -Week 24: Evaluation of OpenID as a base for authentication +Week 23: Debian-Sprint in Extremadura, Spain. Initial thoughts about Mercurial as a base for syncronisation. +Week 24: Evaluation of OpenID as a base for authentication, written local testing scripts Week 25: Conference in Chile. Week 26: Implementation of the XMLRPC authentication system. # HG changeset patch # User Alexander Schremmer # Date 1151696085 -7200 # Node ID 5d308092d40e805f0ab9b74d83cda4ac4da520c5 # Parent 7bb50cdd6fbe9ce71e1a78a7471eab7d7b58bd1e Added dummy getDiff function. diff -r 7bb50cdd6fbe -r 5d308092d40e MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Fri Jun 30 21:34:31 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Fri Jun 30 21:34:45 2006 +0200 @@ -519,6 +519,9 @@ class XmlRpcBase: return "SUCCESS" else: raise Exception("Invalid token.") # XXX make a distinct class + + def xmlrpc_getDiff(self, pagename, from_rev, to_rev): + return "NOT_IMPLEMENTED_YET" # XXX BEGIN WARNING XXX # All xmlrpc_*Attachment* functions have to be considered as UNSTABLE API - # HG changeset patch # User Alexander Schremmer # Date 1151710126 -7200 # Node ID 21eb4cb11e2c8e6c0ce5eff46dd00da4486c3845 # Parent afb156d4caa537c2f766f367adaf5fe51a47d913 Added binary diffing! Not much left for the getDiff function. diff -r afb156d4caa5 -r 21eb4cb11e2c docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Jun 30 21:35:59 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Jul 01 01:28:46 2006 +0200 @@ -14,6 +14,7 @@ Branch moin/1.6-sync-aschremmer * XMLRPC multicall support * Conflict icon in RecentChanges * XMLRPC Authentication System + * Binary Diffing Bugfixes (only stuff that is buggy in moin/1.6 main branch): * ... @@ -38,9 +39,13 @@ Week 22: Tax forms, Fulfilled transcript Added xmlrpc multicall support into the server and backported the client code from python 2.4 Week 23: Debian-Sprint in Extremadura, Spain. Initial thoughts about Mercurial as a base for syncronisation. -Week 24: Evaluation of OpenID as a base for authentication, written local testing scripts +Week 24: Evaluation of OpenID as a base for authentication, written local testing scripts Week 25: Conference in Chile. -Week 26: Implementation of the XMLRPC authentication system. +Week 26: Implementation of the XMLRPC authentication system, added binary + diffing (mainly taken from Mercurial, but had to merge 5 changesets, + remove some mercurial dependencies and document it. Currently, Mercurial + uses a module written in C to solve the problem, so the Python code + was not cared for anymore.) Time plan ========= diff -r afb156d4caa5 -r 21eb4cb11e2c MoinMoin/util/bdiff.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/MoinMoin/util/bdiff.py Sat Jul 01 01:28:46 2006 +0200 @@ -0,0 +1,79 @@ +# Binary patching and diffing +# +# Copyright 2005 Matt Mackall +# Copyright 2006 MoinMoin:AlexanderSchremmer +# +# Algorithm taken from mercurial's mdiff.py +# +# This software may be used and distributed according to the terms +# of the GNU General Public License, incorporated herein by reference. + +import zlib, difflib, struct + +BDIFF_PATT = ">lll" + +def compress(text): + return zlib.compress(text) # here we could tune the compression level + +def decompress(bin): + return zlib.decompress(bin) + +def diff(a, b): + """ Generates a binary diff of the passed strings. """ + if not a: + return b and (struct.pack(BDIFF_PATT, 0, 0, len(b)) + b) + + bin = [] + la = lb = 0 + + p = [0] + for i in a: p.append(p[-1] + len(i)) + + for am, bm, size in difflib.SequenceMatcher(None, a, b).get_matching_blocks(): + s = "".join(b[lb:bm]) + if am > la or s: + bin.append(struct.pack(BDIFF_PATT, p[la], p[am], len(s)) + s) + la = am + size + lb = bm + size + + return "".join(bin) + +def patchtext(bin): + """ Returns the new hunks that are contained in a binary diff.""" + pos = 0 + t = [] + while pos < len(bin): + p1, p2, l = struct.unpack(BDIFF_PATT, bin[pos:pos + 12]) + pos += 12 + t.append(bin[pos:pos + l]) + pos += l + return "".join(t) + +def patch(a, bin): + """ Patches the string a with the binary patch bin. """ + c = last = pos = 0 + r = [] + + while pos < len(bin): + p1, p2, l = struct.unpack(BDIFF_PATT, bin[pos:pos + 12]) + pos += 12 + r.append(a[last:p1]) + r.append(bin[pos:pos + l]) + pos += l + last = p2 + c += 1 + r.append(a[last:]) + + return "".join(r) + +def test(): + a = "föo" * 30 + b = "bär" * 30 + d = diff(a, b) + z = compress(d) + print `patchtext(d)` + #print `d` + print b == patch(a, d) + print len(d), len(z) + +test() \ No newline at end of file # HG changeset patch # User Alexander Schremmer # Date 1151711262 -7200 # Node ID 930c9e82a60b5fb4924105f3f6ff85c148d0042f # Parent 21eb4cb11e2c8e6c0ce5eff46dd00da4486c3845 Optimised diff for text files. diff -r 21eb4cb11e2c -r 930c9e82a60b MoinMoin/util/bdiff.py --- a/MoinMoin/util/bdiff.py Sat Jul 01 01:28:46 2006 +0200 +++ b/MoinMoin/util/bdiff.py Sat Jul 01 01:47:42 2006 +0200 @@ -19,9 +19,12 @@ def decompress(bin): return zlib.decompress(bin) def diff(a, b): - """ Generates a binary diff of the passed strings. """ + """ Generates a binary diff of the passed strings. + Note that you can pass arrays of strings as well. + This might give you better results for text files. """ if not a: - return b and (struct.pack(BDIFF_PATT, 0, 0, len(b)) + b) + s = "".join(b) + return s and (struct.pack(BDIFF_PATT, 0, 0, len(s)) + s) bin = [] la = lb = 0 @@ -37,6 +40,10 @@ def diff(a, b): lb = bm + size return "".join(bin) + +def textdiff(a, b): + """ A diff function optimised for text files. Works with binary files as well. """ + return diff(a.splitlines(1), b.splitlines(1)) def patchtext(bin): """ Returns the new hunks that are contained in a binary diff.""" @@ -67,13 +74,18 @@ def patch(a, bin): return "".join(r) def test(): - a = "föo" * 30 - b = "bär" * 30 + a = ("foo\n" * 30) + b = (" fao" * 30) + + a = file(r"C:\Dokumente und Einstellungen\Administrator\Eigene Dateien\Progra\Python\MoinMoin\moin-1.6-sync\MoinMoin\util\test.1").read() + b = file(r"C:\Dokumente und Einstellungen\Administrator\Eigene Dateien\Progra\Python\MoinMoin\moin-1.6-sync\MoinMoin\util\test.2").read() + a = a.splitlines(1) + b = b.splitlines(1) + d = diff(a, b) z = compress(d) print `patchtext(d)` - #print `d` - print b == patch(a, d) + print `d` + print "".join(b) == patch("".join(a), d) print len(d), len(z) -test() \ No newline at end of file # HG changeset patch # User Alexander Schremmer # Date 1151711431 -7200 # Node ID 1083861bd16cd2fd60ee3f6b7aeffd623c5d1869 # Parent 930c9e82a60b5fb4924105f3f6ff85c148d0042f Replaced size literals with computed constant. diff -r 930c9e82a60b -r 1083861bd16c MoinMoin/util/bdiff.py --- a/MoinMoin/util/bdiff.py Sat Jul 01 01:47:42 2006 +0200 +++ b/MoinMoin/util/bdiff.py Sat Jul 01 01:50:31 2006 +0200 @@ -11,6 +11,7 @@ import zlib, difflib, struct import zlib, difflib, struct BDIFF_PATT = ">lll" +BDIFF_PATT_SIZE = struct.calcsize(">lll") def compress(text): return zlib.compress(text) # here we could tune the compression level @@ -50,8 +51,8 @@ def patchtext(bin): pos = 0 t = [] while pos < len(bin): - p1, p2, l = struct.unpack(BDIFF_PATT, bin[pos:pos + 12]) - pos += 12 + p1, p2, l = struct.unpack(BDIFF_PATT, bin[pos:pos + BDIFF_PATT_SIZE]) + pos += BDIFF_PATT_SIZE t.append(bin[pos:pos + l]) pos += l return "".join(t) @@ -62,8 +63,8 @@ def patch(a, bin): r = [] while pos < len(bin): - p1, p2, l = struct.unpack(BDIFF_PATT, bin[pos:pos + 12]) - pos += 12 + p1, p2, l = struct.unpack(BDIFF_PATT, bin[pos:pos + BDIFF_PATT_SIZE]) + pos += BDIFF_PATT_SIZE r.append(a[last:p1]) r.append(bin[pos:pos + l]) pos += l # HG changeset patch # User Alexander Schremmer # Date 1151711561 -7200 # Node ID dede3773735c7ecef29ef9fac3e01954bcbd29e3 # Parent 1083861bd16cd2fd60ee3f6b7aeffd623c5d1869 Replaced the module doc string. diff -r 1083861bd16c -r dede3773735c MoinMoin/util/bdiff.py --- a/MoinMoin/util/bdiff.py Sat Jul 01 01:50:31 2006 +0200 +++ b/MoinMoin/util/bdiff.py Sat Jul 01 01:52:41 2006 +0200 @@ -1,12 +1,13 @@ -# Binary patching and diffing -# -# Copyright 2005 Matt Mackall -# Copyright 2006 MoinMoin:AlexanderSchremmer -# -# Algorithm taken from mercurial's mdiff.py -# -# This software may be used and distributed according to the terms -# of the GNU General Public License, incorporated herein by reference. +""" + MoinMoin - Binary patching and diffing + + @copyright: 2005 Matt Mackall + @copyright: 2006 MoinMoin:AlexanderSchremmer + + Algorithm taken from mercurial's mdiff.py + + @license: GNU GPL, see COPYING for details. +""" import zlib, difflib, struct # HG changeset patch # User Alexander Schremmer # Date 1152469928 -7200 # Node ID 69af0200534e09934d2a0a398c91c449dd880991 # Parent d53ad7bf13e680977cfec2658232e3eb6003c8f7 Added method to the Dict api. diff -r d53ad7bf13e6 -r 69af0200534e MoinMoin/wikidicts.py --- a/MoinMoin/wikidicts.py Sun Jul 09 18:31:19 2006 +0200 +++ b/MoinMoin/wikidicts.py Sun Jul 09 20:32:08 2006 +0200 @@ -59,6 +59,9 @@ class DictBase: def values(self): return self._dict.values() + + def get_dict(self): + return self._dict def has_key(self, key): return self._dict.has_key(key) # HG changeset patch # User Alexander Schremmer # Date 1152469966 -7200 # Node ID 675d01d7cb68fe680c5b91f640fe81b87c7fc477 # Parent 69af0200534e09934d2a0a398c91c449dd880991 Output xmlrpc fault in case of invalid token. diff -r 69af0200534e -r 675d01d7cb68 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Jul 09 20:32:08 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Jul 09 20:32:46 2006 +0200 @@ -518,7 +518,7 @@ class XmlRpcBase: self.request.user = u return "SUCCESS" else: - raise Exception("Invalid token.") # XXX make a distinct class + return xmlrpclib.Fault(1, "Invalid token.") def xmlrpc_getDiff(self, pagename, from_rev, to_rev): return "NOT_IMPLEMENTED_YET" # HG changeset patch # User Alexander Schremmer # Date 1152469984 -7200 # Node ID 3108c8b2597ee088a84ec6f137ba17a64af6654a # Parent 675d01d7cb68fe680c5b91f640fe81b87c7fc477 Add preliminary SyncPages action. diff -r 675d01d7cb68 -r 3108c8b2597e MoinMoin/action/SyncPages.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/MoinMoin/action/SyncPages.py Sun Jul 09 20:33:04 2006 +0200 @@ -0,0 +1,76 @@ +# -*- coding: iso-8859-1 -*- +""" + MoinMoin - SyncPages action + + This action allows you to synchronise pages of two wikis. + + @copyright: 2006 MoinMoin:AlexanderSchremmer + @license: GNU GPL, see COPYING for details. +""" + +import os +import zipfile +from datetime import datetime + +from MoinMoin import wikiutil, config, user +from MoinMoin.PageEditor import PageEditor +from MoinMoin.Page import Page +from MoinMoin.wikidicts import Dict + +class ActionStatus(Exception): pass + +class RemoteWiki(object): + def __init__(self, interwikiname): + wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) + self.wiki_url = wikiutil.mapURL(self.request, wikiurl) + self.valid = not wikitag_bad + self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2" + + def __repr__(self): + return "" % (self.valid, self.wiki_url) + +class PackagePages: + def __init__(self, pagename, request): + self.request = request + self.pagename = pagename + self.page = Page(request, pagename) + + def parsePage(self): + defaults = { + "remotePrefix": "", + "localPrefix": "", + "remoteWiki": "" + } + + defaults.update(Dict(self.request, self.pagename).get_dict()) + return defaults + + def render(self): + """ Render action + + This action returns a wiki page with optional message, or + redirects to new page. + """ + _ = self.request.getText + + params = self.parsePage() + + try: + if not params["remoteWiki"]: + raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter.")) + + remote = RemoteWiki(params["remoteWiki"]) + + if not remote.valid: + raise ActionStatus(_("The ''remoteWiki'' is unknown.")) + + + self.sync(params) + except ActionStatus, e: + return self.page.send_page(self.request, msg=u'

%s

\n' % (e.args[0], )) + + raise ActionStatus(_("Syncronisation finished.")) + +def execute(pagename, request): + """ Glue code for actions """ + PackagePages(pagename, request).render() # HG changeset patch # User Alexander Schremmer # Date 1152471893 -7200 # Node ID 2ec25306c4a036d3eb50aa3778c262e8d45ed405 # Parent 3108c8b2597ee088a84ec6f137ba17a64af6654a Added getDiff XMLRPC method. diff -r 3108c8b2597e -r 2ec25306c4a0 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Jul 09 20:33:04 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Jul 09 21:04:53 2006 +0200 @@ -518,10 +518,49 @@ class XmlRpcBase: self.request.user = u return "SUCCESS" else: - return xmlrpclib.Fault(1, "Invalid token.") + return xmlrpclib.Fault("INVALID", "Invalid token.") def xmlrpc_getDiff(self, pagename, from_rev, to_rev): - return "NOT_IMPLEMENTED_YET" + from MoinMoin.util.bdiff import textdiff, compress + + pagename = self._instr(pagename) + + # User may read page? + if not self.request.user.may.read(pagename): + return self.notAllowedFault() + + def allowed_rev_type(data): + if data is None: + return true + return isinstance(data, int) and data > 0 + + if not allowed_rev_type(from_rev) or not allowed_rev_type(to_rev): + return xmlrpclib.Fault("FIXME", "Incorrect type for revision(s).") # XXX + + currentpage = Page(self.request, pagename) + revisions = currentpage.getRevList() + + if from_rev is not None and from_rev not in revisions: + return xmlrpclib.Fault("FIXME", "Unknown from_rev.") # XXX + if to_rev is not None and to_rev not in revisions: + return xmlrpclib.Fault("FIXME", "Unknown to_rev.") # XXX + + if from_rev is None: + oldcontents = "" + else: + oldpage = Page(request, pagename, rev=from_rev) + oldcontents = oldpage.get_raw_body() + + if to_rev is None: + newcontents = currentpage.get_raw_body() + else: + newpage = Page(request, pagename, rev=to_rev) + newcontents = newpage.get_raw_body() + newrev = newpage.get_real_rev() + + diffblob = xmlrpclib.Binary(compress(textdiff(oldcontents, newcontents))) + + return # XXX # XXX BEGIN WARNING XXX # All xmlrpc_*Attachment* functions have to be considered as UNSTABLE API - # HG changeset patch # User Alexander Schremmer # Date 1152472081 -7200 # Node ID f29c094483533d55933b7862f453aeb19af82467 # Parent 2ec25306c4a036d3eb50aa3778c262e8d45ed405 Minor modifications. diff -r 2ec25306c4a0 -r f29c09448353 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Jul 09 21:04:53 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Jul 09 21:08:01 2006 +0200 @@ -64,7 +64,7 @@ class PackagePages: if not remote.valid: raise ActionStatus(_("The ''remoteWiki'' is unknown.")) - + # ... self.sync(params) except ActionStatus, e: return self.page.send_page(self.request, msg=u'

%s

\n' % (e.args[0], )) diff -r 2ec25306c4a0 -r f29c09448353 MoinMoin/util/bdiff.py --- a/MoinMoin/util/bdiff.py Sun Jul 09 21:04:53 2006 +0200 +++ b/MoinMoin/util/bdiff.py Sun Jul 09 21:08:01 2006 +0200 @@ -12,7 +12,7 @@ import zlib, difflib, struct import zlib, difflib, struct BDIFF_PATT = ">lll" -BDIFF_PATT_SIZE = struct.calcsize(">lll") +BDIFF_PATT_SIZE = struct.calcsize(BDIFF_PATT) def compress(text): return zlib.compress(text) # here we could tune the compression level # HG changeset patch # User Alexander Schremmer # Date 1152656095 -7200 # Node ID d24f2b1d606a74eadae23006b09b4fb5253e6508 # Parent f29c094483533d55933b7862f453aeb19af82467 Refactoring, fixed Fault codes, fixed return format of the getdiff method. diff -r f29c09448353 -r d24f2b1d606a MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Jul 09 21:08:01 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Wed Jul 12 00:14:55 2006 +0200 @@ -29,7 +29,7 @@ class RemoteWiki(object): def __repr__(self): return "" % (self.valid, self.wiki_url) -class PackagePages: +class ActionClass: def __init__(self, pagename, request): self.request = request self.pagename = pagename @@ -72,5 +72,4 @@ class PackagePages: raise ActionStatus(_("Syncronisation finished.")) def execute(pagename, request): - """ Glue code for actions """ - PackagePages(pagename, request).render() + ActionClass(pagename, request).render() diff -r f29c09448353 -r d24f2b1d606a MoinMoin/action/__init__.py --- a/MoinMoin/action/__init__.py Sun Jul 09 21:08:01 2006 +0200 +++ b/MoinMoin/action/__init__.py Wed Jul 12 00:14:55 2006 +0200 @@ -427,7 +427,7 @@ def do_edit(pagename, request): # Save new text else: try: - still_conflict = "/!\ '''Edit conflict" in savetext + still_conflict = wikiutil.containsConflictMarker(savetext) pg.setConflict(still_conflict) savemsg = pg.saveText(savetext, rev, trivial=trivial, comment=comment) except pg.EditConflict, e: diff -r f29c09448353 -r d24f2b1d606a MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Sun Jul 09 21:08:01 2006 +0200 +++ b/MoinMoin/wikiutil.py Wed Jul 12 00:14:55 2006 +0200 @@ -1443,6 +1443,9 @@ def link_tag(request, params, text=None, else: return "%s%s" % (result, text) +def containsConflictMarker(text): + """ Returns true if there is a conflict marker in the text. """ + return "/!\ '''Edit conflict" in text def linediff(oldlines, newlines, **kw): """ diff -r f29c09448353 -r d24f2b1d606a MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Jul 09 21:08:01 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Wed Jul 12 00:14:55 2006 +0200 @@ -521,6 +521,7 @@ class XmlRpcBase: return xmlrpclib.Fault("INVALID", "Invalid token.") def xmlrpc_getDiff(self, pagename, from_rev, to_rev): + """ Gets the binary difference between two page revisions. See MoinMoin:WikiSyncronisation. """ from MoinMoin.util.bdiff import textdiff, compress pagename = self._instr(pagename) @@ -531,36 +532,48 @@ class XmlRpcBase: def allowed_rev_type(data): if data is None: - return true + return True return isinstance(data, int) and data > 0 - if not allowed_rev_type(from_rev) or not allowed_rev_type(to_rev): - return xmlrpclib.Fault("FIXME", "Incorrect type for revision(s).") # XXX + if not allowed_rev_type(from_rev): + return xmlrpclib.Fault("FROMREV_INVALID", "Incorrect type for from_rev.") + + if not allowed_rev_type(to_rev): + return xmlrpclib.Fault("TOREV_INVALID", "Incorrect type for to_rev.") currentpage = Page(self.request, pagename) + if not currentpage.exists(): + return xmlrpclib.Fault("NOT_EXIST", "Page does not exist.") + revisions = currentpage.getRevList() if from_rev is not None and from_rev not in revisions: - return xmlrpclib.Fault("FIXME", "Unknown from_rev.") # XXX + return xmlrpclib.Fault("FROMREV_INVALID", "Unknown from_rev.") if to_rev is not None and to_rev not in revisions: - return xmlrpclib.Fault("FIXME", "Unknown to_rev.") # XXX - + return xmlrpclib.Fault("TOREV_INVALID", "Unknown to_rev.") + + # use lambda to defer execution in the next lines if from_rev is None: - oldcontents = "" + oldcontents = lambda: "" else: oldpage = Page(request, pagename, rev=from_rev) - oldcontents = oldpage.get_raw_body() + oldcontents = lambda: oldpage.get_raw_body() if to_rev is None: - newcontents = currentpage.get_raw_body() + newcontents = lambda: currentpage.get_raw_body() else: newpage = Page(request, pagename, rev=to_rev) - newcontents = newpage.get_raw_body() + newcontents = lambda: newpage.get_raw_body() newrev = newpage.get_real_rev() - diffblob = xmlrpclib.Binary(compress(textdiff(oldcontents, newcontents))) - - return # XXX + if oldcontents() and oldpage.get_real_rev() == newpage.get_real_rev(): + return xmlrpclib.Fault("ALREADY_CURRENT", "There are no changes.") + + newcontents = newcontents().encode("utf-8") + conflict = wikiutil.containsConflictMarker(newcontents) + diffblob = xmlrpclib.Binary(compress(textdiff(oldcontents().encode("utf-8"), newcontents))) + + return {"conflict": conflict, "diff": diffblob, "diffversion": 1, "current": currentpage.get_real_rev()} # XXX BEGIN WARNING XXX # All xmlrpc_*Attachment* functions have to be considered as UNSTABLE API - # HG changeset patch # User Alexander Schremmer # Date 1152889123 -7200 # Node ID 213776426ce2fd8350e14d790317b3c6960bdf81 # Parent d24f2b1d606a74eadae23006b09b4fb5253e6508 Added get_raw_body_str to the Page class diff -r d24f2b1d606a -r 213776426ce2 MoinMoin/Page.py --- a/MoinMoin/Page.py Wed Jul 12 00:14:55 2006 +0200 +++ b/MoinMoin/Page.py Fri Jul 14 16:58:43 2006 +0200 @@ -780,7 +780,7 @@ class Page: def get_raw_body(self): """ Load the raw markup from the page file. - @rtype: str + @rtype: unicode @return: raw page contents of this page """ if self._raw_body is None: @@ -806,7 +806,15 @@ class Page: file.close() return self._raw_body - + + def get_raw_body_str(self): + """ Returns the raw markup from the page file, as a string. + + @rtype: str + @return: raw page contents of this page + """ + return self.get_raw_body().encode("utf-8") + def set_raw_body(self, body, modified=0): """ Set the raw body text (prevents loading from disk). # HG changeset patch # User Alexander Schremmer # Date 1152889175 -7200 # Node ID 49a9baa51a1f976a234423a41f2d7486d0907ef4 # Parent 213776426ce2fd8350e14d790317b3c6960bdf81 Fixed getDiff, added interwikiName, mergeContents (preliminary). Added entries to my CHANGES file. diff -r 213776426ce2 -r 49a9baa51a1f MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Fri Jul 14 16:58:43 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Fri Jul 14 16:59:35 2006 +0200 @@ -557,23 +557,75 @@ class XmlRpcBase: oldcontents = lambda: "" else: oldpage = Page(request, pagename, rev=from_rev) - oldcontents = lambda: oldpage.get_raw_body() + oldcontents = lambda: oldpage.get_raw_body_str() if to_rev is None: newcontents = lambda: currentpage.get_raw_body() else: newpage = Page(request, pagename, rev=to_rev) - newcontents = lambda: newpage.get_raw_body() + newcontents = lambda: newpage.get_raw_body_str() newrev = newpage.get_real_rev() if oldcontents() and oldpage.get_real_rev() == newpage.get_real_rev(): return xmlrpclib.Fault("ALREADY_CURRENT", "There are no changes.") - newcontents = newcontents().encode("utf-8") + newcontents = newcontents() conflict = wikiutil.containsConflictMarker(newcontents) - diffblob = xmlrpclib.Binary(compress(textdiff(oldcontents().encode("utf-8"), newcontents))) + diffblob = xmlrpclib.Binary(compress(textdiff(oldcontents(), newcontents))) return {"conflict": conflict, "diff": diffblob, "diffversion": 1, "current": currentpage.get_real_rev()} + + def xmlrpc_interwikiName(self): + """ Returns the interwiki name of the current wiki. """ + name = self.request.cfg.interwikiname + if name is None: + return None + else: + return self._outstr(name) + + def xmlrpc_mergeChanges(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): + """ Merges a diff sent by the remote machine and returns the number of the new revision. + Additionally, this method tags the new revision. + + @param pagename: The pagename that is currently dealt with. + @param diff: The diff that can be applied to the version specified by delta_remote_rev. + @param local_rev: The revno of the page on the other wiki system, used for the tag. + @param delta_remote_rev: The revno that the diff is taken against. + @param last_remote_rev: The last revno of the page `pagename` that is known by the other wiki site. + @param interwiki_name: Used to build the interwiki tag. + """ + from MoinMoin.util.bdiff import decompress, patch + + pagename = self._instr(pagename) + + # User may read page? + if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename): + return self.notAllowedFault() + + # XXX add locking here! + + # current version of the page + currentpage = Page(self.request, pagename) + + if currentpage.get_real_rev() != last_remote_rev: + return xmlrpclib.Fault("LASTREV_INVALID", "The page was changed") + + if not currentpage.exists() and diff is None: + return xmlrpclib.Fault("NOT_EXIST", "The page does not exist and no diff was supplied.") + + # base revision used for the diff + basepage = Page(self.request, pagename, rev=delta_remote_rev) + + # generate the new page revision by applying the diff + newcontents = patch(basepage.get_raw_body_str(), decompress(str(diff))) + + # write page + # XXX ... + + # XXX add a tag (interwiki_name, local_rev, current rev) to the page + # XXX return current rev + # XXX finished + # XXX BEGIN WARNING XXX # All xmlrpc_*Attachment* functions have to be considered as UNSTABLE API - diff -r 213776426ce2 -r 49a9baa51a1f docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Jul 14 16:58:43 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Jul 14 16:59:35 2006 +0200 @@ -8,6 +8,8 @@ Branch moin/1.6-sync-aschremmer * Implement actual syncronisation. * Implement a cross-site authentication system, i.e. mainly an identity storage. + * Clean up trailing whitespace. + * Add page locking. New Features: * XMLRPC method to return the Moin version @@ -15,12 +17,15 @@ Branch moin/1.6-sync-aschremmer * Conflict icon in RecentChanges * XMLRPC Authentication System * Binary Diffing + * XMLRPC method to get binary diffs + * Bugfixes (only stuff that is buggy in moin/1.6 main branch): - * ... + * Conflict resolution fixes. Other Changes: * Refactored conflict resolution and XMLRPC code. + * Enhanced API at some points. Developer notes: * ... @@ -40,12 +45,15 @@ Week 22: Tax forms, Fulfilled transcript backported the client code from python 2.4 Week 23: Debian-Sprint in Extremadura, Spain. Initial thoughts about Mercurial as a base for syncronisation. Week 24: Evaluation of OpenID as a base for authentication, written local testing scripts -Week 25: Conference in Chile. +Week 25: Conference in Chile (FET 2006). Week 26: Implementation of the XMLRPC authentication system, added binary diffing (mainly taken from Mercurial, but had to merge 5 changesets, remove some mercurial dependencies and document it. Currently, Mercurial uses a module written in C to solve the problem, so the Python code was not cared for anymore.) +Week 27: Europython, Geneva. +Week 28: Debian-Edu Developer Camp. Implemented getDiff XMLRPC method, added preliminary SyncPages action, + added interwikiName XMLRPC method, added mergeChanges XMLRPC method. Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1152984792 -7200 # Node ID 2563f22db43b1c3292d2ab3087e1f7afd0db0203 # Parent fbbee7e00ffeaaa3301cf6d148a90eaea51e3022 Updated my CHANGES file diff -r fbbee7e00ffe -r 2563f22db43b docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Jul 15 19:17:39 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Jul 15 19:33:12 2006 +0200 @@ -43,7 +43,8 @@ Week 22: Tax forms, Fulfilled transcript changed conflict icon, Added xmlrpc multicall support into the server and backported the client code from python 2.4 -Week 23: Debian-Sprint in Extremadura, Spain. Initial thoughts about Mercurial as a base for syncronisation. +Week 23: Debian-Sprint in Extremadura, Spain. Initial thoughts about Mercurial as + a base for syncronisation. (See wiki) Week 24: Evaluation of OpenID as a base for authentication, written local testing scripts Week 25: Conference in Chile (FET 2006). Week 26: Implementation of the XMLRPC authentication system, added binary @@ -53,7 +54,8 @@ Week 26: Implementation of the XMLRPC au was not cared for anymore.) Week 27: Europython, Geneva. Week 28: Debian-Edu Developer Camp. Implemented getDiff XMLRPC method, added preliminary SyncPages action, - added interwikiName XMLRPC method, added mergeChanges XMLRPC method. + added interwikiName XMLRPC method, added mergeChanges XMLRPC method. Started analysis of the moinupdate + script written by Stefan Merten. Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1152985618 -7200 # Node ID a442ed63b1654d1b1e6204a96e68db8daf2a9581 # Parent 2563f22db43b1c3292d2ab3087e1f7afd0db0203 Reordered class hierachy of remote wikis, enhanced RemoteWiki interface diff -r 2563f22db43b -r a442ed63b165 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Jul 15 19:33:12 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Jul 15 19:46:58 2006 +0200 @@ -10,6 +10,7 @@ import os import zipfile +import xmlrpclib from datetime import datetime from MoinMoin import wikiutil, config, user @@ -20,11 +21,30 @@ class ActionStatus(Exception): pass class ActionStatus(Exception): pass class RemoteWiki(object): + """ This class should be the base for all implementations of remote wiki + classes. """ + def getInterwikiName(self): + """ Returns the interwiki name of the other wiki. """ + return NotImplemented + + def __repr__(self): + """ Returns a representation of the instance for debugging purposes. """ + return NotImplemented + +class MoinWiki(RemoteWiki): def __init__(self, interwikiname): wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) self.wiki_url = wikiutil.mapURL(self.request, wikiurl) self.valid = not wikitag_bad self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2" + self.connection = self.createConnection() + + def createConnection(self): + return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True) + + # Methods implementing the RemoteWiki interface + def getInterwikiName(self): + return self.connection.interwikiName() def __repr__(self): return "" % (self.valid, self.wiki_url) @@ -56,10 +76,13 @@ class ActionClass: params = self.parsePage() try: + if not self.request.cfg.interwikiname: + raise ActionStatus(_("Please set an interwikiname in your wikiconfig (see HelpOnConfiguration) to be able to use this action.")) + if not params["remoteWiki"]: raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter.")) - remote = RemoteWiki(params["remoteWiki"]) + remote = MoinWiki(params["remoteWiki"]) if not remote.valid: raise ActionStatus(_("The ''remoteWiki'' is unknown.")) diff -r 2563f22db43b -r a442ed63b165 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Jul 15 19:33:12 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Jul 15 19:46:58 2006 +0200 @@ -10,6 +10,7 @@ Branch moin/1.6-sync-aschremmer identity storage. * Clean up trailing whitespace. * Add page locking. + * How about using unique IDs that just derive from the interwikiname? New Features: * XMLRPC method to return the Moin version # HG changeset patch # User Alexander Schremmer # Date 1153135776 -7200 # Node ID b258156f1288757d545763b6c147e62b68a94d03 # Parent a442ed63b1654d1b1e6204a96e68db8daf2a9581 Added question to my CHANGES file, fixed a bug in SyncPages diff -r a442ed63b165 -r b258156f1288 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Jul 15 19:46:58 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Mon Jul 17 13:29:36 2006 +0200 @@ -92,7 +92,7 @@ class ActionClass: except ActionStatus, e: return self.page.send_page(self.request, msg=u'

%s

\n' % (e.args[0], )) - raise ActionStatus(_("Syncronisation finished.")) + return self.page.send_page(self.request, msg=_("Syncronisation finished.")) def execute(pagename, request): ActionClass(pagename, request).render() diff -r a442ed63b165 -r b258156f1288 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Jul 15 19:46:58 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Jul 17 13:29:36 2006 +0200 @@ -11,6 +11,7 @@ Branch moin/1.6-sync-aschremmer * Clean up trailing whitespace. * Add page locking. * How about using unique IDs that just derive from the interwikiname? + * How to handle renames? New Features: * XMLRPC method to return the Moin version # HG changeset patch # User Alexander Schremmer # Date 1153216769 -7200 # Node ID 175695a510f5ab6e87ffc28c0c87f44dacbdc710 # Parent eaae4bcf60f39ff242c272e9b0fcaa35fa154c30 Added umlaut for juergens name. diff -r eaae4bcf60f3 -r 175695a510f5 MoinMoin/action/__init__.py --- a/MoinMoin/action/__init__.py Tue Jul 18 11:48:53 2006 +0200 +++ b/MoinMoin/action/__init__.py Tue Jul 18 11:59:29 2006 +0200 @@ -20,7 +20,7 @@ actions_excluded, making and checking tickets, rendering some form, displaying errors and doing stuff after an action. - @copyright: 2000-2004 by Jrgen Hermann , + @copyright: 2000-2004 by Jürgen Hermann , 2006 MoinMoin:ThomasWaldmann @license: GNU GPL, see COPYING for details. """ # HG changeset patch # User Alexander Schremmer # Date 1153217319 -7200 # Node ID 95f57ffeb4e90939a5d71e9a40656d9823675f20 # Parent 175695a510f5ab6e87ffc28c0c87f44dacbdc710 added umlaut somewhere else. diff -r 175695a510f5 -r 95f57ffeb4e9 MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Tue Jul 18 11:59:29 2006 +0200 +++ b/MoinMoin/wikiutil.py Tue Jul 18 12:08:39 2006 +0200 @@ -2,7 +2,7 @@ """ MoinMoin - Wiki Utility Functions - @copyright: 2000 - 2004 by Jrgen Hermann + @copyright: 2000 - 2004 by Jürgen Hermann @license: GNU GPL, see COPYING for details. """ # HG changeset patch # User Alexander Schremmer # Date 1153263542 -7200 # Node ID 86f141856d2bf6b986b48abb69e4e153fc711917 # Parent f56db9746839718cd79615f16a7fed4c8d21953b Finished first version of the mergeChanges method. diff -r f56db9746839 -r 86f141856d2b MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Mon Jul 17 13:29:58 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Wed Jul 19 00:59:02 2006 +0200 @@ -498,6 +498,7 @@ class XmlRpcBase: from MoinMoin import version return (version.project, version.release, version.revision) + # authorization methods def xmlrpc_getAuthToken(self, username, password, *args): @@ -518,7 +519,10 @@ class XmlRpcBase: return "SUCCESS" else: return xmlrpclib.Fault("INVALID", "Invalid token.") - + + + # methods for wiki synchronization + def xmlrpc_getDiff(self, pagename, from_rev, to_rev): """ Gets the binary difference between two page revisions. See MoinMoin:WikiSyncronisation. """ from MoinMoin.util.bdiff import textdiff, compress @@ -594,9 +598,13 @@ class XmlRpcBase: @param interwiki_name: Used to build the interwiki tag. """ from MoinMoin.util.bdiff import decompress, patch + from MoinMoin.wikisync import TagStore + LASTREV_INVALID = xmlrpclib.Fault("LASTREV_INVALID", "The page was changed") pagename = self._instr(pagename) + comment = u"Remote - %r" % interwiki_name + # User may read page? if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename): return self.notAllowedFault() @@ -604,10 +612,10 @@ class XmlRpcBase: # XXX add locking here! # current version of the page - currentpage = Page(self.request, pagename) + currentpage = PageEditor(self.request, pagename, do_editor_backup=0) if currentpage.get_real_rev() != last_remote_rev: - return xmlrpclib.Fault("LASTREV_INVALID", "The page was changed") + return LASTREV_INVALID if not currentpage.exists() and diff is None: return xmlrpclib.Fault("NOT_EXIST", "The page does not exist and no diff was supplied.") @@ -619,13 +627,22 @@ class XmlRpcBase: newcontents = patch(basepage.get_raw_body_str(), decompress(str(diff))) # write page - # XXX ... - - # XXX add a tag (interwiki_name, local_rev, current rev) to the page - # XXX return current rev - # XXX finished - - + try: + page.saveText(newcontents.encode("utf-8"), last_remote_rev, comment=comment) + page.clean_acl_cache() + except PageEditor.EditConflict: + return LASTREV_INVALID + + current_rev = page.get_real_rev() + + tags = TagStore(currentpage) + tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev) + + # XXX unlock page + + return current_rev + + # XXX BEGIN WARNING XXX # All xmlrpc_*Attachment* functions have to be considered as UNSTABLE API - # they are neither standard nor are they what we need when we have switched diff -r f56db9746839 -r 86f141856d2b docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Jul 17 13:29:58 2006 +0200 +++ b/docs/CHANGES.aschremmer Wed Jul 19 00:59:02 2006 +0200 @@ -2,7 +2,8 @@ Branch moin/1.6-sync-aschremmer =============================== Known main issues: - * ... + * How will we store tags? + * How to handle renames/deletes? ToDo: * Implement actual syncronisation. @@ -11,7 +12,6 @@ Branch moin/1.6-sync-aschremmer * Clean up trailing whitespace. * Add page locking. * How about using unique IDs that just derive from the interwikiname? - * How to handle renames? New Features: * XMLRPC method to return the Moin version @@ -20,7 +20,9 @@ Branch moin/1.6-sync-aschremmer * XMLRPC Authentication System * Binary Diffing * XMLRPC method to get binary diffs - * + * XMLRPC method to merge remote changes locally + * XMLRPC method to get the interwiki name + * TagStore class Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. @@ -58,6 +60,8 @@ Week 28: Debian-Edu Developer Camp. Impl Week 28: Debian-Edu Developer Camp. Implemented getDiff XMLRPC method, added preliminary SyncPages action, added interwikiName XMLRPC method, added mergeChanges XMLRPC method. Started analysis of the moinupdate script written by Stefan Merten. +Week 29: Finished first version of the mergeChanges method. Added preliminary + Tag and TagStore classes. Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1153263668 -7200 # Node ID 4e65c6eabe9bd19c617b429fb65c324022a3c6ba # Parent 05629312d4d77d1c76a9c531fe50599b1222c714 Forgot to add the wikisync.py file containing the Tag classes. diff -r 05629312d4d7 -r 4e65c6eabe9b MoinMoin/wikisync.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/MoinMoin/wikisync.py Wed Jul 19 01:01:08 2006 +0200 @@ -0,0 +1,23 @@ +# -*- coding: iso-8859-1 -*- +""" + MoinMoin - Wiki Synchronisation + + @copyright: 2006 by MoinMoin:AlexanderSchremmer + @license: GNU GPL, see COPYING for details. +""" + +# XXX add some code here +class Tag(object): + """ This class is used to store information about merging state. """ + pass + + +class TagStore(object): + """ This class manages the storage of tags. """ + + def __init__(self, page): + self.page = page + + def add(self, **kwargs): + # XXX add some code here + print "Got tag for page %r: %r" % (self.page, kwargs) # HG changeset patch # User Thomas Waldmann # Date 1153294951 -7200 # Node ID 95b16d7c135cb728d64ae56c0731675805113e6d # Parent 4e65c6eabe9bd19c617b429fb65c324022a3c6ba added missing daily CHANGES entry diff -r 4e65c6eabe9b -r 95b16d7c135c docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Wed Jul 19 01:01:08 2006 +0200 +++ b/docs/CHANGES.aschremmer Wed Jul 19 09:42:31 2006 +0200 @@ -63,6 +63,8 @@ Week 29: Finished first version of the m Week 29: Finished first version of the mergeChanges method. Added preliminary Tag and TagStore classes. +2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress + Time plan ========= In July and August, most parts of the implementation will be finished # HG changeset patch # User Alexander Schremmer # Date 1153347154 -7200 # Node ID 608fbab8b6c389b57741b7fad25b269eb3c9cbf7 # Parent 95b16d7c135cb728d64ae56c0731675805113e6d Fixed typos. diff -r 95b16d7c135c -r 608fbab8b6c3 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Wed Jul 19 09:42:31 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Jul 20 00:12:34 2006 +0200 @@ -628,12 +628,12 @@ class XmlRpcBase: # write page try: - page.saveText(newcontents.encode("utf-8"), last_remote_rev, comment=comment) - page.clean_acl_cache() + currentpage.saveText(newcontents.encode("utf-8"), last_remote_rev, comment=comment) + currentpage.clean_acl_cache() except PageEditor.EditConflict: return LASTREV_INVALID - current_rev = page.get_real_rev() + current_rev = currentpage.get_real_rev() tags = TagStore(currentpage) tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev) # HG changeset patch # User Alexander Schremmer # Date 1153347168 -7200 # Node ID 8ee55064a4f2b38898067255bebbfa5d72dc468b # Parent 608fbab8b6c389b57741b7fad25b269eb3c9cbf7 Status update. diff -r 608fbab8b6c3 -r 8ee55064a4f2 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Jul 20 00:12:34 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Jul 20 00:12:48 2006 +0200 @@ -22,7 +22,7 @@ Branch moin/1.6-sync-aschremmer * XMLRPC method to get binary diffs * XMLRPC method to merge remote changes locally * XMLRPC method to get the interwiki name - * TagStore class + * TagStore/PickleTagStore class Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. @@ -60,8 +60,8 @@ Week 28: Debian-Edu Developer Camp. Impl Week 28: Debian-Edu Developer Camp. Implemented getDiff XMLRPC method, added preliminary SyncPages action, added interwikiName XMLRPC method, added mergeChanges XMLRPC method. Started analysis of the moinupdate script written by Stefan Merten. -Week 29: Finished first version of the mergeChanges method. Added preliminary - Tag and TagStore classes. +Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently + using pickle-based storage. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1153347189 -7200 # Node ID fd0d86823c41e0893fba1fbf32e7ab4ac06554be # Parent 8ee55064a4f2b38898067255bebbfa5d72dc468b Added working TagStore and Tag classes. diff -r 8ee55064a4f2 -r fd0d86823c41 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Thu Jul 20 00:12:48 2006 +0200 +++ b/MoinMoin/wikisync.py Thu Jul 20 00:13:09 2006 +0200 @@ -6,18 +6,58 @@ @license: GNU GPL, see COPYING for details. """ -# XXX add some code here +try: + import cpickle as pickle +except ImportError: + import pickle + + class Tag(object): """ This class is used to store information about merging state. """ - pass + + def __init__(self, remote_wiki, remote_rev, current_rev): + self.remote_wiki = remote_wiki + self.remote_rev = remote_rev + self.current_rev = current_rev -class TagStore(object): - """ This class manages the storage of tags. """ +class AbstractTagStore(object): + """ This class is an abstract base class that shows how to implement classes + that manage the storage of tags. """ + + def __init__(self, page): + pass + + def add(self, **kwargs): + print "Got tag for page %r: %r" % (self.page, kwargs) + + +class PickleTagStore(AbstractTagStore): + """ This class manages the storage of tags in pickle files. """ def __init__(self, page): self.page = page + self.filename = page.getPagePath('synctags', use_underlay=0, check_create=1, isfile=1) + self.load() + def load(self): + try: + datafile = file(self.filename, "rb") + except IOError: + self.tags = [] + else: + self.tags = pickle.load(datafile) + datafile.close() + + def commit(self): + datafile = file(self.filename, "wb") + pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL) + datafile.close() + + # public methods def add(self, **kwargs): - # XXX add some code here print "Got tag for page %r: %r" % (self.page, kwargs) + self.tags.append(Tag(**kwargs)) + self.commit() + +TagStore = PickleTagStore \ No newline at end of file # HG changeset patch # User Alexander Schremmer # Date 1153347526 -7200 # Node ID 5fa60ea62b28d9c3c5710a3abd2ebe0d339327df # Parent fd0d86823c41e0893fba1fbf32e7ab4ac06554be Added another question. diff -r fd0d86823c41 -r 5fa60ea62b28 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Jul 20 00:13:09 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Jul 20 00:18:46 2006 +0200 @@ -4,6 +4,7 @@ Branch moin/1.6-sync-aschremmer Known main issues: * How will we store tags? * How to handle renames/deletes? + * How to handle colliding/empty interwiki names? ToDo: * Implement actual syncronisation. # HG changeset patch # User Alexander Schremmer # Date 1153439693 -7200 # Node ID 14d12dfdcd4c0de9c786c74b993170a71dea0829 # Parent 939a545df5167e1af91c643c879b20fc7ed18a48 Added new XMLRPC getAllPagesEx method. diff -r 939a545df516 -r 14d12dfdcd4c MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Jul 20 00:19:22 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Fri Jul 21 01:54:53 2006 +0200 @@ -219,10 +219,37 @@ class XmlRpcBase: """ Get all pages readable by current user @rtype: list - @return: a list of all pages. The result is a list of utf-8 strings. - """ - pagelist = self.request.rootpage.getPageList() - return map(self._outstr, pagelist) + @return: a list of all pages. + """ + + return [self._outstr(x) for x in self.request.rootpage.getPageList()] + + + def xmlrpc_getAllPagesEx(self, opts=None): + """ Get all pages readable by current user. Not an WikiRPC method. + + @param opts: dictionary that can contain the following arguments: + include_system:: set it to false if you do not want to see system pages + include_revno:: set it to True if you want to have lists with [pagename, revno] + include_deleted:: set it to True if you want to include deleted pages + @rtype: list + @return: a list of all pages. + """ + options = {"include_system": True, "include_revno": False, "include_deleted": False} + if opts is not None: + options.update(opts) + + if options["include_system"]: + filter = lambda name: not wikiutil.isSystemPage(self.request, name) + else: + filter = lambda name: True + + pagelist = self.request.rootpage.getPageList(filter=filter, exists=not options["include_deleted"]) + + if options['include_revno']: + return [[self._outstr(x), Page(self.request, x).get_real_rev()] for x in pagelist] + else: + return [self._outstr(x) for x in pagelist] def xmlrpc_getRecentChanges(self, date): """ Get RecentChanges since date # HG changeset patch # User Alexander Schremmer # Date 1153439766 -7200 # Node ID 807f80c76d810de5be16e48bb4bdddc47f0fda19 # Parent 14d12dfdcd4c0de9c786c74b993170a71dea0829 Work on SyncPages. Fixed a few bugs, added new method to RemoteWiki, build regexes from the RemotePageList. diff -r 14d12dfdcd4c -r 807f80c76d81 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Fri Jul 21 01:54:53 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Fri Jul 21 01:56:06 2006 +0200 @@ -9,26 +9,39 @@ """ import os +import re import zipfile import xmlrpclib from datetime import datetime from MoinMoin import wikiutil, config, user +from MoinMoin.packages import unpackLine from MoinMoin.PageEditor import PageEditor from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict class ActionStatus(Exception): pass +class RemotePage(object): + """ This class represents a page in (another) wiki. """ + def __init__(self, name, revno): + self.name = name + self.revno = revno + class RemoteWiki(object): """ This class should be the base for all implementations of remote wiki classes. """ + + def __repr__(self): + """ Returns a representation of the instance for debugging purposes. """ + return NotImplemented + def getInterwikiName(self): """ Returns the interwiki name of the other wiki. """ return NotImplemented - - def __repr__(self): - """ Returns a representation of the instance for debugging purposes. """ + + def getRemotePages(self): + """ Returns a list of RemotePage instances. """ return NotImplemented class MoinWiki(RemoteWiki): @@ -38,6 +51,7 @@ class MoinWiki(RemoteWiki): self.valid = not wikitag_bad self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2" self.connection = self.createConnection() + # XXX add version and interwiki name checking! def createConnection(self): return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True) @@ -45,6 +59,10 @@ class MoinWiki(RemoteWiki): # Methods implementing the RemoteWiki interface def getInterwikiName(self): return self.connection.interwikiName() + + def getRemotePages(self): + pages = self.connection.getAllPagesEx({"include_revno": True}) + return [RemotePage(unicode(name), revno) for name, revno in pages] def __repr__(self): return "" % (self.valid, self.wiki_url) @@ -55,44 +73,74 @@ class ActionClass: self.pagename = pagename self.page = Page(request, pagename) - def parsePage(self): - defaults = { + def parse_page(self): + options = { "remotePrefix": "", "localPrefix": "", - "remoteWiki": "" + "remoteWiki": "", + "localMatch": None, + "remoteMatch": None, + "pageList": None, + "groupList": None, } + + options.update(Dict(self.request, self.pagename).get_dict()) + + # Convert page and group list strings to lists + if options["pageList"] is not None: + options["pageList"] = unpackLine(options["pageList"], ",") + if options["groupList"] is not None: + options["groupList"] = unpackLine(options["groupList"], ",") + + return options + + def fix_params(self, params): + """ Does some fixup on the parameters. """ + + # merge the pageList case into the remoteMatch case + if params["pageList"] is not None: + params["remoteMatch"] = u'|'.join([r'^%s$' % re.escape(name) for name in params["pageList"]]) + + if params["localMatch"] is not None: + params["localMatch"] = re.compile(params["localMatch"], re.U) - defaults.update(Dict(self.request, self.pagename).get_dict()) - return defaults - + if params["remoteMatch"] is not None: + params["remoteMatch"] = re.compile(params["remoteMatch"], re.U) + + return params + def render(self): """ Render action - This action returns a wiki page with optional message, or - redirects to new page. + This action returns a status message. """ _ = self.request.getText - - params = self.parsePage() - + + params = self.fix_params(self.parse_page()) + + try: if not self.request.cfg.interwikiname: raise ActionStatus(_("Please set an interwikiname in your wikiconfig (see HelpOnConfiguration) to be able to use this action.")) if not params["remoteWiki"]: raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter.")) - + remote = MoinWiki(params["remoteWiki"]) - + if not remote.valid: raise ActionStatus(_("The ''remoteWiki'' is unknown.")) - - # ... - self.sync(params) + + self.sync(params, remote) except ActionStatus, e: return self.page.send_page(self.request, msg=u'

%s

\n' % (e.args[0], )) return self.page.send_page(self.request, msg=_("Syncronisation finished.")) + def sync(self, params, remote): + """ This method does the syncronisation work. """ + + r_pages = remote.getRemotePages() + def execute(pagename, request): ActionClass(pagename, request).render() # HG changeset patch # User Alexander Schremmer # Date 1153439896 -7200 # Node ID 86999b189c45e9c1fd4988fc289a3bd4805466fb # Parent 807f80c76d810de5be16e48bb4bdddc47f0fda19 Updated my CHANGES file. diff -r 807f80c76d81 -r 86999b189c45 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Jul 21 01:56:06 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Jul 21 01:58:16 2006 +0200 @@ -11,7 +11,7 @@ Branch moin/1.6-sync-aschremmer * Implement a cross-site authentication system, i.e. mainly an identity storage. * Clean up trailing whitespace. - * Add page locking. + * Add page locking, i.e. use the one in the new storage layer. * How about using unique IDs that just derive from the interwikiname? New Features: @@ -24,6 +24,8 @@ Branch moin/1.6-sync-aschremmer * XMLRPC method to merge remote changes locally * XMLRPC method to get the interwiki name * TagStore/PickleTagStore class + * XMLRPC method to get the pagelist in a special way (revnos, + no system pages etc.) Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. @@ -62,7 +64,7 @@ Week 28: Debian-Edu Developer Camp. Impl added interwikiName XMLRPC method, added mergeChanges XMLRPC method. Started analysis of the moinupdate script written by Stefan Merten. Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently - using pickle-based storage. + using pickle-based storage. Added getAllPagesEx XMLRPC method. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Thomas Waldmann # Date 1153409068 -7200 # Node ID c51d2f5b04db736de668f5c105719fbd0afd3f65 # Parent 939a545df5167e1af91c643c879b20fc7ed18a48 added missing daily CHANGES entry diff -r 939a545df516 -r c51d2f5b04db docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Jul 20 00:19:22 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Jul 20 17:24:28 2006 +0200 @@ -65,6 +65,7 @@ Week 29: Finished first version of the m using pickle-based storage. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1153526325 -7200 # Node ID 742545e9916713547be251819a850494a2e65997 # Parent 06a2fd20eab79cd6753251b4a51f0764c9c0595e Removed unnecessary ACL cache clean call. diff -r 06a2fd20eab7 -r 742545e99167 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Fri Jul 21 02:36:01 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sat Jul 22 01:58:45 2006 +0200 @@ -656,7 +656,6 @@ class XmlRpcBase: # write page try: currentpage.saveText(newcontents.encode("utf-8"), last_remote_rev, comment=comment) - currentpage.clean_acl_cache() except PageEditor.EditConflict: return LASTREV_INVALID # HG changeset patch # User Alexander Schremmer # Date 1153526343 -7200 # Node ID eac4ea43088ea3298b929c544affdd640a560dd7 # Parent 742545e9916713547be251819a850494a2e65997 Documented code, fixed bugs. diff -r 742545e99167 -r eac4ea43088e MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Jul 22 01:58:45 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Jul 22 01:59:03 2006 +0200 @@ -7,7 +7,7 @@ """ try: - import cpickle as pickle + import cPickle as pickle except ImportError: import pickle @@ -16,9 +16,18 @@ class Tag(object): """ This class is used to store information about merging state. """ def __init__(self, remote_wiki, remote_rev, current_rev): + """ Creates a new Tag. + + @param remote_wiki: The identifier of the remote wiki. + @param remote_rev: The revision number on the remote end. + @param current_rev: The related local revision. + """ self.remote_wiki = remote_wiki self.remote_rev = remote_rev self.current_rev = current_rev + + def __repr__(self): + return u"" % (self.remote_wiki, self.remote_rev, self.current_rev) class AbstractTagStore(object): @@ -26,21 +35,35 @@ class AbstractTagStore(object): that manage the storage of tags. """ def __init__(self, page): + """ Subclasses don't need to call this method. It is just here to enforce + them having accept a page argument at least. """ pass def add(self, **kwargs): + """ Adds a Tag object to the current TagStore. """ print "Got tag for page %r: %r" % (self.page, kwargs) + return NotImplemented + + def get_all_tags(self): + """ Returns a list of all Tag objects associated to this page. """ + return NotImplemented class PickleTagStore(AbstractTagStore): """ This class manages the storage of tags in pickle files. """ def __init__(self, page): + """ Creates a new TagStore that uses pickle files. + + @param page: a Page object where the tags should be related to + """ + self.page = page self.filename = page.getPagePath('synctags', use_underlay=0, check_create=1, isfile=1) self.load() def load(self): + """ Loads the tags from the data file. """ try: datafile = file(self.filename, "rb") except IOError: @@ -50,14 +73,20 @@ class PickleTagStore(AbstractTagStore): datafile.close() def commit(self): + """ Writes the memory contents to the data file. """ datafile = file(self.filename, "wb") pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL) datafile.close() - # public methods + # public methods --------------------------------------------------- def add(self, **kwargs): - print "Got tag for page %r: %r" % (self.page, kwargs) self.tags.append(Tag(**kwargs)) self.commit() + + def get_all_tags(self): + return self.tags + +# currently we just have one implementation, so we do not need +# a factory method TagStore = PickleTagStore \ No newline at end of file # HG changeset patch # User Alexander Schremmer # Date 1153526359 -7200 # Node ID e52ab62b3a0a9a65af614ec8d2593d8de7514ad1 # Parent eac4ea43088ea3298b929c544affdd640a560dd7 Added test for wikisync.py diff -r eac4ea43088e -r e52ab62b3a0a MoinMoin/_tests/test_wikisync.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/MoinMoin/_tests/test_wikisync.py Sat Jul 22 01:59:19 2006 +0200 @@ -0,0 +1,33 @@ +# -*- coding: iso-8859-1 -*- +""" +MoinMoin - MoinMoin.wikisync tests + +@copyright: 2006 MoinMoin:AlexanderSchremmer +@license: GNU GPL, see COPYING for details. +""" + +from unittest import TestCase +from MoinMoin.Page import Page +from MoinMoin.PageEditor import PageEditor +from MoinMoin._tests import TestConfig, TestSkipped + +from MoinMoin.wikisync import TagStore + + +class UnsafeSyncTestcase(TestCase): + """ Tests various things related to syncing. Note that it is not possible + to create pages without cluttering page revision currently, so we have to use + the testwiki. """ + + def setUp(self): + if not getattr(self.request.cfg, 'is_test_wiki', False): + raise TestSkipped('This test needs to be run using the test wiki.') + + def testBasicTagThings(self): + page = PageEditor(self.request, "FrontPage") + tags = TagStore(page) + self.assert_(not tags.get_all_tags()) + tags.add(remote_wiki="foo", remote_rev=1, current_rev=2) + tags = TagStore(page) # reload + self.assert_(tags.get_all_tags()[0].remote_rev == 1) + # HG changeset patch # User Thomas Waldmann # Date 1153503251 -7200 # Node ID b339cd4e61c9c8fd62e3255e1861ecfa02b361cc # Parent 06a2fd20eab79cd6753251b4a51f0764c9c0595e added missing daily CHANGES entry diff -r 06a2fd20eab7 -r b339cd4e61c9 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Jul 21 02:36:01 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Jul 21 19:34:11 2006 +0200 @@ -68,6 +68,7 @@ Week 29: Finished first version of the m 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-20: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1153528380 -7200 # Node ID ad101845642ba30d55310d9d791733679ecf9353 # Parent e39b47ed9e2a3995326be0653fde26c7310a1a43 Call the Ex method from the original method, fixed a bug in the Ex method (wikirpc) diff -r e39b47ed9e2a -r ad101845642b MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sat Jul 22 02:00:03 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sat Jul 22 02:33:00 2006 +0200 @@ -222,7 +222,9 @@ class XmlRpcBase: @return: a list of all pages. """ - return [self._outstr(x) for x in self.request.rootpage.getPageList()] + # the official WikiRPC interface is implemented by the extended method + # as well + return self.xmlrpc_getAllPagesEx() def xmlrpc_getAllPagesEx(self, opts=None): @@ -239,7 +241,7 @@ class XmlRpcBase: if opts is not None: options.update(opts) - if options["include_system"]: + if not options["include_system"]: filter = lambda name: not wikiutil.isSystemPage(self.request, name) else: filter = lambda name: True # HG changeset patch # User Alexander Schremmer # Date 1153528443 -7200 # Node ID f9568edf1ff01e282d32d40e7e0740d512b55e8e # Parent ad101845642ba30d55310d9d791733679ecf9353 Introduced MoinLocalWiki class, added code that compares page lists for testing. diff -r ad101845642b -r f9568edf1ff0 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Jul 22 02:33:00 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Jul 22 02:34:03 2006 +0200 @@ -14,19 +14,35 @@ import xmlrpclib import xmlrpclib from datetime import datetime +# Compatiblity to Python 2.3 +try: + set +except NameError: + from sets import Set as set + + from MoinMoin import wikiutil, config, user from MoinMoin.packages import unpackLine from MoinMoin.PageEditor import PageEditor from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict + class ActionStatus(Exception): pass + class RemotePage(object): """ This class represents a page in (another) wiki. """ def __init__(self, name, revno): self.name = name self.revno = revno + + def __repr__(self): + return u"%s<%i>" % (self.name, self.revno) + + def __lt__(self, other): + return self.name > other.name + class RemoteWiki(object): """ This class should be the base for all implementations of remote wiki @@ -40,12 +56,15 @@ class RemoteWiki(object): """ Returns the interwiki name of the other wiki. """ return NotImplemented - def getRemotePages(self): + def getPages(self): """ Returns a list of RemotePage instances. """ return NotImplemented -class MoinWiki(RemoteWiki): - def __init__(self, interwikiname): + +class MoinRemoteWiki(RemoteWiki): + """ Used for MoinMoin wikis reachable via XMLRPC. """ + def __init__(self, request, interwikiname): + self.request = request wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) self.wiki_url = wikiutil.mapURL(self.request, wikiurl) self.valid = not wikitag_bad @@ -54,18 +73,39 @@ class MoinWiki(RemoteWiki): # XXX add version and interwiki name checking! def createConnection(self): - return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True) + if self.valid: + return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True) + else: + return None # Methods implementing the RemoteWiki interface def getInterwikiName(self): return self.connection.interwikiName() - def getRemotePages(self): - pages = self.connection.getAllPagesEx({"include_revno": True}) + def getPages(self): + pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True}) return [RemotePage(unicode(name), revno) for name, revno in pages] def __repr__(self): - return "" % (self.valid, self.wiki_url) + return "" % (self.wiki_url, self.valid) + + +class MoinLocalWiki(RemoteWiki): + """ Used for the current MoinMoin wiki. """ + def __init__(self, request): + self.request = request + + # Methods implementing the RemoteWiki interface + def getInterwikiName(self): + return self.request.cfg.interwikiname + + def getPages(self): + l_pages = [[x, Page(self.request, x).get_real_rev()] for x in self.request.rootpage.getPageList(exists=0)] + return [RemotePage(unicode(name), revno) for name, revno in l_pages] + + def __repr__(self): + return "" + class ActionClass: def __init__(self, pagename, request): @@ -126,21 +166,28 @@ class ActionClass: if not params["remoteWiki"]: raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter.")) - remote = MoinWiki(params["remoteWiki"]) + remote = MoinRemoteWiki(self.request, params["remoteWiki"]) + local = MoinLocalWiki(self.request) if not remote.valid: raise ActionStatus(_("The ''remoteWiki'' is unknown.")) - self.sync(params, remote) + self.sync(params, local, remote) except ActionStatus, e: return self.page.send_page(self.request, msg=u'

%s

\n' % (e.args[0], )) return self.page.send_page(self.request, msg=_("Syncronisation finished.")) - def sync(self, params, remote): + def sync(self, params, local, remote): """ This method does the syncronisation work. """ - r_pages = remote.getRemotePages() + r_pages = remote.getPages() + l_pages = local.getPages() + + # some initial test code + r_new_pages = u",".join(set([repr(x) for x in r_pages]) - set([repr(x) for x in l_pages])) + raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages) + def execute(pagename, request): ActionClass(pagename, request).render() # HG changeset patch # User Alexander Schremmer # Date 1153528593 -7200 # Node ID 9e5749f46b74060ac7714ae29ebbaabba49988aa # Parent f9568edf1ff01e282d32d40e7e0740d512b55e8e Minor bug in SyncPages. diff -r f9568edf1ff0 -r 9e5749f46b74 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Jul 22 02:34:03 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Jul 22 02:36:33 2006 +0200 @@ -41,7 +41,7 @@ class RemotePage(object): return u"%s<%i>" % (self.name, self.revno) def __lt__(self, other): - return self.name > other.name + return self.name < other.name class RemoteWiki(object): # HG changeset patch # User Alexander Schremmer # Date 1153567445 -7200 # Node ID 719c89b31850e674667bf2e5d0ff835e3e1c1d70 # Parent 9e5749f46b74060ac7714ae29ebbaabba49988aa Fixed Python 2.5 incompatibility issue. diff -r 9e5749f46b74 -r 719c89b31850 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Jul 22 02:36:33 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Jul 22 13:24:05 2006 +0200 @@ -38,7 +38,7 @@ class RemotePage(object): self.revno = revno def __repr__(self): - return u"%s<%i>" % (self.name, self.revno) + return repr(u"%s<%i>" % (self.name, self.revno)) def __lt__(self, other): return self.name < other.name # HG changeset patch # User Alexander Schremmer # Date 1153567479 -7200 # Node ID 0f7f98a19685e4d21b80c54751c64ef1c138c8d3 # Parent 719c89b31850e674667bf2e5d0ff835e3e1c1d70 Fixed sre unpickle issues seen on some machines by avoiding to pickle the regex. diff -r 719c89b31850 -r 0f7f98a19685 MoinMoin/wikidicts.py --- a/MoinMoin/wikidicts.py Sat Jul 22 13:24:05 2006 +0200 +++ b/MoinMoin/wikidicts.py Sat Jul 22 13:24:39 2006 +0200 @@ -26,7 +26,7 @@ from MoinMoin.logfile.editlog import Edi # Version of the internal data structure which is pickled # Please increment if you have changed the structure -DICTS_PICKLE_VERSION = 4 +DICTS_PICKLE_VERSION = 5 class DictBase: @@ -44,12 +44,17 @@ class DictBase: """ self.name = name - self.regex = re.compile(self.regex, re.MULTILINE | re.UNICODE) + self.initRegex() # Get text from page named 'name' p = Page.Page(request, name) text = p.get_raw_body() self.initFromText(text) + + def initRegex(cls): + """ Make it a class attribute to avoid it being pickled. """ + cls.regex = re.compile(cls.regex, re.MULTILINE | re.UNICODE) + initRegex = classmethod(initRegex) def initFromText(self, text): raise NotImplementedError('sub classes should override this') # HG changeset patch # User Alexander Schremmer # Date 1153567536 -7200 # Node ID 4725c8ac809af92b326fc79903b4fe4bc73cf24c # Parent 0f7f98a19685e4d21b80c54751c64ef1c138c8d3 Fixed Python 2.5 incompatibility issues (avoid str exceptions), made the bytecode more resistant for people using data directoriees with different MoinMoin versions. diff -r 0f7f98a19685 -r 4725c8ac809a MoinMoin/Page.py --- a/MoinMoin/Page.py Sat Jul 22 13:24:39 2006 +0200 +++ b/MoinMoin/Page.py Sat Jul 22 13:25:36 2006 +0200 @@ -1036,7 +1036,7 @@ class Page: except wikiutil.PluginMissingError: pass else: - raise "Plugin missing error!" # XXX what now? + raise NotImplementedError("Plugin missing error!") # XXX what now? request.formatter = self.formatter self.formatter.setPage(self) if self.hilite_re: @@ -1247,7 +1247,7 @@ class Page: except wikiutil.PluginMissingError: pass else: - raise "No matching parser" # XXX what do we use if nothing at all matches? + raise NotImplementedError("No matching parser") # XXX what do we use if nothing at all matches? # start wiki content div request.write(self.formatter.startContent(content_id)) @@ -1339,7 +1339,7 @@ class Page: except wikiutil.PluginMissingError: pass else: - raise "no matching parser" # XXX what now? + raise NotImplementedError("no matching parser") # XXX what now? return getattr(parser, 'caching', False) return False @@ -1362,11 +1362,15 @@ class Page: try: code = self.loadCache(request) self.execute(request, parser, code) - except 'CacheNeedsUpdate': + except Exception, (msg, ): + if msg != 'CacheNeedsUpdate': + raise try: code = self.makeCache(request, parser) self.execute(request, parser, code) - except 'CacheNeedsUpdate': + except Exception, (msg, ): + if msg != 'CacheNeedsUpdate': + raise request.log('page cache failed after creation') self.format(parser) @@ -1392,19 +1396,21 @@ class Page: cache = caching.CacheEntry(request, self, self.getFormatterName(), scope='item') attachmentsPath = self.getPagePath('attachments', check_create=0) if cache.needsUpdate(self._text_filename(), attachmentsPath): - raise 'CacheNeedsUpdate' + raise Exception('CacheNeedsUpdate') import marshal try: return marshal.loads(cache.content()) + except "CacheNeedsUpdate": # convert old exception into a new one + raise Exception('CacheNeedsUpdate') except (EOFError, ValueError, TypeError): # Bad marshal data, must update the cache. # See http://docs.python.org/lib/module-marshal.html - raise 'CacheNeedsUpdate' + raise Exception('CacheNeedsUpdate') except Exception, err: request.log('fail to load "%s" cache: %s' % (self.page_name, str(err))) - raise 'CacheNeedsUpdate' + raise Exception('CacheNeedsUpdate') def makeCache(self, request, parser): """ Format content into code, update cache and return code """ diff -r 0f7f98a19685 -r 4725c8ac809a MoinMoin/formatter/text_python.py --- a/MoinMoin/formatter/text_python.py Sat Jul 22 13:24:39 2006 +0200 +++ b/MoinMoin/formatter/text_python.py Sat Jul 22 13:25:36 2006 +0200 @@ -54,8 +54,9 @@ class Formatter: waspcode_timestamp = int(time.time()) source = [""" moincode_timestamp = int(os.path.getmtime(os.path.dirname(__file__))) -if moincode_timestamp > %d or request.cfg.cfg_mtime > %d: - raise "CacheNeedsUpdate" +cfg_mtime = getattr(request.cfg, "cfg_mtime", None) +if moincode_timestamp > %d or cfg_mtime is None or cfg_mtime > %d: + raise Exception("CacheNeedsUpdate") """ % (waspcode_timestamp, waspcode_timestamp)] # HG changeset patch # User Alexander Schremmer # Date 1153567599 -7200 # Node ID 9a124201327c08ade5cf57a425058ffc8a3a97df # Parent 4725c8ac809af92b326fc79903b4fe4bc73cf24c Documented changes. diff -r 4725c8ac809a -r 9a124201327c docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Jul 22 13:25:36 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Jul 22 13:26:39 2006 +0200 @@ -28,7 +28,9 @@ Branch moin/1.6-sync-aschremmer no system pages etc.) Bugfixes (only stuff that is buggy in moin/1.6 main branch): - * Conflict resolution fixes. + * Conflict resolution fixes. (merged into main) + * Python 2.5 compatibility fixes in the Page caching logic + * sre pickle issues in the wikidicts code Other Changes: * Refactored conflict resolution and XMLRPC code. # HG changeset patch # User Alexander Schremmer # Date 1153568295 -7200 # Node ID 51086fe55b58048b0af0deb7eab41d6fd99ae4ea # Parent 9a124201327c08ade5cf57a425058ffc8a3a97df Added clearing to the TagStore, use it in the test. diff -r 9a124201327c -r 51086fe55b58 MoinMoin/_tests/test_wikisync.py --- a/MoinMoin/_tests/test_wikisync.py Sat Jul 22 13:26:39 2006 +0200 +++ b/MoinMoin/_tests/test_wikisync.py Sat Jul 22 13:38:15 2006 +0200 @@ -22,12 +22,15 @@ class UnsafeSyncTestcase(TestCase): def setUp(self): if not getattr(self.request.cfg, 'is_test_wiki', False): raise TestSkipped('This test needs to be run using the test wiki.') + self.page = PageEditor(self.request, "FrontPage") def testBasicTagThings(self): - page = PageEditor(self.request, "FrontPage") - tags = TagStore(page) + tags = TagStore(self.page) self.assert_(not tags.get_all_tags()) tags.add(remote_wiki="foo", remote_rev=1, current_rev=2) - tags = TagStore(page) # reload + tags = TagStore(self.page) # reload self.assert_(tags.get_all_tags()[0].remote_rev == 1) - + + def tearDown(self): + tags = TagStore(self.page) + tags.clear() diff -r 9a124201327c -r 51086fe55b58 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Jul 22 13:26:39 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Jul 22 13:38:15 2006 +0200 @@ -46,6 +46,10 @@ class AbstractTagStore(object): def get_all_tags(self): """ Returns a list of all Tag objects associated to this page. """ + return NotImplemented + + def clear(self): + """ Removes all tags. """ return NotImplemented @@ -86,6 +90,9 @@ class PickleTagStore(AbstractTagStore): def get_all_tags(self): return self.tags + def clear(self): + self.tags = [] + self.commit() # currently we just have one implementation, so we do not need # a factory method # HG changeset patch # User Alexander Schremmer # Date 1153781181 -7200 # Node ID ac85f3b79216c8141f69a8a7d14c538b9bbcca05 # Parent b7544e3bd478c2f715cf777511a946cd6d1f516a Bugfix, convert the exception yielded by exec. diff -r b7544e3bd478 -r ac85f3b79216 MoinMoin/Page.py --- a/MoinMoin/Page.py Mon Jul 24 22:18:49 2006 +0200 +++ b/MoinMoin/Page.py Tue Jul 25 00:46:21 2006 +0200 @@ -1389,7 +1389,11 @@ class Page: import MoinMoin if hasattr(MoinMoin, '__loader__'): __file__ = os.path.join(MoinMoin.__loader__.archive, 'dummy') - exec code + + try: + exec code + except "CacheNeedsUpdate": # convert the exception + raise Exception("CacheNeedsUpdate") def loadCache(self, request): """ Return page content cache or raises 'CacheNeedsUpdate' """ # HG changeset patch # User Alexander Schremmer # Date 1153781201 -7200 # Node ID 018bb4266d57d7869e3b14dced252a7a8d59088b # Parent ac85f3b79216c8141f69a8a7d14c538b9bbcca05 Added random_string method. diff -r ac85f3b79216 -r 018bb4266d57 MoinMoin/util/__init__.py --- a/MoinMoin/util/__init__.py Tue Jul 25 00:46:21 2006 +0200 +++ b/MoinMoin/util/__init__.py Tue Jul 25 00:46:41 2006 +0200 @@ -7,7 +7,7 @@ @license: GNU GPL, see COPYING for details. """ -import os, sys, re +import os, sys, re, random ############################################################################# ### XML helper functions @@ -112,3 +112,7 @@ class simpleIO: def close(self): self.buffer = None + +def random_string(length): + chars = ''.join([chr(random.randint(0, 255)) for x in xrange(length)]) + return chars # HG changeset patch # User Alexander Schremmer # Date 1153781588 -7200 # Node ID 6632f9919a8993f83c11647fb23a2086195f7512 # Parent 018bb4266d57d7869e3b14dced252a7a8d59088b Implemented IWID system. diff -r 018bb4266d57 -r 6632f9919a89 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Tue Jul 25 00:46:41 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Tue Jul 25 00:53:08 2006 +0200 @@ -80,7 +80,7 @@ class MoinRemoteWiki(RemoteWiki): # Methods implementing the RemoteWiki interface def getInterwikiName(self): - return self.connection.interwikiName() + return self.connection.interwikiName()[0] def getPages(self): pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True}) diff -r 018bb4266d57 -r 6632f9919a89 MoinMoin/config/multiconfig.py --- a/MoinMoin/config/multiconfig.py Tue Jul 25 00:46:41 2006 +0200 +++ b/MoinMoin/config/multiconfig.py Tue Jul 25 00:53:08 2006 +0200 @@ -7,8 +7,12 @@ @license: GNU GPL, see COPYING for details. """ -import re, os, sys -from MoinMoin import error +import re +import os +import sys +import time + +from MoinMoin import error, util import MoinMoin.auth as authmodule _url_re_cache = None @@ -547,6 +551,37 @@ reStructuredText Quick Reference # check if mail is possible and set flag: self.mail_enabled = (self.mail_smarthost is not None or self.mail_sendmail is not None) and self.mail_from + + # interwiki ID processing + self.load_IWID() + + def load_IWID(self): + """ Loads the InterWikiID of this instance. It is used to identify the instance + globally. + The data file can be found in data/IWID + The IWID is available as cfg.iwid + The full IWID containing the interwiki name is available as cfg.iwid_full + """ + iwid_path = os.path.join(self.data_dir, "IWID") + + try: + iwid_file = file(iwid_path, "rb") + iwid = iwid_file.readline().strip() + iwid_file.close() + except IOError: + iwid = None + + if iwid is None: + iwid = util.random_string(16).encode("hex") + "-" + str(int(time.time())) + iwid_file = file(iwid_path, "wb") + iwid_file.write(iwid) + iwid_file.close() + + self.iwid = iwid + if self.interwikiname is not None: + self.iwid_full = iwid + ":" + self.interwikiname + else: + self.iwid_full = iwid def _config_check(self): """ Check namespace and warn about unknown names diff -r 018bb4266d57 -r 6632f9919a89 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Tue Jul 25 00:46:41 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Tue Jul 25 00:53:08 2006 +0200 @@ -20,7 +20,8 @@ when really necessary (like for transferring binary files like attachments maybe). - @copyright: 2003-2005 by Thomas Waldmann + @copyright: 2003-2006 MoinMoin:ThomasWaldmann + @copyright: 2004-2006 MoinMoin:AlexanderSchremmer @license: GNU GPL, see COPYING for details """ from MoinMoin.util import pysupport @@ -608,12 +609,13 @@ class XmlRpcBase: return {"conflict": conflict, "diff": diffblob, "diffversion": 1, "current": currentpage.get_real_rev()} def xmlrpc_interwikiName(self): - """ Returns the interwiki name of the current wiki. """ + """ Returns the interwiki name and the IWID of the current wiki. """ name = self.request.cfg.interwikiname + iwid = self.request.cfg.iwid if name is None: - return None - else: - return self._outstr(name) + return [None, iwid] + else: + return [self._outstr(name), iwid] def xmlrpc_mergeChanges(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): """ Merges a diff sent by the remote machine and returns the number of the new revision. # HG changeset patch # User Alexander Schremmer # Date 1153781597 -7200 # Node ID 59cc54eb48aba00cd9809dfbb6956300f91c6e0b # Parent 6632f9919a8993f83c11647fb23a2086195f7512 Updated my status file. diff -r 6632f9919a89 -r 59cc54eb48ab docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Tue Jul 25 00:53:08 2006 +0200 +++ b/docs/CHANGES.aschremmer Tue Jul 25 00:53:17 2006 +0200 @@ -26,6 +26,7 @@ Branch moin/1.6-sync-aschremmer * TagStore/PickleTagStore class * XMLRPC method to get the pagelist in a special way (revnos, no system pages etc.) + * IWID support - i.e. every instance has a unique ID Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. (merged into main) @@ -67,6 +68,7 @@ Week 28: Debian-Edu Developer Camp. Impl script written by Stefan Merten. Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently using pickle-based storage. Added getAllPagesEx XMLRPC method. +Week 30: Implemented IWID support, added function to generate random strings. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1153861134 -7200 # Node ID 91e8f75b006b6b74ab268f2f8b87694ab8e7e81b # Parent 61142a50c41b0d5ed694c1af0918be6f377eafa9 Fixed handling of anchors in the rst parser. diff -r 61142a50c41b -r 91e8f75b006b MoinMoin/parser/text_rst.py --- a/MoinMoin/parser/text_rst.py Tue Jul 25 00:55:19 2006 +0200 +++ b/MoinMoin/parser/text_rst.py Tue Jul 25 22:58:54 2006 +0200 @@ -381,8 +381,13 @@ class MoinTranslator(html4css1.HTMLTrans node['classes'].append(prefix) else: # Default case - make a link to a wiki page. - page = Page(self.request, refuri) - node['refuri'] = page.url(self.request) + pagename = refuri + anchor = '' + if refuri.find('#') != -1: + pagename, anchor = refuri.split('#', 1) + anchor = '#' + anchor + page = MoinMoin.Page.Page(self.request, pagename) + node['refuri'] = page.url(self.request) + anchor if not page.exists(): node['classes'].append('nonexistent') html4css1.HTMLTranslator.visit_reference(self, node) diff -r 61142a50c41b -r 91e8f75b006b docs/CHANGES --- a/docs/CHANGES Tue Jul 25 00:55:19 2006 +0200 +++ b/docs/CHANGES Tue Jul 25 22:58:54 2006 +0200 @@ -186,6 +186,7 @@ Version 1.6.current: * Added a (less broken) MoinMoin.support.difflib, details see there. * BadContent and LocalBadContent now get noindex,nofollow robots header, same as POSTs. + * Fixed handling of anchors in wiki links for the Restructured text parser. Other changes: * we use (again) the same browser compatibility check as FCKeditor uses # HG changeset patch # User Alexander Schremmer # Date 1153819562 -7200 # Node ID 40b708ecc33226eb7d30879789ea7afe96764e0a # Parent 61142a50c41b0d5ed694c1af0918be6f377eafa9 Fixed minor bug in XMLRPC code. diff -r 61142a50c41b -r 40b708ecc332 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Tue Jul 25 00:55:19 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Tue Jul 25 11:26:02 2006 +0200 @@ -589,13 +589,13 @@ class XmlRpcBase: if from_rev is None: oldcontents = lambda: "" else: - oldpage = Page(request, pagename, rev=from_rev) + oldpage = Page(self.request, pagename, rev=from_rev) oldcontents = lambda: oldpage.get_raw_body_str() if to_rev is None: newcontents = lambda: currentpage.get_raw_body() else: - newpage = Page(request, pagename, rev=to_rev) + newpage = Page(self.request, pagename, rev=to_rev) newcontents = lambda: newpage.get_raw_body_str() newrev = newpage.get_real_rev() # HG changeset patch # User Alexander Schremmer # Date 1153826310 -7200 # Node ID 2ecd1e6c084df0b6c0babdfbf20a6626799bf56d # Parent 40b708ecc33226eb7d30879789ea7afe96764e0a Fixed security issues in MoinMoin.user (do not reveal the ID), added variable hiding to cgitb. diff -r 40b708ecc332 -r 2ecd1e6c084d MoinMoin/support/cgitb.py --- a/MoinMoin/support/cgitb.py Tue Jul 25 11:26:02 2006 +0200 +++ b/MoinMoin/support/cgitb.py Tue Jul 25 13:18:30 2006 +0200 @@ -69,6 +69,11 @@ Content-Type: text/html __UNDEF__ = [] # a special sentinel object + +class HiddenObject: + def __repr__(self): + return "" +HiddenObject = HiddenObject() class HTMLFormatter: """ Minimal html formatter """ @@ -295,7 +300,10 @@ class Frame: if ttype == tokenize.NAME and token not in keyword.kwlist: if lasttoken == '.': if parent is not __UNDEF__: - value = getattr(parent, token, __UNDEF__) + if self.unsafe_name(token): + value = HiddenObject + else: + value = getattr(parent, token, __UNDEF__) vars.append((prefix + token, prefix, value)) else: where, value = self.lookup(token) @@ -324,8 +332,12 @@ class Frame: value = builtins.get(name, __UNDEF__) else: value = getattr(builtins, name, __UNDEF__) + if self.unsafe_name(name): + value = HiddenObject return scope, value + def unsafe_name(self, name): + return name in self.frame.f_globals.get("unsafe_names", ()) class View: """ Traceback view """ diff -r 40b708ecc332 -r 2ecd1e6c084d MoinMoin/user.py --- a/MoinMoin/user.py Tue Jul 25 11:26:02 2006 +0200 +++ b/MoinMoin/user.py Tue Jul 25 13:18:30 2006 +0200 @@ -5,6 +5,9 @@ @copyright: 2000-2004 by Jürgen Hermann @license: GNU GPL, see COPYING for details. """ + +# add names here to hide them in the cgitb traceback +unsafe_names = ("id", "key", "val", "user_data", "enc_password") import os, time, sha, codecs @@ -289,9 +292,9 @@ class User: self.language = 'en' def __repr__(self): - return "<%s.%s at 0x%x name:%r id:%s valid:%r>" % ( + return "<%s.%s at 0x%x name:%r valid:%r>" % ( self.__class__.__module__, self.__class__.__name__, - id(self), self.name, self.id, self.valid) + id(self), self.name, self.valid) def make_id(self): """ make a new unique user id """ diff -r 40b708ecc332 -r 2ecd1e6c084d docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Tue Jul 25 11:26:02 2006 +0200 +++ b/docs/CHANGES.aschremmer Tue Jul 25 13:18:30 2006 +0200 @@ -30,8 +30,11 @@ Branch moin/1.6-sync-aschremmer Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. (merged into main) - * Python 2.5 compatibility fixes in the Page caching logic - * sre pickle issues in the wikidicts code + * Python 2.5 compatibility fixes in the Page caching logic (merged) + * sre pickle issues in the wikidicts code (merged) + * cgitb can hide particular names, this avoids information leaks + if the user files cannot be parsed for example + * Fixed User.__repr__ - it is insane to put the ID in there Other Changes: * Refactored conflict resolution and XMLRPC code. # HG changeset patch # User Thomas Waldmann # Date 1153856624 -7200 # Node ID d5cb04aab48dd6d3e151b730104cc55f6905b4a8 # Parent 2ecd1e6c084df0b6c0babdfbf20a6626799bf56d added missing daily CHANGES entries diff -r 2ecd1e6c084d -r d5cb04aab48d docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Tue Jul 25 13:18:30 2006 +0200 +++ b/docs/CHANGES.aschremmer Tue Jul 25 21:43:44 2006 +0200 @@ -76,6 +76,10 @@ 2006-07-18: the requested daily entry is 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-20: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-21: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-22: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-23: no work on SOC project +2006-07-24: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1153905714 -7200 # Node ID 8c8b63ad1d17aed6cfaed9d4789db816c7e4dd13 # Parent 2ecd1e6c084df0b6c0babdfbf20a6626799bf56d Worked around the FastCGI problem on Lighttpd: empty lines in the error log Thanks to Jay Soffian diff -r 2ecd1e6c084d -r 8c8b63ad1d17 MoinMoin/support/thfcgi.py --- a/MoinMoin/support/thfcgi.py Tue Jul 25 13:18:30 2006 +0200 +++ b/MoinMoin/support/thfcgi.py Wed Jul 26 11:21:54 2006 +0200 @@ -327,17 +327,18 @@ class Request: self.have_finished = 1 # stderr - self.err.reset() - rec = Record() - rec.rec_type = FCGI_STDERR - rec.req_id = self.req_id - data = self.err.read() - while data: - chunk, data = self.getNextChunk(data) - rec.content = chunk - rec.writeRecord(self.conn) - rec.content = "" - rec.writeRecord(self.conn) # Terminate stream + if self.err.tell(): # just send err record if there is data on the err stream + self.err.reset() + rec = Record() + rec.rec_type = FCGI_STDERR + rec.req_id = self.req_id + data = self.err.read() + while data: + chunk, data = self.getNextChunk(data) + rec.content = chunk + rec.writeRecord(self.conn) + rec.content = "" + rec.writeRecord(self.conn) # Terminate stream # stdout self.out.reset() diff -r 2ecd1e6c084d -r 8c8b63ad1d17 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Tue Jul 25 13:18:30 2006 +0200 +++ b/docs/CHANGES.aschremmer Wed Jul 26 11:21:54 2006 +0200 @@ -35,6 +35,7 @@ Branch moin/1.6-sync-aschremmer * cgitb can hide particular names, this avoids information leaks if the user files cannot be parsed for example * Fixed User.__repr__ - it is insane to put the ID in there + * Worked around the FastCGI problem on Lighttpd: empty lines in the error log, thanks to Jay Soffian Other Changes: * Refactored conflict resolution and XMLRPC code. # HG changeset patch # User Alexander Schremmer # Date 1154095279 -7200 # Node ID 029754c52b111c5dc41fc37719536fabd66c7ea8 # Parent e015da1436530cf41f6ac76aa987d5c6c3925ca0 Changed file format of meta file (\r\n line endings). Enhanced docstrings. diff -r e015da143653 -r 029754c52b11 MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Wed Jul 26 11:22:19 2006 +0200 +++ b/MoinMoin/wikiutil.py Fri Jul 28 16:01:19 2006 +0200 @@ -443,7 +443,7 @@ class MetaDict(dict): if key in INTEGER_METAS: value = str(value) meta.append("%s: %s" % (key, value)) - meta = '\n'.join(meta) + meta = '\r\n'.join(meta) # XXX what does happen if the metafile is being read or written to in another process? metafile = codecs.open(self.metafilename, "w", "utf-8") metafile.write(meta) @@ -452,7 +452,7 @@ class MetaDict(dict): self.dirty = False def sync(self, mtime_usecs=None): - """ sync the in-memory dict to disk (if dirty) """ + """ sync the in-memory dict to the persistent store (if dirty) """ if self.dirty: if not mtime_usecs is None: self.__setitem__('mtime', str(mtime_usecs)) @@ -469,6 +469,8 @@ class MetaDict(dict): raise def __setitem__(self, key, value): + """ Sets a dictionary entry. You actually have to call sync to write it + to the persistent store. """ try: oldvalue = dict.__getitem__(self, key) except KeyError: # HG changeset patch # User Alexander Schremmer # Date 1154095317 -7200 # Node ID f8ba3b0ca9f455a801fe8d1915efcca66d58e67a # Parent 029754c52b111c5dc41fc37719536fabd66c7ea8 Use the meta dict instead of a distinct file for the IWID. diff -r 029754c52b11 -r f8ba3b0ca9f4 MoinMoin/config/multiconfig.py --- a/MoinMoin/config/multiconfig.py Fri Jul 28 16:01:19 2006 +0200 +++ b/MoinMoin/config/multiconfig.py Fri Jul 28 16:01:57 2006 +0200 @@ -12,7 +12,7 @@ import sys import sys import time -from MoinMoin import error, util +from MoinMoin import error, util, wikiutil import MoinMoin.auth as authmodule _url_re_cache = None @@ -552,30 +552,24 @@ reStructuredText Quick Reference # check if mail is possible and set flag: self.mail_enabled = (self.mail_smarthost is not None or self.mail_sendmail is not None) and self.mail_from + self.meta_dict = wikiutil.MetaDict(os.path.join(data_dir, 'meta')) + # interwiki ID processing self.load_IWID() def load_IWID(self): """ Loads the InterWikiID of this instance. It is used to identify the instance globally. - The data file can be found in data/IWID The IWID is available as cfg.iwid The full IWID containing the interwiki name is available as cfg.iwid_full """ - iwid_path = os.path.join(self.data_dir, "IWID") try: - iwid_file = file(iwid_path, "rb") - iwid = iwid_file.readline().strip() - iwid_file.close() - except IOError: - iwid = None - - if iwid is None: + iwid = self.meta_dict['IWID'] + except KeyError: iwid = util.random_string(16).encode("hex") + "-" + str(int(time.time())) - iwid_file = file(iwid_path, "wb") - iwid_file.write(iwid) - iwid_file.close() + self.meta_dict['IWID'] = iwid + self.meta_dict.sync() self.iwid = iwid if self.interwikiname is not None: # HG changeset patch # User Alexander Schremmer # Date 1154098920 -7200 # Node ID 26ed423681e08f4a5e0e7106a91443259f89c5fb # Parent f8ba3b0ca9f455a801fe8d1915efcca66d58e67a Commented a block of debugging code diff -r f8ba3b0ca9f4 -r 26ed423681e0 MoinMoin/util/lock.py --- a/MoinMoin/util/lock.py Fri Jul 28 16:01:57 2006 +0200 +++ b/MoinMoin/util/lock.py Fri Jul 28 17:02:00 2006 +0200 @@ -11,9 +11,9 @@ import os, tempfile, time, errno # Temporary debugging aid, to be replaced with system wide debuging # in release 3000. -import sys -def log(msg): - sys.stderr.write('[%s] lock: %s' % (time.asctime(), msg)) +#import sys +#def log(msg): +# sys.stderr.write('[%s] lock: %s' % (time.asctime(), msg)) class Timer: # HG changeset patch # User Alexander Schremmer # Date 1154098933 -7200 # Node ID ba25ee4ea61de2d446cd8ddc5e60ee3829b7c4ff # Parent 26ed423681e08f4a5e0e7106a91443259f89c5fb Added locking to the MetaDict code. diff -r 26ed423681e0 -r ba25ee4ea61d MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Fri Jul 28 17:02:00 2006 +0200 +++ b/MoinMoin/wikiutil.py Fri Jul 28 17:02:13 2006 +0200 @@ -10,7 +10,7 @@ import codecs, types import codecs, types from MoinMoin import util, version, config -from MoinMoin.util import pysupport, filesys +from MoinMoin.util import pysupport, filesys, lock # Exceptions class InvalidFileNameError(Exception): @@ -410,6 +410,9 @@ class MetaDict(dict): self.metafilename = metafilename self.dirty = False self.loaded = False + lock_dir = os.path.join(self.metafilename, '..', 'cache', '__metalock__') + self.rlock = lock.ReadLock(lock_dir, 60.0) + self.wlock = lock.WriteLock(lock_dir, 60.0) def _get_meta(self): """ get the meta dict from an arbitrary filename. @@ -417,11 +420,15 @@ class MetaDict(dict): @param metafilename: the name of the file to read @return: dict with all values or {} if empty or error """ - # XXX what does happen if the metafile is being written to in another process? + try: - metafile = codecs.open(self.metafilename, "r", "utf-8") - meta = metafile.read() # this is much faster than the file's line-by-line iterator - metafile.close() + self.rlock.acquire(3.0) + try: + metafile = codecs.open(self.metafilename, "r", "utf-8") + meta = metafile.read() # this is much faster than the file's line-by-line iterator + metafile.close() + finally: + self.rlock.release() except IOError: meta = u'' for line in meta.splitlines(): @@ -445,9 +452,14 @@ class MetaDict(dict): meta.append("%s: %s" % (key, value)) meta = '\r\n'.join(meta) # XXX what does happen if the metafile is being read or written to in another process? - metafile = codecs.open(self.metafilename, "w", "utf-8") - metafile.write(meta) - metafile.close() + # XXX Then it is corrupted. We need locks. + self.wlock.aquire(5.0) + try: + metafile = codecs.open(self.metafilename, "w", "utf-8") + metafile.write(meta) + metafile.close() + finally: + self.wlock.release() filesys.chmod(self.metafilename, 0666 & config.umask) self.dirty = False # HG changeset patch # User Alexander Schremmer # Date 1154099443 -7200 # Node ID 815f69e6c563997d3880d884049e634d77eeab79 # Parent cf4a2f11c95921275c99712162654b49e05fa654 Added note to my CHANGES file. diff -r cf4a2f11c959 -r 815f69e6c563 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Jul 28 17:03:03 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Jul 28 17:10:43 2006 +0200 @@ -36,6 +36,7 @@ Branch moin/1.6-sync-aschremmer if the user files cannot be parsed for example * Fixed User.__repr__ - it is insane to put the ID in there * Worked around the FastCGI problem on Lighttpd: empty lines in the error log, thanks to Jay Soffian + * Fixed the MetaDict code to use locks. Other Changes: * Refactored conflict resolution and XMLRPC code. # HG changeset patch # User Alexander Schremmer # Date 1154119559 -7200 # Node ID 2bcb7bdf94a2dd089317f78822a32e539085cf6e # Parent 815f69e6c563997d3880d884049e634d77eeab79 Implemented an editable InterWikiMap, updated my CHANGES file. diff -r 815f69e6c563 -r 2bcb7bdf94a2 MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Fri Jul 28 17:10:43 2006 +0200 +++ b/MoinMoin/wikiutil.py Fri Jul 28 22:45:59 2006 +0200 @@ -6,8 +6,13 @@ @license: GNU GPL, see COPYING for details. """ -import os, re, urllib, cgi -import codecs, types +import cgi +import codecs +import os +import re +import time +import types +import urllib from MoinMoin import util, version, config from MoinMoin.util import pysupport, filesys, lock @@ -497,8 +502,13 @@ class MetaDict(dict): ############################################################################# def load_wikimap(request): """ load interwiki map (once, and only on demand) """ + from MoinMoin.Page import Page + try: _interwiki_list = request.cfg._interwiki_list + now = int(time.time()) + if request.cfg._interwiki_ts + (3*60) < now: # 3 minutes caching time + raise AttributeError # refresh cache except AttributeError: _interwiki_list = {} lines = [] @@ -507,7 +517,7 @@ def load_wikimap(request): # precedence over the shared one, and is thus read AFTER # the shared one intermap_files = request.cfg.shared_intermap - if not isinstance(intermap_files, type([])): + if not isinstance(intermap_files, list): intermap_files = [intermap_files] intermap_files.append(os.path.join(request.cfg.data_dir, "intermap.txt")) @@ -516,6 +526,9 @@ def load_wikimap(request): f = open(filename, "r") lines.extend(f.readlines()) f.close() + + # add the contents of the InterWikiMap page + lines += Page(request, "InterWikiMap").get_raw_body().splitlines() for line in lines: if not line or line[0] == '#': continue @@ -536,6 +549,7 @@ def load_wikimap(request): # save for later request.cfg._interwiki_list = _interwiki_list + request.cfg._interwiki_ts = now return _interwiki_list diff -r 815f69e6c563 -r 2bcb7bdf94a2 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Jul 28 17:10:43 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Jul 28 22:45:59 2006 +0200 @@ -12,7 +12,7 @@ Branch moin/1.6-sync-aschremmer identity storage. * Clean up trailing whitespace. * Add page locking, i.e. use the one in the new storage layer. - * How about using unique IDs that just derive from the interwikiname? + * Check what needs to be documented on MoinMaster. New Features: * XMLRPC method to return the Moin version @@ -27,6 +27,7 @@ Branch moin/1.6-sync-aschremmer * XMLRPC method to get the pagelist in a special way (revnos, no system pages etc.) * IWID support - i.e. every instance has a unique ID + * InterWiki page editable in the wiki Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. (merged into main) @@ -73,14 +74,15 @@ Week 28: Debian-Edu Developer Camp. Impl script written by Stefan Merten. Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently using pickle-based storage. Added getAllPagesEx XMLRPC method. -Week 30: Implemented IWID support, added function to generate random strings. +Week 30: Implemented IWID support, added function to generate random strings. Added support + for editing the InterWikiMap in the wiki. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-20: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-21: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-22: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress -2006-07-23: no work on SOC project +2006-07-23: no work on SOC project -- a Sunday 2006-07-24: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress Time plan # HG changeset patch # User Alexander Schremmer # Date 1154122055 -7200 # Node ID db7863d7a45ee9771dd899a00004df7f2eb1b1fd # Parent 2bcb7bdf94a2dd089317f78822a32e539085cf6e Fixed bug in request.py that avoided showing a traceback if there was a fault after the first headers were sent. diff -r 2bcb7bdf94a2 -r db7863d7a45e MoinMoin/request/__init__.py --- a/MoinMoin/request/__init__.py Fri Jul 28 22:45:59 2006 +0200 +++ b/MoinMoin/request/__init__.py Fri Jul 28 23:27:35 2006 +0200 @@ -17,7 +17,11 @@ from MoinMoin.util import IsWin9x class MoinMoinFinish(Exception): """ Raised to jump directly to end of run() function, where finish is called """ - pass + + +class HeadersAlreadySentException(Exception): + """ Is raised if the headers were already sent when emit_http_headers is called.""" + # Timing --------------------------------------------------------------- @@ -1142,7 +1146,7 @@ space between words. Group page name is sent_headers = getattr(self, 'sent_headers', 0) self.sent_headers = sent_headers + 1 if sent_headers: - raise error.InternalError("emit_http_headers called multiple times(%d)! Headers: %r" % (sent_headers, headers)) + raise HeadersAlreadySentException("emit_http_headers called multiple (%d) times! Headers: %r" % (sent_headers, headers)) #else: # self.log("Notice: emit_http_headers called first time. Headers: %r" % headers) @@ -1216,8 +1220,10 @@ space between words. Group page name is @param err: Exception instance or subclass. """ - self.failed = 1 # save state for self.run() - self.emit_http_headers(['Status: 500 MoinMoin Internal Error']) + self.failed = 1 # save state for self.run() + # we should not generate the headers two times + if not getattr(self, 'sent_headers', 0): + self.emit_http_headers(['Status: 500 MoinMoin Internal Error']) #self.setResponseCode(500) self.log('%s: %s' % (err.__class__.__name__, str(err))) from MoinMoin import failure # HG changeset patch # User Alexander Schremmer # Date 1154122127 -7200 # Node ID cdd4e9418547efefadbf5676d59e84f8c636e32a # Parent db7863d7a45ee9771dd899a00004df7f2eb1b1fd Added locking to the PickleTagStore. diff -r db7863d7a45e -r cdd4e9418547 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Fri Jul 28 23:27:35 2006 +0200 +++ b/MoinMoin/wikisync.py Fri Jul 28 23:28:47 2006 +0200 @@ -6,10 +6,14 @@ @license: GNU GPL, see COPYING for details. """ +import os + try: import cPickle as pickle except ImportError: import pickle + +from MoinMoin.util import lock class Tag(object): @@ -64,23 +68,36 @@ class PickleTagStore(AbstractTagStore): self.page = page self.filename = page.getPagePath('synctags', use_underlay=0, check_create=1, isfile=1) + lock_dir = os.path.join(page.getPagePath('cache', use_underlay=0, check_create=1), '__taglock__') + self.rlock = lock.ReadLock(lock_dir, 60.0) + self.wlock = lock.WriteLock(lock_dir, 60.0) self.load() def load(self): """ Loads the tags from the data file. """ + if not self.rlock.acquire(3.0): + raise EnvironmentError("Could not lock in PickleTagStore") try: - datafile = file(self.filename, "rb") - except IOError: - self.tags = [] - else: - self.tags = pickle.load(datafile) - datafile.close() + try: + datafile = file(self.filename, "rb") + except IOError: + self.tags = [] + else: + self.tags = pickle.load(datafile) + datafile.close() + finally: + self.rlock.release() def commit(self): """ Writes the memory contents to the data file. """ - datafile = file(self.filename, "wb") - pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL) - datafile.close() + if not self.wlock.acquire(3.0): + raise EnvironmentError("Could not lock in PickleTagStore") + try: + datafile = file(self.filename, "wb") + pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL) + datafile.close() + finally: + self.wlock.release() # public methods --------------------------------------------------- def add(self, **kwargs): # HG changeset patch # User Alexander Schremmer # Date 1154122177 -7200 # Node ID 55ebe3c0867c97adbed1a0b330c4158e8ece36e6 # Parent cdd4e9418547efefadbf5676d59e84f8c636e32a Fixed bugs, raise exceptions on locking. diff -r cdd4e9418547 -r 55ebe3c0867c MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Fri Jul 28 23:28:47 2006 +0200 +++ b/MoinMoin/wikiutil.py Fri Jul 28 23:29:37 2006 +0200 @@ -408,14 +408,16 @@ INTEGER_METAS = ['current', 'revision', ] class MetaDict(dict): - """ store meta informations as a dict """ + """ store meta informations as a dict. + XXX It is not thread-safe, add locks! + """ def __init__(self, metafilename): """ create a MetaDict from metafilename """ dict.__init__(self) self.metafilename = metafilename self.dirty = False self.loaded = False - lock_dir = os.path.join(self.metafilename, '..', 'cache', '__metalock__') + lock_dir = os.path.join(metafilename, '..', 'cache', '__metalock__') self.rlock = lock.ReadLock(lock_dir, 60.0) self.wlock = lock.WriteLock(lock_dir, 60.0) @@ -427,7 +429,8 @@ class MetaDict(dict): """ try: - self.rlock.acquire(3.0) + if not self.rlock.acquire(3.0): + raise EnvironmentError("Could not lock in MetaDict") try: metafile = codecs.open(self.metafilename, "r", "utf-8") meta = metafile.read() # this is much faster than the file's line-by-line iterator @@ -456,9 +459,9 @@ class MetaDict(dict): value = str(value) meta.append("%s: %s" % (key, value)) meta = '\r\n'.join(meta) - # XXX what does happen if the metafile is being read or written to in another process? - # XXX Then it is corrupted. We need locks. - self.wlock.aquire(5.0) + + if not self.wlock.acquire(5.0): + raise EnvironmentError("Could not lock in MetaDict") try: metafile = codecs.open(self.metafilename, "w", "utf-8") metafile.write(meta) @@ -502,14 +505,16 @@ class MetaDict(dict): ############################################################################# def load_wikimap(request): """ load interwiki map (once, and only on demand) """ - from MoinMoin.Page import Page + + now = int(time.time()) try: _interwiki_list = request.cfg._interwiki_list - now = int(time.time()) if request.cfg._interwiki_ts + (3*60) < now: # 3 minutes caching time raise AttributeError # refresh cache except AttributeError: + from MoinMoin.Page import Page + _interwiki_list = {} lines = [] # HG changeset patch # User Alexander Schremmer # Date 1154122195 -7200 # Node ID b8142cb6aa7fe723079dbc33f1db33cba510dce3 # Parent 55ebe3c0867c97adbed1a0b330c4158e8ece36e6 Updated my CHANGES file. diff -r 55ebe3c0867c -r b8142cb6aa7f docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Jul 28 23:29:37 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Jul 28 23:29:55 2006 +0200 @@ -2,7 +2,7 @@ Branch moin/1.6-sync-aschremmer =============================== Known main issues: - * How will we store tags? + * How will we store tags? (Metadata support would be handy) * How to handle renames/deletes? * How to handle colliding/empty interwiki names? @@ -38,6 +38,8 @@ Branch moin/1.6-sync-aschremmer * Fixed User.__repr__ - it is insane to put the ID in there * Worked around the FastCGI problem on Lighttpd: empty lines in the error log, thanks to Jay Soffian * Fixed the MetaDict code to use locks. + * Fixed bug in request.py that avoided showing a traceback if there was a fault + after the first headers were sent. Other Changes: * Refactored conflict resolution and XMLRPC code. @@ -75,7 +77,7 @@ Week 29: Finished first version of the m Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently using pickle-based storage. Added getAllPagesEx XMLRPC method. Week 30: Implemented IWID support, added function to generate random strings. Added support - for editing the InterWikiMap in the wiki. + for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1154203322 -7200 # Node ID 75f8c432385fa653593416cf06123997ccf5a5fd # Parent b8142cb6aa7fe723079dbc33f1db33cba510dce3 Fixed MetaDict on POSIX platforms. diff -r b8142cb6aa7f -r 75f8c432385f MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Fri Jul 28 23:29:55 2006 +0200 +++ b/MoinMoin/wikiutil.py Sat Jul 29 22:02:02 2006 +0200 @@ -417,7 +417,7 @@ class MetaDict(dict): self.metafilename = metafilename self.dirty = False self.loaded = False - lock_dir = os.path.join(metafilename, '..', 'cache', '__metalock__') + lock_dir = os.path.normpath(os.path.join(metafilename, '..', 'cache', '__metalock__')) self.rlock = lock.ReadLock(lock_dir, 60.0) self.wlock = lock.WriteLock(lock_dir, 60.0) # HG changeset patch # User Thomas Waldmann # Date 1154207854 -7200 # Node ID 4633e6aba73e42ee8c2fbe2d1d40374348bf50a5 # Parent 75f8c432385fa653593416cf06123997ccf5a5fd added missing daily entries diff -r 75f8c432385f -r 4633e6aba73e docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Jul 29 22:02:02 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Jul 29 23:17:34 2006 +0200 @@ -86,6 +86,11 @@ 2006-07-22: the requested daily entry is 2006-07-22: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-23: no work on SOC project -- a Sunday 2006-07-24: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-25: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-26: student didnt work on project +2006-07-27: student didnt work on project +2006-07-28: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-29: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1154208896 -7200 # Node ID 8b309674bf0699fa95adb4ed9b5c115e561dfc73 # Parent 75f8c432385fa653593416cf06123997ccf5a5fd Minor bugfix in the rst parser re incorrectly referenced Page class. diff -r 75f8c432385f -r 8b309674bf06 MoinMoin/parser/text_rst.py --- a/MoinMoin/parser/text_rst.py Sat Jul 29 22:02:02 2006 +0200 +++ b/MoinMoin/parser/text_rst.py Sat Jul 29 23:34:56 2006 +0200 @@ -386,7 +386,7 @@ class MoinTranslator(html4css1.HTMLTrans if refuri.find('#') != -1: pagename, anchor = refuri.split('#', 1) anchor = '#' + anchor - page = MoinMoin.Page.Page(self.request, pagename) + page = Page(self.request, pagename) node['refuri'] = page.url(self.request) + anchor if not page.exists(): node['classes'].append('nonexistent') # HG changeset patch # User Alexander Schremmer # Date 1154208933 -7200 # Node ID f80e88b33c9deed38230fef7cc19a51aa4c0cb22 # Parent 8b309674bf0699fa95adb4ed9b5c115e561dfc73 Added processing of local- and remoteMatch options to the SyncPages action. diff -r 8b309674bf06 -r f80e88b33c9d MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Jul 29 23:34:56 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Jul 29 23:35:33 2006 +0200 @@ -25,12 +25,12 @@ from MoinMoin.packages import unpackLine from MoinMoin.packages import unpackLine from MoinMoin.PageEditor import PageEditor from MoinMoin.Page import Page -from MoinMoin.wikidicts import Dict +from MoinMoin.wikidicts import Dict, Group class ActionStatus(Exception): pass - +# Move these classes to MoinMoin.wikisync class RemotePage(object): """ This class represents a page in (another) wiki. """ def __init__(self, name, revno): @@ -38,10 +38,17 @@ class RemotePage(object): self.revno = revno def __repr__(self): - return repr(u"%s<%i>" % (self.name, self.revno)) + return repr(unicode(self)) + + def __unicode__(self): + return u"%s<%i>" % (self.name, self.revno) def __lt__(self, other): return self.name < other.name + + def filter(cls, rp_list, regex): + return [x for x in rp_list if regex.match(x.name)] + filter = classmethod(filter) class RemoteWiki(object): @@ -74,7 +81,7 @@ class MoinRemoteWiki(RemoteWiki): def createConnection(self): if self.valid: - return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True) + return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) else: return None @@ -95,13 +102,21 @@ class MoinLocalWiki(RemoteWiki): def __init__(self, request): self.request = request + def getGroupItems(self, group_list): + pages = [] + for group_pagename in group_list: + pages.extend(Group(self.request, group_pagename).members()) + return [self.createRemotePage(x) for x in pages] + + def createRemotePage(self, page_name): + return RemotePage(page_name, Page(self.request, page_name).get_real_rev()) + # Methods implementing the RemoteWiki interface def getInterwikiName(self): return self.request.cfg.interwikiname def getPages(self): - l_pages = [[x, Page(self.request, x).get_real_rev()] for x in self.request.rootpage.getPageList(exists=0)] - return [RemotePage(unicode(name), revno) for name, revno in l_pages] + return [self.createRemotePage(x) for x in self.request.rootpage.getPageList(exists=0)] def __repr__(self): return "" @@ -139,7 +154,8 @@ class ActionClass: # merge the pageList case into the remoteMatch case if params["pageList"] is not None: - params["remoteMatch"] = u'|'.join([r'^%s$' % re.escape(name) for name in params["pageList"]]) + params["localMatch"] = params["remoteMatch"] = u'|'.join([r'^%s$' % re.escape(name) + for name in params["pageList"]]) if params["localMatch"] is not None: params["localMatch"] = re.compile(params["localMatch"], re.U) @@ -183,9 +199,23 @@ class ActionClass: r_pages = remote.getPages() l_pages = local.getPages() + print "Got %i local, %i remote pages" % (len(l_pages), len(r_pages)) + if params["localMatch"]: + l_pages = RemotePage.filter(l_pages, params["localMatch"]) + if params["remoteMatch"]: + print "Filtering remote pages using regex %r" % params["remoteMatch"].pattern + r_pages = RemotePage.filter(r_pages, params["remoteMatch"]) + print "After filtering: Got %i local, %i remote pages" % (len(l_pages), len(r_pages)) + + if params["groupList"]: + pages_from_groupList = local.getGroupItems(params["groupList"]) + if not params["localMatch"]: + l_pages = pages_from_groupList + else: + l_pages += pages_from_groupList # some initial test code - r_new_pages = u",".join(set([repr(x) for x in r_pages]) - set([repr(x) for x in l_pages])) + r_new_pages = u", ".join(set([unicode(x) for x in r_pages]) - set([unicode(x) for x in l_pages])) raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages) # HG changeset patch # User Alexander Schremmer # Date 1154210131 -7200 # Node ID e2cc6b5bed9655cafcbbdaa2504d5e5b82881f7f # Parent 5a9fe91c39d6afdb9de9ed61b698a50450ca4066 Added detection of anonymous wikis to the SyncPages action. diff -r 5a9fe91c39d6 -r e2cc6b5bed96 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Jul 29 23:37:54 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Jul 29 23:55:31 2006 +0200 @@ -29,6 +29,8 @@ from MoinMoin.wikidicts import Dict, Gro class ActionStatus(Exception): pass + +class UnsupportedWikiException(Exception): pass # Move these classes to MoinMoin.wikisync class RemotePage(object): @@ -72,18 +74,27 @@ class MoinRemoteWiki(RemoteWiki): """ Used for MoinMoin wikis reachable via XMLRPC. """ def __init__(self, request, interwikiname): self.request = request + _ = self.request.getText wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) self.wiki_url = wikiutil.mapURL(self.request, wikiurl) self.valid = not wikitag_bad self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2" + if not self.valid: + self.connection = None + return self.connection = self.createConnection() - # XXX add version and interwiki name checking! + version = self.connection.getMoinVersion() + if not isinstance(version, (tuple, list)): + raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least.")) + remote_interwikiname = self.getInterwikiName() + remote_iwid = self.connection.interwikiName()[1] + self.is_anonymous = remote_interwikiname is None + + if not self.is_anonymous and interwikiname != remote_interwikiname: + raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name internally than you specified.")) def createConnection(self): - if self.valid: - return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) - else: - return None + return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) # Methods implementing the RemoteWiki interface def getInterwikiName(self): @@ -182,8 +193,11 @@ class ActionClass: if not params["remoteWiki"]: raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter.")) - remote = MoinRemoteWiki(self.request, params["remoteWiki"]) local = MoinLocalWiki(self.request) + try: + remote = MoinRemoteWiki(self.request, params["remoteWiki"]) + except UnsupportedWikiException, e: + raise ActionStatus(e.msg) if not remote.valid: raise ActionStatus(_("The ''remoteWiki'' is unknown.")) # HG changeset patch # User Alexander Schremmer # Date 1154210559 -7200 # Node ID 33927b0256ce749bcba60052a525d6d845b9bd35 # Parent e2cc6b5bed9655cafcbbdaa2504d5e5b82881f7f Changed marshalling format of the full IWID, load full IWID into the RemoteWiki object. diff -r e2cc6b5bed96 -r 33927b0256ce MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Jul 29 23:55:31 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Jul 30 00:02:39 2006 +0200 @@ -22,7 +22,7 @@ except NameError: from MoinMoin import wikiutil, config, user -from MoinMoin.packages import unpackLine +from MoinMoin.packages import unpackLine, packLine from MoinMoin.PageEditor import PageEditor from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group @@ -89,9 +89,13 @@ class MoinRemoteWiki(RemoteWiki): remote_interwikiname = self.getInterwikiName() remote_iwid = self.connection.interwikiName()[1] self.is_anonymous = remote_interwikiname is None - if not self.is_anonymous and interwikiname != remote_interwikiname: raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name internally than you specified.")) + + if self.is_anonymous: + self.iwid_full = remote_iwid + else: + self.iwid_full = packLine([remote_iwid, interwikiname]) def createConnection(self): return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) diff -r e2cc6b5bed96 -r 33927b0256ce MoinMoin/config/multiconfig.py --- a/MoinMoin/config/multiconfig.py Sat Jul 29 23:55:31 2006 +0200 +++ b/MoinMoin/config/multiconfig.py Sun Jul 30 00:02:39 2006 +0200 @@ -14,6 +14,7 @@ import time from MoinMoin import error, util, wikiutil import MoinMoin.auth as authmodule +from MoinMoin.packages import packLine _url_re_cache = None _farmconfig_mtime = None @@ -573,7 +574,7 @@ reStructuredText Quick Reference self.iwid = iwid if self.interwikiname is not None: - self.iwid_full = iwid + ":" + self.interwikiname + self.iwid_full = packLine([iwid, self.interwikiname]) else: self.iwid_full = iwid diff -r e2cc6b5bed96 -r 33927b0256ce docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Jul 29 23:55:31 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Jul 30 00:02:39 2006 +0200 @@ -77,7 +77,8 @@ Week 29: Finished first version of the m Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently using pickle-based storage. Added getAllPagesEx XMLRPC method. Week 30: Implemented IWID support, added function to generate random strings. Added support - for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. + for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. Added handling of + various options and detection of anonymous wikis to the SyncPages action. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1154211735 -7200 # Node ID e22024151c2c90d6e39e4bf9a5e225070141d136 # Parent 33927b0256ce749bcba60052a525d6d845b9bd35 Fixed some bugs in SyncPages. diff -r 33927b0256ce -r e22024151c2c MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Jul 30 00:02:39 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Jul 30 00:22:15 2006 +0200 @@ -40,13 +40,18 @@ class RemotePage(object): self.revno = revno def __repr__(self): - return repr(unicode(self)) + return repr("" % unicode(self)) def __unicode__(self): return u"%s<%i>" % (self.name, self.revno) def __lt__(self, other): return self.name < other.name + + def __eq__(self, other): + if not isinstance(other, RemotePage): + return false + return self.name == other.name def filter(cls, rp_list, regex): return [x for x in rp_list if regex.match(x.name)] @@ -90,7 +95,9 @@ class MoinRemoteWiki(RemoteWiki): remote_iwid = self.connection.interwikiName()[1] self.is_anonymous = remote_interwikiname is None if not self.is_anonymous and interwikiname != remote_interwikiname: - raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name internally than you specified.")) + raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)" + " internally than you specified (%(localname)s).") % { + "remotename": remote_interwikiname, "localname": interwikiname}) if self.is_anonymous: self.iwid_full = remote_iwid @@ -200,8 +207,8 @@ class ActionClass: local = MoinLocalWiki(self.request) try: remote = MoinRemoteWiki(self.request, params["remoteWiki"]) - except UnsupportedWikiException, e: - raise ActionStatus(e.msg) + except UnsupportedWikiException, (msg, ): + raise ActionStatus(msg) if not remote.valid: raise ActionStatus(_("The ''remoteWiki'' is unknown.")) @@ -233,7 +240,7 @@ class ActionClass: l_pages += pages_from_groupList # some initial test code - r_new_pages = u", ".join(set([unicode(x) for x in r_pages]) - set([unicode(x) for x in l_pages])) + r_new_pages = u", ".join([unicode(x) for x in (set(r_pages) - set(l_pages))]) raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages) # HG changeset patch # User Alexander Schremmer # Date 1154211978 -7200 # Node ID cdd1ea31f00edb73805b7acea6dd556b39e6f6fe # Parent e22024151c2c90d6e39e4bf9a5e225070141d136 Added sample code for set logic in SyncPages. diff -r e22024151c2c -r cdd1ea31f00e MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Jul 30 00:22:15 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Jul 30 00:26:18 2006 +0200 @@ -239,9 +239,17 @@ class ActionClass: else: l_pages += pages_from_groupList + l_pages = set(l_pages) + r_pages = set(r_pages) + + # XXX this is not correct if matching is active + remote_but_not_local = r_pages - l_pages + local_but_not_remote = l_pages - r_pages + # some initial test code - r_new_pages = u", ".join([unicode(x) for x in (set(r_pages) - set(l_pages))]) - raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages) + r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) + l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) + raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages + "
These pages are in the local wiki, but not in the remote one: " + l_new_pages) def execute(pagename, request): # HG changeset patch # User Alexander Schremmer # Date 1154224558 -7200 # Node ID 92f5835853ae000b514f16295d3063c48f59b720 # Parent cdd1ea31f00edb73805b7acea6dd556b39e6f6fe Fixed full iwid for all cases. diff -r cdd1ea31f00e -r 92f5835853ae MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Jul 30 00:26:18 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Jul 30 03:55:58 2006 +0200 @@ -100,7 +100,7 @@ class MoinRemoteWiki(RemoteWiki): "remotename": remote_interwikiname, "localname": interwikiname}) if self.is_anonymous: - self.iwid_full = remote_iwid + self.iwid_full = packLine([remote_iwid]) else: self.iwid_full = packLine([remote_iwid, interwikiname]) diff -r cdd1ea31f00e -r 92f5835853ae MoinMoin/config/multiconfig.py --- a/MoinMoin/config/multiconfig.py Sun Jul 30 00:26:18 2006 +0200 +++ b/MoinMoin/config/multiconfig.py Sun Jul 30 03:55:58 2006 +0200 @@ -576,7 +576,7 @@ reStructuredText Quick Reference if self.interwikiname is not None: self.iwid_full = packLine([iwid, self.interwikiname]) else: - self.iwid_full = iwid + self.iwid_full = packLine([iwid]) def _config_check(self): """ Check namespace and warn about unknown names # HG changeset patch # User Alexander Schremmer # Date 1154337622 -7200 # Node ID 2aa53ed0afa087c63264e85507ba0d7b17879fce # Parent a60c236bf0acafbdc6a1581263883d66b3b8751d Fix the unrespected cache_dir problem. diff -r a60c236bf0ac -r 2aa53ed0afa0 MoinMoin/config/multiconfig.py --- a/MoinMoin/config/multiconfig.py Sun Jul 30 03:59:50 2006 +0200 +++ b/MoinMoin/config/multiconfig.py Mon Jul 31 11:20:22 2006 +0200 @@ -553,7 +553,7 @@ reStructuredText Quick Reference # check if mail is possible and set flag: self.mail_enabled = (self.mail_smarthost is not None or self.mail_sendmail is not None) and self.mail_from - self.meta_dict = wikiutil.MetaDict(os.path.join(data_dir, 'meta')) + self.meta_dict = wikiutil.MetaDict(os.path.join(data_dir, 'meta'), self.cache_dir) # interwiki ID processing self.load_IWID() diff -r a60c236bf0ac -r 2aa53ed0afa0 MoinMoin/script/migration/data.py --- a/MoinMoin/script/migration/data.py Sun Jul 30 03:59:50 2006 +0200 +++ b/MoinMoin/script/migration/data.py Mon Jul 31 11:20:22 2006 +0200 @@ -39,7 +39,7 @@ class PluginScript(MoinScript): meta_fname = os.path.join(data_dir, 'meta') while True: try: - meta = wikiutil.MetaDict(meta_fname) + meta = wikiutil.MetaDict(meta_fname, request.cfg.cache_dir) try: curr_rev = meta['data_format_revision'] mig_name = str(curr_rev) diff -r a60c236bf0ac -r 2aa53ed0afa0 MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Sun Jul 30 03:59:50 2006 +0200 +++ b/MoinMoin/wikiutil.py Mon Jul 31 11:20:22 2006 +0200 @@ -411,13 +411,13 @@ class MetaDict(dict): """ store meta informations as a dict. XXX It is not thread-safe, add locks! """ - def __init__(self, metafilename): + def __init__(self, metafilename, cache_directory): """ create a MetaDict from metafilename """ dict.__init__(self) self.metafilename = metafilename self.dirty = False self.loaded = False - lock_dir = os.path.normpath(os.path.join(metafilename, '..', 'cache', '__metalock__')) + lock_dir = os.path.join(cache_directory, '__metalock__') self.rlock = lock.ReadLock(lock_dir, 60.0) self.wlock = lock.WriteLock(lock_dir, 60.0) # HG changeset patch # User Alexander Schremmer # Date 1154340117 -7200 # Node ID 156d160b1dd9374f573373b3a3b9a3c189282646 # Parent 2aa53ed0afa087c63264e85507ba0d7b17879fce Load the IWID and the meta dict lazily. diff -r 2aa53ed0afa0 -r 156d160b1dd9 MoinMoin/config/multiconfig.py --- a/MoinMoin/config/multiconfig.py Mon Jul 31 11:20:22 2006 +0200 +++ b/MoinMoin/config/multiconfig.py Mon Jul 31 12:01:57 2006 +0200 @@ -552,17 +552,33 @@ reStructuredText Quick Reference # check if mail is possible and set flag: self.mail_enabled = (self.mail_smarthost is not None or self.mail_sendmail is not None) and self.mail_from - - self.meta_dict = wikiutil.MetaDict(os.path.join(data_dir, 'meta'), self.cache_dir) - - # interwiki ID processing - self.load_IWID() + + # Cache variables for the properties below + self._iwid = self._iwid_full = self._meta_dict = None + + def load_meta_dict(self): + """ The meta_dict contains meta data about the wiki instance. """ + if getattr(self, "_meta_dict", None) is None: + self._meta_dict = wikiutil.MetaDict(os.path.join(self.data_dir, 'meta'), self.cache_dir) + return self._meta_dict + meta_dict = property(load_meta_dict) + + # lazily load iwid(_full) + def make_iwid_property(attr): + def getter(self): + if getattr(self, attr, None) is None: + self.load_IWID() + return getattr(self, attr) + return property(getter) + iwid = make_iwid_property("_iwid") + iwid_full = make_iwid_property("_iwid_full") def load_IWID(self): """ Loads the InterWikiID of this instance. It is used to identify the instance globally. The IWID is available as cfg.iwid The full IWID containing the interwiki name is available as cfg.iwid_full + This method is called by the property. """ try: @@ -572,11 +588,11 @@ reStructuredText Quick Reference self.meta_dict['IWID'] = iwid self.meta_dict.sync() - self.iwid = iwid + self._iwid = iwid if self.interwikiname is not None: - self.iwid_full = packLine([iwid, self.interwikiname]) + self._iwid_full = packLine([iwid, self.interwikiname]) else: - self.iwid_full = packLine([iwid]) + self._iwid_full = packLine([iwid]) def _config_check(self): """ Check namespace and warn about unknown names diff -r 2aa53ed0afa0 -r 156d160b1dd9 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Jul 31 11:20:22 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Jul 31 12:01:57 2006 +0200 @@ -79,6 +79,7 @@ Week 30: Implemented IWID support, added Week 30: Implemented IWID support, added function to generate random strings. Added support for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. Added handling of various options and detection of anonymous wikis to the SyncPages action. +Week 31: Load the IWID and the meta dict lazily. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1154473962 -7200 # Node ID 397b97122ad9d1efa78660ccc88e998d1620c2f0 # Parent 156d160b1dd9374f573373b3a3b9a3c189282646 General SyncPages refactoring: Fixed option handling again, refined semantics of options, introduced direction option, replaced "localMatch"/"remoteMatch" by "pageMatch". diff -r 156d160b1dd9 -r 397b97122ad9 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Mon Jul 31 12:01:57 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Wed Aug 02 01:12:42 2006 +0200 @@ -27,36 +27,81 @@ from MoinMoin.Page import Page from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group +# directions +UP, DOWN, BOTH = range(3) +directions_map = {"up": UP, "down": DOWN, "both": BOTH} class ActionStatus(Exception): pass class UnsupportedWikiException(Exception): pass # Move these classes to MoinMoin.wikisync -class RemotePage(object): +class SyncPage(object): """ This class represents a page in (another) wiki. """ - def __init__(self, name, revno): + def __init__(self, name, local_rev=None, remote_rev=None): self.name = name - self.revno = revno + self.local_rev = local_rev + self.remote_rev = remote_rev + assert local_rev or remote_rev def __repr__(self): return repr("" % unicode(self)) def __unicode__(self): - return u"%s<%i>" % (self.name, self.revno) + return u"%s<%r:%r>" % (self.name, self.local_rev, self.remote_rev) def __lt__(self, other): return self.name < other.name + def __hash__(self): + return hash(self.name) + def __eq__(self, other): - if not isinstance(other, RemotePage): + if not isinstance(other, SyncPage): return false return self.name == other.name - def filter(cls, rp_list, regex): - return [x for x in rp_list if regex.match(x.name)] + def filter(cls, sp_list, func): + return [x for x in sp_list if func(x.name)] filter = classmethod(filter) + def merge(cls, local_list, remote_list): + # map page names to SyncPage objects :-) + d = dict(zip(local_list, local_list)) + for sp in remote_list: + if sp in d: + d[sp].remote_rev = sp.remote_rev + else: + d[sp] = sp + return d.keys() + merge = classmethod(merge) + + def is_only_local(self): + return not self.remote_rev + + def is_only_remote(self): + return not self.local_rev + + def is_local_and_remote(self): + return self.local_rev and self.remote_rev + + def iter_local_only(cls, sp_list): + for x in sp_list: + if x.is_only_local(): + yield x + iter_local_only = classmethod(iter_local_only) + + def iter_remote_only(cls, sp_list): + for x in sp_list: + if x.is_only_remote(): + yield x + iter_remote_only = classmethod(iter_remote_only) + + def iter_local_and_remote(cls, sp_list): + for x in sp_list: + if x.is_local_and_remote(): + yield x + iter_local_and_remote = classmethod(iter_local_and_remote) class RemoteWiki(object): """ This class should be the base for all implementations of remote wiki @@ -71,7 +116,7 @@ class RemoteWiki(object): return NotImplemented def getPages(self): - """ Returns a list of RemotePage instances. """ + """ Returns a list of SyncPage instances. """ return NotImplemented @@ -97,7 +142,7 @@ class MoinRemoteWiki(RemoteWiki): if not self.is_anonymous and interwikiname != remote_interwikiname: raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)" " internally than you specified (%(localname)s).") % { - "remotename": remote_interwikiname, "localname": interwikiname}) + "remotename": wikiutil.escape(remote_interwikiname), "localname": wikiutil.escape(interwikiname)}) if self.is_anonymous: self.iwid_full = packLine([remote_iwid]) @@ -113,7 +158,7 @@ class MoinRemoteWiki(RemoteWiki): def getPages(self): pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True}) - return [RemotePage(unicode(name), revno) for name, revno in pages] + return [SyncPage(unicode(name), remote_rev=revno) for name, revno in pages] def __repr__(self): return "" % (self.wiki_url, self.valid) @@ -128,17 +173,17 @@ class MoinLocalWiki(RemoteWiki): pages = [] for group_pagename in group_list: pages.extend(Group(self.request, group_pagename).members()) - return [self.createRemotePage(x) for x in pages] - - def createRemotePage(self, page_name): - return RemotePage(page_name, Page(self.request, page_name).get_real_rev()) + return [self.createSyncPage(x) for x in pages] + + def createSyncPage(self, page_name): + return SyncPage(page_name, local_rev=Page(self.request, page_name).get_real_rev()) # Methods implementing the RemoteWiki interface def getInterwikiName(self): return self.request.cfg.interwikiname def getPages(self): - return [self.createRemotePage(x) for x in self.request.rootpage.getPageList(exists=0)] + return [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] def __repr__(self): return "" @@ -155,10 +200,10 @@ class ActionClass: "remotePrefix": "", "localPrefix": "", "remoteWiki": "", - "localMatch": None, - "remoteMatch": None, + "pageMatch": None, "pageList": None, "groupList": None, + "direction": "foo", # is defaulted below } options.update(Dict(self.request, self.pagename).get_dict()) @@ -169,21 +214,25 @@ class ActionClass: if options["groupList"] is not None: options["groupList"] = unpackLine(options["groupList"], ",") + options["direction"] = directions_map.get(options["direction"], BOTH) + return options def fix_params(self, params): """ Does some fixup on the parameters. """ - # merge the pageList case into the remoteMatch case + # merge the pageList case into the pageMatch case if params["pageList"] is not None: - params["localMatch"] = params["remoteMatch"] = u'|'.join([r'^%s$' % re.escape(name) - for name in params["pageList"]]) - - if params["localMatch"] is not None: - params["localMatch"] = re.compile(params["localMatch"], re.U) - - if params["remoteMatch"] is not None: - params["remoteMatch"] = re.compile(params["remoteMatch"], re.U) + params["pageMatch"] = u'|'.join([r'^%s$' % re.escape(name) + for name in params["pageList"]]) + del params["pageList"] + + if params["pageMatch"] is not None: + params["pageMatch"] = re.compile(params["pageMatch"], re.U) + + # we do not support matching or listing pages if there is a group of pages + if params["groupList"]: + params["pageMatch"] = None return params @@ -195,7 +244,6 @@ class ActionClass: _ = self.request.getText params = self.fix_params(self.parse_page()) - try: if not self.request.cfg.interwikiname: @@ -224,32 +272,32 @@ class ActionClass: r_pages = remote.getPages() l_pages = local.getPages() - print "Got %i local, %i remote pages" % (len(l_pages), len(r_pages)) - if params["localMatch"]: - l_pages = RemotePage.filter(l_pages, params["localMatch"]) - if params["remoteMatch"]: - print "Filtering remote pages using regex %r" % params["remoteMatch"].pattern - r_pages = RemotePage.filter(r_pages, params["remoteMatch"]) - print "After filtering: Got %i local, %i remote pages" % (len(l_pages), len(r_pages)) if params["groupList"]: - pages_from_groupList = local.getGroupItems(params["groupList"]) - if not params["localMatch"]: - l_pages = pages_from_groupList - else: - l_pages += pages_from_groupList - - l_pages = set(l_pages) - r_pages = set(r_pages) + pages_from_groupList = set(local.getGroupItems(params["groupList"])) + r_pages = SyncPage.filter(r_pages, pages_from_groupList.__contains__) + l_pages = SyncPage.filter(l_pages, pages_from_groupList.__contains__) + + m_pages = SyncPage.merge(l_pages, r_pages) + + print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages)) - # XXX this is not correct if matching is active - remote_but_not_local = r_pages - l_pages - local_but_not_remote = l_pages - r_pages + if params["pageMatch"]: + m_pages = SyncPage.filter(m_pages, params["pageMatch"].match) + print "After filtering: Got %i merges pages" % (len(m_pages), ) + + on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) + remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) + local_but_not_remote = list(SyncPage.iter_local_only(m_pages)) # some initial test code r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) - raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages + "
These pages are in the local wiki, but not in the remote one: " + l_new_pages) + raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) + #if params["direction"] in (DOWN, BOTH): + # for rp in remote_but_not_local: + # XXX add locking, acquire read-lock on rp + def execute(pagename, request): diff -r 156d160b1dd9 -r 397b97122ad9 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Jul 31 12:01:57 2006 +0200 +++ b/docs/CHANGES.aschremmer Wed Aug 02 01:12:42 2006 +0200 @@ -79,7 +79,9 @@ Week 30: Implemented IWID support, added Week 30: Implemented IWID support, added function to generate random strings. Added support for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. Added handling of various options and detection of anonymous wikis to the SyncPages action. -Week 31: Load the IWID and the meta dict lazily. +Week 31: Load the IWID and the meta dict lazily. Reworked RemotePage/SyncPage, + fixed option handling again, refined semantics of options, introduced + direction option, replaced "localMatch"/"remoteMatch" by "pageMatch". 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1154622004 -7200 # Node ID 91ffa85ac616dd079014ffc1ae7beaa6e94ec45b # Parent 72a208bfe5798c42a9deb146b5c9902322ab14e4 Fixed severe bug in Standalone that referred to non-existant class attributes in a dynamic way. diff -r 72a208bfe579 -r 91ffa85ac616 MoinMoin/request/STANDALONE.py --- a/MoinMoin/request/STANDALONE.py Wed Aug 02 01:17:25 2006 +0200 +++ b/MoinMoin/request/STANDALONE.py Thu Aug 03 18:20:04 2006 +0200 @@ -33,10 +33,8 @@ class Request(RequestBase): self.http_user_agent = sa.headers.getheader('user-agent', '') co = filter(None, sa.headers.getheaders('cookie')) self.saved_cookie = ', '.join(co) or '' - self.if_modified_since = (sa.headers.getheader('if-modified-since') - or self.if_modified_since) - self.if_none_match = (sa.headers.getheader('if-none-match') - or self.if_none_match) + self.if_modified_since = sa.headers.getheader('if-modified-since') + self.if_none_match = sa.headers.getheader('if-none-match') # Copy rest from standalone request self.server_name = sa.server.server_name # HG changeset patch # User Alexander Schremmer # Date 1154622048 -7200 # Node ID 2be8ec7ba817f7b87b89b3fa24d4640dcd779563 # Parent 91ffa85ac616dd079014ffc1ae7beaa6e94ec45b Now the interwiki code caches the modification time of the interwiki files. diff -r 91ffa85ac616 -r 2be8ec7ba817 MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Thu Aug 03 18:20:04 2006 +0200 +++ b/MoinMoin/wikiutil.py Thu Aug 03 18:20:48 2006 +0200 @@ -503,37 +503,59 @@ class MetaDict(dict): ############################################################################# ### InterWiki ############################################################################# +INTERWIKI_PAGE = "InterWikiMap" + +def generate_file_list(request): + """ generates a list of all files. for internal use. """ + + # order is important here, the local intermap file takes + # precedence over the shared one, and is thus read AFTER + # the shared one + intermap_files = request.cfg.shared_intermap + if not isinstance(intermap_files, list): + intermap_files = [intermap_files] + else: + intermap_files = intermap_files[:] + intermap_files.append(os.path.join(request.cfg.data_dir, "intermap.txt")) + request.cfg.shared_intermap_files = [filename for filename in intermap_files + if filename and os.path.isfile(filename)] + + +def get_max_mtime(file_list, page): + """ Returns the highest modification time of the files in file_list and the + page page. """ + return max([os.stat(filename).st_mtime for filename in file_list] + + [version2timestamp(page.mtime_usecs())]) + + def load_wikimap(request): """ load interwiki map (once, and only on demand) """ + from MoinMoin.Page import Page now = int(time.time()) + if getattr(request.cfg, "shared_intermap_files", None) is None: + generate_file_list(request) try: _interwiki_list = request.cfg._interwiki_list - if request.cfg._interwiki_ts + (3*60) < now: # 3 minutes caching time - raise AttributeError # refresh cache + old_mtime = request.cfg._interwiki_mtime + if request.cfg._interwiki_ts + (1*60) < now: # 1 minutes caching time + max_mtime = get_max_mtime(request.cfg.shared_intermap_files, Page(request, INTERWIKI_PAGE)) + if max_mtime > old_mtime: + raise AttributeError # refresh cache + else: + request.cfg._interwiki_ts = now except AttributeError: - from MoinMoin.Page import Page - _interwiki_list = {} lines = [] - # order is important here, the local intermap file takes - # precedence over the shared one, and is thus read AFTER - # the shared one - intermap_files = request.cfg.shared_intermap - if not isinstance(intermap_files, list): - intermap_files = [intermap_files] - intermap_files.append(os.path.join(request.cfg.data_dir, "intermap.txt")) - - for filename in intermap_files: - if filename and os.path.isfile(filename): - f = open(filename, "r") - lines.extend(f.readlines()) - f.close() + for filename in request.cfg.shared_intermap_files: + f = open(filename, "r") + lines.extend(f.readlines()) + f.close() # add the contents of the InterWikiMap page - lines += Page(request, "InterWikiMap").get_raw_body().splitlines() + lines += Page(request, INTERWIKI_PAGE).get_raw_body().splitlines() for line in lines: if not line or line[0] == '#': continue @@ -555,6 +577,7 @@ def load_wikimap(request): # save for later request.cfg._interwiki_list = _interwiki_list request.cfg._interwiki_ts = now + request.cfg._interwiki_mtime = get_max_mtime(request.cfg.shared_intermap_files, Page(request, INTERWIKI_PAGE)) return _interwiki_list # HG changeset patch # User Alexander Schremmer # Date 1154622132 -7200 # Node ID 47df631cdc0586de81e60641ea15b4c4b4410554 # Parent 2be8ec7ba817f7b87b89b3fa24d4640dcd779563 Added entries to my CHANGES file. diff -r 2be8ec7ba817 -r 47df631cdc05 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Aug 03 18:20:48 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 03 18:22:12 2006 +0200 @@ -27,7 +27,7 @@ Branch moin/1.6-sync-aschremmer * XMLRPC method to get the pagelist in a special way (revnos, no system pages etc.) * IWID support - i.e. every instance has a unique ID - * InterWiki page editable in the wiki + * InterWiki page editable in the wiki, modification detection based on mtimes Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. (merged into main) @@ -82,6 +82,7 @@ Week 31: Load the IWID and the meta dict Week 31: Load the IWID and the meta dict lazily. Reworked RemotePage/SyncPage, fixed option handling again, refined semantics of options, introduced direction option, replaced "localMatch"/"remoteMatch" by "pageMatch". + Store mtime for InterWiki list updates and detect changes based on it. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1154633734 -7200 # Node ID 0719653ce4fb6954a7f7f7ac1b4d997bd20ac0d8 # Parent 47df631cdc0586de81e60641ea15b4c4b4410554 Do not break on plugin exceptions in Page diff -r 47df631cdc05 -r 0719653ce4fb MoinMoin/Page.py --- a/MoinMoin/Page.py Thu Aug 03 18:22:12 2006 +0200 +++ b/MoinMoin/Page.py Thu Aug 03 21:35:34 2006 +0200 @@ -1360,14 +1360,14 @@ class Page: try: code = self.loadCache(request) self.execute(request, parser, code) - except Exception, (msg, ): - if msg != 'CacheNeedsUpdate': + except Exception, e: + if getattr(e, "message", None) != 'CacheNeedsUpdate': raise try: code = self.makeCache(request, parser) self.execute(request, parser, code) - except Exception, (msg, ): - if msg != 'CacheNeedsUpdate': + except Exception, e: + if getattr(e, "message", None) != 'CacheNeedsUpdate': raise request.log('page cache failed after creation') self.format(parser) # HG changeset patch # User Alexander Schremmer # Date 1154635512 -7200 # Node ID fd05b2dc86a496b7e01399b629e7e94d42e2bed5 # Parent 0719653ce4fb6954a7f7f7ac1b4d997bd20ac0d8 Now it even works on Python < 2.5 diff -r 0719653ce4fb -r fd05b2dc86a4 MoinMoin/Page.py --- a/MoinMoin/Page.py Thu Aug 03 21:35:34 2006 +0200 +++ b/MoinMoin/Page.py Thu Aug 03 22:05:12 2006 +0200 @@ -11,6 +11,10 @@ from MoinMoin import config, caching, us from MoinMoin import config, caching, user, util, wikiutil from MoinMoin.logfile import eventlog from MoinMoin.util import filesys, timefuncs + +def is_cache_exception(e): + args = e.args + return not (len(args) != 1 or args[0] != 'CacheNeedsUpdate') class Page: """Page - Manage an (immutable) page associated with a WikiName. @@ -1361,13 +1365,13 @@ class Page: code = self.loadCache(request) self.execute(request, parser, code) except Exception, e: - if getattr(e, "message", None) != 'CacheNeedsUpdate': + if not is_cache_exception(e): raise try: code = self.makeCache(request, parser) self.execute(request, parser, code) except Exception, e: - if getattr(e, "message", None) != 'CacheNeedsUpdate': + if not is_cache_exception(e): raise request.log('page cache failed after creation') self.format(parser) # HG changeset patch # User Alexander Schremmer # Date 1154638464 -7200 # Node ID 34abcbba032d5e0e6db2502ca50344c29cc362c3 # Parent fd05b2dc86a496b7e01399b629e7e94d42e2bed5 Refactored some names in SyncPages diff -r fd05b2dc86a4 -r 34abcbba032d MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Thu Aug 03 22:05:12 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Thu Aug 03 22:54:24 2006 +0200 @@ -111,11 +111,11 @@ class RemoteWiki(object): """ Returns a representation of the instance for debugging purposes. """ return NotImplemented - def getInterwikiName(self): + def get_interwiki_name(self): """ Returns the interwiki name of the other wiki. """ return NotImplemented - def getPages(self): + def get_pages(self): """ Returns a list of SyncPage instances. """ return NotImplemented @@ -136,7 +136,7 @@ class MoinRemoteWiki(RemoteWiki): version = self.connection.getMoinVersion() if not isinstance(version, (tuple, list)): raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least.")) - remote_interwikiname = self.getInterwikiName() + remote_interwikiname = self.get_interwiki_name() remote_iwid = self.connection.interwikiName()[1] self.is_anonymous = remote_interwikiname is None if not self.is_anonymous and interwikiname != remote_interwikiname: @@ -153,10 +153,10 @@ class MoinRemoteWiki(RemoteWiki): return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) # Methods implementing the RemoteWiki interface - def getInterwikiName(self): + def get_interwiki_name(self): return self.connection.interwikiName()[0] - def getPages(self): + def get_pages(self): pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True}) return [SyncPage(unicode(name), remote_rev=revno) for name, revno in pages] @@ -170,6 +170,7 @@ class MoinLocalWiki(RemoteWiki): self.request = request def getGroupItems(self, group_list): + """ Returns all page names that are listed on the page group_list. """ pages = [] for group_pagename in group_list: pages.extend(Group(self.request, group_pagename).members()) @@ -179,10 +180,10 @@ class MoinLocalWiki(RemoteWiki): return SyncPage(page_name, local_rev=Page(self.request, page_name).get_real_rev()) # Methods implementing the RemoteWiki interface - def getInterwikiName(self): + def get_interwiki_name(self): return self.request.cfg.interwikiname - def getPages(self): + def get_pages(self): return [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] def __repr__(self): @@ -252,6 +253,7 @@ class ActionClass: if not params["remoteWiki"]: raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter.")) + # XXX prefix handling local = MoinLocalWiki(self.request) try: remote = MoinRemoteWiki(self.request, params["remoteWiki"]) @@ -270,8 +272,8 @@ class ActionClass: def sync(self, params, local, remote): """ This method does the syncronisation work. """ - r_pages = remote.getPages() - l_pages = local.getPages() + r_pages = remote.get_pages() + l_pages = local.get_pages() if params["groupList"]: pages_from_groupList = set(local.getGroupItems(params["groupList"])) # HG changeset patch # User Alexander Schremmer # Date 1154720076 -7200 # Node ID bf5f8afacf59dc6467a7cc15a237230728d01282 # Parent 34abcbba032d5e0e6db2502ca50344c29cc362c3 Added prefix handling to SyncPages. diff -r 34abcbba032d -r bf5f8afacf59 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Thu Aug 03 22:54:24 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Fri Aug 04 21:34:36 2006 +0200 @@ -31,6 +31,17 @@ UP, DOWN, BOTH = range(3) UP, DOWN, BOTH = range(3) directions_map = {"up": UP, "down": DOWN, "both": BOTH} + +def normalise_pagename(page_name, prefix): + if prefix: + if not page_name.startswith(prefix): + return None + else: + return page_name[len(prefix):] + else: + return page_name + + class ActionStatus(Exception): pass class UnsupportedWikiException(Exception): pass @@ -38,11 +49,14 @@ class UnsupportedWikiException(Exception # Move these classes to MoinMoin.wikisync class SyncPage(object): """ This class represents a page in (another) wiki. """ - def __init__(self, name, local_rev=None, remote_rev=None): + def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None): self.name = name self.local_rev = local_rev self.remote_rev = remote_rev + self.local_name = local_name + self.remote_name = remote_name assert local_rev or remote_rev + assert local_name or remote_name def __repr__(self): return repr("" % unicode(self)) @@ -71,6 +85,7 @@ class SyncPage(object): for sp in remote_list: if sp in d: d[sp].remote_rev = sp.remote_rev + d[sp].remote_name = sp.remote_name else: d[sp] = sp return d.keys() @@ -122,9 +137,11 @@ class RemoteWiki(object): class MoinRemoteWiki(RemoteWiki): """ Used for MoinMoin wikis reachable via XMLRPC. """ - def __init__(self, request, interwikiname): + def __init__(self, request, interwikiname, prefix): self.request = request + self.prefix = prefix _ = self.request.getText + wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) self.wiki_url = wikiutil.mapURL(self.request, wikiurl) self.valid = not wikitag_bad @@ -132,10 +149,13 @@ class MoinRemoteWiki(RemoteWiki): if not self.valid: self.connection = None return + self.connection = self.createConnection() + version = self.connection.getMoinVersion() if not isinstance(version, (tuple, list)): raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least.")) + remote_interwikiname = self.get_interwiki_name() remote_iwid = self.connection.interwikiName()[1] self.is_anonymous = remote_interwikiname is None @@ -158,7 +178,13 @@ class MoinRemoteWiki(RemoteWiki): def get_pages(self): pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True}) - return [SyncPage(unicode(name), remote_rev=revno) for name, revno in pages] + rpages = [] + for name, revno in pages: + normalised_name = normalise_pagename(name, self.prefix) + if normalised_name is None: + continue + rpages.append(SyncPage(normalised_name, remote_rev=revno, remote_name=name)) + return rpages def __repr__(self): return "" % (self.wiki_url, self.valid) @@ -166,8 +192,9 @@ class MoinRemoteWiki(RemoteWiki): class MoinLocalWiki(RemoteWiki): """ Used for the current MoinMoin wiki. """ - def __init__(self, request): + def __init__(self, request, prefix): self.request = request + self.prefix = prefix def getGroupItems(self, group_list): """ Returns all page names that are listed on the page group_list. """ @@ -177,14 +204,17 @@ class MoinLocalWiki(RemoteWiki): return [self.createSyncPage(x) for x in pages] def createSyncPage(self, page_name): - return SyncPage(page_name, local_rev=Page(self.request, page_name).get_real_rev()) + normalised_name = normalise_pagename(page_name, self.prefix) + if normalised_name is None: + return None + return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name) # Methods implementing the RemoteWiki interface def get_interwiki_name(self): return self.request.cfg.interwikiname def get_pages(self): - return [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] + return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] if x] def __repr__(self): return "" @@ -253,10 +283,9 @@ class ActionClass: if not params["remoteWiki"]: raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter.")) - # XXX prefix handling - local = MoinLocalWiki(self.request) + local = MoinLocalWiki(self.request, params["localPrefix"]) try: - remote = MoinRemoteWiki(self.request, params["remoteWiki"]) + remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"]) except UnsupportedWikiException, (msg, ): raise ActionStatus(msg) # HG changeset patch # User Alexander Schremmer # Date 1154722350 -7200 # Node ID 4ba6da7e23a460171d0dc574ab34f900b6c7c180 # Parent bf5f8afacf59dc6467a7cc15a237230728d01282 Started implementing the merging process. Not working yet. diff -r bf5f8afacf59 -r 4ba6da7e23a4 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Fri Aug 04 21:34:36 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Fri Aug 04 22:12:30 2006 +0200 @@ -26,6 +26,8 @@ from MoinMoin.PageEditor import PageEdit from MoinMoin.PageEditor import PageEditor from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group +from MoinMoin.wikisync import TagStore +from MoinMoin.util.bdiff import decompress, patch # directions UP, DOWN, BOTH = range(3) @@ -46,7 +48,7 @@ class ActionStatus(Exception): pass class UnsupportedWikiException(Exception): pass -# Move these classes to MoinMoin.wikisync +# XXX Move these classes to MoinMoin.wikisync class SyncPage(object): """ This class represents a page in (another) wiki. """ def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None): @@ -172,6 +174,9 @@ class MoinRemoteWiki(RemoteWiki): def createConnection(self): return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) + def get_diff(self, pagename, from_rev, to_rev): + return str(self.connection.getDiff(pagename, from_rev, to_rev)) + # Methods implementing the RemoteWiki interface def get_interwiki_name(self): return self.connection.interwikiName()[0] @@ -300,9 +305,9 @@ class ActionClass: def sync(self, params, local, remote): """ This method does the syncronisation work. """ - + + l_pages = local.get_pages() r_pages = remote.get_pages() - l_pages = local.get_pages() if params["groupList"]: pages_from_groupList = set(local.getGroupItems(params["groupList"])) @@ -321,14 +326,39 @@ class ActionClass: remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) local_but_not_remote = list(SyncPage.iter_local_only(m_pages)) - # some initial test code - r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) - l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) - raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) + # some initial test code (XXX remove) + #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) + #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) + #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) #if params["direction"] in (DOWN, BOTH): # for rp in remote_but_not_local: - # XXX add locking, acquire read-lock on rp - + + # let's do the simple case first, can be refactored later to match all cases + for rp in on_both_sides: + # XXX add locking, acquire read-lock on rp + + local_pagename = rp.local_pagename + + tags = TagStore(Page(self.request, local_pagename)) + matching_tags = tags.fetch(iwid_full=remote.iwid_full) + matching_tags.sort() + + if not matching_tags: + remote_rev = None + local_rev = rp.local_rev # merge against the newest version + old_contents = "" + else: + newest_tag = matching_tags[-1] + local_rev = newest_tag.current_rev + remote_rev = newest_tag.remote_rev + old_contents = local_page.get_raw_body_str() + + local_page = Page(self.request, local_pagename, rev=local_rev) + + diff = remote.get_diff(rp.remote_pagename, remote_rev, None) + new_contents = patch(old_contents, decompress(diff)).decode("utf-8") + # XXX this is not finished yet + def execute(pagename, request): diff -r bf5f8afacf59 -r 4ba6da7e23a4 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Fri Aug 04 21:34:36 2006 +0200 +++ b/MoinMoin/wikisync.py Fri Aug 04 22:12:30 2006 +0200 @@ -14,6 +14,7 @@ except ImportError: import pickle from MoinMoin.util import lock +from MoinMoin.packages import unpackLine class Tag(object): @@ -32,6 +33,11 @@ class Tag(object): def __repr__(self): return u"" % (self.remote_wiki, self.remote_rev, self.current_rev) + + def __cmp__(self, other): + if not isinstance(other, Tag): + return NotImplemented + return cmp(self.current_rev, other.current_rev) class AbstractTagStore(object): @@ -54,6 +60,10 @@ class AbstractTagStore(object): def clear(self): """ Removes all tags. """ + return NotImplemented + + def fetch(self, iwid_full=None, iw_name=None): + """ Fetches tags by a special IWID or interwiki name. """ return NotImplemented @@ -111,6 +121,17 @@ class PickleTagStore(AbstractTagStore): self.tags = [] self.commit() + def fetch(self, iwid_full=None, iw_name=None): + assert iwid_full ^ iw_name + if iwid_full: + iwid_full = unpackLine(iwid_full) + if len(iwid_full) == 1: + assert False, "This case is not supported yet" # XXX + iw_name = iwid_full[1] + + return [t for t in self.tags if t.remote_wiki == iw_name] + + # currently we just have one implementation, so we do not need # a factory method TagStore = PickleTagStore \ No newline at end of file diff -r bf5f8afacf59 -r 4ba6da7e23a4 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Aug 04 21:34:36 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Aug 04 22:12:30 2006 +0200 @@ -8,11 +8,13 @@ Branch moin/1.6-sync-aschremmer ToDo: * Implement actual syncronisation. + * Add correct IWID_full handling. * Implement a cross-site authentication system, i.e. mainly an identity storage. * Clean up trailing whitespace. * Add page locking, i.e. use the one in the new storage layer. * Check what needs to be documented on MoinMaster. + * Search for XXX New Features: * XMLRPC method to return the Moin version @@ -83,6 +85,7 @@ Week 31: Load the IWID and the meta dict fixed option handling again, refined semantics of options, introduced direction option, replaced "localMatch"/"remoteMatch" by "pageMatch". Store mtime for InterWiki list updates and detect changes based on it. + Added support for localPrefix and remotePrefix. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Thomas Waldmann # Date 1154941222 -7200 # Node ID 484b34dd3c234426fd955d1683cc4c0410398f60 # Parent 4ba6da7e23a460171d0dc574ab34f900b6c7c180 updated CHANGES.aschremmer diff -r 4ba6da7e23a4 -r 484b34dd3c23 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Aug 04 22:12:30 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 07 11:00:22 2006 +0200 @@ -99,6 +99,14 @@ 2006-07-27: student didnt work on projec 2006-07-27: student didnt work on project 2006-07-28: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-29: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-30: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-07-31: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-08-01: student didn't work on project +2006-08-02: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-08-03: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-08-04: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress +2006-08-05: student didn't work on project +2006-08-06: student didn't work on project Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1154969351 -7200 # Node ID 2617803641bebdb2b081d17e33829afeaec3205d # Parent 484b34dd3c234426fd955d1683cc4c0410398f60 Merge with upstream. diff -r 484b34dd3c23 -r 2617803641be docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Aug 07 11:00:22 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 07 18:49:11 2006 +0200 @@ -86,6 +86,7 @@ Week 31: Load the IWID and the meta dict direction option, replaced "localMatch"/"remoteMatch" by "pageMatch". Store mtime for InterWiki list updates and detect changes based on it. Added support for localPrefix and remotePrefix. +Week 32: 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1154980229 -7200 # Node ID 213b244cdf4ff1b6c79592e299d72dfda8e8a602 # Parent 2617803641bebdb2b081d17e33829afeaec3205d Renamed mergeChanges to mergeDiff. diff -r 2617803641be -r 213b244cdf4f MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Mon Aug 07 18:49:11 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Mon Aug 07 21:50:29 2006 +0200 @@ -617,7 +617,7 @@ class XmlRpcBase: else: return [self._outstr(name), iwid] - def xmlrpc_mergeChanges(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): + def xmlrpc_mergeDiff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): """ Merges a diff sent by the remote machine and returns the number of the new revision. Additionally, this method tags the new revision. @@ -634,7 +634,7 @@ class XmlRpcBase: pagename = self._instr(pagename) - comment = u"Remote - %r" % interwiki_name + comment = u"Remote Merge - %r" % interwiki_name # User may read page? if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename): # HG changeset patch # User Alexander Schremmer # Date 1154980294 -7200 # Node ID 3155f908de850f2b4e69c40c15d2324851fd0338 # Parent 213b244cdf4ff1b6c79592e299d72dfda8e8a602 Refactored conflict markers. diff -r 213b244cdf4f -r 3155f908de85 MoinMoin/PageEditor.py --- a/MoinMoin/PageEditor.py Mon Aug 07 21:50:29 2006 +0200 +++ b/MoinMoin/PageEditor.py Mon Aug 07 21:51:34 2006 +0200 @@ -16,6 +16,12 @@ from MoinMoin.util import filesys, timef from MoinMoin.util import filesys, timefuncs import MoinMoin.util.web from MoinMoin.mail import sendmail + + +# used for merging +conflict_markers = ("\n---- /!\ '''Edit conflict - other version:''' ----\n", + "\n---- /!\ '''Edit conflict - your version:''' ----\n", + "\n---- /!\ '''End of edit conflict''' ----\n") ############################################################################# @@ -101,10 +107,7 @@ class PageEditor(Page): # And try to merge all into one with edit conflict separators verynewtext = diff3.text_merge(original_text, saved_text, savetext, - allow_conflicts, - "\n---- /!\ '''Edit conflict - other version:''' ----\n", - "\n---- /!\ '''Edit conflict - your version:''' ----\n", - "\n---- /!\ '''End of edit conflict''' ----\n") + allow_conflicts, *conflict_markers) if verynewtext: self.set_raw_body(verynewtext) return True # HG changeset patch # User Alexander Schremmer # Date 1154980346 -7200 # Node ID 0390d7857d87b752f8138a7b3222f1f6cefcf7c8 # Parent 3155f908de850f2b4e69c40c15d2324851fd0338 Work on SyncPages, fixed prefix handling, wrote initial merge loop. diff -r 3155f908de85 -r 0390d7857d87 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Mon Aug 07 21:51:34 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Mon Aug 07 21:52:26 2006 +0200 @@ -23,11 +23,12 @@ except NameError: from MoinMoin import wikiutil, config, user from MoinMoin.packages import unpackLine, packLine -from MoinMoin.PageEditor import PageEditor +from MoinMoin.PageEditor import PageEditor, conflict_markers from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group from MoinMoin.wikisync import TagStore -from MoinMoin.util.bdiff import decompress, patch +from MoinMoin.util.bdiff import decompress, patch, compress, textdiff +from MoinMoin.util import diff3 # directions UP, DOWN, BOTH = range(3) @@ -50,7 +51,7 @@ class UnsupportedWikiException(Exception # XXX Move these classes to MoinMoin.wikisync class SyncPage(object): - """ This class represents a page in (another) wiki. """ + """ This class represents a page in one or two wiki(s). """ def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None): self.name = name self.local_rev = local_rev @@ -76,6 +77,18 @@ class SyncPage(object): if not isinstance(other, SyncPage): return false return self.name == other.name + + def add_missing_pagename(self, local, remote): + if self.local_name is None: + n_name = normalise_pagename(self.remote_name, remote.prefix) + assert n_name is not None + self.local_name = (local.prefix or "") + n_name + elif self.remote_name is None: + n_name = normalise_pagename(self.local_name, local.prefix) + assert n_name is not None + self.remote_name = (local.prefix or "") + n_name + + return self # makes using list comps easier def filter(cls, sp_list, func): return [x for x in sp_list if func(x.name)] @@ -132,6 +145,10 @@ class RemoteWiki(object): """ Returns the interwiki name of the other wiki. """ return NotImplemented + def get_iwid(self): + """ Returns the InterWiki ID. """ + return NotImplemented + def get_pages(self): """ Returns a list of SyncPage instances. """ return NotImplemented @@ -174,12 +191,16 @@ class MoinRemoteWiki(RemoteWiki): def createConnection(self): return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) + # Public methods def get_diff(self, pagename, from_rev, to_rev): return str(self.connection.getDiff(pagename, from_rev, to_rev)) # Methods implementing the RemoteWiki interface def get_interwiki_name(self): return self.connection.interwikiName()[0] + + def get_iwid(self): + return self.connection.interwikiName()[1] def get_pages(self): pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True}) @@ -214,9 +235,14 @@ class MoinLocalWiki(RemoteWiki): return None return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name) + # Public methods: + # Methods implementing the RemoteWiki interface def get_interwiki_name(self): return self.request.cfg.interwikiname + + def get_iwid(self): + return self.request.cfg.iwid def get_pages(self): return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] if x] @@ -314,7 +340,7 @@ class ActionClass: r_pages = SyncPage.filter(r_pages, pages_from_groupList.__contains__) l_pages = SyncPage.filter(l_pages, pages_from_groupList.__contains__) - m_pages = SyncPage.merge(l_pages, r_pages) + m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)] print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages)) @@ -334,12 +360,15 @@ class ActionClass: # for rp in remote_but_not_local: # let's do the simple case first, can be refactored later to match all cases + # XXX handle deleted pages for rp in on_both_sides: # XXX add locking, acquire read-lock on rp + current_page = Page(self.request, local_pagename) + current_rev = current_page.get_real_rev() local_pagename = rp.local_pagename - tags = TagStore(Page(self.request, local_pagename)) + tags = TagStore(current_page) matching_tags = tags.fetch(iwid_full=remote.iwid_full) matching_tags.sort() @@ -351,15 +380,38 @@ class ActionClass: newest_tag = matching_tags[-1] local_rev = newest_tag.current_rev remote_rev = newest_tag.remote_rev - old_contents = local_page.get_raw_body_str() - - local_page = Page(self.request, local_pagename, rev=local_rev) - - diff = remote.get_diff(rp.remote_pagename, remote_rev, None) + if remote_rev == rp.remote_rev and local_rev == current_rev: + continue # no changes done, next page + old_page = Page(self.request, local_pagename, rev=local_rev) + old_contents = old_page.get_raw_body_str() + + diff_result = remote.get_diff(rp.remote_pagename, remote_rev, None) + is_remote_conflict = diff_result["conflict"] + assert diff_result["diffversion"] == 1 + diff = diff_result["diff"] + current_remote_rev = diff_result["current"] + + if remote_rev is None: # set the remote_rev for the case without any tags + remote_rev = current_remote_rev + new_contents = patch(old_contents, decompress(diff)).decode("utf-8") - # XXX this is not finished yet - - + old_contents = old_contents.encode("utf-8") + + # here, the actual merge happens + verynewtext = diff3.text_merge(old_contents, new_contents, current_page.get_raw_body(), 1, *conflict_markers) + + new_local_rev = current_rev + 1 # XXX commit first? + local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) + remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) + # XXX add remote conflict handling + very_current_remote_rev = remote.merge_diff(rp.remote_pagename, compress(textdiff(new_contents, verynewtext)), new_local_rev, remote_rev, current_remote_rev, local_full_iwid) + tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev) + comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) + try: + current_page.saveText(verynewtext, current_rev, comment=comment) + except PageEditor.EditConflict: + assert False, "You stumbled on a problem with the current storage system - I cannot lock pages" + # XXX untested def execute(pagename, request): ActionClass(pagename, request).render() # HG changeset patch # User Alexander Schremmer # Date 1154980360 -7200 # Node ID 0448272f0e5b4de44d4d7636594233f6547a3225 # Parent 0390d7857d87b752f8138a7b3222f1f6cefcf7c8 Added entries to my CHANGES file. diff -r 0390d7857d87 -r 0448272f0e5b docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Aug 07 21:52:26 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 07 21:52:40 2006 +0200 @@ -86,7 +86,7 @@ Week 31: Load the IWID and the meta dict direction option, replaced "localMatch"/"remoteMatch" by "pageMatch". Store mtime for InterWiki list updates and detect changes based on it. Added support for localPrefix and remotePrefix. -Week 32: +Week 32: Continued work on the merge logic, finished prefix handling. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress @@ -107,7 +107,7 @@ 2006-08-03: the requested daily entry is 2006-08-03: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-08-04: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-08-05: student didn't work on project -2006-08-06: student didn't work on project +2006-08-06: student didn't work on project -- a Sunday Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1154983480 -7200 # Node ID 1b5093cfc6078c26b8578be80f8db96bece22314 # Parent 432c676186ba149a88ca4501baf394d38648c26e Streamlined Request.Clock, added support for recursive calls. diff -r 432c676186ba -r 1b5093cfc607 MoinMoin/request/__init__.py --- a/MoinMoin/request/__init__.py Mon Aug 07 21:53:20 2006 +0200 +++ b/MoinMoin/request/__init__.py Mon Aug 07 22:44:40 2006 +0200 @@ -28,42 +28,46 @@ class Clock: class Clock: """ Helper class for code profiling we do not use time.clock() as this does not work across threads + This is not thread-safe when it comes to multiple starts for one timer. + It is possible to recursively call the start and stop methods, you + should just ensure that you call them often enough :) """ def __init__(self): self.timings = {} self.states = {} + def _get_name(timer, generation): + if generation == 0: + return timer + else: + return "%s|%i" % (timer, generation) + _get_name = staticmethod(_get_name) + def start(self, timer): - state = self.states.setdefault(timer, 'new') - if state == 'new': - self.timings[timer] = time.time() - self.states[timer] = 'running' - elif state == 'running': - pass # this timer is already running, do nothing - elif state == 'stopped': - # if a timer is stopped, timings has the sum of all times it was running - self.timings[timer] = time.time() - self.timings[timer] - self.states[timer] = 'running' + state = self.states.setdefault(timer, -1) + new_level = state + 1 + name = Clock._get_name(timer, new_level) + self.timings[name] = time.time() - self.timings.get(name, 0) + self.states[timer] = new_level def stop(self, timer): - state = self.states.setdefault(timer, 'neverstarted') - if state == 'running': - self.timings[timer] = time.time() - self.timings[timer] - self.states[timer] = 'stopped' - elif state == 'stopped': - pass # this timer already has been stopped, do nothing - elif state == 'neverstarted': - pass # this timer never has been started, do nothing + state = self.states.setdefault(timer, -1) + if state >= 0: # timer is active + name = Clock._get_name(timer, state) + self.timings[name] = time.time() - self.timings[name] + self.states[timer] = state - 1 def value(self, timer): - state = self.states.setdefault(timer, 'nosuchtimer') - if state == 'stopped': + base_timer = timer.split("|")[0] + state = self.states.get(base_timer, None) + if state == -1: result = "%.3fs" % self.timings[timer] - elif state == 'running': + elif state is None: + result = "- (%s)" % state + else: + print "Got state %r" % state result = "%.3fs (still running)" % (time.time() - self.timings[timer]) - else: - result = "- (%s)" % state return result def dump(self): # HG changeset patch # User Alexander Schremmer # Date 1154983516 -7200 # Node ID 2be5a05416bdb64e4bca6073057f8147de31e40e # Parent 1b5093cfc6078c26b8578be80f8db96bece22314 Added a few timers in Page, removed an except block that should be never used. diff -r 1b5093cfc607 -r 2be5a05416bd MoinMoin/Page.py --- a/MoinMoin/Page.py Mon Aug 07 22:44:40 2006 +0200 +++ b/MoinMoin/Page.py Mon Aug 07 22:45:16 2006 +0200 @@ -1385,6 +1385,7 @@ class Page: def execute(self, request, parser, code): """ Write page content by executing cache code """ formatter = self.formatter + request.clock.start("execute") from MoinMoin.macro import Macro macro_obj = Macro(parser) # Fix __file__ when running from a zip package @@ -1396,6 +1397,7 @@ class Page: exec code except "CacheNeedsUpdate": # convert the exception raise Exception("CacheNeedsUpdate") + request.clock.stop("execute") def loadCache(self, request): """ Return page content cache or raises 'CacheNeedsUpdate' """ @@ -1407,8 +1409,6 @@ class Page: import marshal try: return marshal.loads(cache.content()) - except "CacheNeedsUpdate": # convert old exception into a new one - raise Exception('CacheNeedsUpdate') except (EOFError, ValueError, TypeError): # Bad marshal data, must update the cache. # See http://docs.python.org/lib/module-marshal.html # HG changeset patch # User Alexander Schremmer # Date 1154983734 -7200 # Node ID 40059e38427ae2dbb55b9b27fefd6d9fdc4eafc0 # Parent 2be5a05416bdb64e4bca6073057f8147de31e40e Ensure that the timer is called in Page.execute. diff -r 2be5a05416bd -r 40059e38427a MoinMoin/Page.py --- a/MoinMoin/Page.py Mon Aug 07 22:45:16 2006 +0200 +++ b/MoinMoin/Page.py Mon Aug 07 22:48:54 2006 +0200 @@ -1385,19 +1385,21 @@ class Page: def execute(self, request, parser, code): """ Write page content by executing cache code """ formatter = self.formatter - request.clock.start("execute") - from MoinMoin.macro import Macro - macro_obj = Macro(parser) - # Fix __file__ when running from a zip package - import MoinMoin - if hasattr(MoinMoin, '__loader__'): - __file__ = os.path.join(MoinMoin.__loader__.archive, 'dummy') - + request.clock.start("Page.execute") try: - exec code - except "CacheNeedsUpdate": # convert the exception - raise Exception("CacheNeedsUpdate") - request.clock.stop("execute") + from MoinMoin.macro import Macro + macro_obj = Macro(parser) + # Fix __file__ when running from a zip package + import MoinMoin + if hasattr(MoinMoin, '__loader__'): + __file__ = os.path.join(MoinMoin.__loader__.archive, 'dummy') + + try: + exec code + except "CacheNeedsUpdate": # convert the exception + raise Exception("CacheNeedsUpdate") + finally: + request.clock.stop("Page.execute") def loadCache(self, request): """ Return page content cache or raises 'CacheNeedsUpdate' """ # HG changeset patch # User Alexander Schremmer # Date 1154985006 -7200 # Node ID 637d90f1209db17cdbb90e0b6ef771479e15b3c2 # Parent 40059e38427ae2dbb55b9b27fefd6d9fdc4eafc0 Oops, removed a debug print. diff -r 40059e38427a -r 637d90f1209d MoinMoin/request/__init__.py --- a/MoinMoin/request/__init__.py Mon Aug 07 22:48:54 2006 +0200 +++ b/MoinMoin/request/__init__.py Mon Aug 07 23:10:06 2006 +0200 @@ -66,7 +66,7 @@ class Clock: elif state is None: result = "- (%s)" % state else: - print "Got state %r" % state + #print "Got state %r" % state result = "%.3fs (still running)" % (time.time() - self.timings[timer]) return result # HG changeset patch # User Alexander Schremmer # Date 1155043640 -7200 # Node ID bd5b43d703992d3a95bfe169b76979d4de483481 # Parent 637d90f1209db17cdbb90e0b6ef771479e15b3c2 Refactored conflict flag detection from the edit action into the PageEditor class. diff -r 637d90f1209d -r bd5b43d70399 MoinMoin/PageEditor.py --- a/MoinMoin/PageEditor.py Mon Aug 07 23:10:06 2006 +0200 +++ b/MoinMoin/PageEditor.py Tue Aug 08 15:27:20 2006 +0200 @@ -918,6 +918,9 @@ Try a different name.""") % (newpagename backup_url = self._make_backup(newtext, **kw) action = kw.get('action', 'SAVE') + # remember conflict state + self.setConflict(wikiutil.containsConflictMarker(newtext)) + #!!! need to check if we still retain the lock here #!!! rev check is not enough since internal operations use "0" diff -r 637d90f1209d -r bd5b43d70399 MoinMoin/action/edit.py --- a/MoinMoin/action/edit.py Mon Aug 07 23:10:06 2006 +0200 +++ b/MoinMoin/action/edit.py Tue Aug 08 15:27:20 2006 +0200 @@ -135,8 +135,6 @@ def execute(pagename, request): # Save new text else: try: - still_conflict = wikiutil.containsConflictMarker(savetext) - pg.setConflict(still_conflict) savemsg = pg.saveText(savetext, rev, trivial=trivial, comment=comment) except pg.EditConflict, e: msg = e.message # HG changeset patch # User Alexander Schremmer # Date 1155043666 -7200 # Node ID 353c493dfb774418acd5fb51402697a54a986024 # Parent bd5b43d703992d3a95bfe169b76979d4de483481 Added local conflict detection, logging support to SyncPages, updated my CHANGES file. diff -r bd5b43d70399 -r 353c493dfb77 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Tue Aug 08 15:27:20 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Tue Aug 08 15:27:46 2006 +0200 @@ -252,10 +252,16 @@ class MoinLocalWiki(RemoteWiki): class ActionClass: + INFO, WARNING, ERROR = range(3) # used for logging + def __init__(self, pagename, request): self.request = request self.pagename = pagename self.page = Page(request, pagename) + self.status = [] + + def log_status(self, level, message): + self.status.append((level, message)) def parse_page(self): options = { @@ -325,9 +331,9 @@ class ActionClass: self.sync(params, local, remote) except ActionStatus, e: - return self.page.send_page(self.request, msg=u'

%s

\n' % (e.args[0], )) - - return self.page.send_page(self.request, msg=_("Syncronisation finished.")) + return self.page.send_page(self.request, msg=u'

%s

%s

\n' % (e.args[0], repr(self.status))) + + return self.page.send_page(self.request, msg=u"%s

%s

" % (_("Syncronisation finished."), repr(self.status))) def sync(self, params, local, remote): """ This method does the syncronisation work. """ @@ -365,6 +371,9 @@ class ActionClass: # XXX add locking, acquire read-lock on rp current_page = Page(self.request, local_pagename) + if wikiutil.containsConflictMarker(current_page.get_raw_body()): + self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a local unresolved conflict.") % {"pagename": local_pagename}) + continue current_rev = current_page.get_real_rev() local_pagename = rp.local_pagename @@ -385,6 +394,8 @@ class ActionClass: old_page = Page(self.request, local_pagename, rev=local_rev) old_contents = old_page.get_raw_body_str() + self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_pagename}) + diff_result = remote.get_diff(rp.remote_pagename, remote_rev, None) is_remote_conflict = diff_result["conflict"] assert diff_result["diffversion"] == 1 @@ -392,6 +403,7 @@ class ActionClass: current_remote_rev = diff_result["current"] if remote_rev is None: # set the remote_rev for the case without any tags + self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) remote_rev = current_remote_rev new_contents = patch(old_contents, decompress(diff)).decode("utf-8") @@ -403,15 +415,26 @@ class ActionClass: new_local_rev = current_rev + 1 # XXX commit first? local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) - # XXX add remote conflict handling + very_current_remote_rev = remote.merge_diff(rp.remote_pagename, compress(textdiff(new_contents, verynewtext)), new_local_rev, remote_rev, current_remote_rev, local_full_iwid) - tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev) comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) + + # XXX upgrade to write lock try: current_page.saveText(verynewtext, current_rev, comment=comment) except PageEditor.EditConflict: + # XXX remote rollback needed assert False, "You stumbled on a problem with the current storage system - I cannot lock pages" + tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev) + + if not wikiutil.containsConflictMarker(verynewtext): + self.log_status(ActionClass.INFO, _("Page successfully merged.")) + else: + self.log_status(ActionClass.WARN, _("Page merged with conflicts.")) + + # XXX release lock # XXX untested + def execute(pagename, request): ActionClass(pagename, request).render() diff -r bd5b43d70399 -r 353c493dfb77 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Tue Aug 08 15:27:20 2006 +0200 +++ b/docs/CHANGES.aschremmer Tue Aug 08 15:27:46 2006 +0200 @@ -86,7 +86,9 @@ Week 31: Load the IWID and the meta dict direction option, replaced "localMatch"/"remoteMatch" by "pageMatch". Store mtime for InterWiki list updates and detect changes based on it. Added support for localPrefix and remotePrefix. -Week 32: Continued work on the merge logic, finished prefix handling. +Week 32: Continued work on the merge logic, finished prefix handling. Added local conflict + detection in SyncPages. Added logging support to SyncPages. Refactored conflict flag + detection from the edit action into the PageEditor class. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155067559 -7200 # Node ID 0432e8a9ba2f9e0341b95a01b649e282cc9e2a8a # Parent 353c493dfb774418acd5fb51402697a54a986024 Added docstrings. diff -r 353c493dfb77 -r 0432e8a9ba2f MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Tue Aug 08 15:27:46 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Tue Aug 08 22:05:59 2006 +0200 @@ -36,6 +36,9 @@ directions_map = {"up": UP, "down": DOWN def normalise_pagename(page_name, prefix): + """ Checks if the page_name starts with the prefix. + Returns None if it does not, otherwise strips the prefix. + """ if prefix: if not page_name.startswith(prefix): return None @@ -53,6 +56,13 @@ class SyncPage(object): class SyncPage(object): """ This class represents a page in one or two wiki(s). """ def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None): + """ Creates a SyncPage instance. + @param name: The canonical name of the page, without prefixes. + @param local_rev: The revision of the page in the local wiki. + @param remote_rev: The revision of the page in the remote wiki. + @param local_name: The page name of the page in the local wiki. + @param remote_name: The page name of the page in the remote wiki. + """ self.name = name self.local_rev = local_rev self.remote_rev = remote_rev @@ -71,6 +81,7 @@ class SyncPage(object): return self.name < other.name def __hash__(self): + """ Ensures that the hash value of this page only depends on the canonical name. """ return hash(self.name) def __eq__(self, other): @@ -79,6 +90,9 @@ class SyncPage(object): return self.name == other.name def add_missing_pagename(self, local, remote): + """ Checks if the particular concrete page names are unknown and fills + them in. + """ if self.local_name is None: n_name = normalise_pagename(self.remote_name, remote.prefix) assert n_name is not None @@ -91,10 +105,14 @@ class SyncPage(object): return self # makes using list comps easier def filter(cls, sp_list, func): + """ Returns all pages in sp_list that let func return True + for the canonical page name. + """ return [x for x in sp_list if func(x.name)] filter = classmethod(filter) def merge(cls, local_list, remote_list): + """ Merges two lists of SyncPages into one, migrating attributes like the names. """ # map page names to SyncPage objects :-) d = dict(zip(local_list, local_list)) for sp in remote_list: @@ -107,27 +125,33 @@ class SyncPage(object): merge = classmethod(merge) def is_only_local(self): + """ Is true if the page is only in the local wiki. """ return not self.remote_rev def is_only_remote(self): + """ Is true if the page is only in the remote wiki. """ return not self.local_rev def is_local_and_remote(self): + """ Is true if the page is in both wikis. """ return self.local_rev and self.remote_rev def iter_local_only(cls, sp_list): + """ Iterates over all pages that are local only. """ for x in sp_list: if x.is_only_local(): yield x iter_local_only = classmethod(iter_local_only) def iter_remote_only(cls, sp_list): + """ Iterates over all pages that are remote only. """ for x in sp_list: if x.is_only_remote(): yield x iter_remote_only = classmethod(iter_remote_only) def iter_local_and_remote(cls, sp_list): + """ Iterates over all pages that are local and remote. """ for x in sp_list: if x.is_local_and_remote(): yield x @@ -193,6 +217,8 @@ class MoinRemoteWiki(RemoteWiki): # Public methods def get_diff(self, pagename, from_rev, to_rev): + """ Returns the binary diff of the remote page named pagename, given + from_rev and to_rev. """ return str(self.connection.getDiff(pagename, from_rev, to_rev)) # Methods implementing the RemoteWiki interface @@ -261,9 +287,11 @@ class ActionClass: self.status = [] def log_status(self, level, message): + """ Appends the message with a given importance level to the internal log. """ self.status.append((level, message)) def parse_page(self): + """ Parses the parameter page and returns the read arguments. """ options = { "remotePrefix": "", "localPrefix": "", # HG changeset patch # User Alexander Schremmer # Date 1155135574 -7200 # Node ID 3c80692b9b47e0449aee174d2ba96e03fef04858 # Parent 0432e8a9ba2f9e0341b95a01b649e282cc9e2a8a Added support for XMLRPC functions that return a Fault instance. diff -r 0432e8a9ba2f -r 3c80692b9b47 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Tue Aug 08 22:05:59 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Wed Aug 09 16:59:34 2006 +0200 @@ -125,11 +125,14 @@ class XmlRpcBase: # report exception back to server response = xmlrpclib.dumps(xmlrpclib.Fault(1, self._dump_exc())) else: - # wrap response in a singleton tuple - response = (response,) - - # serialize it - response = xmlrpclib.dumps(response, methodresponse=1) + if isinstance(response, xmlrpclib.Fault): + response = xmlrpclib.dumps(response) + else: + # wrap response in a singleton tuple + response = (response,) + + # serialize it + response = xmlrpclib.dumps(response, methodresponse=1) self.request.emit_http_headers([ "Content-Type: text/xml; charset=utf-8", # HG changeset patch # User Alexander Schremmer # Date 1155136421 -7200 # Node ID 84ca2d77b8e4ed907132beb13af5b808f646e5b7 # Parent 3c80692b9b47e0449aee174d2ba96e03fef04858 Added support for "exclude_non_writable" in getAllPagesEx. diff -r 3c80692b9b47 -r 84ca2d77b8e4 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Wed Aug 09 16:59:34 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Wed Aug 09 17:13:41 2006 +0200 @@ -238,10 +238,12 @@ class XmlRpcBase: include_system:: set it to false if you do not want to see system pages include_revno:: set it to True if you want to have lists with [pagename, revno] include_deleted:: set it to True if you want to include deleted pages + exclude_non_writable:: do not include pages that the current user may not write to @rtype: list @return: a list of all pages. """ - options = {"include_system": True, "include_revno": False, "include_deleted": False} + options = {"include_system": True, "include_revno": False, "include_deleted": False, + "exclude_non_writable": False} if opts is not None: options.update(opts) @@ -249,6 +251,9 @@ class XmlRpcBase: filter = lambda name: not wikiutil.isSystemPage(self.request, name) else: filter = lambda name: True + + if options["exclude_non_writable"]: + filter = lambda name, filter=filter: filter(name) and self.request.user.may.write(name) pagelist = self.request.rootpage.getPageList(filter=filter, exists=not options["include_deleted"]) # HG changeset patch # User Alexander Schremmer # Date 1155136967 -7200 # Node ID 3e2b4beb65ed8c400bd6df1f691dc43b6324cbe9 # Parent 84ca2d77b8e4ed907132beb13af5b808f646e5b7 Moved conflict state detection again into another function. diff -r 84ca2d77b8e4 -r 3e2b4beb65ed MoinMoin/PageEditor.py --- a/MoinMoin/PageEditor.py Wed Aug 09 17:13:41 2006 +0200 +++ b/MoinMoin/PageEditor.py Wed Aug 09 17:22:47 2006 +0200 @@ -807,6 +807,9 @@ Try a different name.""") % (newpagename was_deprecated = self._get_pragmas(self.get_raw_body()).has_key("deprecated") self.copypage() + + # remember conflict state + self.setConflict(wikiutil.containsConflictMarker(text)) # Write always on the standard directory, never change the # underlay directory copy! @@ -918,9 +921,6 @@ Try a different name.""") % (newpagename backup_url = self._make_backup(newtext, **kw) action = kw.get('action', 'SAVE') - # remember conflict state - self.setConflict(wikiutil.containsConflictMarker(newtext)) - #!!! need to check if we still retain the lock here #!!! rev check is not enough since internal operations use "0" # HG changeset patch # User Alexander Schremmer # Date 1155137031 -7200 # Node ID eb0700881a98128d69d3cd5c29745bc76faaf42e # Parent 3e2b4beb65ed8c400bd6df1f691dc43b6324cbe9 Fixed asserts/type checks. diff -r 3e2b4beb65ed -r eb0700881a98 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Wed Aug 09 17:22:47 2006 +0200 +++ b/MoinMoin/wikisync.py Wed Aug 09 17:23:51 2006 +0200 @@ -27,6 +27,7 @@ class Tag(object): @param remote_rev: The revision number on the remote end. @param current_rev: The related local revision. """ + assert isinstance(remote_wiki, str) and isinstance(remote_rev, int) and isinstance(current_rev, int) self.remote_wiki = remote_wiki self.remote_rev = remote_rev self.current_rev = current_rev @@ -122,7 +123,7 @@ class PickleTagStore(AbstractTagStore): self.commit() def fetch(self, iwid_full=None, iw_name=None): - assert iwid_full ^ iw_name + assert bool(iwid_full) ^ bool(iw_name) if iwid_full: iwid_full = unpackLine(iwid_full) if len(iwid_full) == 1: # HG changeset patch # User Alexander Schremmer # Date 1155137073 -7200 # Node ID 32c936ab469e8e920935ab965d4c9338068d7e37 # Parent eb0700881a98128d69d3cd5c29745bc76faaf42e Made SyncPages working, first page could be syncronised. diff -r eb0700881a98 -r 32c936ab469e MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Wed Aug 09 17:23:51 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Wed Aug 09 17:24:33 2006 +0200 @@ -75,7 +75,7 @@ class SyncPage(object): return repr("" % unicode(self)) def __unicode__(self): - return u"%s<%r:%r>" % (self.name, self.local_rev, self.remote_rev) + return u"%s[%s|%s]<%r:%r>" % (self.name, self.local_name, self.remote_name, self.local_rev, self.remote_rev) def __lt__(self, other): return self.name < other.name @@ -219,7 +219,19 @@ class MoinRemoteWiki(RemoteWiki): def get_diff(self, pagename, from_rev, to_rev): """ Returns the binary diff of the remote page named pagename, given from_rev and to_rev. """ - return str(self.connection.getDiff(pagename, from_rev, to_rev)) + result = self.connection.getDiff(pagename, from_rev, to_rev) + if isinstance(result, xmlrpclib.Fault): + raise Exception(result) + result["diff"] = str(result["diff"]) # unmarshal Binary object + return result + + def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): + """ Merges the diff into the page on the remote side. """ + result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name) + print result + if isinstance(result, xmlrpclib.Fault): + raise Exception(result) + return result # Methods implementing the RemoteWiki interface def get_interwiki_name(self): @@ -229,7 +241,8 @@ class MoinRemoteWiki(RemoteWiki): return self.connection.interwikiName()[1] def get_pages(self): - pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True}) + pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True, + "exclude_non_writable": True}) # XXX fix when all 3 directions are supported rpages = [] for name, revno in pages: normalised_name = normalise_pagename(name, self.prefix) @@ -278,7 +291,7 @@ class MoinLocalWiki(RemoteWiki): class ActionClass: - INFO, WARNING, ERROR = range(3) # used for logging + INFO, WARN, ERROR = range(3) # used for logging def __init__(self, pagename, request): self.request = request @@ -359,12 +372,16 @@ class ActionClass: self.sync(params, local, remote) except ActionStatus, e: - return self.page.send_page(self.request, msg=u'

%s

%s

\n' % (e.args[0], repr(self.status))) - - return self.page.send_page(self.request, msg=u"%s

%s

" % (_("Syncronisation finished."), repr(self.status))) + msg = u'

%s

%s

\n' % (e.args[0], repr(self.status)) + else: + msg = u"%s

%s

" % (_("Syncronisation finished."), repr(self.status)) + + # XXX append self.status to the job page + return self.page.send_page(self.request, msg=msg) def sync(self, params, local, remote): """ This method does the syncronisation work. """ + _ = self.request.getText l_pages = local.get_pages() r_pages = remote.get_pages() @@ -397,13 +414,14 @@ class ActionClass: # XXX handle deleted pages for rp in on_both_sides: # XXX add locking, acquire read-lock on rp - - current_page = Page(self.request, local_pagename) + print "Processing %r" % rp + + local_pagename = rp.local_name + current_page = PageEditor(self.request, local_pagename) if wikiutil.containsConflictMarker(current_page.get_raw_body()): self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a local unresolved conflict.") % {"pagename": local_pagename}) continue current_rev = current_page.get_real_rev() - local_pagename = rp.local_pagename tags = TagStore(current_page) matching_tags = tags.fetch(iwid_full=remote.iwid_full) @@ -422,9 +440,9 @@ class ActionClass: old_page = Page(self.request, local_pagename, rev=local_rev) old_contents = old_page.get_raw_body_str() - self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_pagename}) - - diff_result = remote.get_diff(rp.remote_pagename, remote_rev, None) + self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name}) + + diff_result = remote.get_diff(rp.remote_name, remote_rev, None) is_remote_conflict = diff_result["conflict"] assert diff_result["diffversion"] == 1 diff = diff_result["diff"] @@ -444,7 +462,8 @@ class ActionClass: local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) - very_current_remote_rev = remote.merge_diff(rp.remote_pagename, compress(textdiff(new_contents, verynewtext)), new_local_rev, remote_rev, current_remote_rev, local_full_iwid) + diff = textdiff(new_contents.encode("utf-8"), verynewtext.encode("utf-8")) + very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, remote_rev, current_remote_rev, local_full_iwid) comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) # XXX upgrade to write lock diff -r eb0700881a98 -r 32c936ab469e docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Wed Aug 09 17:23:51 2006 +0200 +++ b/docs/CHANGES.aschremmer Wed Aug 09 17:24:33 2006 +0200 @@ -88,7 +88,8 @@ Week 31: Load the IWID and the meta dict Added support for localPrefix and remotePrefix. Week 32: Continued work on the merge logic, finished prefix handling. Added local conflict detection in SyncPages. Added logging support to SyncPages. Refactored conflict flag - detection from the edit action into the PageEditor class. + detection from the edit action into the PageEditor class. Enhanced XMLRPC server in Moin to allow + XMLRPC functions to return Fault instances. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155138119 -7200 # Node ID 97e9866e82c78e64a82fd73b835a9c09b2d5c89a # Parent 32c936ab469e8e920935ab965d4c9338068d7e37 Fixed minor bug in getDiff. diff -r 32c936ab469e -r 97e9866e82c7 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Wed Aug 09 17:24:33 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Wed Aug 09 17:41:59 2006 +0200 @@ -601,6 +601,7 @@ class XmlRpcBase: oldcontents = lambda: oldpage.get_raw_body_str() if to_rev is None: + newpage = currentpage newcontents = lambda: currentpage.get_raw_body() else: newpage = Page(self.request, pagename, rev=to_rev) # HG changeset patch # User Alexander Schremmer # Date 1155138242 -7200 # Node ID 7b128ea29fc4e01302c57ef9f9bfa57859eb8527 # Parent 97e9866e82c78e64a82fd73b835a9c09b2d5c89a Further bug fixes in SyncPages and wikisync diff -r 97e9866e82c7 -r 7b128ea29fc4 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Wed Aug 09 17:41:59 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Wed Aug 09 17:44:02 2006 +0200 @@ -220,17 +220,12 @@ class MoinRemoteWiki(RemoteWiki): """ Returns the binary diff of the remote page named pagename, given from_rev and to_rev. """ result = self.connection.getDiff(pagename, from_rev, to_rev) - if isinstance(result, xmlrpclib.Fault): - raise Exception(result) result["diff"] = str(result["diff"]) # unmarshal Binary object return result def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): """ Merges the diff into the page on the remote side. """ result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name) - print result - if isinstance(result, xmlrpclib.Fault): - raise Exception(result) return result # Methods implementing the RemoteWiki interface @@ -418,14 +413,12 @@ class ActionClass: local_pagename = rp.local_name current_page = PageEditor(self.request, local_pagename) - if wikiutil.containsConflictMarker(current_page.get_raw_body()): - self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a local unresolved conflict.") % {"pagename": local_pagename}) - continue current_rev = current_page.get_real_rev() tags = TagStore(current_page) matching_tags = tags.fetch(iwid_full=remote.iwid_full) matching_tags.sort() + #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags) if not matching_tags: remote_rev = None @@ -448,6 +441,13 @@ class ActionClass: diff = diff_result["diff"] current_remote_rev = diff_result["current"] + # do not sync if the conflict is remote and local, or if it is local + # and the page has never been syncronised + if (wikiutil.containsConflictMarker(current_page.get_raw_body()) + and (remote_rev is None or is_remote_conflict)): + self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a locally or remotely unresolved conflict.") % {"pagename": local_pagename}) + continue + if remote_rev is None: # set the remote_rev for the case without any tags self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) remote_rev = current_remote_rev diff -r 97e9866e82c7 -r 7b128ea29fc4 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Wed Aug 09 17:41:59 2006 +0200 +++ b/MoinMoin/wikisync.py Wed Aug 09 17:44:02 2006 +0200 @@ -130,7 +130,7 @@ class PickleTagStore(AbstractTagStore): assert False, "This case is not supported yet" # XXX iw_name = iwid_full[1] - return [t for t in self.tags if t.remote_wiki == iw_name] + return [t for t in self.tags if unpackLine(t.remote_wiki)[1] == iw_name] # currently we just have one implementation, so we do not need diff -r 97e9866e82c7 -r 7b128ea29fc4 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Wed Aug 09 17:41:59 2006 +0200 +++ b/docs/CHANGES.aschremmer Wed Aug 09 17:44:02 2006 +0200 @@ -9,6 +9,7 @@ Branch moin/1.6-sync-aschremmer ToDo: * Implement actual syncronisation. * Add correct IWID_full handling. + * Reduce round-trip times by caching queries and using MultiCall objects. * Implement a cross-site authentication system, i.e. mainly an identity storage. * Clean up trailing whitespace. # HG changeset patch # User Alexander Schremmer # Date 1155141071 -7200 # Node ID e92c75fa13a96bb16f320b48d2ba7fc1e86f5a49 # Parent 7b128ea29fc4e01302c57ef9f9bfa57859eb8527 Introduced a new diff3 mode that skips equal lines and should produce less conflicts hopefully without any corruption. diff -r 7b128ea29fc4 -r e92c75fa13a9 MoinMoin/util/diff3.py --- a/MoinMoin/util/diff3.py Wed Aug 09 17:44:02 2006 +0200 +++ b/MoinMoin/util/diff3.py Wed Aug 09 18:31:11 2006 +0200 @@ -6,25 +6,28 @@ @license: GNU GPL, see COPYING for details. """ -def text_merge(old, other, new, allow_conflicts=1, - marker1='<<<<<<<<<<<<<<<<<<<<<<<<<\n', - marker2='=========================\n', - marker3='>>>>>>>>>>>>>>>>>>>>>>>>>\n'): +default_markers = ('<<<<<<<<<<<<<<<<<<<<<<<<<\n', + '=========================\n', + '>>>>>>>>>>>>>>>>>>>>>>>>>\n') + +def text_merge(old, other, new, allow_conflicts=1, *markers): """ do line by line diff3 merge with three strings """ result = merge(old.splitlines(1), other.splitlines(1), new.splitlines(1), - allow_conflicts, marker1, marker2, marker3) + allow_conflicts, *markers) return ''.join(result) -def merge(old, other, new, allow_conflicts=1, - marker1='<<<<<<<<<<<<<<<<<<<<<<<<<\n', - marker2='=========================\n', - marker3='>>>>>>>>>>>>>>>>>>>>>>>>>\n'): +def merge(old, other, new, allow_conflicts=1, *markers): """ do line by line diff3 merge input must be lists containing single lines """ + if not markers: + markers = default_markers + marker1, marker2, marker3 = markers + old_nr, other_nr, new_nr = 0, 0, 0 old_len, other_len, new_len = len(old), len(other), len(new) result = [] + while old_nr < old_len and other_nr < other_len and new_nr < new_len: # unchanged if old[old_nr] == other[other_nr] == new[new_nr]: @@ -33,6 +36,12 @@ def merge(old, other, new, allow_conflic other_nr += 1 new_nr += 1 else: + if allow_conflicts == 2: # experimental addition to the algorithm + if other[other_nr] == new[new_nr]: + result.append(new[new_nr]) + other_nr += 1 + new_nr += 1 + continue new_match = find_match(old, new, old_nr, new_nr) other_match = find_match(old, other, old_nr, other_nr) # new is changed @@ -100,13 +109,16 @@ def merge(old, other, new, allow_conflic pass # conflict else: - if not allow_conflicts: - return None - result.append(marker1) - result.extend(other[other_nr:]) - result.append(marker2) - result.extend(new[new_nr:]) - result.append(marker3) + if new == other: + result.extend(new[new_nr:]) + else: + if not allow_conflicts: + return None + result.append(marker1) + result.extend(other[other_nr:]) + result.append(marker2) + result.extend(new[new_nr:]) + result.append(marker3) return result def tripple_match(old, other, new, other_match, new_match): @@ -255,4 +267,3 @@ AAA 014 if __name__ == '__main__': main() - # HG changeset patch # User Alexander Schremmer # Date 1155141179 -7200 # Node ID 9721b9e2a074434d1d3e499451332532460892f5 # Parent e92c75fa13a96bb16f320b48d2ba7fc1e86f5a49 Use the new diff3 mode in SyncPages. Updated my CHANGES file. diff -r e92c75fa13a9 -r 9721b9e2a074 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Wed Aug 09 18:31:11 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Wed Aug 09 18:32:59 2006 +0200 @@ -456,7 +456,7 @@ class ActionClass: old_contents = old_contents.encode("utf-8") # here, the actual merge happens - verynewtext = diff3.text_merge(old_contents, new_contents, current_page.get_raw_body(), 1, *conflict_markers) + verynewtext = diff3.text_merge(old_contents, new_contents, current_page.get_raw_body(), 2, *conflict_markers) new_local_rev = current_rev + 1 # XXX commit first? local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) diff -r e92c75fa13a9 -r 9721b9e2a074 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Wed Aug 09 18:31:11 2006 +0200 +++ b/docs/CHANGES.aschremmer Wed Aug 09 18:32:59 2006 +0200 @@ -31,6 +31,9 @@ Branch moin/1.6-sync-aschremmer no system pages etc.) * IWID support - i.e. every instance has a unique ID * InterWiki page editable in the wiki, modification detection based on mtimes + * SyncPages action + * XMLRPC functions may return Fault instances + * diff3 algorithm extenteded, a new mode should reduce the conflicts Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. (merged into main) @@ -90,7 +93,8 @@ Week 32: Continued work on the merge log Week 32: Continued work on the merge logic, finished prefix handling. Added local conflict detection in SyncPages. Added logging support to SyncPages. Refactored conflict flag detection from the edit action into the PageEditor class. Enhanced XMLRPC server in Moin to allow - XMLRPC functions to return Fault instances. + XMLRPC functions to return Fault instances. Introduced a new diff3 mode that should reduce the + conflicts. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155148345 -7200 # Node ID 7686daa0249c89309370aa6e97f141bc463d9b11 # Parent 9721b9e2a074434d1d3e499451332532460892f5 Small fixes, added direction support to get_pages. diff -r 9721b9e2a074 -r 7686daa0249c MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Wed Aug 09 18:32:59 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Wed Aug 09 20:32:25 2006 +0200 @@ -173,7 +173,7 @@ class RemoteWiki(object): """ Returns the InterWiki ID. """ return NotImplemented - def get_pages(self): + def get_pages(self, **kwargs): """ Returns a list of SyncPage instances. """ return NotImplemented @@ -195,12 +195,14 @@ class MoinRemoteWiki(RemoteWiki): self.connection = self.createConnection() - version = self.connection.getMoinVersion() - if not isinstance(version, (tuple, list)): - raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least.")) - - remote_interwikiname = self.get_interwiki_name() - remote_iwid = self.connection.interwikiName()[1] + iw_list = self.connection.interwikiName() + + #version = self.connection.getMoinVersion() + #if not isinstance(version, (tuple, list)): + # raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least.")) + + self.remote_interwikiname = remote_interwikiname = iw_list[0] + self.remote_iwid = remote_iwid = iw_list[1] self.is_anonymous = remote_interwikiname is None if not self.is_anonymous and interwikiname != remote_interwikiname: raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)" @@ -230,14 +232,16 @@ class MoinRemoteWiki(RemoteWiki): # Methods implementing the RemoteWiki interface def get_interwiki_name(self): - return self.connection.interwikiName()[0] + return self.remote_interwikiname def get_iwid(self): - return self.connection.interwikiName()[1] - - def get_pages(self): - pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True, - "exclude_non_writable": True}) # XXX fix when all 3 directions are supported + return self.remote_iwid + + def get_pages(self, **kwargs): + options = {"include_revno": True, + "include_deleted": True, + "exclude_non_writable": kwargs["exclude_non_writable"]} + pages = self.connection.getAllPagesEx(options) rpages = [] for name, revno in pages: normalised_name = normalise_pagename(name, self.prefix) @@ -278,7 +282,8 @@ class MoinLocalWiki(RemoteWiki): def get_iwid(self): return self.request.cfg.iwid - def get_pages(self): + def get_pages(self, **kwargs): + assert not kwargs return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] if x] def __repr__(self): @@ -379,7 +384,7 @@ class ActionClass: _ = self.request.getText l_pages = local.get_pages() - r_pages = remote.get_pages() + r_pages = remote.get_pages(exclude_non_writable=direction != DOWN) if params["groupList"]: pages_from_groupList = set(local.getGroupItems(params["groupList"])) @@ -402,8 +407,6 @@ class ActionClass: #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) - #if params["direction"] in (DOWN, BOTH): - # for rp in remote_but_not_local: # let's do the simple case first, can be refactored later to match all cases # XXX handle deleted pages @@ -435,7 +438,7 @@ class ActionClass: self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name}) - diff_result = remote.get_diff(rp.remote_name, remote_rev, None) + diff_result = remote.get_diff(rp.remote_name, remote_rev, None) # XXX might raise ALREADY_CURRENT is_remote_conflict = diff_result["conflict"] assert diff_result["diffversion"] == 1 diff = diff_result["diff"] @@ -480,7 +483,6 @@ class ActionClass: self.log_status(ActionClass.WARN, _("Page merged with conflicts.")) # XXX release lock - # XXX untested def execute(pagename, request): # HG changeset patch # User Alexander Schremmer # Date 1155243799 -7200 # Node ID d1930ce1cc0f318746de769a65c9e27786b952ed # Parent 7686daa0249c89309370aa6e97f141bc463d9b11 Oops, classic unicode bug in mergeDiff. diff -r 7686daa0249c -r d1930ce1cc0f MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Wed Aug 09 20:32:25 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Aug 10 23:03:19 2006 +0200 @@ -668,7 +668,7 @@ class XmlRpcBase: # write page try: - currentpage.saveText(newcontents.encode("utf-8"), last_remote_rev, comment=comment) + currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev, comment=comment) except PageEditor.EditConflict: return LASTREV_INVALID # HG changeset patch # User Alexander Schremmer # Date 1155244781 -7200 # Node ID f3c8f750c1fec564d7c70704a4568185cc23f42d # Parent d1930ce1cc0f318746de769a65c9e27786b952ed Another minor unicode bug in getDiff. diff -r d1930ce1cc0f -r f3c8f750c1fe MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Aug 10 23:03:19 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Aug 10 23:19:41 2006 +0200 @@ -602,7 +602,7 @@ class XmlRpcBase: if to_rev is None: newpage = currentpage - newcontents = lambda: currentpage.get_raw_body() + newcontents = lambda: currentpage.get_raw_body_str() else: newpage = Page(self.request, pagename, rev=to_rev) newcontents = lambda: newpage.get_raw_body_str() # HG changeset patch # User Alexander Schremmer # Date 1155246082 -7200 # Node ID 5b015ced5609e0fa0c73631c2a6fdddcb50008c0 # Parent f3c8f750c1fec564d7c70704a4568185cc23f42d Fixed a corner case where both wiki's pages are equal. diff -r f3c8f750c1fe -r 5b015ced5609 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Aug 10 23:19:41 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Aug 10 23:41:22 2006 +0200 @@ -669,6 +669,8 @@ class XmlRpcBase: # write page try: currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev, comment=comment) + except PageEditor.Unchanged: # could happen in case of both wiki's pages being equal + pass except PageEditor.EditConflict: return LASTREV_INVALID # HG changeset patch # User Alexander Schremmer # Date 1155248374 -7200 # Node ID adebc2c73ef793a86992ac14732d1028f0853145 # Parent 5b015ced5609e0fa0c73631c2a6fdddcb50008c0 Added dump method to the AbstractTagStore. diff -r 5b015ced5609 -r adebc2c73ef7 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Thu Aug 10 23:41:22 2006 +0200 +++ b/MoinMoin/wikisync.py Fri Aug 11 00:19:34 2006 +0200 @@ -49,6 +49,10 @@ class AbstractTagStore(object): """ Subclasses don't need to call this method. It is just here to enforce them having accept a page argument at least. """ pass + + def dump(self): + """ Returns all tags for a given item as a string. """ + return repr(self.get_all_tags()) def add(self, **kwargs): """ Adds a Tag object to the current TagStore. """ # HG changeset patch # User Alexander Schremmer # Date 1155248420 -7200 # Node ID a76ce7b951900e8bbe86afadf397887f94d0cc7c # Parent adebc2c73ef793a86992ac14732d1028f0853145 SyncPages fixes, allow for the case where only the local page is changed. diff -r adebc2c73ef7 -r a76ce7b95190 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Fri Aug 11 00:19:34 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Fri Aug 11 00:20:20 2006 +0200 @@ -284,7 +284,7 @@ class MoinLocalWiki(RemoteWiki): def get_pages(self, **kwargs): assert not kwargs - return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] if x] + return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=1)] if x] def __repr__(self): return "" @@ -384,7 +384,7 @@ class ActionClass: _ = self.request.getText l_pages = local.get_pages() - r_pages = remote.get_pages(exclude_non_writable=direction != DOWN) + r_pages = remote.get_pages(exclude_non_writable=params["direction"] != DOWN) if params["groupList"]: pages_from_groupList = set(local.getGroupItems(params["groupList"])) @@ -438,11 +438,16 @@ class ActionClass: self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name}) - diff_result = remote.get_diff(rp.remote_name, remote_rev, None) # XXX might raise ALREADY_CURRENT - is_remote_conflict = diff_result["conflict"] - assert diff_result["diffversion"] == 1 - diff = diff_result["diff"] - current_remote_rev = diff_result["current"] + if remote_rev != rp.remote_rev: + diff_result = remote.get_diff(rp.remote_name, remote_rev, None) # XXX might raise ALREADY_CURRENT + is_remote_conflict = diff_result["conflict"] + assert diff_result["diffversion"] == 1 + diff = diff_result["diff"] + current_remote_rev = diff_result["current"] + else: + current_remote_rev = remote_rev + is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8")) + diff = None # do not sync if the conflict is remote and local, or if it is local # and the page has never been syncronised @@ -455,27 +460,38 @@ class ActionClass: self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) remote_rev = current_remote_rev - new_contents = patch(old_contents, decompress(diff)).decode("utf-8") - old_contents = old_contents.encode("utf-8") + old_contents_dec = old_contents.decode("utf-8") + if diff is None: + new_contents = old_contents_dec + else: + new_contents = patch(old_contents, decompress(diff)).decode("utf-8") + old_contents = old_contents_dec # here, the actual merge happens + print "Merging %r, %r and %r" % (old_contents, new_contents, current_page.get_raw_body()) verynewtext = diff3.text_merge(old_contents, new_contents, current_page.get_raw_body(), 2, *conflict_markers) - new_local_rev = current_rev + 1 # XXX commit first? local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) diff = textdiff(new_contents.encode("utf-8"), verynewtext.encode("utf-8")) - very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, remote_rev, current_remote_rev, local_full_iwid) comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) # XXX upgrade to write lock try: current_page.saveText(verynewtext, current_rev, comment=comment) + except PageEditor.Unchanged: + pass except PageEditor.EditConflict: # XXX remote rollback needed assert False, "You stumbled on a problem with the current storage system - I cannot lock pages" - tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev) + new_local_rev = current_page.get_real_rev() + try: + very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, remote_rev, current_remote_rev, local_full_iwid) + except Exception, e: + raise # XXX rollback + else: + tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev) if not wikiutil.containsConflictMarker(verynewtext): self.log_status(ActionClass.INFO, _("Page successfully merged.")) # HG changeset patch # User Alexander Schremmer # Date 1155328094 -7200 # Node ID 5d555ec6b40a86e273810940b1c109e513e32dc4 # Parent a76ce7b951900e8bbe86afadf397887f94d0cc7c Fixed a bug that was hard to track down in SyncPages (some hours spent :-/). diff -r a76ce7b95190 -r 5d555ec6b40a MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Fri Aug 11 00:20:20 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Fri Aug 11 22:28:14 2006 +0200 @@ -475,6 +475,8 @@ class ActionClass: remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) diff = textdiff(new_contents.encode("utf-8"), verynewtext.encode("utf-8")) + #print "Diff against %r" % new_contents.encode("utf-8") + comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) # XXX upgrade to write lock @@ -487,7 +489,7 @@ class ActionClass: assert False, "You stumbled on a problem with the current storage system - I cannot lock pages" new_local_rev = current_page.get_real_rev() try: - very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, remote_rev, current_remote_rev, local_full_iwid) + very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid) except Exception, e: raise # XXX rollback else: diff -r a76ce7b95190 -r 5d555ec6b40a MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Fri Aug 11 00:20:20 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Fri Aug 11 22:28:14 2006 +0200 @@ -665,6 +665,7 @@ class XmlRpcBase: # generate the new page revision by applying the diff newcontents = patch(basepage.get_raw_body_str(), decompress(str(diff))) + #print "Diff against %r" % basepage.get_raw_body_str() # write page try: diff -r a76ce7b95190 -r 5d555ec6b40a docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Aug 11 00:20:20 2006 +0200 +++ b/docs/CHANGES.aschremmer Fri Aug 11 22:28:14 2006 +0200 @@ -94,7 +94,7 @@ Week 32: Continued work on the merge log detection in SyncPages. Added logging support to SyncPages. Refactored conflict flag detection from the edit action into the PageEditor class. Enhanced XMLRPC server in Moin to allow XMLRPC functions to return Fault instances. Introduced a new diff3 mode that should reduce the - conflicts. + conflicts. Fixed hard to track down bugs in SyncPages. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155414649 -7200 # Node ID ae9eb32b6899a0a68419d5270cfa22f3fff69134 # Parent 5d555ec6b40a86e273810940b1c109e513e32dc4 Refactored code, cleaned up some parts of the code, moved some classes to wikisync diff -r 5d555ec6b40a -r ae9eb32b6899 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Fri Aug 11 22:28:14 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 12 22:30:49 2006 +0200 @@ -10,9 +10,7 @@ import os import re -import zipfile import xmlrpclib -from datetime import datetime # Compatiblity to Python 2.3 try: @@ -26,7 +24,7 @@ from MoinMoin.PageEditor import PageEdit from MoinMoin.PageEditor import PageEditor, conflict_markers from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group -from MoinMoin.wikisync import TagStore +from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage, MoinLocalWiki, MoinRemoteWiki from MoinMoin.util.bdiff import decompress, patch, compress, textdiff from MoinMoin.util import diff3 @@ -35,259 +33,7 @@ directions_map = {"up": UP, "down": DOWN directions_map = {"up": UP, "down": DOWN, "both": BOTH} -def normalise_pagename(page_name, prefix): - """ Checks if the page_name starts with the prefix. - Returns None if it does not, otherwise strips the prefix. - """ - if prefix: - if not page_name.startswith(prefix): - return None - else: - return page_name[len(prefix):] - else: - return page_name - - class ActionStatus(Exception): pass - -class UnsupportedWikiException(Exception): pass - -# XXX Move these classes to MoinMoin.wikisync -class SyncPage(object): - """ This class represents a page in one or two wiki(s). """ - def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None): - """ Creates a SyncPage instance. - @param name: The canonical name of the page, without prefixes. - @param local_rev: The revision of the page in the local wiki. - @param remote_rev: The revision of the page in the remote wiki. - @param local_name: The page name of the page in the local wiki. - @param remote_name: The page name of the page in the remote wiki. - """ - self.name = name - self.local_rev = local_rev - self.remote_rev = remote_rev - self.local_name = local_name - self.remote_name = remote_name - assert local_rev or remote_rev - assert local_name or remote_name - - def __repr__(self): - return repr("" % unicode(self)) - - def __unicode__(self): - return u"%s[%s|%s]<%r:%r>" % (self.name, self.local_name, self.remote_name, self.local_rev, self.remote_rev) - - def __lt__(self, other): - return self.name < other.name - - def __hash__(self): - """ Ensures that the hash value of this page only depends on the canonical name. """ - return hash(self.name) - - def __eq__(self, other): - if not isinstance(other, SyncPage): - return false - return self.name == other.name - - def add_missing_pagename(self, local, remote): - """ Checks if the particular concrete page names are unknown and fills - them in. - """ - if self.local_name is None: - n_name = normalise_pagename(self.remote_name, remote.prefix) - assert n_name is not None - self.local_name = (local.prefix or "") + n_name - elif self.remote_name is None: - n_name = normalise_pagename(self.local_name, local.prefix) - assert n_name is not None - self.remote_name = (local.prefix or "") + n_name - - return self # makes using list comps easier - - def filter(cls, sp_list, func): - """ Returns all pages in sp_list that let func return True - for the canonical page name. - """ - return [x for x in sp_list if func(x.name)] - filter = classmethod(filter) - - def merge(cls, local_list, remote_list): - """ Merges two lists of SyncPages into one, migrating attributes like the names. """ - # map page names to SyncPage objects :-) - d = dict(zip(local_list, local_list)) - for sp in remote_list: - if sp in d: - d[sp].remote_rev = sp.remote_rev - d[sp].remote_name = sp.remote_name - else: - d[sp] = sp - return d.keys() - merge = classmethod(merge) - - def is_only_local(self): - """ Is true if the page is only in the local wiki. """ - return not self.remote_rev - - def is_only_remote(self): - """ Is true if the page is only in the remote wiki. """ - return not self.local_rev - - def is_local_and_remote(self): - """ Is true if the page is in both wikis. """ - return self.local_rev and self.remote_rev - - def iter_local_only(cls, sp_list): - """ Iterates over all pages that are local only. """ - for x in sp_list: - if x.is_only_local(): - yield x - iter_local_only = classmethod(iter_local_only) - - def iter_remote_only(cls, sp_list): - """ Iterates over all pages that are remote only. """ - for x in sp_list: - if x.is_only_remote(): - yield x - iter_remote_only = classmethod(iter_remote_only) - - def iter_local_and_remote(cls, sp_list): - """ Iterates over all pages that are local and remote. """ - for x in sp_list: - if x.is_local_and_remote(): - yield x - iter_local_and_remote = classmethod(iter_local_and_remote) - -class RemoteWiki(object): - """ This class should be the base for all implementations of remote wiki - classes. """ - - def __repr__(self): - """ Returns a representation of the instance for debugging purposes. """ - return NotImplemented - - def get_interwiki_name(self): - """ Returns the interwiki name of the other wiki. """ - return NotImplemented - - def get_iwid(self): - """ Returns the InterWiki ID. """ - return NotImplemented - - def get_pages(self, **kwargs): - """ Returns a list of SyncPage instances. """ - return NotImplemented - - -class MoinRemoteWiki(RemoteWiki): - """ Used for MoinMoin wikis reachable via XMLRPC. """ - def __init__(self, request, interwikiname, prefix): - self.request = request - self.prefix = prefix - _ = self.request.getText - - wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) - self.wiki_url = wikiutil.mapURL(self.request, wikiurl) - self.valid = not wikitag_bad - self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2" - if not self.valid: - self.connection = None - return - - self.connection = self.createConnection() - - iw_list = self.connection.interwikiName() - - #version = self.connection.getMoinVersion() - #if not isinstance(version, (tuple, list)): - # raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least.")) - - self.remote_interwikiname = remote_interwikiname = iw_list[0] - self.remote_iwid = remote_iwid = iw_list[1] - self.is_anonymous = remote_interwikiname is None - if not self.is_anonymous and interwikiname != remote_interwikiname: - raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)" - " internally than you specified (%(localname)s).") % { - "remotename": wikiutil.escape(remote_interwikiname), "localname": wikiutil.escape(interwikiname)}) - - if self.is_anonymous: - self.iwid_full = packLine([remote_iwid]) - else: - self.iwid_full = packLine([remote_iwid, interwikiname]) - - def createConnection(self): - return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) - - # Public methods - def get_diff(self, pagename, from_rev, to_rev): - """ Returns the binary diff of the remote page named pagename, given - from_rev and to_rev. """ - result = self.connection.getDiff(pagename, from_rev, to_rev) - result["diff"] = str(result["diff"]) # unmarshal Binary object - return result - - def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): - """ Merges the diff into the page on the remote side. """ - result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name) - return result - - # Methods implementing the RemoteWiki interface - def get_interwiki_name(self): - return self.remote_interwikiname - - def get_iwid(self): - return self.remote_iwid - - def get_pages(self, **kwargs): - options = {"include_revno": True, - "include_deleted": True, - "exclude_non_writable": kwargs["exclude_non_writable"]} - pages = self.connection.getAllPagesEx(options) - rpages = [] - for name, revno in pages: - normalised_name = normalise_pagename(name, self.prefix) - if normalised_name is None: - continue - rpages.append(SyncPage(normalised_name, remote_rev=revno, remote_name=name)) - return rpages - - def __repr__(self): - return "" % (self.wiki_url, self.valid) - - -class MoinLocalWiki(RemoteWiki): - """ Used for the current MoinMoin wiki. """ - def __init__(self, request, prefix): - self.request = request - self.prefix = prefix - - def getGroupItems(self, group_list): - """ Returns all page names that are listed on the page group_list. """ - pages = [] - for group_pagename in group_list: - pages.extend(Group(self.request, group_pagename).members()) - return [self.createSyncPage(x) for x in pages] - - def createSyncPage(self, page_name): - normalised_name = normalise_pagename(page_name, self.prefix) - if normalised_name is None: - return None - return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name) - - # Public methods: - - # Methods implementing the RemoteWiki interface - def get_interwiki_name(self): - return self.request.cfg.interwikiname - - def get_iwid(self): - return self.request.cfg.iwid - - def get_pages(self, **kwargs): - assert not kwargs - return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=1)] if x] - - def __repr__(self): - return "" class ActionClass: @@ -485,13 +231,14 @@ class ActionClass: except PageEditor.Unchanged: pass except PageEditor.EditConflict: - # XXX remote rollback needed assert False, "You stumbled on a problem with the current storage system - I cannot lock pages" + new_local_rev = current_page.get_real_rev() + try: very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid) except Exception, e: - raise # XXX rollback + raise # XXX rollback locally else: tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev) diff -r 5d555ec6b40a -r ae9eb32b6899 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Fri Aug 11 22:28:14 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Aug 12 22:30:49 2006 +0200 @@ -7,14 +7,270 @@ """ import os +import xmlrpclib try: import cPickle as pickle except ImportError: import pickle + +from MoinMoin import wikiutil from MoinMoin.util import lock -from MoinMoin.packages import unpackLine +from MoinMoin.Page import Page +from MoinMoin.packages import unpackLine, packLine + + +def normalise_pagename(page_name, prefix): + """ Checks if the page_name starts with the prefix. + Returns None if it does not, otherwise strips the prefix. + """ + if prefix: + if not page_name.startswith(prefix): + return None + else: + return page_name[len(prefix):] + else: + return page_name + + +class UnsupportedWikiException(Exception): pass + + +class SyncPage(object): + """ This class represents a page in one or two wiki(s). """ + def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None): + """ Creates a SyncPage instance. + @param name: The canonical name of the page, without prefixes. + @param local_rev: The revision of the page in the local wiki. + @param remote_rev: The revision of the page in the remote wiki. + @param local_name: The page name of the page in the local wiki. + @param remote_name: The page name of the page in the remote wiki. + """ + self.name = name + self.local_rev = local_rev + self.remote_rev = remote_rev + self.local_name = local_name + self.remote_name = remote_name + assert local_rev or remote_rev + assert local_name or remote_name + + def __repr__(self): + return repr("" % unicode(self)) + + def __unicode__(self): + return u"%s[%s|%s]<%r:%r>" % (self.name, self.local_name, self.remote_name, self.local_rev, self.remote_rev) + + def __lt__(self, other): + return self.name < other.name + + def __hash__(self): + """ Ensures that the hash value of this page only depends on the canonical name. """ + return hash(self.name) + + def __eq__(self, other): + if not isinstance(other, SyncPage): + return false + return self.name == other.name + + def add_missing_pagename(self, local, remote): + """ Checks if the particular concrete page names are unknown and fills + them in. + """ + if self.local_name is None: + n_name = normalise_pagename(self.remote_name, remote.prefix) + assert n_name is not None + self.local_name = (local.prefix or "") + n_name + elif self.remote_name is None: + n_name = normalise_pagename(self.local_name, local.prefix) + assert n_name is not None + self.remote_name = (local.prefix or "") + n_name + + return self # makes using list comps easier + + def filter(cls, sp_list, func): + """ Returns all pages in sp_list that let func return True + for the canonical page name. + """ + return [x for x in sp_list if func(x.name)] + filter = classmethod(filter) + + def merge(cls, local_list, remote_list): + """ Merges two lists of SyncPages into one, migrating attributes like the names. """ + # map page names to SyncPage objects :-) + d = dict(zip(local_list, local_list)) + for sp in remote_list: + if sp in d: + d[sp].remote_rev = sp.remote_rev + d[sp].remote_name = sp.remote_name + else: + d[sp] = sp + return d.keys() + merge = classmethod(merge) + + def is_only_local(self): + """ Is true if the page is only in the local wiki. """ + return not self.remote_rev + + def is_only_remote(self): + """ Is true if the page is only in the remote wiki. """ + return not self.local_rev + + def is_local_and_remote(self): + """ Is true if the page is in both wikis. """ + return self.local_rev and self.remote_rev + + def iter_local_only(cls, sp_list): + """ Iterates over all pages that are local only. """ + for x in sp_list: + if x.is_only_local(): + yield x + iter_local_only = classmethod(iter_local_only) + + def iter_remote_only(cls, sp_list): + """ Iterates over all pages that are remote only. """ + for x in sp_list: + if x.is_only_remote(): + yield x + iter_remote_only = classmethod(iter_remote_only) + + def iter_local_and_remote(cls, sp_list): + """ Iterates over all pages that are local and remote. """ + for x in sp_list: + if x.is_local_and_remote(): + yield x + iter_local_and_remote = classmethod(iter_local_and_remote) + +class RemoteWiki(object): + """ This class should be the base for all implementations of remote wiki + classes. """ + + def __repr__(self): + """ Returns a representation of the instance for debugging purposes. """ + return NotImplemented + + def get_interwiki_name(self): + """ Returns the interwiki name of the other wiki. """ + return NotImplemented + + def get_iwid(self): + """ Returns the InterWiki ID. """ + return NotImplemented + + def get_pages(self, **kwargs): + """ Returns a list of SyncPage instances. """ + return NotImplemented + + +class MoinRemoteWiki(RemoteWiki): + """ Used for MoinMoin wikis reachable via XMLRPC. """ + def __init__(self, request, interwikiname, prefix): + self.request = request + self.prefix = prefix + _ = self.request.getText + + wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) + self.wiki_url = wikiutil.mapURL(self.request, wikiurl) + self.valid = not wikitag_bad + self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2" + if not self.valid: + self.connection = None + return + + self.connection = self.createConnection() + + try: + iw_list = self.connection.interwikiName() + except xmlrpclib.Fault, e: + raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least.")) + + self.remote_interwikiname = remote_interwikiname = iw_list[0] + self.remote_iwid = remote_iwid = iw_list[1] + self.is_anonymous = remote_interwikiname is None + if not self.is_anonymous and interwikiname != remote_interwikiname: + raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)" + " internally than you specified (%(localname)s).") % { + "remotename": wikiutil.escape(remote_interwikiname), "localname": wikiutil.escape(interwikiname)}) + + if self.is_anonymous: + self.iwid_full = packLine([remote_iwid]) + else: + self.iwid_full = packLine([remote_iwid, interwikiname]) + + def createConnection(self): + return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) + + # Public methods + def get_diff(self, pagename, from_rev, to_rev): + """ Returns the binary diff of the remote page named pagename, given + from_rev and to_rev. """ + result = self.connection.getDiff(pagename, from_rev, to_rev) + result["diff"] = str(result["diff"]) # unmarshal Binary object + return result + + def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): + """ Merges the diff into the page on the remote side. """ + result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name) + return result + + # Methods implementing the RemoteWiki interface + def get_interwiki_name(self): + return self.remote_interwikiname + + def get_iwid(self): + return self.remote_iwid + + def get_pages(self, **kwargs): + options = {"include_revno": True, + "include_deleted": True, + "exclude_non_writable": kwargs["exclude_non_writable"]} + pages = self.connection.getAllPagesEx(options) + rpages = [] + for name, revno in pages: + normalised_name = normalise_pagename(name, self.prefix) + if normalised_name is None: + continue + rpages.append(SyncPage(normalised_name, remote_rev=revno, remote_name=name)) + return rpages + + def __repr__(self): + return "" % (getattr(self, "wiki_url", Ellipsis), getattr(self, "valid", Ellipsis)) + + +class MoinLocalWiki(RemoteWiki): + """ Used for the current MoinMoin wiki. """ + def __init__(self, request, prefix): + self.request = request + self.prefix = prefix + + def getGroupItems(self, group_list): + """ Returns all page names that are listed on the page group_list. """ + pages = [] + for group_pagename in group_list: + pages.extend(Group(self.request, group_pagename).members()) + return [self.createSyncPage(x) for x in pages] + + def createSyncPage(self, page_name): + normalised_name = normalise_pagename(page_name, self.prefix) + if normalised_name is None: + return None + return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name) + + # Public methods: + + # Methods implementing the RemoteWiki interface + def get_interwiki_name(self): + return self.request.cfg.interwikiname + + def get_iwid(self): + return self.request.cfg.iwid + + def get_pages(self, **kwargs): + assert not kwargs + return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=1)] if x] + + def __repr__(self): + return "" class Tag(object): @@ -126,17 +382,17 @@ class PickleTagStore(AbstractTagStore): self.tags = [] self.commit() - def fetch(self, iwid_full=None, iw_name=None): - assert bool(iwid_full) ^ bool(iw_name) - if iwid_full: - iwid_full = unpackLine(iwid_full) - if len(iwid_full) == 1: - assert False, "This case is not supported yet" # XXX - iw_name = iwid_full[1] - - return [t for t in self.tags if unpackLine(t.remote_wiki)[1] == iw_name] + def fetch(self, iwid_full): + iwid_full = unpackLine(iwid_full) + matching_tags = [] + for t in self.tags: + t_iwid_full = unpackLine(t.remote_wiki) + if ((t_iwid_full[0] == iwid_full[0]) # either match IWID or IW name + or (len(t_iwid_full) == 2 and len(iwid_full) == 2 and t_iwid_full[1] == iwid_full[1])): + matching_tags.append(t) + return matching_tags # currently we just have one implementation, so we do not need # a factory method -TagStore = PickleTagStore \ No newline at end of file +TagStore = PickleTagStore diff -r 5d555ec6b40a -r ae9eb32b6899 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Fri Aug 11 22:28:14 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Aug 12 22:30:49 2006 +0200 @@ -2,20 +2,21 @@ Branch moin/1.6-sync-aschremmer =============================== Known main issues: + * How to handle renames/deletes? * How will we store tags? (Metadata support would be handy) - * How to handle renames/deletes? - * How to handle colliding/empty interwiki names? + (currently done in Pickle files) ToDo: - * Implement actual syncronisation. - * Add correct IWID_full handling. + * Implement all syncronisation cases (all directions, all 3 page sets). * Reduce round-trip times by caching queries and using MultiCall objects. + * Attach the status information to the job page. * Implement a cross-site authentication system, i.e. mainly an identity storage. * Clean up trailing whitespace. * Add page locking, i.e. use the one in the new storage layer. * Check what needs to be documented on MoinMaster. * Search for XXX + * Put author names into the comment field, transmit mimetypes. New Features: * XMLRPC method to return the Moin version @@ -94,7 +95,8 @@ Week 32: Continued work on the merge log detection in SyncPages. Added logging support to SyncPages. Refactored conflict flag detection from the edit action into the PageEditor class. Enhanced XMLRPC server in Moin to allow XMLRPC functions to return Fault instances. Introduced a new diff3 mode that should reduce the - conflicts. Fixed hard to track down bugs in SyncPages. + conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by + either of both components when searching for tags. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155416028 -7200 # Node ID 16bed977b05479b463867b417c4ee5db435a7802 # Parent ae9eb32b6899a0a68419d5270cfa22f3fff69134 Added support for underlay page ignore and prefix filtering in getAllPagesEx, transmit the page list filtered by prefix. Ignore underlay pages in general. diff -r ae9eb32b6899 -r 16bed977b054 MoinMoin/Page.py --- a/MoinMoin/Page.py Sat Aug 12 22:30:49 2006 +0200 +++ b/MoinMoin/Page.py Sat Aug 12 22:53:48 2006 +0200 @@ -636,7 +636,7 @@ class Page: return count - def getPageList(self, user=None, exists=1, filter=None): + def getPageList(self, user=None, exists=1, filter=None, include_underlay=True): """ List user readable pages under current page Currently only request.rootpage is used to list pages, but if we @@ -693,8 +693,14 @@ class Page: if filter and not filter(name): continue + page = Page(request, name) + + # Filter underlay pages + if not include_underlay and page.getPageStatus()[0]: # is an underlay page + continue + # Filter deleted pages - if exists and not Page(request, name).exists(): + if exists and not page.exists(): continue # Filter out page user may not read. diff -r ae9eb32b6899 -r 16bed977b054 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 12 22:30:49 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 12 22:53:48 2006 +0200 @@ -139,11 +139,11 @@ class ActionClass: m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)] - print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages)) - + print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages)) # XXX remove + if params["pageMatch"]: m_pages = SyncPage.filter(m_pages, params["pageMatch"].match) - print "After filtering: Got %i merges pages" % (len(m_pages), ) + print "After filtering: Got %i merges pages" % (len(m_pages), ) # XXX remove on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) diff -r ae9eb32b6899 -r 16bed977b054 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Aug 12 22:30:49 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Aug 12 22:53:48 2006 +0200 @@ -223,7 +223,9 @@ class MoinRemoteWiki(RemoteWiki): def get_pages(self, **kwargs): options = {"include_revno": True, "include_deleted": True, - "exclude_non_writable": kwargs["exclude_non_writable"]} + "exclude_non_writable": kwargs["exclude_non_writable"], + "include_underlay": False, + "prefix": self.prefix} pages = self.connection.getAllPagesEx(options) rpages = [] for name, revno in pages: @@ -267,10 +269,22 @@ class MoinLocalWiki(RemoteWiki): def get_pages(self, **kwargs): assert not kwargs - return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=1)] if x] + if self.prefix: + page_filter = lambda name,prefix=self.prefix: name.startswith(prefix) + else: + page_filter = lambda x: True + pages = [] + for x in self.request.rootpage.getPageList(exists=1, include_underlay=False, filter=page_filter): + sp = self.createSyncPage(x) + if sp: + pages.append(sp) + return pages def __repr__(self): return "" + + +# ------------------ Tags ------------------ class Tag(object): diff -r ae9eb32b6899 -r 16bed977b054 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sat Aug 12 22:30:49 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sat Aug 12 22:53:48 2006 +0200 @@ -239,11 +239,13 @@ class XmlRpcBase: include_revno:: set it to True if you want to have lists with [pagename, revno] include_deleted:: set it to True if you want to include deleted pages exclude_non_writable:: do not include pages that the current user may not write to + include_underlay:: return underlay pagenames as well + prefix:: the page name must begin with this prefix to be included @rtype: list @return: a list of all pages. """ options = {"include_system": True, "include_revno": False, "include_deleted": False, - "exclude_non_writable": False} + "exclude_non_writable": False, "include_underlay": True, "prefix": ""} if opts is not None: options.update(opts) @@ -255,7 +257,11 @@ class XmlRpcBase: if options["exclude_non_writable"]: filter = lambda name, filter=filter: filter(name) and self.request.user.may.write(name) - pagelist = self.request.rootpage.getPageList(filter=filter, exists=not options["include_deleted"]) + if options["prefix"]: + filter = lambda name, filter=filter, prefix=options["prefix"]: filter(name) and name.startswith(prefix) + + pagelist = self.request.rootpage.getPageList(filter=filter, exists=not options["include_deleted"], + include_underlay=options["include_underlay"]) if options['include_revno']: return [[self._outstr(x), Page(self.request, x).get_real_rev()] for x in pagelist] diff -r ae9eb32b6899 -r 16bed977b054 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Aug 12 22:30:49 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Aug 12 22:53:48 2006 +0200 @@ -8,6 +8,7 @@ Branch moin/1.6-sync-aschremmer ToDo: * Implement all syncronisation cases (all directions, all 3 page sets). + * Test with prefixes * Reduce round-trip times by caching queries and using MultiCall objects. * Attach the status information to the job page. * Implement a cross-site authentication system, i.e. mainly an @@ -96,7 +97,8 @@ Week 32: Continued work on the merge log detection from the edit action into the PageEditor class. Enhanced XMLRPC server in Moin to allow XMLRPC functions to return Fault instances. Introduced a new diff3 mode that should reduce the conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by - either of both components when searching for tags. + either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by + the prefix on the remote side. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155417657 -7200 # Node ID e313c2187271ae752bd18e1d7125d4f937529c63 # Parent 16bed977b05479b463867b417c4ee5db435a7802 Show an error message for the direction UP. diff -r 16bed977b054 -r e313c2187271 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 12 22:53:48 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 12 23:20:57 2006 +0200 @@ -101,6 +101,9 @@ class ActionClass: params = self.fix_params(self.parse_page()) try: + if params["direction"] == UP: + raise ActionStatus(_("The only supported directions are BOTH and DOWN.")) + if not self.request.cfg.interwikiname: raise ActionStatus(_("Please set an interwikiname in your wikiconfig (see HelpOnConfiguration) to be able to use this action.")) # HG changeset patch # User Alexander Schremmer # Date 1155419654 -7200 # Node ID 064778edb38cbe40fc27e46c0c92368b1505c285 # Parent e313c2187271ae752bd18e1d7125d4f937529c63 Started integration of direction DOWN support. diff -r e313c2187271 -r 064778edb38c MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 12 23:20:57 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 12 23:54:14 2006 +0200 @@ -27,6 +27,7 @@ from MoinMoin.wikisync import TagStore, from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage, MoinLocalWiki, MoinRemoteWiki from MoinMoin.util.bdiff import decompress, patch, compress, textdiff from MoinMoin.util import diff3 + # directions UP, DOWN, BOTH = range(3) @@ -131,9 +132,10 @@ class ActionClass: def sync(self, params, local, remote): """ This method does the syncronisation work. """ _ = self.request.getText + direction = params["direction"] l_pages = local.get_pages() - r_pages = remote.get_pages(exclude_non_writable=params["direction"] != DOWN) + r_pages = remote.get_pages(exclude_non_writable=direction != DOWN) if params["groupList"]: pages_from_groupList = set(local.getGroupItems(params["groupList"])) @@ -180,7 +182,7 @@ class ActionClass: newest_tag = matching_tags[-1] local_rev = newest_tag.current_rev remote_rev = newest_tag.remote_rev - if remote_rev == rp.remote_rev and local_rev == current_rev: + if remote_rev == rp.remote_rev and (direction == DOWN or local_rev == current_rev): continue # no changes done, next page old_page = Page(self.request, local_pagename, rev=local_rev) old_contents = old_page.get_raw_body_str() # HG changeset patch # User Alexander Schremmer # Date 1155468330 -7200 # Node ID ff08338e67feccd3d01d844a8cbf1003588a0ee0 # Parent b34b3fad81616db1535107332c792742ee128660 Filter the pagelists by pageList on both sides (speedup). Marked a few operations as direct API accesses. diff -r b34b3fad8161 -r ff08338e67fe MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 12 23:57:39 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 13 13:25:30 2006 +0200 @@ -81,7 +81,6 @@ class ActionClass: if params["pageList"] is not None: params["pageMatch"] = u'|'.join([r'^%s$' % re.escape(name) for name in params["pageList"]]) - del params["pageList"] if params["pageMatch"] is not None: params["pageMatch"] = re.compile(params["pageMatch"], re.U) @@ -89,6 +88,7 @@ class ActionClass: # we do not support matching or listing pages if there is a group of pages if params["groupList"]: params["pageMatch"] = None + params["pageList"] = None return params @@ -111,9 +111,9 @@ class ActionClass: if not params["remoteWiki"]: raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter.")) - local = MoinLocalWiki(self.request, params["localPrefix"]) - try: - remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"]) + local = MoinLocalWiki(self.request, params["localPrefix"], params["pageList"]) + try: + remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"]) except UnsupportedWikiException, (msg, ): raise ActionStatus(msg) @@ -166,7 +166,7 @@ class ActionClass: print "Processing %r" % rp local_pagename = rp.local_name - current_page = PageEditor(self.request, local_pagename) + current_page = PageEditor(self.request, local_pagename) # YYY direct access current_rev = current_page.get_real_rev() tags = TagStore(current_page) @@ -184,8 +184,7 @@ class ActionClass: remote_rev = newest_tag.remote_rev if remote_rev == rp.remote_rev and (direction == DOWN or local_rev == current_rev): continue # no changes done, next page - old_page = Page(self.request, local_pagename, rev=local_rev) - old_contents = old_page.get_raw_body_str() + old_contents = Page(self.request, local_pagename, rev=local_rev).get_raw_body_str() # YYY direct access self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name}) @@ -232,7 +231,7 @@ class ActionClass: # XXX upgrade to write lock try: - current_page.saveText(verynewtext, current_rev, comment=comment) + current_page.saveText(verynewtext, current_rev, comment=comment) # YYY direct access except PageEditor.Unchanged: pass except PageEditor.EditConflict: diff -r b34b3fad8161 -r ff08338e67fe MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Aug 12 23:57:39 2006 +0200 +++ b/MoinMoin/wikisync.py Sun Aug 13 13:25:30 2006 +0200 @@ -164,9 +164,10 @@ class RemoteWiki(object): class MoinRemoteWiki(RemoteWiki): """ Used for MoinMoin wikis reachable via XMLRPC. """ - def __init__(self, request, interwikiname, prefix): + def __init__(self, request, interwikiname, prefix, pagelist): self.request = request self.prefix = prefix + self.pagelist = pagelist _ = self.request.getText wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) @@ -225,7 +226,8 @@ class MoinRemoteWiki(RemoteWiki): "include_deleted": True, "exclude_non_writable": kwargs["exclude_non_writable"], "include_underlay": False, - "prefix": self.prefix} + "prefix": self.prefix, + "pagelist": self.pagelist} pages = self.connection.getAllPagesEx(options) rpages = [] for name, revno in pages: @@ -241,9 +243,10 @@ class MoinRemoteWiki(RemoteWiki): class MoinLocalWiki(RemoteWiki): """ Used for the current MoinMoin wiki. """ - def __init__(self, request, prefix): + def __init__(self, request, prefix, pagelist): self.request = request self.prefix = prefix + self.pagelist = pagelist def getGroupItems(self, group_list): """ Returns all page names that are listed on the page group_list. """ @@ -269,8 +272,14 @@ class MoinLocalWiki(RemoteWiki): def get_pages(self, **kwargs): assert not kwargs - if self.prefix: - page_filter = lambda name,prefix=self.prefix: name.startswith(prefix) + if self.prefix or self.pagelist: + def page_filter(name, prefix=(self.prefix or ""), pagelist=self.pagelist): + n_name = normalise_pagename(name, prefix) + if not n_name: + return False + if not pagelist: + return True + return n_name in pagelist else: page_filter = lambda x: True pages = [] diff -r b34b3fad8161 -r ff08338e67fe MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sat Aug 12 23:57:39 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Aug 13 13:25:30 2006 +0200 @@ -244,23 +244,33 @@ class XmlRpcBase: @rtype: list @return: a list of all pages. """ + from MoinMoin.wikisync import normalise_pagename options = {"include_system": True, "include_revno": False, "include_deleted": False, - "exclude_non_writable": False, "include_underlay": True, "prefix": ""} + "exclude_non_writable": False, "include_underlay": True, "prefix": "", + "pagelist": None} if opts is not None: options.update(opts) if not options["include_system"]: - filter = lambda name: not wikiutil.isSystemPage(self.request, name) - else: - filter = lambda name: True + p_filter = lambda name: not wikiutil.isSystemPage(self.request, name) + else: + p_filter = lambda name: True if options["exclude_non_writable"]: - filter = lambda name, filter=filter: filter(name) and self.request.user.may.write(name) - - if options["prefix"]: - filter = lambda name, filter=filter, prefix=options["prefix"]: filter(name) and name.startswith(prefix) - - pagelist = self.request.rootpage.getPageList(filter=filter, exists=not options["include_deleted"], + p_filter = lambda name, p_filter=p_filter: p_filter(name) and self.request.user.may.write(name) + + if options["prefix"] or options["pagelist"]: + def p_filter(name, p_filter=p_filter, prefix=(options["prefix"] or ""), pagelist=options["pagelist"]): + if not p_filter(name): + return False + n_name = normalise_pagename(name, prefix) + if not n_name: + return False + if not pagelist: + return True + return n_name in pagelist + + pagelist = self.request.rootpage.getPageList(filter=p_filter, exists=not options["include_deleted"], include_underlay=options["include_underlay"]) if options['include_revno']: diff -r b34b3fad8161 -r ff08338e67fe docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Aug 12 23:57:39 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 13 13:25:30 2006 +0200 @@ -17,6 +17,7 @@ Branch moin/1.6-sync-aschremmer * Add page locking, i.e. use the one in the new storage layer. * Check what needs to be documented on MoinMaster. * Search for XXX + * Maybe refactor YYY into MoinLocalWiki * Put author names into the comment field, transmit mimetypes. New Features: @@ -98,7 +99,7 @@ Week 32: Continued work on the merge log XMLRPC functions to return Fault instances. Introduced a new diff3 mode that should reduce the conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by - the prefix on the remote side. + the prefix and the pageList on the remote side. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155477928 -7200 # Node ID eb9e5e21b0e548ba50815a9cf74ce2f584c32151 # Parent ff08338e67feccd3d01d844a8cbf1003588a0ee0 Implemented the DOWN direction, refactored direction handling. The synctags of older syncs are invalid now, you have to delete them. diff -r ff08338e67fe -r eb9e5e21b0e5 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 13 13:25:30 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 13 16:05:28 2006 +0200 @@ -24,13 +24,13 @@ from MoinMoin.PageEditor import PageEdit from MoinMoin.PageEditor import PageEditor, conflict_markers from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group -from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage, MoinLocalWiki, MoinRemoteWiki +from MoinMoin.wikisync import (TagStore, UnsupportedWikiException, SyncPage, + MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH) from MoinMoin.util.bdiff import decompress, patch, compress, textdiff from MoinMoin.util import diff3 -# directions -UP, DOWN, BOTH = range(3) +# map sync directions directions_map = {"up": UP, "down": DOWN, "both": BOTH} @@ -45,6 +45,7 @@ class ActionClass: self.pagename = pagename self.page = Page(request, pagename) self.status = [] + request.flush() def log_status(self, level, message): """ Appends the message with a given importance level to the internal log. """ @@ -70,7 +71,7 @@ class ActionClass: if options["groupList"] is not None: options["groupList"] = unpackLine(options["groupList"], ",") - options["direction"] = directions_map.get(options["direction"], BOTH) + options["direction"] = directions_map.get(options["direction"].lower(), BOTH) return options @@ -170,7 +171,11 @@ class ActionClass: current_rev = current_page.get_real_rev() tags = TagStore(current_page) - matching_tags = tags.fetch(iwid_full=remote.iwid_full) + if direction == BOTH: + match_direction = direction + else: + match_direction = None + matching_tags = tags.fetch(iwid_full=remote.iwid_full,direction=match_direction) matching_tags.sort() #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags) @@ -188,6 +193,12 @@ class ActionClass: self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name}) + if direction == DOWN: + remote_rev = None # always fetch the full page, ignore remote conflict check + patch_base_contents = "" + else: + patch_base_contents = old_contents + if remote_rev != rp.remote_rev: diff_result = remote.get_diff(rp.remote_name, remote_rev, None) # XXX might raise ALREADY_CURRENT is_remote_conflict = diff_result["conflict"] @@ -206,20 +217,17 @@ class ActionClass: self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a locally or remotely unresolved conflict.") % {"pagename": local_pagename}) continue - if remote_rev is None: # set the remote_rev for the case without any tags + if remote_rev is None and direction == BOTH: self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) - remote_rev = current_remote_rev - - old_contents_dec = old_contents.decode("utf-8") + if diff is None: - new_contents = old_contents_dec - else: - new_contents = patch(old_contents, decompress(diff)).decode("utf-8") - old_contents = old_contents_dec + new_contents = old_contents.decode("utf-8") + else: + new_contents = patch(patch_base_contents, decompress(diff)).decode("utf-8") # here, the actual merge happens - print "Merging %r, %r and %r" % (old_contents, new_contents, current_page.get_raw_body()) - verynewtext = diff3.text_merge(old_contents, new_contents, current_page.get_raw_body(), 2, *conflict_markers) + print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body()) + verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents, current_page.get_raw_body(), 2, *conflict_markers) local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) @@ -239,12 +247,15 @@ class ActionClass: new_local_rev = current_page.get_real_rev() - try: - very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid) - except Exception, e: - raise # XXX rollback locally - else: - tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev) + if direction == BOTH: + try: + very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid) + except Exception, e: + raise # XXX rollback locally and do not tag locally + else: + very_current_remote_rev = current_remote_rev + + tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction) if not wikiutil.containsConflictMarker(verynewtext): self.log_status(ActionClass.INFO, _("Page successfully merged.")) diff -r ff08338e67fe -r eb9e5e21b0e5 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sun Aug 13 13:25:30 2006 +0200 +++ b/MoinMoin/wikisync.py Sun Aug 13 16:05:28 2006 +0200 @@ -19,6 +19,10 @@ from MoinMoin.util import lock from MoinMoin.util import lock from MoinMoin.Page import Page from MoinMoin.packages import unpackLine, packLine + + +# sync directions +UP, DOWN, BOTH = range(3) def normalise_pagename(page_name, prefix): @@ -299,17 +303,19 @@ class Tag(object): class Tag(object): """ This class is used to store information about merging state. """ - def __init__(self, remote_wiki, remote_rev, current_rev): + def __init__(self, remote_wiki, remote_rev, current_rev, direction): """ Creates a new Tag. @param remote_wiki: The identifier of the remote wiki. @param remote_rev: The revision number on the remote end. @param current_rev: The related local revision. + @param direction: The direction of the sync, encoded as an integer. """ assert isinstance(remote_wiki, str) and isinstance(remote_rev, int) and isinstance(current_rev, int) self.remote_wiki = remote_wiki self.remote_rev = remote_rev self.current_rev = current_rev + self.direction = direction def __repr__(self): return u"" % (self.remote_wiki, self.remote_rev, self.current_rev) @@ -405,13 +411,14 @@ class PickleTagStore(AbstractTagStore): self.tags = [] self.commit() - def fetch(self, iwid_full): + def fetch(self, iwid_full, direction=None): iwid_full = unpackLine(iwid_full) matching_tags = [] for t in self.tags: t_iwid_full = unpackLine(t.remote_wiki) if ((t_iwid_full[0] == iwid_full[0]) # either match IWID or IW name - or (len(t_iwid_full) == 2 and len(iwid_full) == 2 and t_iwid_full[1] == iwid_full[1])): + or (len(t_iwid_full) == 2 and len(iwid_full) == 2 and t_iwid_full[1] == iwid_full[1]) + ) and (direction is None or t.direction == direction): matching_tags.append(t) return matching_tags diff -r ff08338e67fe -r eb9e5e21b0e5 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Aug 13 13:25:30 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Aug 13 16:05:28 2006 +0200 @@ -580,6 +580,7 @@ class XmlRpcBase: def xmlrpc_getDiff(self, pagename, from_rev, to_rev): """ Gets the binary difference between two page revisions. See MoinMoin:WikiSyncronisation. """ from MoinMoin.util.bdiff import textdiff, compress + from MoinMoin.wikisync import BOTH pagename = self._instr(pagename) @@ -694,7 +695,7 @@ class XmlRpcBase: current_rev = currentpage.get_real_rev() tags = TagStore(currentpage) - tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev) + tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev, direction=BOTH) # XXX unlock page diff -r ff08338e67fe -r eb9e5e21b0e5 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 13 13:25:30 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 13 16:05:28 2006 +0200 @@ -11,6 +11,7 @@ Branch moin/1.6-sync-aschremmer * Test with prefixes * Reduce round-trip times by caching queries and using MultiCall objects. * Attach the status information to the job page. + * Show tags in an action=info view? * Implement a cross-site authentication system, i.e. mainly an identity storage. * Clean up trailing whitespace. @@ -99,7 +100,7 @@ Week 32: Continued work on the merge log XMLRPC functions to return Fault instances. Introduced a new diff3 mode that should reduce the conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by - the prefix and the pageList on the remote side. + the prefix and the pageList on the remote side. Finished the direction==DOWN mode. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155497247 -7200 # Node ID 7ef8046450702b7cc7c2ed2393ec1c0ecb11dc6f # Parent eb9e5e21b0e548ba50815a9cf74ce2f584c32151 Cleaned file. diff -r eb9e5e21b0e5 -r 7ef804645070 MoinMoin/util/bdiff.py --- a/MoinMoin/util/bdiff.py Sun Aug 13 16:05:28 2006 +0200 +++ b/MoinMoin/util/bdiff.py Sun Aug 13 21:27:27 2006 +0200 @@ -79,8 +79,8 @@ def test(): a = ("foo\n" * 30) b = (" fao" * 30) - a = file(r"C:\Dokumente und Einstellungen\Administrator\Eigene Dateien\Progra\Python\MoinMoin\moin-1.6-sync\MoinMoin\util\test.1").read() - b = file(r"C:\Dokumente und Einstellungen\Administrator\Eigene Dateien\Progra\Python\MoinMoin\moin-1.6-sync\MoinMoin\util\test.2").read() + a = file(r"test.1").read() + b = file(r"test.2").read() a = a.splitlines(1) b = b.splitlines(1) # HG changeset patch # User Alexander Schremmer # Date 1155497268 -7200 # Node ID 151da160dcea43759a574cdb4eb862d4230a0b2c # Parent 7ef8046450702b7cc7c2ed2393ec1c0ecb11dc6f Moved an import in the XMLRPC code. diff -r 7ef804645070 -r 151da160dcea MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Aug 13 21:27:27 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Aug 13 21:27:48 2006 +0200 @@ -580,7 +580,6 @@ class XmlRpcBase: def xmlrpc_getDiff(self, pagename, from_rev, to_rev): """ Gets the binary difference between two page revisions. See MoinMoin:WikiSyncronisation. """ from MoinMoin.util.bdiff import textdiff, compress - from MoinMoin.wikisync import BOTH pagename = self._instr(pagename) @@ -655,7 +654,7 @@ class XmlRpcBase: @param interwiki_name: Used to build the interwiki tag. """ from MoinMoin.util.bdiff import decompress, patch - from MoinMoin.wikisync import TagStore + from MoinMoin.wikisync import TagStore, BOTH LASTREV_INVALID = xmlrpclib.Fault("LASTREV_INVALID", "The page was changed") pagename = self._instr(pagename) diff -r 7ef804645070 -r 151da160dcea docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 13 21:27:27 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 13 21:27:48 2006 +0200 @@ -13,12 +13,13 @@ Branch moin/1.6-sync-aschremmer * Attach the status information to the job page. * Show tags in an action=info view? * Implement a cross-site authentication system, i.e. mainly an - identity storage. + identity storage. (does OpenID make sense?) * Clean up trailing whitespace. * Add page locking, i.e. use the one in the new storage layer. * Check what needs to be documented on MoinMaster. * Search for XXX * Maybe refactor YYY into MoinLocalWiki + * Remove amount of "very" in the code * Put author names into the comment field, transmit mimetypes. New Features: # HG changeset patch # User Alexander Schremmer # Date 1155505251 -7200 # Node ID db1811c83ccca52a45484f73ed2e6de21b667c8a # Parent 151da160dcea43759a574cdb4eb862d4230a0b2c Fixed test for wikisync. diff -r 151da160dcea -r db1811c83ccc MoinMoin/_tests/test_wikisync.py --- a/MoinMoin/_tests/test_wikisync.py Sun Aug 13 21:27:48 2006 +0200 +++ b/MoinMoin/_tests/test_wikisync.py Sun Aug 13 23:40:51 2006 +0200 @@ -11,7 +11,7 @@ from MoinMoin.PageEditor import PageEdit from MoinMoin.PageEditor import PageEditor from MoinMoin._tests import TestConfig, TestSkipped -from MoinMoin.wikisync import TagStore +from MoinMoin.wikisync import TagStore, BOTH class UnsafeSyncTestcase(TestCase): @@ -27,7 +27,7 @@ class UnsafeSyncTestcase(TestCase): def testBasicTagThings(self): tags = TagStore(self.page) self.assert_(not tags.get_all_tags()) - tags.add(remote_wiki="foo", remote_rev=1, current_rev=2) + tags.add(remote_wiki="foo", remote_rev=1, current_rev=2, direction=BOTH) tags = TagStore(self.page) # reload self.assert_(tags.get_all_tags()[0].remote_rev == 1) # HG changeset patch # User Thomas Waldmann # Date 1155538835 -7200 # Node ID 32fac089f2a6b76977c80067a9a08ce0db27e84f # Parent db1811c83ccca52a45484f73ed2e6de21b667c8a added missing CHANGES entries diff -r db1811c83ccc -r 32fac089f2a6 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 13 23:40:51 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 14 09:00:35 2006 +0200 @@ -123,6 +123,13 @@ 2006-08-04: the requested daily entry is 2006-08-04: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-08-05: student didn't work on project 2006-08-06: student didn't work on project -- a Sunday +2006-08-07: entry missing +2006-08-08: entry missing +2006-08-09: entry missing +2006-08-10: entry missing +2006-08-11: entry missing +2006-08-12: entry missing +2006-08-13: entry missing Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1155586013 -7200 # Node ID bb2f70fc973465844b2041c2fb4f851e276741ae # Parent db1811c83ccca52a45484f73ed2e6de21b667c8a Have been thinking about a solution for the remaining sync cases, put the thoughts into a docstring." diff -r db1811c83ccc -r bb2f70fc9734 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 13 23:40:51 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Mon Aug 14 22:06:53 2006 +0200 @@ -131,7 +131,30 @@ class ActionClass: return self.page.send_page(self.request, msg=msg) def sync(self, params, local, remote): - """ This method does the syncronisation work. """ + """ This method does the syncronisation work. + Currently, it handles the case where the pages exist on both sides. + Now there are a few other cases left that have to be implemented: + Wiki A | Wiki B | Remark + ----------+----------+------------------------------ + exists | deleted | In this case, we do a normal merge if there + | | are no tags. If there were changes in + | | Wiki A, there is a merge with a conflict. + | | Otherwise (no changes past last merge), + | | the page is deleted in Wiki A. + ----------+----------+------------------------------- + exists | non- | Now the wiki knows that the page was renamed. + with tags | existant | There should be an RPC method that asks + | | for the new name (which could be recorded + | | on page rename). Then the page is + | | renamed in Wiki A as well and the sync + | | is done normally. + ----------+----------+------------------------------- + exists | any case | Do a sync without considering tags + with tags | with non | to ensure data integrity. + | matching | + | tags | + ----------+----------+------------------------------- + """ _ = self.request.getText direction = params["direction"] diff -r db1811c83ccc -r bb2f70fc9734 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 13 23:40:51 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 14 22:06:53 2006 +0200 @@ -7,9 +7,10 @@ Branch moin/1.6-sync-aschremmer (currently done in Pickle files) ToDo: - * Implement all syncronisation cases (all directions, all 3 page sets). + * Tags should store the page name to recognise renaming scenarios. + * Implement all syncronisation cases (all 3 page sets). * Test with prefixes - * Reduce round-trip times by caching queries and using MultiCall objects. + * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Attach the status information to the job page. * Show tags in an action=info view? * Implement a cross-site authentication system, i.e. mainly an @@ -38,7 +39,7 @@ Branch moin/1.6-sync-aschremmer * InterWiki page editable in the wiki, modification detection based on mtimes * SyncPages action * XMLRPC functions may return Fault instances - * diff3 algorithm extenteded, a new mode should reduce the conflicts + * diff3 algorithm extended, a new mode should reduce the conflicts Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. (merged into main) @@ -102,6 +103,7 @@ Week 32: Continued work on the merge log conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by the prefix and the pageList on the remote side. Finished the direction==DOWN mode. + Started designing the solutions for the other sync cases. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155587201 -7200 # Node ID e85ad6a95ae5912818a2d0fa345ae4acfcdaf120 # Parent bb2f70fc973465844b2041c2fb4f851e276741ae Documented new ideas in the docstring, changed tag format (store normalised name), transmit normalised name in mergeDiff. diff -r bb2f70fc9734 -r e85ad6a95ae5 MoinMoin/_tests/test_wikisync.py --- a/MoinMoin/_tests/test_wikisync.py Mon Aug 14 22:06:53 2006 +0200 +++ b/MoinMoin/_tests/test_wikisync.py Mon Aug 14 22:26:41 2006 +0200 @@ -27,7 +27,7 @@ class UnsafeSyncTestcase(TestCase): def testBasicTagThings(self): tags = TagStore(self.page) self.assert_(not tags.get_all_tags()) - tags.add(remote_wiki="foo", remote_rev=1, current_rev=2, direction=BOTH) + tags.add(remote_wiki="foo", remote_rev=1, current_rev=2, direction=BOTH, normalised_name="FrontPage") tags = TagStore(self.page) # reload self.assert_(tags.get_all_tags()[0].remote_rev == 1) diff -r bb2f70fc9734 -r e85ad6a95ae5 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Mon Aug 14 22:06:53 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Mon Aug 14 22:26:41 2006 +0200 @@ -141,6 +141,8 @@ class ActionClass: | | Wiki A, there is a merge with a conflict. | | Otherwise (no changes past last merge), | | the page is deleted in Wiki A. + | | This needs static info that could be + | | transferred with the pagelist. ----------+----------+------------------------------- exists | non- | Now the wiki knows that the page was renamed. with tags | existant | There should be an RPC method that asks @@ -148,8 +150,14 @@ class ActionClass: | | on page rename). Then the page is | | renamed in Wiki A as well and the sync | | is done normally. + | | Every wiki retains a dict that maps + | | (IWID, oldname) => newname and that is + | | updated on every rename. oldname refers + | | to the pagename known by the old wiki (can be + | | gathered from tags). ----------+----------+------------------------------- - exists | any case | Do a sync without considering tags + exists | any case | Try a rename search first, then + | | do a sync without considering tags with tags | with non | to ensure data integrity. | matching | | tags | @@ -272,13 +280,13 @@ class ActionClass: if direction == BOTH: try: - very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid) + very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, rp.name) except Exception, e: raise # XXX rollback locally and do not tag locally else: very_current_remote_rev = current_remote_rev - tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction) + tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=rp.name) if not wikiutil.containsConflictMarker(verynewtext): self.log_status(ActionClass.INFO, _("Page successfully merged.")) diff -r bb2f70fc9734 -r e85ad6a95ae5 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Mon Aug 14 22:06:53 2006 +0200 +++ b/MoinMoin/wikisync.py Mon Aug 14 22:26:41 2006 +0200 @@ -213,9 +213,9 @@ class MoinRemoteWiki(RemoteWiki): result["diff"] = str(result["diff"]) # unmarshal Binary object return result - def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): + def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name): """ Merges the diff into the page on the remote side. """ - result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name) + result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name) return result # Methods implementing the RemoteWiki interface @@ -303,7 +303,7 @@ class Tag(object): class Tag(object): """ This class is used to store information about merging state. """ - def __init__(self, remote_wiki, remote_rev, current_rev, direction): + def __init__(self, remote_wiki, remote_rev, current_rev, direction, normalised_name): """ Creates a new Tag. @param remote_wiki: The identifier of the remote wiki. @@ -311,14 +311,17 @@ class Tag(object): @param current_rev: The related local revision. @param direction: The direction of the sync, encoded as an integer. """ - assert isinstance(remote_wiki, str) and isinstance(remote_rev, int) and isinstance(current_rev, int) + assert (isinstance(remote_wiki, basestring) and isinstance(remote_rev, int) + and isinstance(current_rev, int) and isinstance(direction, int) + and isinstance(normalised_name, basestring)) self.remote_wiki = remote_wiki self.remote_rev = remote_rev self.current_rev = current_rev self.direction = direction - - def __repr__(self): - return u"" % (self.remote_wiki, self.remote_rev, self.current_rev) + self.normalised_name = normalised_name + + def __repr__(self): + return u"" % (self.normalised_name, self.remote_wiki, self.remote_rev, self.current_rev) def __cmp__(self, other): if not isinstance(other, Tag): diff -r bb2f70fc9734 -r e85ad6a95ae5 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Mon Aug 14 22:06:53 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Mon Aug 14 22:26:41 2006 +0200 @@ -642,7 +642,7 @@ class XmlRpcBase: else: return [self._outstr(name), iwid] - def xmlrpc_mergeDiff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name): + def xmlrpc_mergeDiff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, normalised_name): """ Merges a diff sent by the remote machine and returns the number of the new revision. Additionally, this method tags the new revision. @@ -652,6 +652,7 @@ class XmlRpcBase: @param delta_remote_rev: The revno that the diff is taken against. @param last_remote_rev: The last revno of the page `pagename` that is known by the other wiki site. @param interwiki_name: Used to build the interwiki tag. + @param normalised_name: The normalised pagename that is common to both wikis. """ from MoinMoin.util.bdiff import decompress, patch from MoinMoin.wikisync import TagStore, BOTH @@ -694,7 +695,7 @@ class XmlRpcBase: current_rev = currentpage.get_real_rev() tags = TagStore(currentpage) - tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev, direction=BOTH) + tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev, direction=BOTH, normalised_name=normalised_name) # XXX unlock page diff -r bb2f70fc9734 -r e85ad6a95ae5 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Aug 14 22:06:53 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 14 22:26:41 2006 +0200 @@ -21,6 +21,8 @@ Branch moin/1.6-sync-aschremmer * Search for XXX * Maybe refactor YYY into MoinLocalWiki * Remove amount of "very" in the code + * Do older tags of one wiki site have to be stored as well? Why don't we + keep just one tag? * Put author names into the comment field, transmit mimetypes. New Features: @@ -103,7 +105,8 @@ Week 32: Continued work on the merge log conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by the prefix and the pageList on the remote side. Finished the direction==DOWN mode. - Started designing the solutions for the other sync cases. + Started designing the solutions for the other sync cases. Store and transmit the + normalised name. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155588734 -7200 # Node ID 9608758dca9ac72ece553fcf4c3ca19b1977af96 # Parent e083ea8c934e2690527bc83af6b8a1996df231d8 Fixed severe race conditions in the sync tags and the meta dict code. Before, multiple processes could destroy each other data by keeping two meta dicts instantiated and writing to them. diff -r e083ea8c934e -r 9608758dca9a MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Mon Aug 14 22:29:44 2006 +0200 +++ b/MoinMoin/wikisync.py Mon Aug 14 22:52:14 2006 +0200 @@ -368,51 +368,58 @@ class PickleTagStore(AbstractTagStore): @param page: a Page object where the tags should be related to """ - + self.page = page self.filename = page.getPagePath('synctags', use_underlay=0, check_create=1, isfile=1) lock_dir = os.path.join(page.getPagePath('cache', use_underlay=0, check_create=1), '__taglock__') self.rlock = lock.ReadLock(lock_dir, 60.0) self.wlock = lock.WriteLock(lock_dir, 60.0) - self.load() + + if not self.rlock.acquire(3.0): + raise EnvironmentError("Could not lock in PickleTagStore") + try: + self.load() + finally: + self.rlock.release() def load(self): """ Loads the tags from the data file. """ - if not self.rlock.acquire(3.0): - raise EnvironmentError("Could not lock in PickleTagStore") - try: - try: - datafile = file(self.filename, "rb") - except IOError: - self.tags = [] - else: - self.tags = pickle.load(datafile) - datafile.close() - finally: - self.rlock.release() + try: + datafile = file(self.filename, "rb") + except IOError: + self.tags = [] + else: + self.tags = pickle.load(datafile) + datafile.close() def commit(self): """ Writes the memory contents to the data file. """ + datafile = file(self.filename, "wb") + pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL) + datafile.close() + + # public methods --------------------------------------------------- + def add(self, **kwargs): if not self.wlock.acquire(3.0): raise EnvironmentError("Could not lock in PickleTagStore") try: - datafile = file(self.filename, "wb") - pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL) - datafile.close() + self.load() + self.tags.append(Tag(**kwargs)) + self.commit() finally: self.wlock.release() - # public methods --------------------------------------------------- - def add(self, **kwargs): - self.tags.append(Tag(**kwargs)) - self.commit() - def get_all_tags(self): return self.tags def clear(self): self.tags = [] - self.commit() + if not self.wlock.acquire(3.0): + raise EnvironmentError("Could not lock in PickleTagStore") + try: + self.commit() + finally: + self.wlock.release() def fetch(self, iwid_full, direction=None): iwid_full = unpackLine(iwid_full) diff -r e083ea8c934e -r 9608758dca9a MoinMoin/wikiutil.py --- a/MoinMoin/wikiutil.py Mon Aug 14 22:29:44 2006 +0200 +++ b/MoinMoin/wikiutil.py Mon Aug 14 22:52:14 2006 +0200 @@ -408,17 +408,22 @@ INTEGER_METAS = ['current', 'revision', class MetaDict(dict): """ store meta informations as a dict. - XXX It is not thread-safe, add locks! """ def __init__(self, metafilename, cache_directory): """ create a MetaDict from metafilename """ dict.__init__(self) self.metafilename = metafilename self.dirty = False - self.loaded = False lock_dir = os.path.join(cache_directory, '__metalock__') self.rlock = lock.ReadLock(lock_dir, 60.0) self.wlock = lock.WriteLock(lock_dir, 60.0) + + if not self.rlock.acquire(3.0): + raise EnvironmentError("Could not lock in MetaDict") + try: + self._get_meta() + finally: + self.rlock.release() def _get_meta(self): """ get the meta dict from an arbitrary filename. @@ -428,14 +433,9 @@ class MetaDict(dict): """ try: - if not self.rlock.acquire(3.0): - raise EnvironmentError("Could not lock in MetaDict") - try: - metafile = codecs.open(self.metafilename, "r", "utf-8") - meta = metafile.read() # this is much faster than the file's line-by-line iterator - metafile.close() - finally: - self.rlock.release() + metafile = codecs.open(self.metafilename, "r", "utf-8") + meta = metafile.read() # this is much faster than the file's line-by-line iterator + metafile.close() except IOError: meta = u'' for line in meta.splitlines(): @@ -444,7 +444,6 @@ class MetaDict(dict): if key in INTEGER_METAS: value = int(value) dict.__setitem__(self, key, value) - self.loaded = True def _put_meta(self): """ put the meta dict into an arbitrary filename. @@ -459,44 +458,37 @@ class MetaDict(dict): meta.append("%s: %s" % (key, value)) meta = '\r\n'.join(meta) + metafile = codecs.open(self.metafilename, "w", "utf-8") + metafile.write(meta) + metafile.close() + filesys.chmod(self.metafilename, 0666 & config.umask) + self.dirty = False + + def sync(self, mtime_usecs=None): + """ No-Op except for that parameter """ + if not mtime_usecs is None: + self.__setitem__('mtime', str(mtime_usecs)) + # otherwise no-op + + def __getitem__(self, key): + """ We don't care for cache coherency here. """ + return dict.__getitem__(self, key) + + def __setitem__(self, key, value): + """ Sets a dictionary entry. """ if not self.wlock.acquire(5.0): raise EnvironmentError("Could not lock in MetaDict") try: - metafile = codecs.open(self.metafilename, "w", "utf-8") - metafile.write(meta) - metafile.close() + self._get_meta() # refresh cache + try: + oldvalue = dict.__getitem__(self, key) + except KeyError: + oldvalue = None + if value != oldvalue: + dict.__setitem__(self, key, value) + self._put_meta() # sync cache finally: self.wlock.release() - filesys.chmod(self.metafilename, 0666 & config.umask) - self.dirty = False - - def sync(self, mtime_usecs=None): - """ sync the in-memory dict to the persistent store (if dirty) """ - if self.dirty: - if not mtime_usecs is None: - self.__setitem__('mtime', str(mtime_usecs)) - self._put_meta() - - def __getitem__(self, key): - try: - return dict.__getitem__(self, key) - except KeyError: - if not self.loaded: - self._get_meta() # lazy loading of metadata - return dict.__getitem__(self, key) - else: - raise - - def __setitem__(self, key, value): - """ Sets a dictionary entry. You actually have to call sync to write it - to the persistent store. """ - try: - oldvalue = dict.__getitem__(self, key) - except KeyError: - oldvalue = None - if value != oldvalue: - dict.__setitem__(self, key, value) - self.dirty = True ############################################################################# diff -r e083ea8c934e -r 9608758dca9a docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Aug 14 22:29:44 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 14 22:52:14 2006 +0200 @@ -54,6 +54,7 @@ Branch moin/1.6-sync-aschremmer * Fixed the MetaDict code to use locks. * Fixed bug in request.py that avoided showing a traceback if there was a fault after the first headers were sent. + * Fixed severe race conditions in the meta dict and the sync tags code. Other Changes: * Refactored conflict resolution and XMLRPC code. # HG changeset patch # User Alexander Schremmer # Date 1155671542 -7200 # Node ID 93ecff3f806f73b318589c103e6c1b8805d0d2a8 # Parent 9608758dca9ac72ece553fcf4c3ca19b1977af96 Check for local write permissions early, fixed Python 2.3 incompatiblity. diff -r 9608758dca9a -r 93ecff3f806f MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Mon Aug 14 22:52:14 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Tue Aug 15 21:52:22 2006 +0200 @@ -24,8 +24,8 @@ from MoinMoin.PageEditor import PageEdit from MoinMoin.PageEditor import PageEditor, conflict_markers from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group -from MoinMoin.wikisync import (TagStore, UnsupportedWikiException, SyncPage, - MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH) +from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage +from MoinMoin.wikisync import MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH from MoinMoin.util.bdiff import decompress, patch, compress, textdiff from MoinMoin.util import diff3 diff -r 9608758dca9a -r 93ecff3f806f MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Mon Aug 14 22:52:14 2006 +0200 +++ b/MoinMoin/wikisync.py Tue Aug 15 21:52:22 2006 +0200 @@ -261,6 +261,8 @@ class MoinLocalWiki(RemoteWiki): def createSyncPage(self, page_name): normalised_name = normalise_pagename(page_name, self.prefix) + if not self.request.user.may.write(normalised_name): + return None if normalised_name is None: return None return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name) # HG changeset patch # User Alexander Schremmer # Date 1155676738 -7200 # Node ID d1a4083fc36ea66efdd1a4083b0077b82aacc016 # Parent 2625857eabf0da4611606c3ae04cbb540fb577c4 Fixed some print statements. diff -r 2625857eabf0 -r d1a4083fc36e MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Tue Aug 15 22:37:09 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Tue Aug 15 23:18:58 2006 +0200 @@ -176,11 +176,11 @@ class ActionClass: m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)] - print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages)) # XXX remove + self.log_status(INFO, "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))) # XXX remove? if params["pageMatch"]: m_pages = SyncPage.filter(m_pages, params["pageMatch"].match) - print "After filtering: Got %i merges pages" % (len(m_pages), ) # XXX remove + self.log_status(INFO, "After filtering: Got %i merges pages" % (len(m_pages), )) # XXX remove on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) @@ -195,7 +195,7 @@ class ActionClass: # XXX handle deleted pages for rp in on_both_sides: # XXX add locking, acquire read-lock on rp - print "Processing %r" % rp + #print "Processing %r" % rp local_pagename = rp.local_name current_page = PageEditor(self.request, local_pagename) # YYY direct access @@ -216,6 +216,7 @@ class ActionClass: old_contents = "" else: newest_tag = matching_tags[-1] + # XXX check the tag.normalised_name here local_rev = newest_tag.current_rev remote_rev = newest_tag.remote_rev if remote_rev == rp.remote_rev and (direction == DOWN or local_rev == current_rev): @@ -257,7 +258,7 @@ class ActionClass: new_contents = patch(patch_base_contents, decompress(diff)).decode("utf-8") # here, the actual merge happens - print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body()) + # XXX print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body()) verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents, current_page.get_raw_body(), 2, *conflict_markers) local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) # HG changeset patch # User Alexander Schremmer # Date 1155677515 -7200 # Node ID 6bbd177f5b360d2428b4a4fb0e5010bb00e43cdb # Parent d1a4083fc36ea66efdd1a4083b0077b82aacc016 Oops, fixed minor NameError. diff -r d1a4083fc36e -r 6bbd177f5b36 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Tue Aug 15 23:18:58 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Tue Aug 15 23:31:55 2006 +0200 @@ -176,11 +176,11 @@ class ActionClass: m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)] - self.log_status(INFO, "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))) # XXX remove? + self.log_status(self.INFO, "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))) # XXX remove? if params["pageMatch"]: m_pages = SyncPage.filter(m_pages, params["pageMatch"].match) - self.log_status(INFO, "After filtering: Got %i merges pages" % (len(m_pages), )) # XXX remove + self.log_status(self.INFO, "After filtering: Got %i merges pages" % (len(m_pages), )) # XXX remove on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) # HG changeset patch # User Alexander Schremmer # Date 1155678654 -7200 # Node ID 59dca7bc1d0f1ecb748df67df29eb159eedf00da # Parent 6bbd177f5b360d2428b4a4fb0e5010bb00e43cdb Fixed IOError in the TagStore. diff -r 6bbd177f5b36 -r 59dca7bc1d0f MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Tue Aug 15 23:31:55 2006 +0200 +++ b/MoinMoin/wikisync.py Tue Aug 15 23:50:54 2006 +0200 @@ -388,10 +388,10 @@ class PickleTagStore(AbstractTagStore): """ Loads the tags from the data file. """ try: datafile = file(self.filename, "rb") + self.tags = pickle.load(datafile) except IOError: self.tags = [] else: - self.tags = pickle.load(datafile) datafile.close() def commit(self): # HG changeset patch # User Alexander Schremmer # Date 1155679157 -7200 # Node ID c53381d8527f3ecda5bc978348faba4a1d73a851 # Parent 59dca7bc1d0f1ecb748df67df29eb159eedf00da Fixed EOFError in the TagStore. diff -r 59dca7bc1d0f -r c53381d8527f MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Tue Aug 15 23:50:54 2006 +0200 +++ b/MoinMoin/wikisync.py Tue Aug 15 23:59:17 2006 +0200 @@ -389,7 +389,7 @@ class PickleTagStore(AbstractTagStore): try: datafile = file(self.filename, "rb") self.tags = pickle.load(datafile) - except IOError: + except (IOError, EOFError): self.tags = [] else: datafile.close() # HG changeset patch # User Alexander Schremmer # Date 1155679409 -7200 # Node ID 7e0faeed44bcc0491202e712ff59987d36c280f8 # Parent c53381d8527f3ecda5bc978348faba4a1d73a851 Fixed another Python 2.3 incompatibility. diff -r c53381d8527f -r 7e0faeed44bc MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Tue Aug 15 23:59:17 2006 +0200 +++ b/MoinMoin/wikisync.py Wed Aug 16 00:03:29 2006 +0200 @@ -397,7 +397,7 @@ class PickleTagStore(AbstractTagStore): def commit(self): """ Writes the memory contents to the data file. """ datafile = file(self.filename, "wb") - pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL) + pickle.dump(self.tags, datafile, pickle.HIGHEST_PROTOCOL) datafile.close() # public methods --------------------------------------------------- # HG changeset patch # User Alexander Schremmer # Date 1155758962 -7200 # Node ID a0b8e78621d0cfe88779140c6555f58264f6b9eb # Parent 7e0faeed44bcc0491202e712ff59987d36c280f8 Preliminary support for items of different mime types. diff -r 7e0faeed44bc -r a0b8e78621d0 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Wed Aug 16 00:03:29 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Wed Aug 16 22:09:22 2006 +0200 @@ -25,7 +25,7 @@ from MoinMoin.Page import Page from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage -from MoinMoin.wikisync import MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH +from MoinMoin.wikisync import MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH, MIMETYPE_MOIN from MoinMoin.util.bdiff import decompress, patch, compress, textdiff from MoinMoin.util import diff3 @@ -162,9 +162,12 @@ class ActionClass: | matching | | tags | ----------+----------+------------------------------- + exists | exists | already handled. """ _ = self.request.getText direction = params["direction"] + local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) + remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) l_pages = local.get_pages() r_pages = remote.get_pages(exclude_non_writable=direction != DOWN) @@ -221,6 +224,12 @@ class ActionClass: remote_rev = newest_tag.remote_rev if remote_rev == rp.remote_rev and (direction == DOWN or local_rev == current_rev): continue # no changes done, next page + if rp.local_mime_type != MIMETYPE_MOIN and not (remote_rev == rp.remote_rev ^ local_rev == current_rev): + self.log_status(ActionClass.WARN, _("The item %(pagename)s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again.") % {"pagename": rp.name}) + continue + if rp.local_mime_type != rp.remote_mime_type: + self.log_status(ActionClass.WARN, _("The item %(pagename)s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again.") % {"pagename": rp.name}) + continue old_contents = Page(self.request, local_pagename, rev=local_rev).get_raw_body_str() # YYY direct access self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name}) @@ -239,12 +248,15 @@ class ActionClass: current_remote_rev = diff_result["current"] else: current_remote_rev = remote_rev - is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8")) + if rp.local_mime_type == MIMETYPE_MOIN: + is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8")) + else: + is_remote_conflict = NotImplemented diff = None # do not sync if the conflict is remote and local, or if it is local # and the page has never been syncronised - if (wikiutil.containsConflictMarker(current_page.get_raw_body()) + if (rp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) and (remote_rev is None or is_remote_conflict)): self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a locally or remotely unresolved conflict.") % {"pagename": local_pagename}) continue @@ -253,18 +265,23 @@ class ActionClass: self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) if diff is None: - new_contents = old_contents.decode("utf-8") - else: - new_contents = patch(patch_base_contents, decompress(diff)).decode("utf-8") - - # here, the actual merge happens - # XXX print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body()) - verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents, current_page.get_raw_body(), 2, *conflict_markers) - - local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) - remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) - - diff = textdiff(new_contents.encode("utf-8"), verynewtext.encode("utf-8")) + new_contents = old_contents + else: + new_contents = patch(patch_base_contents, decompress(diff)) + + if rp.local_mime_type == MIMETYPE_MOIN: + new_contents_unicode = new_contents.decode("utf-8") + # here, the actual merge happens + # XXX print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body()) + verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) + verynewtext_raw = verynewtext.encode("utf-8") + else: + if diff is None: + verynewtext_raw = new_contents + else: + verynewtext_raw = current_page.get_raw_body_str() + + diff = textdiff(new_contents, verynewtext_raw) #print "Diff against %r" % new_contents.encode("utf-8") comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) @@ -289,7 +306,7 @@ class ActionClass: tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=rp.name) - if not wikiutil.containsConflictMarker(verynewtext): + if rp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(verynewtext): self.log_status(ActionClass.INFO, _("Page successfully merged.")) else: self.log_status(ActionClass.WARN, _("Page merged with conflicts.")) diff -r 7e0faeed44bc -r a0b8e78621d0 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Wed Aug 16 00:03:29 2006 +0200 +++ b/MoinMoin/wikisync.py Wed Aug 16 22:09:22 2006 +0200 @@ -21,6 +21,7 @@ from MoinMoin.packages import unpackLine from MoinMoin.packages import unpackLine, packLine +MIMETYPE_MOIN = "text/wiki" # sync directions UP, DOWN, BOTH = range(3) @@ -58,6 +59,8 @@ class SyncPage(object): self.remote_name = remote_name assert local_rev or remote_rev assert local_name or remote_name + self.local_mime_type = MIMETYPE_MOIN # XXX no usable storage API yet + self.remote_mime_type = MIMETYPE_MOIN def __repr__(self): return repr("" % unicode(self)) @@ -107,6 +110,7 @@ class SyncPage(object): if sp in d: d[sp].remote_rev = sp.remote_rev d[sp].remote_name = sp.remote_name + # XXX merge mime type here else: d[sp] = sp return d.keys() diff -r 7e0faeed44bc -r a0b8e78621d0 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Wed Aug 16 00:03:29 2006 +0200 +++ b/docs/CHANGES.aschremmer Wed Aug 16 22:09:22 2006 +0200 @@ -13,14 +13,14 @@ Branch moin/1.6-sync-aschremmer * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Attach the status information to the job page. * Show tags in an action=info view? + * Check what needs to be documented on MoinMaster. + * Search for XXX + * Maybe refactor YYY into MoinLocalWiki + * Remove amount of "very" in the code * Implement a cross-site authentication system, i.e. mainly an identity storage. (does OpenID make sense?) * Clean up trailing whitespace. * Add page locking, i.e. use the one in the new storage layer. - * Check what needs to be documented on MoinMaster. - * Search for XXX - * Maybe refactor YYY into MoinLocalWiki - * Remove amount of "very" in the code * Do older tags of one wiki site have to be stored as well? Why don't we keep just one tag? * Put author names into the comment field, transmit mimetypes. @@ -107,7 +107,8 @@ Week 32: Continued work on the merge log either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by the prefix and the pageList on the remote side. Finished the direction==DOWN mode. Started designing the solutions for the other sync cases. Store and transmit the - normalised name. + normalised name. Implemented preliminary mime type support, only transmission of the mime type + and the new storage API is missing. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155765657 -7200 # Node ID f103cf7c371e16dd69512995f28820ca19e3f27b # Parent a0b8e78621d0cfe88779140c6555f58264f6b9eb Detect renamed pages. Changed the getDiff interface. diff -r a0b8e78621d0 -r f103cf7c371e MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Wed Aug 16 22:09:22 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Thu Aug 17 00:00:57 2006 +0200 @@ -133,6 +133,7 @@ class ActionClass: def sync(self, params, local, remote): """ This method does the syncronisation work. Currently, it handles the case where the pages exist on both sides. + One of the major missing parts is rename handling. Now there are a few other cases left that have to be implemented: Wiki A | Wiki B | Remark ----------+----------+------------------------------ @@ -159,8 +160,8 @@ class ActionClass: exists | any case | Try a rename search first, then | | do a sync without considering tags with tags | with non | to ensure data integrity. - | matching | - | tags | + | matching | Hmm, how do we detect this + | tags | case if the unmatching tags are only on the remote side? ----------+----------+------------------------------- exists | exists | already handled. """ @@ -213,24 +214,31 @@ class ActionClass: matching_tags.sort() #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags) - if not matching_tags: - remote_rev = None - local_rev = rp.local_rev # merge against the newest version - old_contents = "" - else: + # some default values for non matching tags + normalised_name = None + remote_rev = None + local_rev = rp.local_rev # merge against the newest version + old_contents = "" + + if matching_tags: newest_tag = matching_tags[-1] - # XXX check the tag.normalised_name here - local_rev = newest_tag.current_rev - remote_rev = newest_tag.remote_rev - if remote_rev == rp.remote_rev and (direction == DOWN or local_rev == current_rev): + + # handle some cases where we cannot continue for this page + if newest_tag.remote_rev == rp.remote_rev and (direction == DOWN or newest_tag.current_rev == current_rev): continue # no changes done, next page - if rp.local_mime_type != MIMETYPE_MOIN and not (remote_rev == rp.remote_rev ^ local_rev == current_rev): + if rp.local_mime_type != MIMETYPE_MOIN and not (newest_tag.remote_rev == rp.remote_rev ^ newest_tag.current_rev == current_rev): self.log_status(ActionClass.WARN, _("The item %(pagename)s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again.") % {"pagename": rp.name}) continue if rp.local_mime_type != rp.remote_mime_type: self.log_status(ActionClass.WARN, _("The item %(pagename)s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again.") % {"pagename": rp.name}) continue - old_contents = Page(self.request, local_pagename, rev=local_rev).get_raw_body_str() # YYY direct access + if newest_tag.normalised_name != rp.name: + self.log_status(ActionClass.WARN, _("The item %(pagename)s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page.") % {"pagename": rp.name}) # XXX implement renames + else: + normalised_name = newest_tag.normalised_name + local_rev = newest_tag.current_rev + remote_rev = newest_tag.remote_rev + old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name}) @@ -241,7 +249,10 @@ class ActionClass: patch_base_contents = old_contents if remote_rev != rp.remote_rev: - diff_result = remote.get_diff(rp.remote_name, remote_rev, None) # XXX might raise ALREADY_CURRENT + diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name) + if diff_result is None: + self.log_status(ActionClass.ERROR, _("The page %(pagename)s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced.") % {"pagename": rp.remote_name}) + continue is_remote_conflict = diff_result["conflict"] assert diff_result["diffversion"] == 1 diff = diff_result["diff"] diff -r a0b8e78621d0 -r f103cf7c371e MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Wed Aug 16 22:09:22 2006 +0200 +++ b/MoinMoin/wikisync.py Thu Aug 17 00:00:57 2006 +0200 @@ -210,10 +210,15 @@ class MoinRemoteWiki(RemoteWiki): return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) # Public methods - def get_diff(self, pagename, from_rev, to_rev): + def get_diff(self, pagename, from_rev, to_rev, n_name=None): """ Returns the binary diff of the remote page named pagename, given from_rev and to_rev. """ - result = self.connection.getDiff(pagename, from_rev, to_rev) + try: + result = self.connection.getDiff(pagename, from_rev, to_rev, n_name) + except xmlrpclib.Fault, e: + if e.faultCode == "INVALID_TAG": + return None + raise result["diff"] = str(result["diff"]) # unmarshal Binary object return result @@ -356,7 +361,11 @@ class AbstractTagStore(object): def get_all_tags(self): """ Returns a list of all Tag objects associated to this page. """ return NotImplemented - + + def get_last_tag(self): + """ Returns the newest tag. """ + return NotImplemented + def clear(self): """ Removes all tags. """ return NotImplemented @@ -416,7 +425,14 @@ class PickleTagStore(AbstractTagStore): self.wlock.release() def get_all_tags(self): - return self.tags + return self.tags[:] + + def get_last_tag(self): + temp = self.tags[:] + temp.sort() + if not temp: + return None + return temp[-1] def clear(self): self.tags = [] diff -r a0b8e78621d0 -r f103cf7c371e MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Wed Aug 16 22:09:22 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Aug 17 00:00:57 2006 +0200 @@ -577,11 +577,46 @@ class XmlRpcBase: # methods for wiki synchronization - def xmlrpc_getDiff(self, pagename, from_rev, to_rev): - """ Gets the binary difference between two page revisions. See MoinMoin:WikiSyncronisation. """ + def xmlrpc_getDiff(self, pagename, from_rev, to_rev, n_name=None): + """ Gets the binary difference between two page revisions. + + @param pagename: unicode string qualifying the page name + + @param fromRev: integer specifying the source revision. May be None to + refer to a virtual empty revision which leads to a diff + containing the whole page. + + @param toRev: integer specifying the target revision. May be None to + refer to the current revision. If the current revision is the same + as fromRev, there will be a special error condition "ALREADY_CURRENT" + + @param n_name: do a tag check verifying that n_name was the normalised + name of the last tag + + If both fromRev and toRev are None, this function acts similar to getPage, i.e. it will diff("",currentRev). + + @return Returns a dict: + * status (not a field, implicit, returned as Fault if not SUCCESS): + * "SUCCESS" - if the diff could be retrieved successfully + * "NOT_EXIST" - item does not exist + * "FROMREV_INVALID" - the source revision is invalid + * "TOREV_INVALID" - the target revision is invalid + * "INTERNAL_ERROR" - there was an internal error + * "INVALID_TAG" - the last tag does not match the supplied normalised name + * "ALREADY_CURRENT" - this not merely an error condition. It rather means that + there is no new revision to diff against which is a good thing while + synchronisation. + * current: the revision number of the current revision (not the one which was diff'ed against) + * diff: Binary object that transports a zlib-compressed binary diff (see bdiff.py, taken from Mercurial) + * conflict: if there is a conflict on the page currently + + """ from MoinMoin.util.bdiff import textdiff, compress + from MoinMoin.wikisync import TagStore pagename = self._instr(pagename) + if n_name is not None: + n_name = self._instr(n_name) # User may read page? if not self.request.user.may.read(pagename): @@ -626,6 +661,12 @@ class XmlRpcBase: if oldcontents() and oldpage.get_real_rev() == newpage.get_real_rev(): return xmlrpclib.Fault("ALREADY_CURRENT", "There are no changes.") + + if n_name is not None: + tags = TagStore(newpage) + last_tag = tags.get_last_tag() + if last_tag is not None and last_tag.normalised_name != n_name: + return xmlrpclib.Fault("INVALID_TAG", "The used tag is incorrect because the normalised name does not match.") newcontents = newcontents() conflict = wikiutil.containsConflictMarker(newcontents) diff -r a0b8e78621d0 -r f103cf7c371e docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Wed Aug 16 22:09:22 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 17 00:00:57 2006 +0200 @@ -9,17 +9,20 @@ Branch moin/1.6-sync-aschremmer ToDo: * Tags should store the page name to recognise renaming scenarios. * Implement all syncronisation cases (all 3 page sets). - * Test with prefixes * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Attach the status information to the job page. + * Check what needs to be documented on MoinMaster. + * Show tags in an action=info view? - * Check what needs to be documented on MoinMaster. + + * Test with prefixes * Search for XXX * Maybe refactor YYY into MoinLocalWiki * Remove amount of "very" in the code + * Clean up trailing whitespace. + * Implement a cross-site authentication system, i.e. mainly an identity storage. (does OpenID make sense?) - * Clean up trailing whitespace. * Add page locking, i.e. use the one in the new storage layer. * Do older tags of one wiki site have to be stored as well? Why don't we keep just one tag? @@ -106,9 +109,9 @@ Week 32: Continued work on the merge log conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by the prefix and the pageList on the remote side. Finished the direction==DOWN mode. - Started designing the solutions for the other sync cases. Store and transmit the +Week 33: Started designing the solutions for the other sync cases. Store and transmit the normalised name. Implemented preliminary mime type support, only transmission of the mime type - and the new storage API is missing. + and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :). 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155767597 -7200 # Node ID 1b4b1e9aaa4cfa1ab890dec7c3a37502e45d1bf8 # Parent f103cf7c371e16dd69512995f28820ca19e3f27b Cleaned some parts of my CHANGES file. diff -r f103cf7c371e -r 1b4b1e9aaa4c docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Aug 17 00:00:57 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 17 00:33:17 2006 +0200 @@ -7,7 +7,6 @@ Branch moin/1.6-sync-aschremmer (currently done in Pickle files) ToDo: - * Tags should store the page name to recognise renaming scenarios. * Implement all syncronisation cases (all 3 page sets). * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Attach the status information to the job page. @@ -95,8 +94,9 @@ Week 29: Finished first version of the m Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently using pickle-based storage. Added getAllPagesEx XMLRPC method. Week 30: Implemented IWID support, added function to generate random strings. Added support - for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. Added handling of - various options and detection of anonymous wikis to the SyncPages action. + for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and + the MetaDict classes. Added handling of various options and detection of anonymous + wikis to the SyncPages action. Week 31: Load the IWID and the meta dict lazily. Reworked RemotePage/SyncPage, fixed option handling again, refined semantics of options, introduced direction option, replaced "localMatch"/"remoteMatch" by "pageMatch". # HG changeset patch # User Thomas Waldmann # Date 1155829049 -7200 # Node ID 4dc79658cdcde6ba3b8d27f5a17bed46822f0168 # Parent 1b4b1e9aaa4cfa1ab890dec7c3a37502e45d1bf8 added missing CHANGES entries diff -r 1b4b1e9aaa4c -r 4dc79658cdcd docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Aug 17 00:33:17 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 17 17:37:29 2006 +0200 @@ -140,6 +140,10 @@ 2006-08-11: entry missing 2006-08-11: entry missing 2006-08-12: entry missing 2006-08-13: entry missing +2006-08-14: entry missing +2006-08-15: entry missing +2006-08-16: entry missing +2006-08-17: entry missing Time plan ========= # HG changeset patch # User Alexander Schremmer # Date 1155682965 -7200 # Node ID e2ff18e85c37c2b4c554389b9ea4d08d9aa151e1 # Parent 277695e8e6f58df1b11d5514f3893c3b988a4431 Fixed interwiki bug in fckdialog, thanks to David Linke. diff -r 277695e8e6f5 -r e2ff18e85c37 MoinMoin/action/fckdialog.py --- a/MoinMoin/action/fckdialog.py Tue Aug 15 22:59:12 2006 +0200 +++ b/MoinMoin/action/fckdialog.py Wed Aug 16 01:02:45 2006 +0200 @@ -228,7 +228,7 @@ def link_dialog(request): interwiki_list = wikiutil.load_wikimap(request) interwiki = interwiki_list.keys() interwiki.sort() - iwpreferred = request.cfg.interwiki_preferred + iwpreferred = request.cfg.interwiki_preferred[:] if not iwpreferred or iwpreferred and iwpreferred[-1] is not None: resultlist = iwpreferred for iw in interwiki: diff -r 277695e8e6f5 -r e2ff18e85c37 docs/CHANGES --- a/docs/CHANGES Tue Aug 15 22:59:12 2006 +0200 +++ b/docs/CHANGES Wed Aug 16 01:02:45 2006 +0200 @@ -208,6 +208,8 @@ Version 1.6.current: same as POSTs. * Fixed handling of anchors in wiki links for the Restructured text parser. * Fixed http header output. + * Fixed request.cfg corruption in the fckdialog code that could lead + to e.g. stalled servers (thanks to David Linke) Other changes: * we use (again) the same browser compatibility check as FCKeditor uses # HG changeset patch # User Alexander Schremmer # Date 1155834630 -7200 # Node ID 25b82e0741bb84af8ac5018efac16caf480b5471 # Parent 05602636efa79bc0ee4b6d6a290bf5ffb360a6a0 Document the pages done in Page.getPageList, added return_objects kwarg. diff -r 05602636efa7 -r 25b82e0741bb MoinMoin/Page.py --- a/MoinMoin/Page.py Thu Aug 17 11:14:02 2006 +0200 +++ b/MoinMoin/Page.py Thu Aug 17 19:10:30 2006 +0200 @@ -636,7 +636,8 @@ class Page: return count - def getPageList(self, user=None, exists=1, filter=None, include_underlay=True): + def getPageList(self, user=None, exists=1, filter=None, include_underlay=True, + return_objects=False): """ List user readable pages under current page Currently only request.rootpage is used to list pages, but if we @@ -662,6 +663,9 @@ class Page: @param user: the user requesting the pages (MoinMoin.user.User) @param filter: filter function @param exists: filter existing pages + @param include_underlay: determines if underlay pages are returned as well + @param return_objects: lets it return a list of Page objects instead of + names @rtype: list of unicode strings @return: user readable wiki page names """ @@ -684,7 +688,7 @@ class Page: cache[pagename] = None - if user or exists or filter: + if user or exists or filter or not include_underlay or return_objects: # Filter names pages = [] for name in cache: @@ -707,7 +711,10 @@ class Page: if user and not user.may.read(name): continue - pages.append(name) + if return_objects: + pages.append(page) + else: + pages.append(name) else: pages = cache.keys() # HG changeset patch # User Alexander Schremmer # Date 1155837208 -7200 # Node ID ed3baf538cd543710bad816d2841c0c3106168a2 # Parent 62b8e7e8d5a54d84c33dfe277db994df7838ae34 Added infrastructure support for deleted pages in the sync framework. diff -r 62b8e7e8d5a5 -r ed3baf538cd5 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Thu Aug 17 19:11:38 2006 +0200 +++ b/MoinMoin/wikisync.py Thu Aug 17 19:53:28 2006 +0200 @@ -44,7 +44,8 @@ class UnsupportedWikiException(Exception class SyncPage(object): """ This class represents a page in one or two wiki(s). """ - def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None): + def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None, + local_deleted=False, remote_deleted=False): """ Creates a SyncPage instance. @param name: The canonical name of the page, without prefixes. @param local_rev: The revision of the page in the local wiki. @@ -59,6 +60,8 @@ class SyncPage(object): self.remote_name = remote_name assert local_rev or remote_rev assert local_name or remote_name + self.local_deleted = local_deleted + self.remote_deleted = remote_deleted self.local_mime_type = MIMETYPE_MOIN # XXX no usable storage API yet self.remote_mime_type = MIMETYPE_MOIN @@ -110,6 +113,7 @@ class SyncPage(object): if sp in d: d[sp].remote_rev = sp.remote_rev d[sp].remote_name = sp.remote_name + d[sp].remote_deleted = sp.remote_deleted # XXX merge mime type here else: d[sp] = sp @@ -240,14 +244,15 @@ class MoinRemoteWiki(RemoteWiki): "exclude_non_writable": kwargs["exclude_non_writable"], "include_underlay": False, "prefix": self.prefix, - "pagelist": self.pagelist} + "pagelist": self.pagelist, + "mark_deleted": True} pages = self.connection.getAllPagesEx(options) rpages = [] for name, revno in pages: normalised_name = normalise_pagename(name, self.prefix) if normalised_name is None: continue - rpages.append(SyncPage(normalised_name, remote_rev=revno, remote_name=name)) + rpages.append(SyncPage(normalised_name, remote_rev=abs(revno), remote_name=name, remote_deleted=revno < 0)) return rpages def __repr__(self): @@ -274,7 +279,8 @@ class MoinLocalWiki(RemoteWiki): return None if normalised_name is None: return None - return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name) + page = Page(self.request, page_name) + return SyncPage(normalised_name, local_rev=page.get_real_rev(), local_name=page_name, local_deleted=not page.exists()) # Public methods: @@ -298,7 +304,7 @@ class MoinLocalWiki(RemoteWiki): else: page_filter = lambda x: True pages = [] - for x in self.request.rootpage.getPageList(exists=1, include_underlay=False, filter=page_filter): + for x in self.request.rootpage.getPageList(exists=False, include_underlay=False, filter=page_filter): sp = self.createSyncPage(x) if sp: pages.append(sp) diff -r 62b8e7e8d5a5 -r ed3baf538cd5 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Aug 17 19:11:38 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Aug 17 19:53:28 2006 +0200 @@ -241,13 +241,15 @@ class XmlRpcBase: exclude_non_writable:: do not include pages that the current user may not write to include_underlay:: return underlay pagenames as well prefix:: the page name must begin with this prefix to be included + mark_deleted:: returns the revision number -rev_no if the page was deleted. + Makes only sense if you enable include_revno and include_deleted. @rtype: list @return: a list of all pages. """ from MoinMoin.wikisync import normalise_pagename options = {"include_system": True, "include_revno": False, "include_deleted": False, "exclude_non_writable": False, "include_underlay": True, "prefix": "", - "pagelist": None} + "pagelist": None, "mark_deleted": False} if opts is not None: options.update(opts) @@ -271,10 +273,17 @@ class XmlRpcBase: return n_name in pagelist pagelist = self.request.rootpage.getPageList(filter=p_filter, exists=not options["include_deleted"], - include_underlay=options["include_underlay"]) + include_underlay=options["include_underlay"], + return_objects=options["include_revno"]) if options['include_revno']: - return [[self._outstr(x), Page(self.request, x).get_real_rev()] for x in pagelist] + pages = [] + for x in pagelist: + revno = x.get_real_rev() + if options["mark_deleted"] and not x.exists(): + revno = -revno + pages.append([self._outstr(x.page_name), revno]) + return pages else: return [self._outstr(x) for x in pagelist] diff -r 62b8e7e8d5a5 -r ed3baf538cd5 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Aug 17 19:11:38 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 17 19:53:28 2006 +0200 @@ -112,6 +112,7 @@ Week 33: Started designing the solutions Week 33: Started designing the solutions for the other sync cases. Store and transmit the normalised name. Implemented preliminary mime type support, only transmission of the mime type and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :). + Added infrastructure support for detecting deleted pages (not used in the merging logic yet). 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress @@ -144,9 +145,3 @@ 2006-08-15: entry missing 2006-08-15: entry missing 2006-08-16: entry missing 2006-08-17: entry missing - -Time plan -========= -In July and August, most parts of the implementation will be finished -from 07-10 to 07-14 and from 08-03 to 08-19. Between those time spans, there -are exams. # HG changeset patch # User Alexander Schremmer # Date 1155837532 -7200 # Node ID c4f6af5ee29562946601b9fa9188eb3be687f5f2 # Parent ed3baf538cd543710bad816d2841c0c3106168a2 Mute the tempnam warning in the caching module. diff -r ed3baf538cd5 -r c4f6af5ee295 MoinMoin/caching.py --- a/MoinMoin/caching.py Thu Aug 17 19:53:28 2006 +0200 +++ b/MoinMoin/caching.py Thu Aug 17 19:58:52 2006 +0200 @@ -7,8 +7,14 @@ """ import os +import warnings + from MoinMoin import config from MoinMoin.util import filesys, lock + +# filter the tempname warning because we create the tempfile only in directories +# where only we should have write access initially +warnings.filterwarnings("ignore", "tempnam.*security", RuntimeWarning, "MoinMoin.caching") class CacheEntry: def __init__(self, request, arena, key, scope='page_or_wiki', do_locking=True): diff -r ed3baf538cd5 -r c4f6af5ee295 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Aug 17 19:53:28 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 17 19:58:52 2006 +0200 @@ -57,6 +57,7 @@ Branch moin/1.6-sync-aschremmer * Fixed bug in request.py that avoided showing a traceback if there was a fault after the first headers were sent. * Fixed severe race conditions in the meta dict and the sync tags code. + * Mute the tempnam warning in the caching module. Other Changes: * Refactored conflict resolution and XMLRPC code. # HG changeset patch # User Alexander Schremmer # Date 1155841523 -7200 # Node ID a36c70e5f0ddbdfc997efd936b1c5881d2f6a842 # Parent c4f6af5ee29562946601b9fa9188eb3be687f5f2 Reworked i18n and logging support, from now on, the log is attached to the job page. diff -r c4f6af5ee295 -r a36c70e5f0dd MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Thu Aug 17 19:58:52 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Thu Aug 17 21:05:23 2006 +0200 @@ -37,19 +37,32 @@ class ActionStatus(Exception): pass class ActionStatus(Exception): pass -class ActionClass: - INFO, WARN, ERROR = range(3) # used for logging +class ActionClass(object): + INFO, WARN, ERROR = zip(range(3), ("", "", "/!\\")) # used for logging def __init__(self, pagename, request): self.request = request self.pagename = pagename - self.page = Page(request, pagename) + self.page = PageEditor(request, pagename) self.status = [] request.flush() - def log_status(self, level, message): + def log_status(self, level, message, substitutions=(), raw_suffix=""): """ Appends the message with a given importance level to the internal log. """ - self.status.append((level, message)) + self.status.append((level, message, substitutions, raw_suffix)) + + def generate_log_table(self): + """ Transforms self.status into a user readable table. """ + table_line = u"|| %(smiley)s || %(message)s%(raw_suffix)s ||" + table = [] + + for line in self.status: + macro_args = [line[1]] + list(line[2]) + table.append(table_line % {"smiley": line[0][1], "message": + u"[[GetText2(|%s)]]" % (packLine(macro_args), ), + "raw_suffix": line[3]}) + + return "\n".join(table) def parse_page(self): """ Parses the parameter page and returns the read arguments. """ @@ -102,6 +115,7 @@ class ActionClass: params = self.fix_params(self.parse_page()) + # XXX aquire readlock on self.page try: if params["direction"] == UP: raise ActionStatus(_("The only supported directions are BOTH and DOWN.")) @@ -123,11 +137,13 @@ class ActionClass: self.sync(params, local, remote) except ActionStatus, e: - msg = u'

%s

%s

\n' % (e.args[0], repr(self.status)) + msg = u'

%s

%s

\n' % (e.args[0], repr(self.status)) # XXX remove self.status else: - msg = u"%s

%s

" % (_("Syncronisation finished."), repr(self.status)) - - # XXX append self.status to the job page + msg = u"%s

%s

" % (_("Syncronisation finished."), repr(self.status)) # XXX remove self.status + + self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0) + + # XXX release readlock on self.page return self.page.send_page(self.request, msg=msg) def sync(self, params, local, remote): @@ -165,10 +181,13 @@ class ActionClass: ----------+----------+------------------------------- exists | exists | already handled. """ - _ = self.request.getText + _ = lambda x: x # we will translate it later + direction = params["direction"] local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) + + self.log_status(self.INFO, _("Syncronisation started -"), raw_suffix=" [[DateTime(%s)]]" % self.page._get_local_timestamp()) l_pages = local.get_pages() r_pages = remote.get_pages(exclude_non_writable=direction != DOWN) @@ -180,11 +199,12 @@ class ActionClass: m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)] - self.log_status(self.INFO, "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))) # XXX remove? + self.log_status(self.INFO, _("Got a list of %s local and %s remote pages. This results in %s different pages over-all."), + (str(len(l_pages)), str(len(r_pages)), str(len(m_pages)))) if params["pageMatch"]: m_pages = SyncPage.filter(m_pages, params["pageMatch"].match) - self.log_status(self.INFO, "After filtering: Got %i merges pages" % (len(m_pages), )) # XXX remove + self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), )) on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) @@ -199,7 +219,7 @@ class ActionClass: # XXX handle deleted pages for rp in on_both_sides: # XXX add locking, acquire read-lock on rp - #print "Processing %r" % rp + # XXX print "Processing %r" % rp local_pagename = rp.local_name current_page = PageEditor(self.request, local_pagename) # YYY direct access @@ -227,20 +247,20 @@ class ActionClass: if newest_tag.remote_rev == rp.remote_rev and (direction == DOWN or newest_tag.current_rev == current_rev): continue # no changes done, next page if rp.local_mime_type != MIMETYPE_MOIN and not (newest_tag.remote_rev == rp.remote_rev ^ newest_tag.current_rev == current_rev): - self.log_status(ActionClass.WARN, _("The item %(pagename)s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again.") % {"pagename": rp.name}) + self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, )) continue if rp.local_mime_type != rp.remote_mime_type: - self.log_status(ActionClass.WARN, _("The item %(pagename)s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again.") % {"pagename": rp.name}) + self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."),(rp.name, )) continue if newest_tag.normalised_name != rp.name: - self.log_status(ActionClass.WARN, _("The item %(pagename)s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page.") % {"pagename": rp.name}) # XXX implement renames + self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames else: normalised_name = newest_tag.normalised_name local_rev = newest_tag.current_rev remote_rev = newest_tag.remote_rev old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access - self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name}) + self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, rp.remote_name)) if direction == DOWN: remote_rev = None # always fetch the full page, ignore remote conflict check @@ -251,7 +271,7 @@ class ActionClass: if remote_rev != rp.remote_rev: diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name) if diff_result is None: - self.log_status(ActionClass.ERROR, _("The page %(pagename)s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced.") % {"pagename": rp.remote_name}) + self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (rp.remote_name, )) continue is_remote_conflict = diff_result["conflict"] assert diff_result["diffversion"] == 1 @@ -269,7 +289,7 @@ class ActionClass: # and the page has never been syncronised if (rp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) and (remote_rev is None or is_remote_conflict)): - self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a locally or remotely unresolved conflict.") % {"pagename": local_pagename}) + self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, )) continue if remote_rev is None and direction == BOTH: @@ -293,7 +313,7 @@ class ActionClass: verynewtext_raw = current_page.get_raw_body_str() diff = textdiff(new_contents, verynewtext_raw) - #print "Diff against %r" % new_contents.encode("utf-8") + # XXX print "Diff against %r" % new_contents.encode("utf-8") comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) diff -r c4f6af5ee295 -r a36c70e5f0dd MoinMoin/mail/mailimport.py --- a/MoinMoin/mail/mailimport.py Thu Aug 17 19:58:52 2006 +0200 +++ b/MoinMoin/mail/mailimport.py Thu Aug 17 21:05:23 2006 +0200 @@ -269,6 +269,10 @@ def import_mail_from_message(request, me elif table_ends is not None and not line.startswith("||"): break + # in order to let the gettext system recognise the [[GetText]] calls used below, + # we must repeat them here: + [_("From"), _("To"), _("Content"), _("Date"), _("Attachments")] + table_header = (u"\n\n## mail_overview (don't delete this line)\n" + u"|| '''[[GetText(From)]] ''' || '''[[GetText(To)]] ''' || '''[[GetText(Content)]] ''' || '''[[GetText(Date)]] ''' || '''[[GetText(Attachments)]] ''' ||\n" ) diff -r c4f6af5ee295 -r a36c70e5f0dd MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Thu Aug 17 19:58:52 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Thu Aug 17 21:05:23 2006 +0200 @@ -706,11 +706,12 @@ class XmlRpcBase: """ from MoinMoin.util.bdiff import decompress, patch from MoinMoin.wikisync import TagStore, BOTH + from MoinMoin.packages import unpackLine LASTREV_INVALID = xmlrpclib.Fault("LASTREV_INVALID", "The page was changed") pagename = self._instr(pagename) - comment = u"Remote Merge - %r" % interwiki_name + comment = u"Remote Merge - %r" % unpackLine(interwiki_name)[-1] # User may read page? if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename): diff -r c4f6af5ee295 -r a36c70e5f0dd docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Aug 17 19:58:52 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 17 21:05:23 2006 +0200 @@ -114,6 +114,7 @@ Week 33: Started designing the solutions normalised name. Implemented preliminary mime type support, only transmission of the mime type and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :). Added infrastructure support for detecting deleted pages (not used in the merging logic yet). + Reworked i18n and logging support. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress diff -r c4f6af5ee295 -r a36c70e5f0dd MoinMoin/macro/GetText2.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/MoinMoin/macro/GetText2.py Thu Aug 17 21:05:23 2006 +0200 @@ -0,0 +1,29 @@ +# -*- coding: iso-8859-1 -*- +""" + MoinMoin - Load I18N Text and substitute data. + + This macro has the main purpose of being used by extensions that write + data to wiki pages but want to ensure that it is properly translated. + + @copyright: 2006 by MoinMoin:AlexanderSchremmer + @license: GNU GPL, see COPYING for details. +""" + +from MoinMoin import wikiutil +from MoinMoin.packages import unpackLine + +Dependencies = ["language"] + +def execute(macro, args): + """ args consists of a character specifiying the separator and then a + packLine sequence describing a list. The first element of it is the message + and the remaining elements are substituted in the message using string + substitution. + """ + sep = args[0] + args = unpackLine(args[1:], sep) + translation = macro.request.getText(args[0], formatted=False) + message = translation % tuple(args[1:]) + + return macro.formatter.text(message) + # HG changeset patch # User Alexander Schremmer # Date 1155842523 -7200 # Node ID d0c171c168a8870c80f623aa1593904f3b26176c # Parent a36c70e5f0ddbdfc997efd936b1c5881d2f6a842 Minor cleanup, added entry to my CHANGES file. diff -r a36c70e5f0dd -r d0c171c168a8 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Thu Aug 17 21:05:23 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Thu Aug 17 21:22:03 2006 +0200 @@ -137,13 +137,13 @@ class ActionClass(object): self.sync(params, local, remote) except ActionStatus, e: - msg = u'

%s

%s

\n' % (e.args[0], repr(self.status)) # XXX remove self.status + msg = u'

%s

\n' % (e.args[0], ) else: - msg = u"%s

%s

" % (_("Syncronisation finished."), repr(self.status)) # XXX remove self.status + msg = u"%s" % (_("Syncronisation finished."), ) self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0) - # XXX release readlock on self.page + return self.page.send_page(self.request, msg=msg) def sync(self, params, local, remote): diff -r a36c70e5f0dd -r d0c171c168a8 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Aug 17 21:05:23 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 17 21:22:03 2006 +0200 @@ -44,6 +44,7 @@ Branch moin/1.6-sync-aschremmer * SyncPages action * XMLRPC functions may return Fault instances * diff3 algorithm extended, a new mode should reduce the conflicts + * GetText2 macro Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. (merged into main) @@ -114,7 +115,7 @@ Week 33: Started designing the solutions normalised name. Implemented preliminary mime type support, only transmission of the mime type and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :). Added infrastructure support for detecting deleted pages (not used in the merging logic yet). - Reworked i18n and logging support. + Reworked i18n and logging support. Added GetText2 macro. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155842883 -7200 # Node ID cd150467ee30d8ac285762bc29d4ba0972c284eb # Parent d0c171c168a8870c80f623aa1593904f3b26176c Minor cleanups. diff -r d0c171c168a8 -r cd150467ee30 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Thu Aug 17 21:22:03 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Thu Aug 17 21:28:03 2006 +0200 @@ -184,6 +184,11 @@ class ActionClass(object): _ = lambda x: x # we will translate it later direction = params["direction"] + if direction == BOTH: + match_direction = direction + else: + match_direction = None + local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()]) remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()]) @@ -209,7 +214,7 @@ class ActionClass(object): on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) local_but_not_remote = list(SyncPage.iter_local_only(m_pages)) - + # some initial test code (XXX remove) #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) @@ -226,11 +231,8 @@ class ActionClass(object): current_rev = current_page.get_real_rev() tags = TagStore(current_page) - if direction == BOTH: - match_direction = direction - else: - match_direction = None - matching_tags = tags.fetch(iwid_full=remote.iwid_full,direction=match_direction) + + matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction) matching_tags.sort() #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags) @@ -250,7 +252,7 @@ class ActionClass(object): self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, )) continue if rp.local_mime_type != rp.remote_mime_type: - self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."),(rp.name, )) + self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (rp.name, )) continue if newest_tag.normalised_name != rp.name: self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames # HG changeset patch # User Alexander Schremmer # Date 1155846934 -7200 # Node ID 4a307ee59506fca24322b456df00daa3214d2867 # Parent cd150467ee30d8ac285762bc29d4ba0972c284eb Minor cleaning in my CHANGES file. diff -r cd150467ee30 -r 4a307ee59506 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Thu Aug 17 21:28:03 2006 +0200 +++ b/docs/CHANGES.aschremmer Thu Aug 17 22:35:34 2006 +0200 @@ -9,7 +9,6 @@ Branch moin/1.6-sync-aschremmer ToDo: * Implement all syncronisation cases (all 3 page sets). * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) - * Attach the status information to the job page. * Check what needs to be documented on MoinMaster. * Show tags in an action=info view? # HG changeset patch # User Alexander Schremmer # Date 1155938404 -7200 # Node ID 2e29ef86be2681e1c1de86b2b50e7a6c5683d766 # Parent 4a307ee59506fca24322b456df00daa3214d2867 Reordered SyncPages, preparing for new modes. diff -r 4a307ee59506 -r 2e29ef86be26 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Thu Aug 17 22:35:34 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 19 00:00:04 2006 +0200 @@ -211,18 +211,9 @@ class ActionClass(object): m_pages = SyncPage.filter(m_pages, params["pageMatch"].match) self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), )) - on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) - remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) - local_but_not_remote = list(SyncPage.iter_local_only(m_pages)) - - # some initial test code (XXX remove) - #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) - #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) - #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) - - # let's do the simple case first, can be refactored later to match all cases - # XXX handle deleted pages - for rp in on_both_sides: + def handle_page(rp): + # let's do the simple case first, can be refactored later to match all cases + # XXX handle deleted pages # XXX add locking, acquire read-lock on rp # XXX print "Processing %r" % rp @@ -247,13 +238,13 @@ class ActionClass(object): # handle some cases where we cannot continue for this page if newest_tag.remote_rev == rp.remote_rev and (direction == DOWN or newest_tag.current_rev == current_rev): - continue # no changes done, next page + return # no changes done, next page if rp.local_mime_type != MIMETYPE_MOIN and not (newest_tag.remote_rev == rp.remote_rev ^ newest_tag.current_rev == current_rev): self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, )) - continue + return if rp.local_mime_type != rp.remote_mime_type: self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (rp.name, )) - continue + return if newest_tag.normalised_name != rp.name: self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames else: @@ -274,7 +265,7 @@ class ActionClass(object): diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name) if diff_result is None: self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (rp.remote_name, )) - continue + return is_remote_conflict = diff_result["conflict"] assert diff_result["diffversion"] == 1 diff = diff_result["diff"] @@ -292,7 +283,7 @@ class ActionClass(object): if (rp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) and (remote_rev is None or is_remote_conflict)): self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, )) - continue + return if remote_rev is None and direction == BOTH: self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) @@ -346,6 +337,19 @@ class ActionClass(object): # XXX release lock + on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) + remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) + local_but_not_remote = list(SyncPage.iter_local_only(m_pages)) + + # some initial test code (XXX remove) + #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) + #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) + #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) + + for rp in on_both_sides: + handle_page(rp) + + def execute(pagename, request): ActionClass(pagename, request).render() # HG changeset patch # User Alexander Schremmer # Date 1155942063 -7200 # Node ID c48e57b1460a6f21eacc3e728a9825fa70cf7871 # Parent 2e29ef86be2681e1c1de86b2b50e7a6c5683d766 Allow passing None for some rev parameters of mergeDiff. diff -r 2e29ef86be26 -r c48e57b1460a MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sat Aug 19 00:00:04 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sat Aug 19 01:01:03 2006 +0200 @@ -722,14 +722,14 @@ class XmlRpcBase: # current version of the page currentpage = PageEditor(self.request, pagename, do_editor_backup=0) - if currentpage.get_real_rev() != last_remote_rev: + if last_remote_rev is not None and currentpage.get_real_rev() != last_remote_rev: return LASTREV_INVALID if not currentpage.exists() and diff is None: return xmlrpclib.Fault("NOT_EXIST", "The page does not exist and no diff was supplied.") # base revision used for the diff - basepage = Page(self.request, pagename, rev=delta_remote_rev) + basepage = Page(self.request, pagename, rev=(delta_remote_rev or 0)) # generate the new page revision by applying the diff newcontents = patch(basepage.get_raw_body_str(), decompress(str(diff))) @@ -737,7 +737,7 @@ class XmlRpcBase: # write page try: - currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev, comment=comment) + currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev or 0, comment=comment) except PageEditor.Unchanged: # could happen in case of both wiki's pages being equal pass except PageEditor.EditConflict: # HG changeset patch # User Alexander Schremmer # Date 1155942118 -7200 # Node ID 0c3927a3b8a2c1fe9e3c3448c3739753655e41ae # Parent c48e57b1460a6f21eacc3e728a9825fa70cf7871 Getting syncing of pages working that are just on one side, still faulty, though. diff -r c48e57b1460a -r 0c3927a3b8a2 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 19 01:01:03 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 19 01:01:58 2006 +0200 @@ -346,7 +346,7 @@ class ActionClass(object): #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) - for rp in on_both_sides: + for rp in m_pages: #on_both_sides: handle_page(rp) diff -r c48e57b1460a -r 0c3927a3b8a2 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Aug 19 01:01:03 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Aug 19 01:01:58 2006 +0200 @@ -64,6 +64,7 @@ class SyncPage(object): self.remote_deleted = remote_deleted self.local_mime_type = MIMETYPE_MOIN # XXX no usable storage API yet self.remote_mime_type = MIMETYPE_MOIN + assert remote_rev != 99999999 def __repr__(self): return repr("" % unicode(self)) @@ -94,7 +95,7 @@ class SyncPage(object): elif self.remote_name is None: n_name = normalise_pagename(self.local_name, local.prefix) assert n_name is not None - self.remote_name = (local.prefix or "") + n_name + self.remote_name = (remote.prefix or "") + n_name return self # makes using list comps easier @@ -252,7 +253,10 @@ class MoinRemoteWiki(RemoteWiki): normalised_name = normalise_pagename(name, self.prefix) if normalised_name is None: continue - rpages.append(SyncPage(normalised_name, remote_rev=abs(revno), remote_name=name, remote_deleted=revno < 0)) + if abs(revno) != 99999999: # I love sane in-band signalling + remote_rev = abs(revno) + remote_deleted = revno < 0 + rpages.append(SyncPage(normalised_name, remote_rev=remote_rev, remote_name=name, remote_deleted=remote_deleted)) return rpages def __repr__(self): @@ -280,7 +284,10 @@ class MoinLocalWiki(RemoteWiki): if normalised_name is None: return None page = Page(self.request, page_name) - return SyncPage(normalised_name, local_rev=page.get_real_rev(), local_name=page_name, local_deleted=not page.exists()) + revno = page.get_real_rev() + if revno == 99999999: # I love sane in-band signalling + revno = None + return SyncPage(normalised_name, local_rev=revno, local_name=page_name, local_deleted=not page.exists()) # Public methods: diff -r c48e57b1460a -r 0c3927a3b8a2 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Aug 19 01:01:03 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Aug 19 01:01:58 2006 +0200 @@ -114,7 +114,8 @@ Week 33: Started designing the solutions normalised name. Implemented preliminary mime type support, only transmission of the mime type and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :). Added infrastructure support for detecting deleted pages (not used in the merging logic yet). - Reworked i18n and logging support. Added GetText2 macro. + Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages + working that are just available on one side, still faulty though. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1155991518 -7200 # Node ID f6801f7e4d59e293422e570ddc5d5eeae4613ffc # Parent 506d04a9ceee89e014712d6ca0836001da0de533 Fixed standalone server that was broken by the prefix changes. diff -r 506d04a9ceee -r f6801f7e4d59 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 19 01:02:56 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 19 14:45:18 2006 +0200 @@ -350,6 +350,5 @@ class ActionClass(object): handle_page(rp) - def execute(pagename, request): ActionClass(pagename, request).render() diff -r 506d04a9ceee -r f6801f7e4d59 MoinMoin/server/standalone.py --- a/MoinMoin/server/standalone.py Sat Aug 19 01:02:56 2006 +0200 +++ b/MoinMoin/server/standalone.py Sat Aug 19 14:45:18 2006 +0200 @@ -293,8 +293,9 @@ class MoinRequestHandler(SimpleHTTPServe request is not available at this time. Should be fixed by having url_prefix_static in a server config. """ - if self.path.startswith('/moin_static160/'): # XXX - self.path = self.path[5:] + PREFIX = '/moin_static160/' + if self.path.startswith(PREFIX): # XXX + self.path = self.path[len(PREFIX)-1:] self.serve_static_file() elif self.path in ['/favicon.ico', '/robots.txt']: self.serve_static_file() # HG changeset patch # User Alexander Schremmer # Date 1155996412 -7200 # Node ID 4a8c4849d162dfc0da8daf318c396892aef6c7b9 # Parent f6801f7e4d59e293422e570ddc5d5eeae4613ffc Got syncing working with pages that are just in one wiki. diff -r f6801f7e4d59 -r 4a8c4849d162 MoinMoin/_tests/test_wikisync.py --- a/MoinMoin/_tests/test_wikisync.py Sat Aug 19 14:45:18 2006 +0200 +++ b/MoinMoin/_tests/test_wikisync.py Sat Aug 19 16:06:52 2006 +0200 @@ -29,6 +29,7 @@ class UnsafeSyncTestcase(TestCase): self.assert_(not tags.get_all_tags()) tags.add(remote_wiki="foo", remote_rev=1, current_rev=2, direction=BOTH, normalised_name="FrontPage") tags = TagStore(self.page) # reload + dummy = repr(tags.get_all_tags()) # this should not raise self.assert_(tags.get_all_tags()[0].remote_rev == 1) def tearDown(self): diff -r f6801f7e4d59 -r 4a8c4849d162 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 19 14:45:18 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 19 16:06:52 2006 +0200 @@ -225,7 +225,7 @@ class ActionClass(object): matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction) matching_tags.sort() - #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags) + # print "------ TAGS: " + repr(matching_tags) + repr(tags.tags) XXX # some default values for non matching tags normalised_name = None @@ -296,7 +296,8 @@ class ActionClass(object): if rp.local_mime_type == MIMETYPE_MOIN: new_contents_unicode = new_contents.decode("utf-8") # here, the actual merge happens - # XXX print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body()) + # XXX + # print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body()) verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) verynewtext_raw = verynewtext.encode("utf-8") else: @@ -306,7 +307,7 @@ class ActionClass(object): verynewtext_raw = current_page.get_raw_body_str() diff = textdiff(new_contents, verynewtext_raw) - # XXX print "Diff against %r" % new_contents.encode("utf-8") + # print "Diff against %r" % new_contents.encode("utf-8") # XXX comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) diff -r f6801f7e4d59 -r 4a8c4849d162 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Aug 19 14:45:18 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Aug 19 16:06:52 2006 +0200 @@ -279,14 +279,14 @@ class MoinLocalWiki(RemoteWiki): def createSyncPage(self, page_name): normalised_name = normalise_pagename(page_name, self.prefix) + if normalised_name is None: + return None if not self.request.user.may.write(normalised_name): - return None - if normalised_name is None: return None page = Page(self.request, page_name) revno = page.get_real_rev() if revno == 99999999: # I love sane in-band signalling - revno = None + return None return SyncPage(normalised_name, local_rev=revno, local_name=page_name, local_deleted=not page.exists()) # Public methods: @@ -345,7 +345,7 @@ class Tag(object): self.normalised_name = normalised_name def __repr__(self): - return u"" % (self.normalised_name, self.remote_wiki, self.remote_rev, self.current_rev) + return u"" % (getattr(self, "normalised_name", "UNDEF"), self.remote_wiki, self.remote_rev, self.current_rev) def __cmp__(self, other): if not isinstance(other, Tag): diff -r f6801f7e4d59 -r 4a8c4849d162 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Aug 19 14:45:18 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Aug 19 16:06:52 2006 +0200 @@ -15,6 +15,7 @@ Branch moin/1.6-sync-aschremmer * Test with prefixes * Search for XXX + * Delete iters? * Maybe refactor YYY into MoinLocalWiki * Remove amount of "very" in the code * Clean up trailing whitespace. # HG changeset patch # User Alexander Schremmer # Date 1155997099 -7200 # Node ID df88b475c1f0b8b62adebd5c36a17162226a24c3 # Parent 4a8c4849d162dfc0da8daf318c396892aef6c7b9 Reworked debugging. diff -r 4a8c4849d162 -r df88b475c1f0 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 19 16:06:52 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 19 16:18:19 2006 +0200 @@ -29,6 +29,8 @@ from MoinMoin.util.bdiff import decompre from MoinMoin.util.bdiff import decompress, patch, compress, textdiff from MoinMoin.util import diff3 + +debug = True # map sync directions directions_map = {"up": UP, "down": DOWN, "both": BOTH} @@ -128,7 +130,7 @@ class ActionClass(object): local = MoinLocalWiki(self.request, params["localPrefix"], params["pageList"]) try: - remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"]) + remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], verbose=debug) except UnsupportedWikiException, (msg, ): raise ActionStatus(msg) @@ -215,7 +217,8 @@ class ActionClass(object): # let's do the simple case first, can be refactored later to match all cases # XXX handle deleted pages # XXX add locking, acquire read-lock on rp - # XXX print "Processing %r" % rp + if debug: + self.log_status(ActionClass.INFO, "Processing %r" % rp) local_pagename = rp.local_name current_page = PageEditor(self.request, local_pagename) # YYY direct access @@ -225,7 +228,8 @@ class ActionClass(object): matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction) matching_tags.sort() - # print "------ TAGS: " + repr(matching_tags) + repr(tags.tags) XXX + if debug: + self.log_status(ActionClass.INFO, "Tags: %r [[BR]] All: %r" % (matching_tags, tags.tags)) # some default values for non matching tags normalised_name = None @@ -296,8 +300,8 @@ class ActionClass(object): if rp.local_mime_type == MIMETYPE_MOIN: new_contents_unicode = new_contents.decode("utf-8") # here, the actual merge happens - # XXX - # print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body()) + if debug: + self.log_status(ActionClass.INFO, "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body())) verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) verynewtext_raw = verynewtext.encode("utf-8") else: @@ -307,7 +311,8 @@ class ActionClass(object): verynewtext_raw = current_page.get_raw_body_str() diff = textdiff(new_contents, verynewtext_raw) - # print "Diff against %r" % new_contents.encode("utf-8") # XXX + if debug: + self.log_status(ActionClass.INFO, "Diff against %r" % new_contents.encode("utf-8")) comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) @@ -338,9 +343,10 @@ class ActionClass(object): # XXX release lock - on_both_sides = list(SyncPage.iter_local_and_remote(m_pages)) - remote_but_not_local = list(SyncPage.iter_remote_only(m_pages)) - local_but_not_remote = list(SyncPage.iter_local_only(m_pages)) + # XXX remove? + #on_both_sides = SyncPage.iter_local_and_remote(m_pages) + #remote_but_not_local = SyncPage.iter_remote_only(m_pages) + #local_but_not_remote = SyncPage.iter_local_only(m_pages) # some initial test code (XXX remove) #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) diff -r 4a8c4849d162 -r df88b475c1f0 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Aug 19 16:06:52 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Aug 19 16:18:19 2006 +0200 @@ -177,10 +177,11 @@ class RemoteWiki(object): class MoinRemoteWiki(RemoteWiki): """ Used for MoinMoin wikis reachable via XMLRPC. """ - def __init__(self, request, interwikiname, prefix, pagelist): + def __init__(self, request, interwikiname, prefix, pagelist, verbose=False): self.request = request self.prefix = prefix self.pagelist = pagelist + self.verbose = verbose _ = self.request.getText wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, )) @@ -212,7 +213,7 @@ class MoinRemoteWiki(RemoteWiki): self.iwid_full = packLine([remote_iwid, interwikiname]) def createConnection(self): - return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True) + return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=self.verbose) # Public methods def get_diff(self, pagename, from_rev, to_rev, n_name=None): # HG changeset patch # User Alexander Schremmer # Date 1156014413 -7200 # Node ID b7352ce359006a1cf89a2a8dbdbb990fa47f8310 # Parent df88b475c1f0b8b62adebd5c36a17162226a24c3 Fix GetText2 for incorrect parameters. diff -r df88b475c1f0 -r b7352ce35900 MoinMoin/macro/GetText2.py --- a/MoinMoin/macro/GetText2.py Sat Aug 19 16:18:19 2006 +0200 +++ b/MoinMoin/macro/GetText2.py Sat Aug 19 21:06:53 2006 +0200 @@ -22,7 +22,10 @@ def execute(macro, args): """ sep = args[0] args = unpackLine(args[1:], sep) - translation = macro.request.getText(args[0], formatted=False) + if args: + translation = macro.request.getText(args[0], formatted=False) + else: + translation = u"" message = translation % tuple(args[1:]) return macro.formatter.text(message) # HG changeset patch # User Alexander Schremmer # Date 1156014455 -7200 # Node ID 16a854a172c91dfb2e1636854efb75e568c150ed # Parent b7352ce359006a1cf89a2a8dbdbb990fa47f8310 Implemented support for deleted pages, finished support for pages that are just in one wiki. diff -r b7352ce35900 -r 16a854a172c9 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 19 21:06:53 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 19 21:07:35 2006 +0200 @@ -32,6 +32,7 @@ from MoinMoin.util import diff3 debug = True + # map sync directions directions_map = {"up": UP, "down": DOWN, "both": BOTH} @@ -49,7 +50,7 @@ class ActionClass(object): self.status = [] request.flush() - def log_status(self, level, message, substitutions=(), raw_suffix=""): + def log_status(self, level, message="", substitutions=(), raw_suffix=""): """ Appends the message with a given importance level to the internal log. """ self.status.append((level, message, substitutions, raw_suffix)) @@ -61,7 +62,7 @@ class ActionClass(object): for line in self.status: macro_args = [line[1]] + list(line[2]) table.append(table_line % {"smiley": line[0][1], "message": - u"[[GetText2(|%s)]]" % (packLine(macro_args), ), + macro_args and u"[[GetText2(|%s)]]" % (packLine(macro_args), ), "raw_suffix": line[3]}) return "\n".join(table) @@ -155,14 +156,6 @@ class ActionClass(object): Now there are a few other cases left that have to be implemented: Wiki A | Wiki B | Remark ----------+----------+------------------------------ - exists | deleted | In this case, we do a normal merge if there - | | are no tags. If there were changes in - | | Wiki A, there is a merge with a conflict. - | | Otherwise (no changes past last merge), - | | the page is deleted in Wiki A. - | | This needs static info that could be - | | transferred with the pagelist. - ----------+----------+------------------------------- exists | non- | Now the wiki knows that the page was renamed. with tags | existant | There should be an RPC method that asks | | for the new name (which could be recorded @@ -181,7 +174,6 @@ class ActionClass(object): | matching | Hmm, how do we detect this | tags | case if the unmatching tags are only on the remote side? ----------+----------+------------------------------- - exists | exists | already handled. """ _ = lambda x: x # we will translate it later @@ -214,22 +206,19 @@ class ActionClass(object): self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), )) def handle_page(rp): - # let's do the simple case first, can be refactored later to match all cases - # XXX handle deleted pages # XXX add locking, acquire read-lock on rp if debug: - self.log_status(ActionClass.INFO, "Processing %r" % rp) + self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % rp) local_pagename = rp.local_name current_page = PageEditor(self.request, local_pagename) # YYY direct access - current_rev = current_page.get_real_rev() tags = TagStore(current_page) matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction) matching_tags.sort() if debug: - self.log_status(ActionClass.INFO, "Tags: %r [[BR]] All: %r" % (matching_tags, tags.tags)) + self.log_status(ActionClass.INFO, raw_suffix="Tags: %r [[BR]] All: %r" % (matching_tags, tags.tags)) # some default values for non matching tags normalised_name = None @@ -239,18 +228,31 @@ class ActionClass(object): if matching_tags: newest_tag = matching_tags[-1] - + + local_change = newest_tag.current_rev != rp.local_rev + remote_change = newest_tag.remote_rev != rp.remote_rev + # handle some cases where we cannot continue for this page - if newest_tag.remote_rev == rp.remote_rev and (direction == DOWN or newest_tag.current_rev == current_rev): + if not remote_change and (direction == DOWN or not local_change): return # no changes done, next page - if rp.local_mime_type != MIMETYPE_MOIN and not (newest_tag.remote_rev == rp.remote_rev ^ newest_tag.current_rev == current_rev): + if rp.local_deleted and rp.remote_deleted: + return + if rp.remote_deleted and not local_change: + self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r locally" % rp) + # XXX delete rp locally + return + if rp.local_deleted and not remote_change: + self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r remotely" % rp) + # XXX delete rp remotely + return + if rp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change): self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, )) return if rp.local_mime_type != rp.remote_mime_type: self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (rp.name, )) return if newest_tag.normalised_name != rp.name: - self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames + self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames else: normalised_name = newest_tag.normalised_name local_rev = newest_tag.current_rev @@ -266,14 +268,20 @@ class ActionClass(object): patch_base_contents = old_contents if remote_rev != rp.remote_rev: - diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name) - if diff_result is None: - self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (rp.remote_name, )) - return - is_remote_conflict = diff_result["conflict"] - assert diff_result["diffversion"] == 1 - diff = diff_result["diff"] - current_remote_rev = diff_result["current"] + if rp.remote_deleted: # ignore remote changes + current_remote_rev = rp.remote_rev + is_remote_conflict = False + diff = None + self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (rp.name, )) + else: + diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name) + if diff_result is None: + self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (rp.remote_name, )) + return + is_remote_conflict = diff_result["conflict"] + assert diff_result["diffversion"] == 1 + diff = diff_result["diff"] + current_remote_rev = diff_result["current"] else: current_remote_rev = remote_rev if rp.local_mime_type == MIMETYPE_MOIN: @@ -292,16 +300,18 @@ class ActionClass(object): if remote_rev is None and direction == BOTH: self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) - if diff is None: + if rp.remote_deleted: + new_contents = "" + elif diff is None: new_contents = old_contents else: new_contents = patch(patch_base_contents, decompress(diff)) if rp.local_mime_type == MIMETYPE_MOIN: new_contents_unicode = new_contents.decode("utf-8") - # here, the actual merge happens + # here, the actual 3-way merge happens if debug: - self.log_status(ActionClass.INFO, "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body())) + self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body())) verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) verynewtext_raw = verynewtext.encode("utf-8") else: @@ -312,13 +322,13 @@ class ActionClass(object): diff = textdiff(new_contents, verynewtext_raw) if debug: - self.log_status(ActionClass.INFO, "Diff against %r" % new_contents.encode("utf-8")) + self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % new_contents) comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) # XXX upgrade to write lock try: - current_page.saveText(verynewtext, current_rev, comment=comment) # YYY direct access + current_page.saveText(verynewtext, rp.local_rev, comment=comment) # YYY direct access except PageEditor.Unchanged: pass except PageEditor.EditConflict: @@ -353,7 +363,7 @@ class ActionClass(object): #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) - for rp in m_pages: #on_both_sides: + for rp in m_pages: handle_page(rp) diff -r b7352ce35900 -r 16a854a172c9 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Aug 19 21:06:53 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Aug 19 21:07:35 2006 +0200 @@ -116,7 +116,7 @@ Week 33: Started designing the solutions and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :). Added infrastructure support for detecting deleted pages (not used in the merging logic yet). Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages - working that are just available on one side, still faulty though. + working that are just available on one side. Working synchronisation of deleted pages. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1156014586 -7200 # Node ID 13955987ef54f8bd41e15adf0eeebbb6d0f00c42 # Parent 16a854a172c91dfb2e1636854efb75e568c150ed Updated my CHANGES file. diff -r 16a854a172c9 -r 13955987ef54 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Aug 19 21:07:35 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Aug 19 21:09:46 2006 +0200 @@ -2,12 +2,11 @@ Branch moin/1.6-sync-aschremmer =============================== Known main issues: - * How to handle renames/deletes? + * How to handle renames? * How will we store tags? (Metadata support would be handy) (currently done in Pickle files) ToDo: - * Implement all syncronisation cases (all 3 page sets). * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Check what needs to be documented on MoinMaster. @@ -26,6 +25,7 @@ Branch moin/1.6-sync-aschremmer * Do older tags of one wiki site have to be stored as well? Why don't we keep just one tag? * Put author names into the comment field, transmit mimetypes. + * Implement renamed pages. New Features: * XMLRPC method to return the Moin version # HG changeset patch # User Alexander Schremmer # Date 1156019673 -7200 # Node ID a4efeab7a10f41bc42b2f4df948bb00578685df5 # Parent 13955987ef54f8bd41e15adf0eeebbb6d0f00c42 Added security check to PageEditor.deletePage. diff -r 13955987ef54 -r a4efeab7a10f MoinMoin/PageEditor.py --- a/MoinMoin/PageEditor.py Sat Aug 19 21:09:46 2006 +0200 +++ b/MoinMoin/PageEditor.py Sat Aug 19 22:34:33 2006 +0200 @@ -509,6 +509,11 @@ Try a different name.""") % (newpagename """ _ = self._ success = True + if not (self.request.user.may.write(self.page_name) + and self.request.user.may.delete(self.page_name)): + msg = _('You are not allowed to delete this page!') + raise self.AccessDenied, msg + try: # First save a final backup copy of the current page # (recreating the page allows access to the backups again) diff -r 13955987ef54 -r a4efeab7a10f MoinMoin/action/DeletePage.py --- a/MoinMoin/action/DeletePage.py Sat Aug 19 21:09:46 2006 +0200 +++ b/MoinMoin/action/DeletePage.py Sat Aug 19 22:34:33 2006 +0200 @@ -25,6 +25,8 @@ class DeletePage(ActionBase): self.form_trigger_label = _('Delete') def is_allowed(self): + # this is not strictly necessary because the underlying storage code checks + # as well may = self.request.user.may return may.write(self.pagename) and may.delete(self.pagename) # HG changeset patch # User Alexander Schremmer # Date 1156020801 -7200 # Node ID 2cd1b40ea3e7b7721df61114e429a33304004c27 # Parent a4efeab7a10f41bc42b2f4df948bb00578685df5 Added support for deletion of local pages. diff -r a4efeab7a10f -r 2cd1b40ea3e7 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 19 22:34:33 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 19 22:53:21 2006 +0200 @@ -212,6 +212,7 @@ class ActionClass(object): local_pagename = rp.local_name current_page = PageEditor(self.request, local_pagename) # YYY direct access + comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) tags = TagStore(current_page) @@ -238,12 +239,18 @@ class ActionClass(object): if rp.local_deleted and rp.remote_deleted: return if rp.remote_deleted and not local_change: - self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r locally" % rp) - # XXX delete rp locally + msg = local.delete_page(rp.local_name, comment) + if not msg: + self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (rp.name, )) + else: + self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (rp.name, ), msg) return if rp.local_deleted and not remote_change: - self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r remotely" % rp) - # XXX delete rp remotely + if direction == DOWN: + return + self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r remotely" % rp) # XXX add + msg = remote.delete_page(rp.remote_name) + self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (rp.name, )) return if rp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change): self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, )) @@ -324,8 +331,6 @@ class ActionClass(object): if debug: self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % new_contents) - comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) - # XXX upgrade to write lock try: current_page.saveText(verynewtext, rp.local_rev, comment=comment) # YYY direct access diff -r a4efeab7a10f -r 2cd1b40ea3e7 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Aug 19 22:34:33 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Aug 19 22:53:21 2006 +0200 @@ -18,6 +18,7 @@ from MoinMoin import wikiutil from MoinMoin import wikiutil from MoinMoin.util import lock from MoinMoin.Page import Page +from MoinMoin.PageEditor import PageEditor from MoinMoin.packages import unpackLine, packLine @@ -67,7 +68,7 @@ class SyncPage(object): assert remote_rev != 99999999 def __repr__(self): - return repr("" % unicode(self)) + return repr("" % unicode(self)) def __unicode__(self): return u"%s[%s|%s]<%r:%r>" % (self.name, self.local_name, self.remote_name, self.local_rev, self.remote_rev) @@ -174,6 +175,10 @@ class RemoteWiki(object): """ Returns a list of SyncPage instances. """ return NotImplemented + def delete_page(self, pagename): + """ Deletes the page called pagename. """ + return NotImplemented + class MoinRemoteWiki(RemoteWiki): """ Used for MoinMoin wikis reachable via XMLRPC. """ @@ -232,6 +237,9 @@ class MoinRemoteWiki(RemoteWiki): """ Merges the diff into the page on the remote side. """ result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name) return result + + def delete_page(self, pagename): + return # XXX not implemented yet # Methods implementing the RemoteWiki interface def get_interwiki_name(self): @@ -293,6 +301,14 @@ class MoinLocalWiki(RemoteWiki): # Public methods: # Methods implementing the RemoteWiki interface + def delete_page(self, page_name, comment): + page = PageEditor(self.request, page_name) + try: + page.deletePage(comment) + except PageEditor.AccessDenied, (msg, ): + return msg + return "" + def get_interwiki_name(self): return self.request.cfg.interwikiname diff -r a4efeab7a10f -r 2cd1b40ea3e7 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sat Aug 19 22:34:33 2006 +0200 +++ b/docs/CHANGES.aschremmer Sat Aug 19 22:53:21 2006 +0200 @@ -2,11 +2,13 @@ Branch moin/1.6-sync-aschremmer =============================== Known main issues: + * Do I need to tag delete operations? * How to handle renames? * How will we store tags? (Metadata support would be handy) (currently done in Pickle files) ToDo: + * Delete remote pages. * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Check what needs to be documented on MoinMaster. # HG changeset patch # User Alexander Schremmer # Date 1156021444 -7200 # Node ID bf59bd23289b493a85a0da269712fd1d1e84233e # Parent 2cd1b40ea3e7b7721df61114e429a33304004c27 Some cleanup, fixed packages test because the default test user is not allowed to delete pages. diff -r 2cd1b40ea3e7 -r bf59bd23289b MoinMoin/_tests/test_packages.py --- a/MoinMoin/_tests/test_packages.py Sat Aug 19 22:53:21 2006 +0200 +++ b/MoinMoin/_tests/test_packages.py Sat Aug 19 23:04:04 2006 +0200 @@ -21,13 +21,11 @@ class DebugPackage(Package, ScriptEngine self.script = script or u"""moinmoinpackage|1 print|foo ReplaceUnderlay|testdatei|TestSeite2 -DeletePage|TestSeite2|Test ... IgnoreExceptions|True DeletePage|TestSeiteDoesNotExist|Test ... IgnoreExceptions|False AddRevision|foofile|FooPage AddRevision|foofile|FooPage -DeletePage|FooPage|Test ... setthemename|foo #foobar installplugin|foo|local|parser|testy @@ -60,7 +58,6 @@ class UnsafePackageTestcase(TestCase): testseite2 = Page(self.request, 'TestSeite2') self.assertEqual(testseite2.getPageText(), "Hello world, I am the file testdatei") self.assert_(testseite2.isUnderlayPage()) - self.assert_(not Page(self.request, 'FooPage').exists()) class QuotingTestCase(TestCase): def testQuoting(self): diff -r 2cd1b40ea3e7 -r bf59bd23289b MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sat Aug 19 22:53:21 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sat Aug 19 23:04:04 2006 +0200 @@ -172,7 +172,8 @@ class ActionClass(object): | | do a sync without considering tags with tags | with non | to ensure data integrity. | matching | Hmm, how do we detect this - | tags | case if the unmatching tags are only on the remote side? + | tags | case if the unmatching tags are only + | | on the remote side? ----------+----------+------------------------------- """ _ = lambda x: x # we will translate it later @@ -358,16 +359,6 @@ class ActionClass(object): # XXX release lock - # XXX remove? - #on_both_sides = SyncPage.iter_local_and_remote(m_pages) - #remote_but_not_local = SyncPage.iter_remote_only(m_pages) - #local_but_not_remote = SyncPage.iter_local_only(m_pages) - - # some initial test code (XXX remove) - #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local]) - #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote]) - #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "
These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages)) - for rp in m_pages: handle_page(rp) diff -r 2cd1b40ea3e7 -r bf59bd23289b MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sat Aug 19 22:53:21 2006 +0200 +++ b/MoinMoin/wikisync.py Sat Aug 19 23:04:04 2006 +0200 @@ -134,26 +134,6 @@ class SyncPage(object): """ Is true if the page is in both wikis. """ return self.local_rev and self.remote_rev - def iter_local_only(cls, sp_list): - """ Iterates over all pages that are local only. """ - for x in sp_list: - if x.is_only_local(): - yield x - iter_local_only = classmethod(iter_local_only) - - def iter_remote_only(cls, sp_list): - """ Iterates over all pages that are remote only. """ - for x in sp_list: - if x.is_only_remote(): - yield x - iter_remote_only = classmethod(iter_remote_only) - - def iter_local_and_remote(cls, sp_list): - """ Iterates over all pages that are local and remote. """ - for x in sp_list: - if x.is_local_and_remote(): - yield x - iter_local_and_remote = classmethod(iter_local_and_remote) class RemoteWiki(object): """ This class should be the base for all implementations of remote wiki # HG changeset patch # User Alexander Schremmer # Date 1156021495 -7200 # Node ID 5ffa0d1ae026309be41d31d83f6579b625f6d94e # Parent bf59bd23289b493a85a0da269712fd1d1e84233e Removed newlocking test because it had never been working. We can revive the code later if necessary. diff -r bf59bd23289b -r 5ffa0d1ae026 MoinMoin/_tests/test_newlocking.py --- a/MoinMoin/_tests/test_newlocking.py Sat Aug 19 23:04:04 2006 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- -""" - MoinMoin - MoinMoin test for new style "locking" (== mostly avoid locking) - - The idea is to not have to lock files when we just want to read them. - When we never overwrite file content with new stuff, locking is not needed. - We can just write the new content into a new file (with tmpfname) and then - rename it to the original filename. Files that opened the original filename - before the rename will still read old content after the rename (until they - are closed). - - @copyright: 2006 by Thomas Waldmann (idea: Bastian Blank) - @license: GNU GPL, see COPYING for details. -""" - -import unittest, tempfile, os, shutil -from MoinMoin._tests import TestConfig, TestSkipped - -def rename(oldname, newname): - """ Multiplatform rename - - Move to MoinMoin.util.filesys when done. - - TODO: - Test/Fix win32 stuff. - - Check: MoveFileEx: If the new filename is None, it deletes the file (needs very recent pywin32 binding). - This is documented for the "on reboot" stuff, does this also work when not doing it on next reboot? - Maybe we can use this at another place. - - API doc: http://msdn.microsoft.com/library/default.asp?url=/library/en-us/fileio/fs/movefileex.asp - - Windows 95/98/ME do not implement MoveFileEx(). - Either have some other working code or document we drop support for less-than-NT. - Document pywin32 extension dependency. - - """ - # this nt specific code should be replaced by better stuff - if os.name == 'nt': - # uses mark hammond's pywin32 extension - # there seems to be also stuff in win32api.MoveFileEx and win32con.MOVEFILE_REPLACE_EXISTING - # what's the difference to them in win32file? - from win32file import MoveFileEx, MOVEFILE_REPLACE_EXISTING - ret = MoveFileEx(oldname, newname, MOVEFILE_REPLACE_EXISTING) - # If the function succeeds, the return value is nonzero. - # If the function fails, the return value is 0 (zero). To get extended error information, call GetLastError. - if ret == 0: - raise OSError # emulate os.rename behaviour - else: - os.rename(oldname, newname) # rename has no return value, but raises OSError in case of failure - - -class NewLockTests(unittest.TestCase): - - def setUp(self): - self.test_dir = tempfile.mkdtemp('', 'lock_') - - def tearDown(self): - shutil.rmtree(self.test_dir) - - def testNoLockingForReading(self): - """ new locking: NoLockingForReading tests if files still work when filename is target of a rename """ - fname = os.path.join(self.test_dir, 'readtest') - tmpfname = os.path.join(self.test_dir, '__readtest') - origdata = "precious content" - newdata = "new content" - f = file(fname, "w") ; f.write(origdata) ; f.close() - f = file(fname, "r") - ftmp = file(tmpfname, "w") ; ftmp.write(newdata) ; ftmp.close() - rename(tmpfname, fname) - read1data = f.read() ; f.close() # we should still get origdata here! - f = file(fname, "r") ; read2data = f.read() ; f.close() # we should newdata now. - self.failUnless(origdata == read1data and newdata == read2data, "got wrong data when reading") - # HG changeset patch # User Alexander Schremmer # Date 1156074496 -7200 # Node ID b8ee9f0cb30e90b607da5ec5008d208aef3c4210 # Parent 5ffa0d1ae026309be41d31d83f6579b625f6d94e Merged upstream changes to htmlmarkup.py (partly) diff -r 5ffa0d1ae026 -r b8ee9f0cb30e MoinMoin/support/htmlmarkup.py --- a/MoinMoin/support/htmlmarkup.py Sat Aug 19 23:04:55 2006 +0200 +++ b/MoinMoin/support/htmlmarkup.py Sun Aug 20 13:48:16 2006 +0200 @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- -# copied from trac.util.markup, revision 3446, merged on 2006-06-30 +# copied from trac.util.html, revision 3609, merged on 2006-08-20 # # Copyright (C) 2003-2006 Edgewall Software +# Copyright 2006 MoinMoin:AlexanderSchremmer # All rights reserved. # # This software is licensed as described in the file COPYING, which @@ -318,8 +319,11 @@ class Fragment(object): else: yield escape(child, quotes=False) + def __unicode__(self): + return u''.join(self.serialize()) + def __str__(self): - return Markup(''.join(self.serialize())) + return ''.join(self.serialize()) def __add__(self, other): return Fragment()(self, other) # HG changeset patch # User Alexander Schremmer # Date 1156081752 -7200 # Node ID 144c3281e88ff8a1b504b100a783595b8138e300 # Parent b8ee9f0cb30e90b607da5ec5008d208aef3c4210 Now the deletion of remote pages is working. diff -r b8ee9f0cb30e -r 144c3281e88f MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 20 13:48:16 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 20 15:49:12 2006 +0200 @@ -48,7 +48,6 @@ class ActionClass(object): self.pagename = pagename self.page = PageEditor(request, pagename) self.status = [] - request.flush() def log_status(self, level, message="", substitutions=(), raw_suffix=""): """ Appends the message with a given importance level to the internal log. """ @@ -147,7 +146,7 @@ class ActionClass(object): self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0) # XXX release readlock on self.page - return self.page.send_page(self.request, msg=msg) + self.page.send_page(self.request, msg=msg) def sync(self, params, local, remote): """ This method does the syncronisation work. @@ -249,9 +248,11 @@ class ActionClass(object): if rp.local_deleted and not remote_change: if direction == DOWN: return - self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r remotely" % rp) # XXX add - msg = remote.delete_page(rp.remote_name) - self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (rp.name, )) + msg = remote.delete_page(rp.remote_name, rp.remote_rev, local_full_iwid) + if not msg: + self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (rp.name, )) + else: + self.log_status(ActionClass.ERROR, _("Error while deleting page %s remotely:"), (rp.name, ), msg) return if rp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change): self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, )) diff -r b8ee9f0cb30e -r 144c3281e88f MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sun Aug 20 13:48:16 2006 +0200 +++ b/MoinMoin/wikisync.py Sun Aug 20 15:49:12 2006 +0200 @@ -155,10 +155,6 @@ class RemoteWiki(object): """ Returns a list of SyncPage instances. """ return NotImplemented - def delete_page(self, pagename): - """ Deletes the page called pagename. """ - return NotImplemented - class MoinRemoteWiki(RemoteWiki): """ Used for MoinMoin wikis reachable via XMLRPC. """ @@ -218,10 +214,17 @@ class MoinRemoteWiki(RemoteWiki): result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name) return result - def delete_page(self, pagename): - return # XXX not implemented yet + def delete_page(self, pagename, last_remote_rev, interwiki_name): + try: + result = self.connection.mergeDiff(pagename, None, None, None, last_remote_rev, interwiki_name, None) + except xmlrpclib.Fault, e: + if e.faultCode == "NOT_ALLOWED": + return e.faultString + raise + return "" # Methods implementing the RemoteWiki interface + def get_interwiki_name(self): return self.remote_interwikiname @@ -281,7 +284,7 @@ class MoinLocalWiki(RemoteWiki): # Public methods: # Methods implementing the RemoteWiki interface - def delete_page(self, page_name, comment): + def delete_page(self, pagename, comment): page = PageEditor(self.request, page_name) try: page.deletePage(comment) diff -r b8ee9f0cb30e -r 144c3281e88f MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Aug 20 13:48:16 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Aug 20 15:49:12 2006 +0200 @@ -698,11 +698,19 @@ class XmlRpcBase: @param pagename: The pagename that is currently dealt with. @param diff: The diff that can be applied to the version specified by delta_remote_rev. + If it is None, the page is deleted. @param local_rev: The revno of the page on the other wiki system, used for the tag. @param delta_remote_rev: The revno that the diff is taken against. @param last_remote_rev: The last revno of the page `pagename` that is known by the other wiki site. @param interwiki_name: Used to build the interwiki tag. @param normalised_name: The normalised pagename that is common to both wikis. + + @return Returns the current revision number after the merge was done. Or one of the following errors: + * "SUCCESS" - the page could be merged and tagged successfully. + * "NOT_EXIST" - item does not exist and there was not any content supplied. + * "LASTREV_INVALID" - the page was changed and the revision got invalid + * "INTERNAL_ERROR" - there was an internal error + * "NOT_ALLOWED" - you are not allowed to do the merge operation on the page """ from MoinMoin.util.bdiff import decompress, patch from MoinMoin.wikisync import TagStore, BOTH @@ -727,6 +735,13 @@ class XmlRpcBase: if not currentpage.exists() and diff is None: return xmlrpclib.Fault("NOT_EXIST", "The page does not exist and no diff was supplied.") + + if diff is None: # delete the page + try: + currentpage.deletePage(comment) + except PageEditor.AccessDenied, (msg, ): + return xmlrpclib.Fault("NOT_ALLOWED", msg) + return currentpage.get_real_rev() # base revision used for the diff basepage = Page(self.request, pagename, rev=(delta_remote_rev or 0)) diff -r b8ee9f0cb30e -r 144c3281e88f docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 20 13:48:16 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 20 15:49:12 2006 +0200 @@ -8,7 +8,6 @@ Branch moin/1.6-sync-aschremmer (currently done in Pickle files) ToDo: - * Delete remote pages. * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Check what needs to be documented on MoinMaster. @@ -16,9 +15,8 @@ Branch moin/1.6-sync-aschremmer * Test with prefixes * Search for XXX - * Delete iters? * Maybe refactor YYY into MoinLocalWiki - * Remove amount of "very" in the code + * Remove amount of "very" in the code, rename rp into sp * Clean up trailing whitespace. * Implement a cross-site authentication system, i.e. mainly an # HG changeset patch # User Alexander Schremmer # Date 1156082158 -7200 # Node ID 564cc2b53ea9f0286f08a815a00425513fc491fc # Parent 144c3281e88ff8a1b504b100a783595b8138e300 Reworded a docstring in SyncPages.sync, whitespace cleanup. diff -r 144c3281e88f -r 564cc2b53ea9 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 20 15:49:12 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 20 15:55:58 2006 +0200 @@ -147,12 +147,12 @@ class ActionClass(object): # XXX release readlock on self.page self.page.send_page(self.request, msg=msg) - + def sync(self, params, local, remote): """ This method does the syncronisation work. - Currently, it handles the case where the pages exist on both sides. - One of the major missing parts is rename handling. - Now there are a few other cases left that have to be implemented: + Currently, it handles nearly all cases. + The major missing part is rename handling. + There are a few other cases left that have to be implemented: Wiki A | Wiki B | Remark ----------+----------+------------------------------ exists | non- | Now the wiki knows that the page was renamed. @@ -366,3 +366,4 @@ class ActionClass(object): def execute(pagename, request): ActionClass(pagename, request).render() + diff -r 144c3281e88f -r 564cc2b53ea9 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sun Aug 20 15:49:12 2006 +0200 +++ b/MoinMoin/wikisync.py Sun Aug 20 15:55:58 2006 +0200 @@ -321,15 +321,15 @@ class MoinLocalWiki(RemoteWiki): return "" -# ------------------ Tags ------------------ +# ------------------ Tags ------------------ class Tag(object): """ This class is used to store information about merging state. """ - + def __init__(self, remote_wiki, remote_rev, current_rev, direction, normalised_name): """ Creates a new Tag. - + @param remote_wiki: The identifier of the remote wiki. @param remote_rev: The revision number on the remote end. @param current_rev: The related local revision. @@ -393,7 +393,7 @@ class PickleTagStore(AbstractTagStore): def __init__(self, page): """ Creates a new TagStore that uses pickle files. - + @param page: a Page object where the tags should be related to """ @@ -419,7 +419,7 @@ class PickleTagStore(AbstractTagStore): self.tags = [] else: datafile.close() - + def commit(self): """ Writes the memory contents to the data file. """ datafile = file(self.filename, "wb") @@ -471,3 +471,4 @@ class PickleTagStore(AbstractTagStore): # currently we just have one implementation, so we do not need # a factory method TagStore = PickleTagStore + diff -r 144c3281e88f -r 564cc2b53ea9 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Aug 20 15:49:12 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Aug 20 15:55:58 2006 +0200 @@ -130,7 +130,7 @@ class XmlRpcBase: else: # wrap response in a singleton tuple response = (response,) - + # serialize it response = xmlrpclib.dumps(response, methodresponse=1) @@ -182,7 +182,7 @@ class XmlRpcBase: request. See http://www.xmlrpc.com/discuss/msgReader$1208 - + Copied from SimpleXMLRPCServer.py """ @@ -275,7 +275,7 @@ class XmlRpcBase: pagelist = self.request.rootpage.getPageList(filter=p_filter, exists=not options["include_deleted"], include_underlay=options["include_underlay"], return_objects=options["include_revno"]) - + if options['include_revno']: pages = [] for x in pagelist: @@ -289,7 +289,7 @@ class XmlRpcBase: def xmlrpc_getRecentChanges(self, date): """ Get RecentChanges since date - + @param date: date since when rc will be listed @rtype: list @return: a list of changed pages since date, which should be in @@ -695,7 +695,7 @@ class XmlRpcBase: def xmlrpc_mergeDiff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, normalised_name): """ Merges a diff sent by the remote machine and returns the number of the new revision. Additionally, this method tags the new revision. - + @param pagename: The pagename that is currently dealt with. @param diff: The diff that can be applied to the version specified by delta_remote_rev. If it is None, the page is deleted. @@ -704,7 +704,7 @@ class XmlRpcBase: @param last_remote_rev: The last revno of the page `pagename` that is known by the other wiki site. @param interwiki_name: Used to build the interwiki tag. @param normalised_name: The normalised pagename that is common to both wikis. - + @return Returns the current revision number after the merge was done. Or one of the following errors: * "SUCCESS" - the page could be merged and tagged successfully. * "NOT_EXIST" - item does not exist and there was not any content supplied. @@ -720,7 +720,7 @@ class XmlRpcBase: pagename = self._instr(pagename) comment = u"Remote Merge - %r" % unpackLine(interwiki_name)[-1] - + # User may read page? if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename): return self.notAllowedFault() @@ -759,7 +759,7 @@ class XmlRpcBase: return LASTREV_INVALID current_rev = currentpage.get_real_rev() - + tags = TagStore(currentpage) tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev, direction=BOTH, normalised_name=normalised_name) # HG changeset patch # User Alexander Schremmer # Date 1156082313 -7200 # Node ID 70e69aad10286d29d3a0ae5120f8ea463478f65e # Parent 564cc2b53ea9f0286f08a815a00425513fc491fc Minor change in my CHANGES file. diff -r 564cc2b53ea9 -r 70e69aad1028 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 20 15:55:58 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 20 15:58:33 2006 +0200 @@ -17,7 +17,6 @@ Branch moin/1.6-sync-aschremmer * Search for XXX * Maybe refactor YYY into MoinLocalWiki * Remove amount of "very" in the code, rename rp into sp - * Clean up trailing whitespace. * Implement a cross-site authentication system, i.e. mainly an identity storage. (does OpenID make sense?) # HG changeset patch # User Alexander Schremmer # Date 1156083016 -7200 # Node ID 4d2dd952a513b7dbcc060d7f57cb3b0afc02cff3 # Parent baafe28d8037610538dbb56373f1687cccd5611f Refactoring: Renamed rp to sp in SyncPages. diff -r baafe28d8037 -r 4d2dd952a513 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 20 15:58:49 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 20 16:10:16 2006 +0200 @@ -205,12 +205,12 @@ class ActionClass(object): m_pages = SyncPage.filter(m_pages, params["pageMatch"].match) self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), )) - def handle_page(rp): - # XXX add locking, acquire read-lock on rp + def handle_page(sp): + # XXX add locking, acquire read-lock on sp if debug: - self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % rp) - - local_pagename = rp.local_name + self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % sp) + + local_pagename = sp.local_name current_page = PageEditor(self.request, local_pagename) # YYY direct access comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid()) @@ -224,51 +224,51 @@ class ActionClass(object): # some default values for non matching tags normalised_name = None remote_rev = None - local_rev = rp.local_rev # merge against the newest version + local_rev = sp.local_rev # merge against the newest version old_contents = "" if matching_tags: newest_tag = matching_tags[-1] - local_change = newest_tag.current_rev != rp.local_rev - remote_change = newest_tag.remote_rev != rp.remote_rev + local_change = newest_tag.current_rev != sp.local_rev + remote_change = newest_tag.remote_rev != sp.remote_rev # handle some cases where we cannot continue for this page if not remote_change and (direction == DOWN or not local_change): return # no changes done, next page - if rp.local_deleted and rp.remote_deleted: - return - if rp.remote_deleted and not local_change: - msg = local.delete_page(rp.local_name, comment) + if sp.local_deleted and sp.remote_deleted: + return + if sp.remote_deleted and not local_change: + msg = local.delete_page(sp.local_name, comment) if not msg: - self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (rp.name, )) + self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (sp.name, )) else: - self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (rp.name, ), msg) - return - if rp.local_deleted and not remote_change: + self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (sp.name, ), msg) + return + if sp.local_deleted and not remote_change: if direction == DOWN: return - msg = remote.delete_page(rp.remote_name, rp.remote_rev, local_full_iwid) + msg = remote.delete_page(sp.remote_name, sp.remote_rev, local_full_iwid) if not msg: - self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (rp.name, )) + self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (sp.name, )) else: - self.log_status(ActionClass.ERROR, _("Error while deleting page %s remotely:"), (rp.name, ), msg) - return - if rp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change): - self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, )) - return - if rp.local_mime_type != rp.remote_mime_type: - self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (rp.name, )) - return - if newest_tag.normalised_name != rp.name: - self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames + self.log_status(ActionClass.ERROR, _("Error while deleting page %s remotely:"), (sp.name, ), msg) + return + if sp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change): + self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (sp.name, )) + return + if sp.local_mime_type != sp.remote_mime_type: + self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (sp.name, )) + return + if newest_tag.normalised_name != sp.name: + self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full syncronisation history is lost for this page."), (sp.name, )) # XXX implement renames else: normalised_name = newest_tag.normalised_name local_rev = newest_tag.current_rev remote_rev = newest_tag.remote_rev old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access - self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, rp.remote_name)) + self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, sp.remote_name)) if direction == DOWN: remote_rev = None # always fetch the full page, ignore remote conflict check @@ -276,16 +276,16 @@ class ActionClass(object): else: patch_base_contents = old_contents - if remote_rev != rp.remote_rev: - if rp.remote_deleted: # ignore remote changes - current_remote_rev = rp.remote_rev + if remote_rev != sp.remote_rev: + if sp.remote_deleted: # ignore remote changes + current_remote_rev = sp.remote_rev is_remote_conflict = False diff = None - self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (rp.name, )) - else: - diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name) + self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (sp.name, )) + else: + diff_result = remote.get_diff(sp.remote_name, remote_rev, None, normalised_name) if diff_result is None: - self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (rp.remote_name, )) + self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (sp.remote_name, )) return is_remote_conflict = diff_result["conflict"] assert diff_result["diffversion"] == 1 @@ -293,7 +293,7 @@ class ActionClass(object): current_remote_rev = diff_result["current"] else: current_remote_rev = remote_rev - if rp.local_mime_type == MIMETYPE_MOIN: + if sp.local_mime_type == MIMETYPE_MOIN: is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8")) else: is_remote_conflict = NotImplemented @@ -301,7 +301,7 @@ class ActionClass(object): # do not sync if the conflict is remote and local, or if it is local # and the page has never been syncronised - if (rp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) + if (sp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) and (remote_rev is None or is_remote_conflict)): self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, )) return @@ -309,14 +309,14 @@ class ActionClass(object): if remote_rev is None and direction == BOTH: self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) - if rp.remote_deleted: + if sp.remote_deleted: new_contents = "" elif diff is None: new_contents = old_contents else: new_contents = patch(patch_base_contents, decompress(diff)) - if rp.local_mime_type == MIMETYPE_MOIN: + if sp.local_mime_type == MIMETYPE_MOIN: new_contents_unicode = new_contents.decode("utf-8") # here, the actual 3-way merge happens if debug: @@ -335,7 +335,7 @@ class ActionClass(object): # XXX upgrade to write lock try: - current_page.saveText(verynewtext, rp.local_rev, comment=comment) # YYY direct access + current_page.saveText(verynewtext, sp.local_rev, comment=comment) # YYY direct access except PageEditor.Unchanged: pass except PageEditor.EditConflict: @@ -345,23 +345,23 @@ class ActionClass(object): if direction == BOTH: try: - very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, rp.name) + very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name) except Exception, e: raise # XXX rollback locally and do not tag locally else: very_current_remote_rev = current_remote_rev - tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=rp.name) - - if rp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(verynewtext): + tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name) + + if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(verynewtext): self.log_status(ActionClass.INFO, _("Page successfully merged.")) else: self.log_status(ActionClass.WARN, _("Page merged with conflicts.")) # XXX release lock - for rp in m_pages: - handle_page(rp) + for sp in m_pages: + handle_page(sp) def execute(pagename, request): diff -r baafe28d8037 -r 4d2dd952a513 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 20 15:58:49 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 20 16:10:16 2006 +0200 @@ -16,7 +16,7 @@ Branch moin/1.6-sync-aschremmer * Test with prefixes * Search for XXX * Maybe refactor YYY into MoinLocalWiki - * Remove amount of "very" in the code, rename rp into sp + * Remove amount of "very" in the code * Implement a cross-site authentication system, i.e. mainly an identity storage. (does OpenID make sense?) # HG changeset patch # User Alexander Schremmer # Date 1156083551 -7200 # Node ID 650a2dc16b19ac639aae47dfd14c3ef82deb49dc # Parent 4d2dd952a513b7dbcc060d7f57cb3b0afc02cff3 Renamed some variables in SyncPages, added more YYY markers. diff -r 4d2dd952a513 -r 650a2dc16b19 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 20 16:10:16 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 20 16:19:11 2006 +0200 @@ -301,7 +301,7 @@ class ActionClass(object): # do not sync if the conflict is remote and local, or if it is local # and the page has never been syncronised - if (sp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) + if (sp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) # YYY direct access and (remote_rev is None or is_remote_conflict)): self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, )) return @@ -310,38 +310,38 @@ class ActionClass(object): self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) if sp.remote_deleted: - new_contents = "" + remote_contents = "" elif diff is None: - new_contents = old_contents - else: - new_contents = patch(patch_base_contents, decompress(diff)) + remote_contents = old_contents + else: + remote_contents = patch(patch_base_contents, decompress(diff)) if sp.local_mime_type == MIMETYPE_MOIN: - new_contents_unicode = new_contents.decode("utf-8") + remote_contents_unicode = remote_contents.decode("utf-8") # here, the actual 3-way merge happens if debug: - self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body())) - verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) - verynewtext_raw = verynewtext.encode("utf-8") + self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body())) + merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) # YYY direct access + merged_text_raw = merged_text.encode("utf-8") else: if diff is None: - verynewtext_raw = new_contents - else: - verynewtext_raw = current_page.get_raw_body_str() - - diff = textdiff(new_contents, verynewtext_raw) + merged_text_raw = remote_contents + else: + merged_text_raw = current_page.get_raw_body_str() # YYY direct access + + diff = textdiff(remote_contents, merged_text_raw) if debug: - self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % new_contents) + self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % remote_contents) # XXX upgrade to write lock try: - current_page.saveText(verynewtext, sp.local_rev, comment=comment) # YYY direct access + current_page.saveText(merged_text, sp.local_rev, comment=comment) # YYY direct access except PageEditor.Unchanged: pass except PageEditor.EditConflict: assert False, "You stumbled on a problem with the current storage system - I cannot lock pages" - new_local_rev = current_page.get_real_rev() + new_local_rev = current_page.get_real_rev() # YYY direct access if direction == BOTH: try: @@ -353,7 +353,7 @@ class ActionClass(object): tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name) - if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(verynewtext): + if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(merged_text): self.log_status(ActionClass.INFO, _("Page successfully merged.")) else: self.log_status(ActionClass.WARN, _("Page merged with conflicts.")) diff -r 4d2dd952a513 -r 650a2dc16b19 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 20 16:10:16 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 20 16:19:11 2006 +0200 @@ -15,11 +15,10 @@ Branch moin/1.6-sync-aschremmer * Test with prefixes * Search for XXX - * Maybe refactor YYY into MoinLocalWiki - * Remove amount of "very" in the code * Implement a cross-site authentication system, i.e. mainly an identity storage. (does OpenID make sense?) + * Maybe refactor YYY into MoinLocalWiki * Add page locking, i.e. use the one in the new storage layer. * Do older tags of one wiki site have to be stored as well? Why don't we keep just one tag? # HG changeset patch # User Alexander Schremmer # Date 1156087657 -7200 # Node ID aebf6f7ab57e21a3787f4b3f1777ef16a0991489 # Parent 650a2dc16b19ac639aae47dfd14c3ef82deb49dc Rescue the umlauts. diff -r 650a2dc16b19 -r aebf6f7ab57e MoinMoin/Page.py --- a/MoinMoin/Page.py Sun Aug 20 16:19:11 2006 +0200 +++ b/MoinMoin/Page.py Sun Aug 20 17:27:37 2006 +0200 @@ -2,7 +2,7 @@ """ MoinMoin - Page class - @copyright: 2000-2004 by Jrgen Hermann + @copyright: 2000-2004 by Jürgen Hermann @license: GNU GPL, see COPYING for details. """ diff -r 650a2dc16b19 -r aebf6f7ab57e MoinMoin/PageEditor.py --- a/MoinMoin/PageEditor.py Sun Aug 20 16:19:11 2006 +0200 +++ b/MoinMoin/PageEditor.py Sun Aug 20 17:27:37 2006 +0200 @@ -2,7 +2,7 @@ """ MoinMoin - PageEditor class - @copyright: 2000-2004 by Jrgen Hermann + @copyright: 2000-2004 by Jürgen Hermann @license: GNU GPL, see COPYING for details. """ diff -r 650a2dc16b19 -r aebf6f7ab57e MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Aug 20 16:19:11 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Aug 20 17:27:37 2006 +0200 @@ -5,7 +5,7 @@ If you want to use wikirpc function "putPage", read the comments in xmlrpc_putPage or it won't work! - Parts of this code are based on Jrgen Hermann's wikirpc.py, + Parts of this code are based on Jürgen Hermann's wikirpc.py, Les Orchard's "xmlrpc.cgi" and further work by Gustavo Niemeyer. See http://www.ecyrd.com/JSPWiki/Wiki.jsp?page=WikiRPCInterface # HG changeset patch # User Alexander Schremmer # Date 1156087676 -7200 # Node ID dfed953d5e9f4f20dabe5e9beedbbf993ad182cf # Parent aebf6f7ab57e21a3787f4b3f1777ef16a0991489 Added showtags action, minor changes in SyncPages. diff -r aebf6f7ab57e -r dfed953d5e9f MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 20 17:27:37 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 20 17:27:56 2006 +0200 @@ -61,7 +61,7 @@ class ActionClass(object): for line in self.status: macro_args = [line[1]] + list(line[2]) table.append(table_line % {"smiley": line[0][1], "message": - macro_args and u"[[GetText2(|%s)]]" % (packLine(macro_args), ), + line[1] and (u"[[GetText2(|%s)]]" % (packLine(macro_args), )), "raw_suffix": line[3]}) return "\n".join(table) @@ -141,7 +141,7 @@ class ActionClass(object): except ActionStatus, e: msg = u'

%s

\n' % (e.args[0], ) else: - msg = u"%s" % (_("Syncronisation finished."), ) + msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), ) self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0) # XXX release readlock on self.page diff -r aebf6f7ab57e -r dfed953d5e9f docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 20 17:27:37 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 20 17:27:56 2006 +0200 @@ -4,20 +4,18 @@ Branch moin/1.6-sync-aschremmer Known main issues: * Do I need to tag delete operations? * How to handle renames? - * How will we store tags? (Metadata support would be handy) + * How should we store tags? (Metadata support would be handy) (currently done in Pickle files) ToDo: + * Implement rollback * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Check what needs to be documented on MoinMaster. - - * Show tags in an action=info view? - * Test with prefixes * Search for XXX - * Implement a cross-site authentication system, i.e. mainly an identity storage. (does OpenID make sense?) + * Maybe refactor YYY into MoinLocalWiki * Add page locking, i.e. use the one in the new storage layer. * Do older tags of one wiki site have to be stored as well? Why don't we @@ -43,6 +41,7 @@ Branch moin/1.6-sync-aschremmer * XMLRPC functions may return Fault instances * diff3 algorithm extended, a new mode should reduce the conflicts * GetText2 macro + * showtags action Bugfixes (only stuff that is buggy in moin/1.6 main branch): * Conflict resolution fixes. (merged into main) diff -r aebf6f7ab57e -r dfed953d5e9f MoinMoin/action/showtags.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/MoinMoin/action/showtags.py Sun Aug 20 17:27:56 2006 +0200 @@ -0,0 +1,23 @@ +# -*- coding: iso-8859-1 -*- +""" + MoinMoin - "showtags" action + + This action shows all sync tags related to a specific page. + + @copyright: 2006 by MoinMoin:AlexanderSchremmer + @license: GNU GPL, see COPYING for details. +""" + +from MoinMoin import config +from MoinMoin.Page import Page +from MoinMoin.wikisync import TagStore + +def execute(pagename, request): + mimetype = "text/plain" + + request.emit_http_headers(["Content-Type: %s; charset=%s" % (mimetype, config.charset)]) + + page = Page(request, pagename) + tags = TagStore(page) + request.write(tags.dump()) + # HG changeset patch # User Alexander Schremmer # Date 1156091846 -7200 # Node ID 8bf6e48c6236e1670aeb3e8da99d47ace5555d00 # Parent 4a71075e6d39688f719ac994c2d96ba9f2e7e165 Added rollback and exception logging to SyncPages. diff -r 4a71075e6d39 -r 8bf6e48c6236 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 20 17:29:00 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 20 18:37:26 2006 +0200 @@ -24,7 +24,7 @@ from MoinMoin.PageEditor import PageEdit from MoinMoin.PageEditor import PageEditor, conflict_markers from MoinMoin.Page import Page from MoinMoin.wikidicts import Dict, Group -from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage +from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage, NotAllowedException from MoinMoin.wikisync import MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH, MIMETYPE_MOIN from MoinMoin.util.bdiff import decompress, patch, compress, textdiff from MoinMoin.util import diff3 @@ -136,15 +136,20 @@ class ActionClass(object): if not remote.valid: raise ActionStatus(_("The ''remoteWiki'' is unknown.")) - - self.sync(params, local, remote) except ActionStatus, e: msg = u'

%s

\n' % (e.args[0], ) - else: - msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), ) - - self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0) - # XXX release readlock on self.page + + try: + try: + self.sync(params, local, remote) + except Exception, e: + self.log_status(self.ERROR, _("A severe error occured:"), raw_suffix=repr(e)) + raise + else: + msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), ) + finally: + self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0) + # XXX release readlock on self.page self.page.send_page(self.request, msg=msg) @@ -335,21 +340,37 @@ class ActionClass(object): # XXX upgrade to write lock try: + local_change_done = True current_page.saveText(merged_text, sp.local_rev, comment=comment) # YYY direct access except PageEditor.Unchanged: - pass + local_change_done = False except PageEditor.EditConflict: + local_change_done = False assert False, "You stumbled on a problem with the current storage system - I cannot lock pages" new_local_rev = current_page.get_real_rev() # YYY direct access - if direction == BOTH: - try: - very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name) - except Exception, e: - raise # XXX rollback locally and do not tag locally - else: - very_current_remote_rev = current_remote_rev + def rollback_local_change(): # YYY direct local access + rev = new_local_rev - 1 + revstr = '%08d' % rev + oldpg = Page(self.request, sp.local_name, rev=rev) + pg = PageEditor(self.request, sp.local_name) + savemsg = pg.saveText(oldpg.get_raw_body(), 0, comment=u"Wikisync rollback", extra=revstr, action="SAVE/REVERT") + + try: + if direction == BOTH: + try: + very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name) + except NotAllowedException: + self.log_status(ActionClass.ERROR, _("Page could not be merged because you are not allowed to modify the page in the remote wiki.")) + return + else: + very_current_remote_rev = current_remote_rev + + local_change_done = False # changes are committed remotely, all is fine + finally: + if local_change_done: + rollback_local_change() tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name) diff -r 4a71075e6d39 -r 8bf6e48c6236 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sun Aug 20 17:29:00 2006 +0200 +++ b/MoinMoin/wikisync.py Sun Aug 20 18:37:26 2006 +0200 @@ -41,6 +41,9 @@ def normalise_pagename(page_name, prefix class UnsupportedWikiException(Exception): pass + + +class NotAllowedException(Exception): pass class SyncPage(object): @@ -211,7 +214,12 @@ class MoinRemoteWiki(RemoteWiki): def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name): """ Merges the diff into the page on the remote side. """ - result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name) + try: + result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name) + except xmlrpclib.Fault, e: + if e.faultCode == "NOT_ALLOWED": + raise NotAllowedException + raise return result def delete_page(self, pagename, last_remote_rev, interwiki_name): diff -r 4a71075e6d39 -r 8bf6e48c6236 MoinMoin/xmlrpc/__init__.py --- a/MoinMoin/xmlrpc/__init__.py Sun Aug 20 17:29:00 2006 +0200 +++ b/MoinMoin/xmlrpc/__init__.py Sun Aug 20 18:37:26 2006 +0200 @@ -723,7 +723,7 @@ class XmlRpcBase: # User may read page? if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename): - return self.notAllowedFault() + return xmlrpclib.Fault("NOT_ALLOWED", "You are not allowed to write to this page.") # XXX add locking here! diff -r 4a71075e6d39 -r 8bf6e48c6236 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 20 17:29:00 2006 +0200 +++ b/docs/CHANGES.aschremmer Sun Aug 20 18:37:26 2006 +0200 @@ -8,7 +8,6 @@ Branch moin/1.6-sync-aschremmer (currently done in Pickle files) ToDo: - * Implement rollback * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Check what needs to be documented on MoinMaster. * Test with prefixes @@ -114,6 +113,7 @@ Week 33: Started designing the solutions Added infrastructure support for detecting deleted pages (not used in the merging logic yet). Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages working that are just available on one side. Working synchronisation of deleted pages. + Implemented rollback in case of remote problems and exception logging. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1156110366 -7200 # Node ID 0559fa036536d4ce15b8470e90f9b41a12e09819 # Parent 8bf6e48c6236e1670aeb3e8da99d47ace5555d00 Added authentication support, fixed a few messages (added pagename). diff -r 8bf6e48c6236 -r 0559fa036536 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 20 18:37:26 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 20 23:46:06 2006 +0200 @@ -76,6 +76,8 @@ class ActionClass(object): "pageList": None, "groupList": None, "direction": "foo", # is defaulted below + "user": None, # this should be refactored into a password agent + "password": None, # or OpenID like solution (XXX) } options.update(Dict(self.request, self.pagename).get_dict()) @@ -117,7 +119,6 @@ class ActionClass(object): params = self.fix_params(self.parse_page()) - # XXX aquire readlock on self.page try: if params["direction"] == UP: raise ActionStatus(_("The only supported directions are BOTH and DOWN.")) @@ -130,8 +131,8 @@ class ActionClass(object): local = MoinLocalWiki(self.request, params["localPrefix"], params["pageList"]) try: - remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], verbose=debug) - except UnsupportedWikiException, (msg, ): + remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], params["user"], params["password"], verbose=debug) + except (UnsupportedWikiException, NotAllowedException), (msg, ): raise ActionStatus(msg) if not remote.valid: @@ -148,6 +149,7 @@ class ActionClass(object): else: msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), ) finally: + # XXX aquire readlock on self.page self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0) # XXX release readlock on self.page @@ -312,7 +314,7 @@ class ActionClass(object): return if remote_rev is None and direction == BOTH: - self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki.")) + self.log_status(ActionClass.INFO, _("This is the first synchronisation between the local and the remote wiki for the page %s."), (sp.name, )) if sp.remote_deleted: remote_contents = "" @@ -362,7 +364,7 @@ class ActionClass(object): try: very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name) except NotAllowedException: - self.log_status(ActionClass.ERROR, _("Page could not be merged because you are not allowed to modify the page in the remote wiki.")) + self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, )) return else: very_current_remote_rev = current_remote_rev @@ -375,9 +377,9 @@ class ActionClass(object): tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name) if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(merged_text): - self.log_status(ActionClass.INFO, _("Page successfully merged.")) - else: - self.log_status(ActionClass.WARN, _("Page merged with conflicts.")) + self.log_status(ActionClass.INFO, _("Page %s successfully merged."), (sp.name, )) + else: + self.log_status(ActionClass.WARN, _("Page %s merged with conflicts."), (sp.name, )) # XXX release lock diff -r 8bf6e48c6236 -r 0559fa036536 MoinMoin/wikisync.py --- a/MoinMoin/wikisync.py Sun Aug 20 18:37:26 2006 +0200 +++ b/MoinMoin/wikisync.py Sun Aug 20 23:46:06 2006 +0200 @@ -20,6 +20,7 @@ from MoinMoin.Page import Page from MoinMoin.Page import Page from MoinMoin.PageEditor import PageEditor from MoinMoin.packages import unpackLine, packLine +from MoinMoin.support.multicall import MultiCall MIMETYPE_MOIN = "text/wiki" @@ -161,7 +162,7 @@ class RemoteWiki(object): class MoinRemoteWiki(RemoteWiki): """ Used for MoinMoin wikis reachable via XMLRPC. """ - def __init__(self, request, interwikiname, prefix, pagelist, verbose=False): + def __init__(self, request, interwikiname, prefix, pagelist, user, password, verbose=False): self.request = request self.prefix = prefix self.pagelist = pagelist @@ -181,7 +182,16 @@ class MoinRemoteWiki(RemoteWiki): try: iw_list = self.connection.interwikiName() except xmlrpclib.Fault, e: - raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least.")) + raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, version 1.6 is required at least.")) + + if user and password: + token = self.connection.getAuthToken(user, password) + if token: + self.token = token + else: + raise NotAllowedException(_("Invalid username or password.")) + else: + self.token = None self.remote_interwikiname = remote_interwikiname = iw_list[0] self.remote_iwid = remote_iwid = iw_list[1] @@ -247,7 +257,13 @@ class MoinRemoteWiki(RemoteWiki): "prefix": self.prefix, "pagelist": self.pagelist, "mark_deleted": True} - pages = self.connection.getAllPagesEx(options) + if self.token: + m = MultiCall(self.connection) + m.applyAuthToken(self.token) + m.getAllPagesEx(options) + tokres, pages = m() + else: + pages = self.connection.getAllPagesEx(options) rpages = [] for name, revno in pages: normalised_name = normalise_pagename(name, self.prefix) # HG changeset patch # User Alexander Schremmer # Date 1156110587 -7200 # Node ID fd3ceaad98d1c358d471419920e76b553c0dc7d2 # Parent 0559fa036536d4ce15b8470e90f9b41a12e09819 Fixed the new remote page scenario. diff -r 0559fa036536 -r fd3ceaad98d1 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Sun Aug 20 23:46:06 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Sun Aug 20 23:49:47 2006 +0200 @@ -343,7 +343,7 @@ class ActionClass(object): # XXX upgrade to write lock try: local_change_done = True - current_page.saveText(merged_text, sp.local_rev, comment=comment) # YYY direct access + current_page.saveText(merged_text, sp.local_rev or 0, comment=comment) # YYY direct access except PageEditor.Unchanged: local_change_done = False except PageEditor.EditConflict: # HG changeset patch # User Alexander Schremmer # Date 1156159244 -7200 # Node ID 46812497775e57fa208cb14b3d971bee939eeb9f # Parent fd3ceaad98d1c358d471419920e76b553c0dc7d2 Refactored my CHANGES file. diff -r fd3ceaad98d1 -r 46812497775e docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Sun Aug 20 23:49:47 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 21 13:20:44 2006 +0200 @@ -7,7 +7,7 @@ Branch moin/1.6-sync-aschremmer * How should we store tags? (Metadata support would be handy) (currently done in Pickle files) - ToDo: + ToDo: (this should not go into CHANGES) * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Check what needs to be documented on MoinMaster. * Test with prefixes @@ -23,45 +23,49 @@ Branch moin/1.6-sync-aschremmer * Implement renamed pages. New Features: - * XMLRPC method to return the Moin version - * XMLRPC multicall support * Conflict icon in RecentChanges - * XMLRPC Authentication System * Binary Diffing - * XMLRPC method to get binary diffs - * XMLRPC method to merge remote changes locally - * XMLRPC method to get the interwiki name - * TagStore/PickleTagStore class - * XMLRPC method to get the pagelist in a special way (revnos, - no system pages etc.) - * IWID support - i.e. every instance has a unique ID - * InterWiki page editable in the wiki, modification detection based on mtimes - * SyncPages action - * XMLRPC functions may return Fault instances - * diff3 algorithm extended, a new mode should reduce the conflicts - * GetText2 macro - * showtags action + * New XMLRPC methods (see doc strings for details): + * getMoinVersion + * system.multicall -- multicall support + * Authentication System: getAuthToken/appyAuthToken + * getDiff -- method to get binary diffs + * mergeDiff -- method to local changes remotely + * interwikiName -- method to get the IWID and the interwiki moniker + * getAllPagesEx -- method to get the pagelist in a special way (revnos, + no system pages etc.) + * IWID support - i.e. every wiki instance has a unique ID + * The list of InterWiki sites is editable in the wiki (page InterWikiMap), + it is getting reloaded every minute + * Syncronisation of wikis using the SyncPages action + * GetText2 macro that allows to translate messages that contain data + * showtags action that lists all tags related to a page Bugfixes (only stuff that is buggy in moin/1.6 main branch): - * Conflict resolution fixes. (merged into main) - * Python 2.5 compatibility fixes in the Page caching logic (merged) - * sre pickle issues in the wikidicts code (merged) + * Conflict resolution fixes. + * Python 2.5 compatibility fixes in the Page caching logic + * sre pickle issues in the wikidicts code * cgitb can hide particular names, this avoids information leaks if the user files cannot be parsed for example * Fixed User.__repr__ - it is insane to put the ID in there - * Worked around the FastCGI problem on Lighttpd: empty lines in the error log, thanks to Jay Soffian + * Worked around the FastCGI problem on Lighttpd: empty lines in the error + log, thanks to Jay Soffian * Fixed the MetaDict code to use locks. * Fixed bug in request.py that avoided showing a traceback if there was a fault after the first headers were sent. * Fixed severe race conditions in the meta dict and the sync tags code. * Mute the tempnam warning in the caching module. + * diff3 algorithm extended, a new mode should reduce the conflicts Other Changes: * Refactored conflict resolution and XMLRPC code. - * Enhanced API at some points. Developer notes: - * ... + * There is a new Page method called Page.get_raw_body_str that returns + the encoded page body. This is useful if you just deal with byte data + (e.g. while generating binary diffs). + * The TagStore/PickleTagStore system is used to store the syncronisation tags. + * XMLRPC functions may return Fault instances Do not forget to check the related wiki page: http://moinmoin.wikiwikiweb.de/WikiSyncronisation @@ -110,10 +114,11 @@ Week 33: Started designing the solutions Week 33: Started designing the solutions for the other sync cases. Store and transmit the normalised name. Implemented preliminary mime type support, only transmission of the mime type and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :). - Added infrastructure support for detecting deleted pages (not used in the merging logic yet). + Added infrastructure support for detecting deleted pages. Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages working that are just available on one side. Working synchronisation of deleted pages. Implemented rollback in case of remote problems and exception logging. + Documented this system on MoinMaster. 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress # HG changeset patch # User Alexander Schremmer # Date 1156162927 -7200 # Node ID 7a37f25b539952b99457e7b9115a902644b78f90 # Parent 09aa290e4a8894c67be33381d3418eb48cf88279 Let the standalone server open the logfile unbuffered, thanks to Carsten Grohmann. diff -r 09aa290e4a88 -r 7a37f25b5399 MoinMoin/server/standalone.py --- a/MoinMoin/server/standalone.py Mon Aug 21 13:22:05 2006 +0200 +++ b/MoinMoin/server/standalone.py Mon Aug 21 14:22:07 2006 +0200 @@ -575,7 +575,7 @@ def run(configClass): MoinRequestHandler.serve_moin, config.memoryProfile) if config.logPath: - sys.stderr = file(config.logPath, 'at') + sys.stderr = file(config.logPath, 'at', 0) registerSignalHandlers(quit) httpd = makeServer(config) diff -r 09aa290e4a88 -r 7a37f25b5399 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Aug 21 13:22:05 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 21 14:22:07 2006 +0200 @@ -56,6 +56,8 @@ Branch moin/1.6-sync-aschremmer * Fixed severe race conditions in the meta dict and the sync tags code. * Mute the tempnam warning in the caching module. * diff3 algorithm extended, a new mode should reduce the conflicts + * Standalone opens it logfile unbuffered from now on, thanks to + Carsten Grohmann Other Changes: * Refactored conflict resolution and XMLRPC code. # HG changeset patch # User Alexander Schremmer # Date 1156163013 -7200 # Node ID bcabe48fc2f6454be9a88f0941360f68a37ee19e # Parent 7a37f25b539952b99457e7b9115a902644b78f90 Fixed bug in diff3.py that silently truncated pages if there was content added on one side at the end. diff -r 7a37f25b5399 -r bcabe48fc2f6 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Mon Aug 21 14:22:07 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Mon Aug 21 14:23:33 2006 +0200 @@ -326,9 +326,9 @@ class ActionClass(object): if sp.local_mime_type == MIMETYPE_MOIN: remote_contents_unicode = remote_contents.decode("utf-8") # here, the actual 3-way merge happens + merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 1, *conflict_markers) # YYY direct access if debug: - self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body())) - merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) # YYY direct access + self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r into %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), merged_text)) merged_text_raw = merged_text.encode("utf-8") else: if diff is None: diff -r 7a37f25b5399 -r bcabe48fc2f6 MoinMoin/util/diff3.py --- a/MoinMoin/util/diff3.py Mon Aug 21 14:22:07 2006 +0200 +++ b/MoinMoin/util/diff3.py Mon Aug 21 14:23:33 2006 +0200 @@ -98,7 +98,7 @@ def merge(old, other, new, allow_conflic result.extend(new[new_nr:]) # other added lines elif old_nr == old_len and new_nr == new_len: - result.extend(other[other_nr]) + result.extend(other[other_nr:]) # new deleted lines elif (new_nr == new_len and (old_len - old_nr == other_len - other_nr) and match(old, other, old_nr, other_nr, old_len-old_nr) == old_len - old_nr): diff -r 7a37f25b5399 -r bcabe48fc2f6 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Aug 21 14:22:07 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 21 14:23:33 2006 +0200 @@ -55,7 +55,9 @@ Branch moin/1.6-sync-aschremmer after the first headers were sent. * Fixed severe race conditions in the meta dict and the sync tags code. * Mute the tempnam warning in the caching module. - * diff3 algorithm extended, a new mode should reduce the conflicts + * diff3 algorithm extended, a new mode should reduce the conflicts, + fixed a bug that silently truncated pages if there was content added + on one side at the end * Standalone opens it logfile unbuffered from now on, thanks to Carsten Grohmann # HG changeset patch # User Alexander Schremmer # Date 1156163333 -7200 # Node ID 6b83abc85e83f56191c005a3f31474e0833ea458 # Parent bcabe48fc2f6454be9a88f0941360f68a37ee19e Cleaned my CHANGES file a bit. diff -r bcabe48fc2f6 -r 6b83abc85e83 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Aug 21 14:23:33 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 21 14:28:53 2006 +0200 @@ -10,16 +10,15 @@ Branch moin/1.6-sync-aschremmer ToDo: (this should not go into CHANGES) * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) * Check what needs to be documented on MoinMaster. - * Test with prefixes - * Search for XXX * Implement a cross-site authentication system, i.e. mainly an identity storage. (does OpenID make sense?) * Maybe refactor YYY into MoinLocalWiki - * Add page locking, i.e. use the one in the new storage layer. + * Add page locking, i.e. use the one in the new storage layer (see XXX). * Do older tags of one wiki site have to be stored as well? Why don't we keep just one tag? - * Put author names into the comment field, transmit mimetypes. + * Put author names into the comment field + * Transmit mimetypes (see XXX). Needs new storage system. * Implement renamed pages. New Features: # HG changeset patch # User Alexander Schremmer # Date 1156169352 -7200 # Node ID e6e054247a58b74937607d2b705a7758e3fb2aed # Parent 6b83abc85e83f56191c005a3f31474e0833ea458 Disabled debug mode, output GetText as well if GetText2 is not necessary, refactored my CHANGES file. diff -r 6b83abc85e83 -r e6e054247a58 MoinMoin/action/SyncPages.py --- a/MoinMoin/action/SyncPages.py Mon Aug 21 14:28:53 2006 +0200 +++ b/MoinMoin/action/SyncPages.py Mon Aug 21 16:09:12 2006 +0200 @@ -30,7 +30,7 @@ from MoinMoin.util import diff3 from MoinMoin.util import diff3 -debug = True +debug = False # map sync directions @@ -59,10 +59,17 @@ class ActionClass(object): table = [] for line in self.status: - macro_args = [line[1]] + list(line[2]) - table.append(table_line % {"smiley": line[0][1], "message": - line[1] and (u"[[GetText2(|%s)]]" % (packLine(macro_args), )), - "raw_suffix": line[3]}) + if line[1]: + if line[2]: + macro_args = [line[1]] + list(line[2]) + message = u"[[GetText2(|%s)]]" % (packLine(macro_args), ) + else: + message = u"[[GetText(%s)]]" % (line[1], ) + else: + message = u"" + table.append(table_line % {"smiley": line[0][1], + "message": message, + "raw_suffix": line[3]}) return "\n".join(table) diff -r 6b83abc85e83 -r e6e054247a58 docs/CHANGES.aschremmer --- a/docs/CHANGES.aschremmer Mon Aug 21 14:28:53 2006 +0200 +++ b/docs/CHANGES.aschremmer Mon Aug 21 16:09:12 2006 +0200 @@ -8,11 +8,11 @@ Branch moin/1.6-sync-aschremmer (currently done in Pickle files) ToDo: (this should not go into CHANGES) + * Add authentication to the MoinRemoteWiki methods that are left. * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?) - * Check what needs to be documented on MoinMaster. + * Implement a cross-site authentication system, i.e. mainly an identity storage. (does OpenID make sense?) - * Maybe refactor YYY into MoinLocalWiki * Add page locking, i.e. use the one in the new storage layer (see XXX). * Do older tags of one wiki site have to be stored as well? Why don't we @@ -23,7 +23,6 @@ Branch moin/1.6-sync-aschremmer New Features: * Conflict icon in RecentChanges - * Binary Diffing * New XMLRPC methods (see doc strings for details): * getMoinVersion * system.multicall -- multicall support @@ -62,6 +61,7 @@ Branch moin/1.6-sync-aschremmer Other Changes: * Refactored conflict resolution and XMLRPC code. + * Added a module for binary diffs Developer notes: * There is a new Page method called Page.get_raw_body_str that returns @@ -69,20 +69,21 @@ Branch moin/1.6-sync-aschremmer (e.g. while generating binary diffs). * The TagStore/PickleTagStore system is used to store the syncronisation tags. * XMLRPC functions may return Fault instances + * Moin got multicall support, including a module that makes it usable on the + client-side without requiring Python 2.4 Do not forget to check the related wiki page: http://moinmoin.wikiwikiweb.de/WikiSyncronisation Diary ===== -Week 21: Basic Infrastructur setup (repos), +Week 21: Basic infrastructure setup (repos), initial talks to the mentor, started writing the design document, helped other students to get started -Week 22: Tax forms, Fulfilled transcription request, +Week 22: Tax forms, fulfilled transcription request, written conflict icon support, refactored conflict handling, - changed conflict icon, - Added xmlrpc multicall support into the server and - backported the client code from python 2.4 + changed conflict icon, added xmlrpc multicall support into the server + and backported the client code from Python 2.4 Week 23: Debian-Sprint in Extremadura, Spain. Initial thoughts about Mercurial as a base for syncronisation. (See wiki) Week 24: Evaluation of OpenID as a base for authentication, written local testing scripts @@ -116,7 +117,8 @@ Week 32: Continued work on the merge log the prefix and the pageList on the remote side. Finished the direction==DOWN mode. Week 33: Started designing the solutions for the other sync cases. Store and transmit the normalised name. Implemented preliminary mime type support, only transmission of the mime type - and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :). + and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and inform + the user about the missing support for them). Added infrastructure support for detecting deleted pages. Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages working that are just available on one side. Working synchronisation of deleted pages.