1# -*- coding: iso-8859-1 -*-
2"""
3    MoinMoin - Wiki Synchronisation
4
5    @copyright: 2006 MoinMoin:AlexanderSchremmer
6    @license: GNU GPL, see COPYING for details.
7"""
8
9import os
10import socket
11import xmlrpclib
12
13from MoinMoin import wikiutil
14from MoinMoin.util import lock, pickle
15from MoinMoin.Page import Page
16from MoinMoin.PageEditor import PageEditor
17from MoinMoin.packages import unpackLine, packLine
18
19
20MIMETYPE_MOIN = "text/wiki"
21# sync directions
22UP, DOWN, BOTH = range(3)
23
24
25def normalise_pagename(page_name, prefix):
26    """ Checks if the page_name starts with the prefix.
27        Returns None if it does not, otherwise strips the prefix.
28    """
29    if prefix:
30        if not page_name.startswith(prefix):
31            return None
32        else:
33            return page_name[len(prefix):]
34    else:
35        return page_name
36
37
38class UnsupportedWikiException(Exception):
39    pass
40
41
42class NotAllowedException(Exception):
43    pass
44
45
46class SyncPage(object):
47    """ This class represents a page in one or two wiki(s). """
48    def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None,
49                 local_deleted=False, remote_deleted=False):
50        """ Creates a SyncPage instance.
51            @param name: The canonical name of the page, without prefixes.
52            @param local_rev: The revision of the page in the local wiki.
53            @param remote_rev: The revision of the page in the remote wiki.
54            @param local_name: The page name of the page in the local wiki.
55            @param remote_name: The page name of the page in the remote wiki.
56        """
57        self.name = name
58        self.local_rev = local_rev
59        self.remote_rev = remote_rev
60        self.local_name = local_name
61        self.remote_name = remote_name
62        assert local_rev or remote_rev
63        assert local_name or remote_name
64        self.local_deleted = local_deleted
65        self.remote_deleted = remote_deleted
66        self.local_mime_type = MIMETYPE_MOIN   # XXX no usable storage API yet
67        self.remote_mime_type = MIMETYPE_MOIN
68        assert remote_rev != 99999999
69
70    def __repr__(self):
71        return repr("<Sync Page %r>" % unicode(self))
72
73    def __unicode__(self):
74        return u"%s[%s|%s]<%r:%r>" % (self.name, self.local_name, self.remote_name, self.local_rev, self.remote_rev)
75
76    def __lt__(self, other):
77        return self.name < other.name
78
79    def __hash__(self):
80        """ Ensures that the hash value of this page only depends on the canonical name. """
81        return hash(self.name)
82
83    def __eq__(self, other):
84        if not isinstance(other, SyncPage):
85            return False
86        return self.name == other.name
87
88    def add_missing_pagename(self, local, remote):
89        """ Checks if the particular concrete page names are unknown and fills
90            them in.
91        """
92        if self.local_name is None:
93            n_name = normalise_pagename(self.remote_name, remote.prefix)
94            assert n_name is not None
95            self.local_name = (local.prefix or "") + n_name
96        elif self.remote_name is None:
97            n_name = normalise_pagename(self.local_name, local.prefix)
98            assert n_name is not None
99            self.remote_name = (remote.prefix or "") + n_name
100
101        return self # makes using list comps easier
102
103    def filter(cls, sp_list, func):
104        """ Returns all pages in sp_list that let func return True
105            for the canonical page name.
106        """
107        return [x for x in sp_list if func(x.name)]
108    filter = classmethod(filter)
109
110    def merge(cls, local_list, remote_list):
111        """ Merges two lists of SyncPages into one, migrating attributes like the names. """
112        # map page names to SyncPage objects :-)
113        d = dict(zip(local_list, local_list))
114        for sp in remote_list:
115            if sp in d:
116                d[sp].remote_rev = sp.remote_rev
117                d[sp].remote_name = sp.remote_name
118                d[sp].remote_deleted = sp.remote_deleted
119                # XXX merge mime type here
120            else:
121                d[sp] = sp
122        return d.keys()
123    merge = classmethod(merge)
124
125    def is_only_local(self):
126        """ Is true if the page is only in the local wiki. """
127        return not self.remote_rev
128
129    def is_only_remote(self):
130        """ Is true if the page is only in the remote wiki. """
131        return not self.local_rev
132
133    def is_local_and_remote(self):
134        """ Is true if the page is in both wikis. """
135        return self.local_rev and self.remote_rev
136
137
138class RemoteWiki(object):
139    """ This class should be the base for all implementations of remote wiki
140        classes. """
141
142    def __repr__(self):
143        """ Returns a representation of the instance for debugging purposes. """
144        return NotImplemented
145
146    def get_interwiki_name(self):
147        """ Returns the interwiki name of the other wiki. """
148        return NotImplemented
149
150    def get_iwid(self):
151        """ Returns the InterWiki ID. """
152        return NotImplemented
153
154    def get_pages(self, **kw):
155        """ Returns a list of SyncPage instances. """
156        return NotImplemented
157
158
159class MoinRemoteWiki(RemoteWiki):
160    """ Used for MoinMoin wikis reachable via XMLRPC. """
161    def __init__(self, request, interwikiname, prefix, pagelist, user, password, verbose=False):
162        self.request = request
163        self.prefix = prefix
164        self.pagelist = pagelist
165        self.verbose = verbose
166        _ = self.request.getText
167
168        wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_interwiki(self.request, interwikiname, '')
169        self.wiki_url = wikiutil.mapURL(self.request, wikiurl)
170        self.valid = not wikitag_bad
171        self.xmlrpc_url = str(self.wiki_url + "?action=xmlrpc2")
172        if not self.valid:
173            self.connection = None
174            return
175
176        self.connection = self.createConnection()
177
178        try:
179            iw_list = self.connection.interwikiName()
180        except socket.error:
181            raise UnsupportedWikiException(_("The wiki is currently not reachable."))
182        except xmlrpclib.Fault, err:
183            raise UnsupportedWikiException("xmlrpclib.Fault: %s" % str(err))
184
185        if user and password:
186            token = self.connection.getAuthToken(user, password)
187            if token:
188                self.token = token
189            else:
190                raise NotAllowedException(_("Invalid username or password."))
191        else:
192            self.token = None
193
194        self.remote_interwikiname = remote_interwikiname = iw_list[0]
195        self.remote_iwid = remote_iwid = iw_list[1]
196        self.is_anonymous = remote_interwikiname is None
197        if not self.is_anonymous and interwikiname != remote_interwikiname:
198            raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)"
199                                             " internally than you specified (%(localname)s).") % {
200                "remotename": wikiutil.escape(remote_interwikiname), "localname": wikiutil.escape(interwikiname)})
201
202        if self.is_anonymous:
203            self.iwid_full = packLine([remote_iwid])
204        else:
205            self.iwid_full = packLine([remote_iwid, interwikiname])
206
207    def createConnection(self):
208        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=self.verbose)
209
210    # Public methods
211    def get_diff_pre(self, pagename, from_rev, to_rev, n_name=None):
212        """ Returns the binary diff of the remote page named pagename, given
213            from_rev and to_rev. Generates the call. """
214        return "getDiff", (pagename, from_rev, to_rev, n_name)
215
216    def get_diff_post(self, value):
217        """ Processes the return value of the call generated by get_diff_pre. """
218        if isinstance(value, xmlrpclib.Fault):
219            if value.faultCode == "INVALID_TAG":
220                return None
221            raise value
222        value["diff"] = str(value["diff"]) # unmarshal Binary object
223        return value
224
225    def merge_diff_pre(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name):
226        """ Merges the diff into the page on the remote side. Generates the call. """
227        return "mergeDiff", (pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
228
229    def merge_diff_post(self, result):
230        """ Processes the return value of the call generated by merge_diff_pre.  """
231        if isinstance(result, xmlrpclib.Fault):
232            if result.faultCode == "NOT_ALLOWED":
233                raise NotAllowedException
234            raise result
235        return result
236
237    def delete_page_pre(self, pagename, last_remote_rev, interwiki_name):
238        """ Deletes a remote page. Generates the call. """
239        return "mergeDiff", (pagename, None, None, None, last_remote_rev, interwiki_name, None)
240
241    def delete_page_post(self, result):
242        """ Processes the return value of the call generated by delete_page_pre. """
243        if isinstance(result, xmlrpclib.Fault):
244            if result.faultCode == "NOT_ALLOWED":
245                return result.faultString
246            raise result
247        return ""
248
249    def create_multicall_object(self):
250        """ Generates an object that can be used like a MultiCall instance. """
251        return xmlrpclib.MultiCall(self.connection)
252
253    def prepare_multicall(self):
254        """ Can be used to return initial calls that e.g. authenticate the user.
255            @return: [(funcname, (arg,+)*]
256        """
257        if self.token:
258            return [("applyAuthToken", (self.token, ))]
259        return []
260
261    def delete_auth_token(self):
262        if self.token:
263            self.connection.deleteAuthToken(self.token)
264            self.token = None
265
266    # Methods implementing the RemoteWiki interface
267
268    def get_interwiki_name(self):
269        return self.remote_interwikiname
270
271    def get_iwid(self):
272        return self.remote_iwid
273
274    def get_pages(self, **kwargs):
275        options = {"include_revno": True,
276                   "include_deleted": True,
277                   "exclude_non_writable": kwargs["exclude_non_writable"],
278                   "include_underlay": False,
279                   "prefix": self.prefix,
280                   "pagelist": self.pagelist,
281                   "mark_deleted": True}
282        if self.token:
283            m = xmlrpclib.MultiCall(self.connection)
284            m.applyAuthToken(self.token)
285            m.getAllPagesEx(options)
286            tokres, pages = m()
287        else:
288            pages = self.connection.getAllPagesEx(options)
289        rpages = []
290        for name, revno in pages:
291            normalised_name = normalise_pagename(name, self.prefix)
292            if normalised_name is None:
293                continue
294            if abs(revno) != 99999999: # I love sane in-band signalling
295                remote_rev = abs(revno)
296                remote_deleted = revno < 0
297                rpages.append(SyncPage(normalised_name, remote_rev=remote_rev, remote_name=name, remote_deleted=remote_deleted))
298        return rpages
299
300    def __repr__(self):
301        return "<MoinRemoteWiki wiki_url=%r valid=%r>" % (getattr(self, "wiki_url", Ellipsis), getattr(self, "valid", Ellipsis))
302
303
304class MoinLocalWiki(RemoteWiki):
305    """ Used for the current MoinMoin wiki. """
306    def __init__(self, request, prefix, pagelist):
307        self.request = request
308        self.prefix = prefix
309        self.pagelist = pagelist
310
311    def getGroupItems(self, group_list):
312        """ Returns all page names that are listed on the page group_list. """
313        pages = []
314        for group_pagename in group_list:
315            pages.extend(request.groups.get(group_pagename, []))
316        return [self.createSyncPage(x) for x in pages]
317
318    def createSyncPage(self, page_name):
319        normalised_name = normalise_pagename(page_name, self.prefix)
320        if normalised_name is None:
321            return None
322        page = Page(self.request, page_name)
323        revno = page.get_real_rev()
324        if revno == 99999999: # I love sane in-band signalling
325            return None
326        return SyncPage(normalised_name, local_rev=revno, local_name=page_name, local_deleted=not page.exists())
327
328    # Public methods:
329
330    # Methods implementing the RemoteWiki interface
331    def delete_page(self, pagename, comment):
332        page = PageEditor(self.request, pagename)
333        try:
334            page.deletePage(comment)
335        except PageEditor.AccessDenied, (msg, ):
336            return msg
337        return ""
338
339    def get_interwiki_name(self):
340        return self.request.cfg.interwikiname
341
342    def get_iwid(self):
343        return self.request.cfg.iwid
344
345    def get_pages(self, **kwargs):
346        assert not kwargs
347        if self.prefix or self.pagelist:
348            def page_filter(name, prefix=(self.prefix or ""), pagelist=self.pagelist):
349                n_name = normalise_pagename(name, prefix)
350                if not n_name:
351                    return False
352                if not pagelist:
353                    return True
354                return n_name in pagelist
355        else:
356            page_filter = lambda x: True
357        pages = []
358        for x in self.request.rootpage.getPageList(exists=False, include_underlay=False, filter=page_filter):
359            sp = self.createSyncPage(x)
360            if sp:
361                pages.append(sp)
362        return pages
363
364    def __repr__(self):
365        return "<MoinLocalWiki>"
366
367
368# ------------------ Tags ------------------
369
370
371class Tag(object):
372    """ This class is used to store information about merging state. """
373
374    def __init__(self, remote_wiki, remote_rev, current_rev, direction, normalised_name):
375        """ Creates a new Tag.
376
377        @param remote_wiki: The identifier of the remote wiki.
378        @param remote_rev: The revision number on the remote end.
379        @param current_rev: The related local revision.
380        @param direction: The direction of the sync, encoded as an integer.
381        """
382        assert (isinstance(remote_wiki, basestring) and isinstance(remote_rev, int)
383                and isinstance(current_rev, int) and isinstance(direction, int)
384                and isinstance(normalised_name, basestring))
385        self.remote_wiki = remote_wiki
386        self.remote_rev = remote_rev
387        self.current_rev = current_rev
388        self.direction = direction
389        self.normalised_name = normalised_name
390
391    def __repr__(self):
392        return u"<Tag normalised_pagename=%r remote_wiki=%r remote_rev=%r current_rev=%r>" % (getattr(self, "normalised_name", "UNDEF"), self.remote_wiki, self.remote_rev, self.current_rev)
393
394    def __cmp__(self, other):
395        if not isinstance(other, Tag):
396            return NotImplemented
397        return cmp(self.current_rev, other.current_rev)
398
399
400class AbstractTagStore(object):
401    """ This class is an abstract base class that shows how to implement classes
402        that manage the storage of tags. """
403
404    def __init__(self, page):
405        """ Subclasses don't need to call this method. It is just here to enforce
406        them having accept a page argument at least. """
407        pass
408
409    def dump(self):
410        """ Returns all tags for a given item as a string. """
411        return repr(self.get_all_tags())
412
413    def add(self, **kwargs):
414        """ Adds a Tag object to the current TagStore. """
415        print "Got tag for page %r: %r" % (self.page, kwargs)
416        return NotImplemented
417
418    def get_all_tags(self):
419        """ Returns a list of all Tag objects associated to this page. """
420        return NotImplemented
421
422    def get_last_tag(self):
423        """ Returns the newest tag. """
424        return NotImplemented
425
426    def clear(self):
427        """ Removes all tags. """
428        return NotImplemented
429
430    def fetch(self, iwid_full=None, direction=None):
431        """ Fetches tags by a special IWID or interwiki name. """
432        return NotImplemented
433
434
435class PickleTagStore(AbstractTagStore):
436    """ This class manages the storage of tags in pickle files. """
437
438    def __init__(self, page):
439        """ Creates a new TagStore that uses pickle files.
440
441        @param page: a Page object where the tags should be related to
442        """
443
444        self.page = page
445        self.filename = page.getPagePath('synctags', use_underlay=0, check_create=1, isfile=1)
446        lock_dir = os.path.join(page.getPagePath('cache', use_underlay=0, check_create=1), '__taglock__')
447        self.rlock = lock.ReadLock(lock_dir, 60.0)
448        self.wlock = lock.WriteLock(lock_dir, 60.0)
449
450        if not self.rlock.acquire(3.0):
451            raise EnvironmentError("Could not lock in PickleTagStore")
452        try:
453            self.load()
454        finally:
455            self.rlock.release()
456
457    def load(self):
458        """ Loads the tags from the data file. """
459        try:
460            datafile = file(self.filename, "rb")
461            self.tags = pickle.load(datafile)
462        except (IOError, EOFError):
463            self.tags = []
464        else:
465            datafile.close()
466
467    def commit(self):
468        """ Writes the memory contents to the data file. """
469        datafile = file(self.filename, "wb")
470        pickle.dump(self.tags, datafile, pickle.HIGHEST_PROTOCOL)
471        datafile.close()
472
473    # public methods ---------------------------------------------------
474    def add(self, **kwargs):
475        if not self.wlock.acquire(3.0):
476            raise EnvironmentError("Could not lock in PickleTagStore")
477        try:
478            self.load()
479            self.tags.append(Tag(**kwargs))
480            self.commit()
481        finally:
482            self.wlock.release()
483
484    def get_all_tags(self):
485        return self.tags[:]
486
487    def get_last_tag(self):
488        temp = self.tags[:]
489        temp.sort()
490        if not temp:
491            return None
492        return temp[-1]
493
494    def clear(self):
495        self.tags = []
496        if not self.wlock.acquire(3.0):
497            raise EnvironmentError("Could not lock in PickleTagStore")
498        try:
499            self.commit()
500        finally:
501            self.wlock.release()
502
503    def fetch(self, iwid_full, direction=None):
504        iwid_full = unpackLine(iwid_full)
505        matching_tags = []
506        for t in self.tags:
507            t_iwid_full = unpackLine(t.remote_wiki)
508            if ((t_iwid_full[0] == iwid_full[0]) # either match IWID or IW name
509                or (len(t_iwid_full) == 2 and len(iwid_full) == 2 and t_iwid_full[1] == iwid_full[1])
510                ) and (direction is None or t.direction == direction):
511                matching_tags.append(t)
512        return matching_tags
513
514
515# currently we just have one implementation, so we do not need
516# a factory method
517TagStore = PickleTagStore
518
519