collab: Update MoinMoin to 1.9.11
This commit is contained in:
parent
f2bc7f5d82
commit
795a00e5c4
3 changed files with 181 additions and 532 deletions
|
@ -1,4 +1,4 @@
|
||||||
---
|
---
|
||||||
moin_version: 1.9.8
|
moin_version: 1.9.11
|
||||||
tmpfs_context: system_u:object_r:httpd_sys_rw_content_t:s0
|
tmpfs_context: system_u:object_r:httpd_sys_rw_content_t:s0
|
||||||
srcdir: /usr/local/src
|
srcdir: /usr/local/src
|
||||||
|
|
|
@ -1,6 +1,143 @@
|
||||||
--- ./MoinMoin/auth/__init__.py.orig 2014-10-17 22:45:32.000000000 +0300
|
--- ./MoinMoin/Page.py.orig 2014-10-17 22:45:32.000000000 +0300
|
||||||
+++ ./MoinMoin/auth/__init__.py 2014-10-20 11:53:32.869284981 +0300
|
+++ ./MoinMoin/Page.py 2016-02-05 20:20:23.598923780 +0200
|
||||||
@@ -371,7 +371,7 @@
|
@@ -108,8 +108,7 @@ class ItemCache:
|
||||||
|
(for 'meta') or the complete cache ('pagelists').
|
||||||
|
@param request: the request object
|
||||||
|
"""
|
||||||
|
- from MoinMoin.logfile import editlog
|
||||||
|
- elog = editlog.EditLog(request)
|
||||||
|
+ elog = request.editlog
|
||||||
|
old_pos = self.log_pos
|
||||||
|
new_pos, items = elog.news(old_pos)
|
||||||
|
if items:
|
||||||
|
@@ -626,7 +625,12 @@ class Page(object):
|
||||||
|
"""
|
||||||
|
return self.exists(domain='standard', includeDeleted=includeDeleted)
|
||||||
|
|
||||||
|
- def exists(self, rev=0, domain=None, includeDeleted=False):
|
||||||
|
+ def _in_backend(self):
|
||||||
|
+ if self.page_name in self.request.graphdata:
|
||||||
|
+ return self.request.graphdata.is_saved(self.page_name)
|
||||||
|
+ return 0
|
||||||
|
+
|
||||||
|
+ def exists(self, rev=0, domain=None, includeDeleted=False, includeBackend=True):
|
||||||
|
""" Does this page exist?
|
||||||
|
|
||||||
|
This is the lower level method for checking page existence. Use
|
||||||
|
@@ -656,6 +660,12 @@ class Page(object):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
+ # If it's in the backend, it exists
|
||||||
|
+ if self._in_backend():
|
||||||
|
+ return True
|
||||||
|
+ elif includeBackend:
|
||||||
|
+ return False
|
||||||
|
+
|
||||||
|
# Look for non-deleted pages only, using get_rev
|
||||||
|
if not rev and self.rev:
|
||||||
|
rev = self.rev
|
||||||
|
@@ -789,13 +799,20 @@ class Page(object):
|
||||||
|
@rtype: string
|
||||||
|
@return: formatted link
|
||||||
|
"""
|
||||||
|
+ # Optimising closing of links
|
||||||
|
+ if kw.get('on', None) == 0:
|
||||||
|
+ formatter=getattr(self, 'formatter', None)
|
||||||
|
+ if formatter:
|
||||||
|
+ return formatter.url(0, '', None)
|
||||||
|
+
|
||||||
|
if not text:
|
||||||
|
text = self.split_title()
|
||||||
|
text = wikiutil.escape(text)
|
||||||
|
|
||||||
|
- # Add css class for non existing page
|
||||||
|
- if not self.exists():
|
||||||
|
- kw['css_class'] = 'nonexistent'
|
||||||
|
+ # Add css class for non existing page (if not done by formatter.pagelink)
|
||||||
|
+ if not kw.has_key('css_class'):
|
||||||
|
+ if not self.exists():
|
||||||
|
+ kw['css_class'] = 'nonexistent'
|
||||||
|
|
||||||
|
attachment_indicator = kw.get('attachment_indicator')
|
||||||
|
if attachment_indicator is None:
|
||||||
|
@@ -1826,7 +1843,7 @@ class RootPage(Page):
|
||||||
|
|
||||||
|
return underlay, path
|
||||||
|
|
||||||
|
- def getPageList(self, user=None, exists=1, filter=None, include_underlay=True, return_objects=False):
|
||||||
|
+ def getPageList(self, user=None, exists=1, filter=None, include_underlay=True, return_objects=False, includeBackend=True):
|
||||||
|
""" List user readable pages under current page
|
||||||
|
|
||||||
|
Currently only request.rootpage is used to list pages, but if we
|
||||||
|
@@ -1895,7 +1912,7 @@ class RootPage(Page):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Filter deleted pages
|
||||||
|
- if exists and not page.exists():
|
||||||
|
+ if exists and not page.exists(includeBackend=includeBackend):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Filter out page user may not read.
|
||||||
|
--- MoinMoin/PageEditor.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
|
+++ MoinMoin/PageEditor.py 2020-11-11 09:13:04.000000000 +0200
|
||||||
|
@@ -17,7 +17,7 @@
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os, time, codecs, errno
|
||||||
|
-
|
||||||
|
+import unicodedata
|
||||||
|
|
||||||
|
from MoinMoin import caching, config, wikiutil, error
|
||||||
|
from MoinMoin.Page import Page
|
||||||
|
@@ -53,6 +53,17 @@
|
||||||
|
</script>
|
||||||
|
"""
|
||||||
|
|
||||||
|
+#############################################################################
|
||||||
|
+### Filtering unprintable characters from page content
|
||||||
|
+#############################################################################
|
||||||
|
+
|
||||||
|
+ALLOWED_CONTROL_CHARS = '\t\n\r'
|
||||||
|
+
|
||||||
|
+def filter_unprintable(text):
|
||||||
|
+ return ''.join(x for x in text
|
||||||
|
+ if (not unicodedata.category(x) in ['Cc', 'Cn', 'Cs']
|
||||||
|
+ or x in ALLOWED_CONTROL_CHARS))
|
||||||
|
+
|
||||||
|
|
||||||
|
#############################################################################
|
||||||
|
### PageEditor - Edit pages
|
||||||
|
@@ -1065,6 +1076,26 @@
|
||||||
|
"""
|
||||||
|
request = self.request
|
||||||
|
_ = self._
|
||||||
|
+
|
||||||
|
+ # Depending on the configuration, filter unprintable
|
||||||
|
+ # characters from text content or warn of them. Unprintable
|
||||||
|
+ # characters are often undesired, and result from
|
||||||
|
+ # eg. copy-pasting text from productivity tools.
|
||||||
|
+ _handle_unprintable = getattr(self.request.cfg,
|
||||||
|
+ 'gwiki_handle_unprintable', '')
|
||||||
|
+ if _handle_unprintable in ['warn', 'filter']:
|
||||||
|
+ _newtext = filter_unprintable(newtext)
|
||||||
|
+ if _handle_unprintable == 'filter':
|
||||||
|
+ newtext = _newtext
|
||||||
|
+ elif _newtext != newtext:
|
||||||
|
+ _pos = 0
|
||||||
|
+ for i in len(_newtext):
|
||||||
|
+ _pos = i
|
||||||
|
+ if _newtext[i] != newtext[i]:
|
||||||
|
+ break
|
||||||
|
+ raise self.SaveError(_("Bad character in text at position %s.")%
|
||||||
|
+ (_pos))
|
||||||
|
+
|
||||||
|
self._save_draft(newtext, rev, **kw)
|
||||||
|
action = kw.get('action', 'SAVE')
|
||||||
|
deleted = kw.get('deleted', False)
|
||||||
|
--- MoinMoin/auth/__init__.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
|
+++ MoinMoin/auth/__init__.py 2020-11-11 09:28:06.000000000 +0200
|
||||||
|
@@ -374,7 +374,7 @@
|
||||||
auth_username = self.transform_username(auth_username)
|
auth_username = self.transform_username(auth_username)
|
||||||
logging.debug("auth_username (after decode/transform) = %r" % auth_username)
|
logging.debug("auth_username (after decode/transform) = %r" % auth_username)
|
||||||
u = user.User(request, auth_username=auth_username,
|
u = user.User(request, auth_username=auth_username,
|
||||||
|
@ -30,9 +167,9 @@
|
||||||
# Bidi control characters
|
# Bidi control characters
|
||||||
\u202A | # LRE
|
\u202A | # LRE
|
||||||
\u202B | # RLE
|
\u202B | # RLE
|
||||||
--- ./MoinMoin/formatter/__init__.py.orig 2014-11-03 20:24:17.000000000 +0200
|
--- MoinMoin/formatter/__init__.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
+++ ./MoinMoin/formatter/__init__.py 2014-11-07 17:05:23.360806970 +0200
|
+++ MoinMoin/formatter/__init__.py 2020-11-11 09:18:45.000000000 +0200
|
||||||
@@ -135,7 +135,10 @@ class FormatterBase:
|
@@ -137,7 +137,10 @@
|
||||||
# Try to decode text. It might return junk, but we don't
|
# Try to decode text. It might return junk, but we don't
|
||||||
# have enough information with attachments.
|
# have enough information with attachments.
|
||||||
content = wikiutil.decodeUnknownInput(content)
|
content = wikiutil.decodeUnknownInput(content)
|
||||||
|
@ -44,9 +181,8 @@
|
||||||
colorizer.format(self)
|
colorizer.format(self)
|
||||||
except IOError:
|
except IOError:
|
||||||
pass
|
pass
|
||||||
|
--- MoinMoin/formatter/text_html.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
--- ./MoinMoin/formatter/text_html.py.orig 2014-10-17 22:45:32.000000000 +0300
|
+++ MoinMoin/formatter/text_html.py 2020-11-11 09:19:27.000000000 +0200
|
||||||
+++ ./MoinMoin/formatter/text_html.py 2015-08-20 12:16:01.940528662 +0300
|
|
||||||
@@ -6,12 +6,14 @@
|
@@ -6,12 +6,14 @@
|
||||||
@license: GNU GPL, see COPYING for details.
|
@license: GNU GPL, see COPYING for details.
|
||||||
"""
|
"""
|
||||||
|
@ -62,8 +198,8 @@
|
||||||
+from MoinMoin import wikiutil, i18n, config
|
+from MoinMoin import wikiutil, i18n, config
|
||||||
from MoinMoin.Page import Page
|
from MoinMoin.Page import Page
|
||||||
from MoinMoin.action import AttachFile
|
from MoinMoin.action import AttachFile
|
||||||
from MoinMoin.support.python_compatibility import set
|
|
||||||
@@ -474,16 +476,17 @@ class Formatter(FormatterBase):
|
@@ -473,16 +475,17 @@
|
||||||
del kw['generated']
|
del kw['generated']
|
||||||
if page is None:
|
if page is None:
|
||||||
page = Page(self.request, pagename, formatter=self)
|
page = Page(self.request, pagename, formatter=self)
|
||||||
|
@ -88,7 +224,7 @@
|
||||||
|
|
||||||
def interwikilink(self, on, interwiki='', pagename='', **kw):
|
def interwikilink(self, on, interwiki='', pagename='', **kw):
|
||||||
"""
|
"""
|
||||||
@@ -534,12 +537,25 @@ class Formatter(FormatterBase):
|
@@ -533,12 +536,25 @@
|
||||||
logging.warning("Deprecation warning: MoinMoin.formatter.text_html.url being called with do_escape=1/True parameter, please review caller.")
|
logging.warning("Deprecation warning: MoinMoin.formatter.text_html.url being called with do_escape=1/True parameter, please review caller.")
|
||||||
else:
|
else:
|
||||||
logging.warning("Deprecation warning: MoinMoin.formatter.text_html.url being called with do_escape=0/False parameter, please remove it from the caller.")
|
logging.warning("Deprecation warning: MoinMoin.formatter.text_html.url being called with do_escape=0/False parameter, please remove it from the caller.")
|
||||||
|
@ -406,146 +542,9 @@
|
||||||
|
|
||||||
package = ZipPackage(request, packagefile)
|
package = ZipPackage(request, packagefile)
|
||||||
if not package.isPackage():
|
if not package.isPackage():
|
||||||
--- ./MoinMoin/PageEditor.py.orig 2014-10-17 22:45:32.000000000 +0300
|
--- MoinMoin/parser/text_moin_wiki.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
+++ ./MoinMoin/PageEditor.py 2014-10-20 11:53:32.880284974 +0300
|
+++ MoinMoin/parser/text_moin_wiki.py 2020-11-11 09:23:34.000000000 +0200
|
||||||
@@ -17,7 +17,7 @@
|
@@ -727,8 +727,12 @@
|
||||||
"""
|
|
||||||
|
|
||||||
import os, time, codecs, errno
|
|
||||||
-
|
|
||||||
+import unicodedata
|
|
||||||
|
|
||||||
from MoinMoin import caching, config, wikiutil, error
|
|
||||||
from MoinMoin.Page import Page
|
|
||||||
@@ -54,6 +54,17 @@
|
|
||||||
</script>
|
|
||||||
"""
|
|
||||||
|
|
||||||
+#############################################################################
|
|
||||||
+### Filtering unprintable characters from page content
|
|
||||||
+#############################################################################
|
|
||||||
+
|
|
||||||
+ALLOWED_CONTROL_CHARS = '\t\n\r'
|
|
||||||
+
|
|
||||||
+def filter_unprintable(text):
|
|
||||||
+ return ''.join(x for x in text
|
|
||||||
+ if (not unicodedata.category(x) in ['Cc', 'Cn', 'Cs']
|
|
||||||
+ or x in ALLOWED_CONTROL_CHARS))
|
|
||||||
+
|
|
||||||
|
|
||||||
#############################################################################
|
|
||||||
### PageEditor - Edit pages
|
|
||||||
@@ -1066,6 +1077,26 @@
|
|
||||||
"""
|
|
||||||
request = self.request
|
|
||||||
_ = self._
|
|
||||||
+
|
|
||||||
+ # Depending on the configuration, filter unprintable
|
|
||||||
+ # characters from text content or warn of them. Unprintable
|
|
||||||
+ # characters are often undesired, and result from
|
|
||||||
+ # eg. copy-pasting text from productivity tools.
|
|
||||||
+ _handle_unprintable = getattr(self.request.cfg,
|
|
||||||
+ 'gwiki_handle_unprintable', '')
|
|
||||||
+ if _handle_unprintable in ['warn', 'filter']:
|
|
||||||
+ _newtext = filter_unprintable(newtext)
|
|
||||||
+ if _handle_unprintable == 'filter':
|
|
||||||
+ newtext = _newtext
|
|
||||||
+ elif _newtext != newtext:
|
|
||||||
+ _pos = 0
|
|
||||||
+ for i in len(_newtext):
|
|
||||||
+ _pos = i
|
|
||||||
+ if _newtext[i] != newtext[i]:
|
|
||||||
+ break
|
|
||||||
+ raise self.SaveError(_("Bad character in text at position %s.")%
|
|
||||||
+ (_pos))
|
|
||||||
+
|
|
||||||
self._save_draft(newtext, rev, **kw)
|
|
||||||
action = kw.get('action', 'SAVE')
|
|
||||||
deleted = kw.get('deleted', False)
|
|
||||||
--- ./MoinMoin/Page.py.orig 2014-10-17 22:45:32.000000000 +0300
|
|
||||||
+++ ./MoinMoin/Page.py 2016-02-05 20:20:23.598923780 +0200
|
|
||||||
@@ -108,8 +108,7 @@ class ItemCache:
|
|
||||||
(for 'meta') or the complete cache ('pagelists').
|
|
||||||
@param request: the request object
|
|
||||||
"""
|
|
||||||
- from MoinMoin.logfile import editlog
|
|
||||||
- elog = editlog.EditLog(request)
|
|
||||||
+ elog = request.editlog
|
|
||||||
old_pos = self.log_pos
|
|
||||||
new_pos, items = elog.news(old_pos)
|
|
||||||
if items:
|
|
||||||
@@ -626,7 +625,12 @@ class Page(object):
|
|
||||||
"""
|
|
||||||
return self.exists(domain='standard', includeDeleted=includeDeleted)
|
|
||||||
|
|
||||||
- def exists(self, rev=0, domain=None, includeDeleted=False):
|
|
||||||
+ def _in_backend(self):
|
|
||||||
+ if self.page_name in self.request.graphdata:
|
|
||||||
+ return self.request.graphdata.is_saved(self.page_name)
|
|
||||||
+ return 0
|
|
||||||
+
|
|
||||||
+ def exists(self, rev=0, domain=None, includeDeleted=False, includeBackend=True):
|
|
||||||
""" Does this page exist?
|
|
||||||
|
|
||||||
This is the lower level method for checking page existence. Use
|
|
||||||
@@ -656,6 +660,12 @@ class Page(object):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
+ # If it's in the backend, it exists
|
|
||||||
+ if self._in_backend():
|
|
||||||
+ return True
|
|
||||||
+ elif includeBackend:
|
|
||||||
+ return False
|
|
||||||
+
|
|
||||||
# Look for non-deleted pages only, using get_rev
|
|
||||||
if not rev and self.rev:
|
|
||||||
rev = self.rev
|
|
||||||
@@ -789,13 +799,20 @@ class Page(object):
|
|
||||||
@rtype: string
|
|
||||||
@return: formatted link
|
|
||||||
"""
|
|
||||||
+ # Optimising closing of links
|
|
||||||
+ if kw.get('on', None) == 0:
|
|
||||||
+ formatter=getattr(self, 'formatter', None)
|
|
||||||
+ if formatter:
|
|
||||||
+ return formatter.url(0, '', None)
|
|
||||||
+
|
|
||||||
if not text:
|
|
||||||
text = self.split_title()
|
|
||||||
text = wikiutil.escape(text)
|
|
||||||
|
|
||||||
- # Add css class for non existing page
|
|
||||||
- if not self.exists():
|
|
||||||
- kw['css_class'] = 'nonexistent'
|
|
||||||
+ # Add css class for non existing page (if not done by formatter.pagelink)
|
|
||||||
+ if not kw.has_key('css_class'):
|
|
||||||
+ if not self.exists():
|
|
||||||
+ kw['css_class'] = 'nonexistent'
|
|
||||||
|
|
||||||
attachment_indicator = kw.get('attachment_indicator')
|
|
||||||
if attachment_indicator is None:
|
|
||||||
@@ -1826,7 +1843,7 @@ class RootPage(Page):
|
|
||||||
|
|
||||||
return underlay, path
|
|
||||||
|
|
||||||
- def getPageList(self, user=None, exists=1, filter=None, include_underlay=True, return_objects=False):
|
|
||||||
+ def getPageList(self, user=None, exists=1, filter=None, include_underlay=True, return_objects=False, includeBackend=True):
|
|
||||||
""" List user readable pages under current page
|
|
||||||
|
|
||||||
Currently only request.rootpage is used to list pages, but if we
|
|
||||||
@@ -1895,7 +1912,7 @@ class RootPage(Page):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Filter deleted pages
|
|
||||||
- if exists and not page.exists():
|
|
||||||
+ if exists and not page.exists(includeBackend=includeBackend):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Filter out page user may not read.
|
|
||||||
--- ./MoinMoin/parser/text_moin_wiki.py.orig 2014-10-17 22:45:33.000000000 +0300
|
|
||||||
+++ ./MoinMoin/parser/text_moin_wiki.py 2015-04-29 14:40:41.284018265 +0300
|
|
||||||
@@ -728,8 +728,12 @@ class Parser:
|
|
||||||
if scheme == 'attachment':
|
if scheme == 'attachment':
|
||||||
mt = wikiutil.MimeType(filename=url)
|
mt = wikiutil.MimeType(filename=url)
|
||||||
if mt.major == 'text':
|
if mt.major == 'text':
|
||||||
|
@ -560,7 +559,7 @@
|
||||||
# destinguishs if browser need a plugin in place
|
# destinguishs if browser need a plugin in place
|
||||||
elif mt.major == 'image' and mt.minor in config.browser_supported_images:
|
elif mt.major == 'image' and mt.minor in config.browser_supported_images:
|
||||||
desc = self._transclude_description(desc, url)
|
desc = self._transclude_description(desc, url)
|
||||||
@@ -873,9 +877,10 @@ class Parser:
|
@@ -872,9 +876,10 @@
|
||||||
tag_attrs, query_args = self._get_params(params,
|
tag_attrs, query_args = self._get_params(params,
|
||||||
tag_attrs={},
|
tag_attrs={},
|
||||||
acceptable_attrs=acceptable_attrs)
|
acceptable_attrs=acceptable_attrs)
|
||||||
|
@ -573,65 +572,9 @@
|
||||||
else: # interwiki link
|
else: # interwiki link
|
||||||
page_name, anchor = wikiutil.split_anchor(page_name)
|
page_name, anchor = wikiutil.split_anchor(page_name)
|
||||||
tag_attrs, query_args = self._get_params(params,
|
tag_attrs, query_args = self._get_params(params,
|
||||||
--- ./MoinMoin/support/werkzeug/formparser.py.orig 2014-10-17 22:45:32.000000000 +0300
|
--- MoinMoin/theme/__init__.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
+++ ./MoinMoin/support/werkzeug/formparser.py 2014-10-20 11:53:32.882284972 +0300
|
+++ MoinMoin/theme/__init__.py 2020-11-11 09:24:34.000000000 +0200
|
||||||
@@ -33,13 +33,50 @@
|
@@ -46,6 +46,7 @@
|
||||||
#: for multipart messages.
|
|
||||||
_supported_multipart_encodings = frozenset(['base64', 'quoted-printable'])
|
|
||||||
|
|
||||||
+class SmartStream(object):
|
|
||||||
+ """A file-like stream that dynamically switches from memory-based
|
|
||||||
+ to file-based storage when the total amount of data is larger
|
|
||||||
+ than 500 kilobytes."""
|
|
||||||
+
|
|
||||||
+ def __init__(self, threshold=1024*500):
|
|
||||||
+ self._is_file = False
|
|
||||||
+ self._threshold = threshold
|
|
||||||
+ self._stream = StringIO()
|
|
||||||
+
|
|
||||||
+ def __getattr__(self, key):
|
|
||||||
+ return getattr(self._stream, key)
|
|
||||||
+
|
|
||||||
+ def _check(self):
|
|
||||||
+ if self._is_file:
|
|
||||||
+ return
|
|
||||||
+
|
|
||||||
+ pos = self._stream.tell()
|
|
||||||
+ if pos <= self._threshold:
|
|
||||||
+ return
|
|
||||||
+
|
|
||||||
+ stream = TemporaryFile('wb+')
|
|
||||||
+ stream.write(self._stream.getvalue())
|
|
||||||
+ stream.flush()
|
|
||||||
+ stream.seek(pos)
|
|
||||||
+
|
|
||||||
+ self._stream.close()
|
|
||||||
+ self._stream = stream
|
|
||||||
+ self._is_file = True
|
|
||||||
+
|
|
||||||
+ def write(self, *args, **kw):
|
|
||||||
+ result = self._stream.write(*args, **kw)
|
|
||||||
+ self._check()
|
|
||||||
+ return result
|
|
||||||
+
|
|
||||||
+ def writelines(self, *args, **kw):
|
|
||||||
+ result = self._stream.writelines(*args, **kw)
|
|
||||||
+ self._check()
|
|
||||||
+ return result
|
|
||||||
|
|
||||||
def default_stream_factory(total_content_length, filename, content_type,
|
|
||||||
content_length=None):
|
|
||||||
"""The stream factory that is used per default."""
|
|
||||||
- if total_content_length > 1024 * 500:
|
|
||||||
- return TemporaryFile('wb+')
|
|
||||||
- return StringIO()
|
|
||||||
+ return SmartStream()
|
|
||||||
|
|
||||||
|
|
||||||
def parse_form_data(environ, stream_factory=None, charset='utf-8',
|
|
||||||
--- ./MoinMoin/theme/__init__.py.orig 2014-10-17 22:45:32.000000000 +0300
|
|
||||||
+++ ./MoinMoin/theme/__init__.py 2014-10-20 11:53:32.884284973 +0300
|
|
||||||
@@ -48,6 +48,7 @@
|
|
||||||
'diff': (_("Diffs"), "moin-diff.png", 15, 11),
|
'diff': (_("Diffs"), "moin-diff.png", 15, 11),
|
||||||
'info': (_("Info"), "moin-info.png", 12, 11),
|
'info': (_("Info"), "moin-info.png", 12, 11),
|
||||||
'edit': (_("Edit"), "moin-edit.png", 12, 12),
|
'edit': (_("Edit"), "moin-edit.png", 12, 12),
|
||||||
|
@ -650,10 +593,10 @@
|
||||||
'attachimg': (_("[ATTACH]"), "attach.png", 32, 32),
|
'attachimg': (_("[ATTACH]"), "attach.png", 32, 32),
|
||||||
# RecentChanges
|
# RecentChanges
|
||||||
'rss': (_("[RSS]"), "moin-rss.png", 16, 16),
|
'rss': (_("[RSS]"), "moin-rss.png", 16, 16),
|
||||||
--- ./MoinMoin/user.py.orig 2014-10-17 22:45:32.000000000 +0300
|
--- MoinMoin/user.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
+++ ./MoinMoin/user.py 2014-10-20 11:53:32.887284976 +0300
|
+++ MoinMoin/user.py 2020-11-11 09:25:51.000000000 +0200
|
||||||
@@ -23,6 +23,9 @@
|
@@ -25,6 +25,9 @@
|
||||||
import os, time, codecs, base64
|
import hmac
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
import md5crypt
|
import md5crypt
|
||||||
+import errno
|
+import errno
|
||||||
|
@ -680,7 +623,7 @@
|
||||||
def getUserList(request):
|
def getUserList(request):
|
||||||
""" Get a list of all (numerical) user IDs.
|
""" Get a list of all (numerical) user IDs.
|
||||||
@@ -288,6 +293,10 @@
|
@@ -288,6 +293,10 @@
|
||||||
hash = hash_new('sha1', pwd)
|
hash = hashlib.new('sha1', pwd)
|
||||||
hash.update(salt)
|
hash.update(salt)
|
||||||
return '{SSHA}' + base64.encodestring(hash.digest() + salt).rstrip()
|
return '{SSHA}' + base64.encodestring(hash.digest() + salt).rstrip()
|
||||||
+ elif scheme == '{SHA}':
|
+ elif scheme == '{SHA}':
|
||||||
|
@ -778,9 +721,9 @@
|
||||||
|
|
||||||
if not self.disabled:
|
if not self.disabled:
|
||||||
self.valid = 1
|
self.valid = 1
|
||||||
--- ./MoinMoin/util/filesys.py.orig 2014-10-17 22:45:32.000000000 +0300
|
--- MoinMoin/util/filesys.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
+++ ./MoinMoin/util/filesys.py 2014-10-20 11:53:32.888284976 +0300
|
+++ MoinMoin/util/filesys.py 2020-11-11 09:26:25.000000000 +0200
|
||||||
@@ -217,7 +217,6 @@
|
@@ -220,7 +220,6 @@
|
||||||
"""
|
"""
|
||||||
names = os.listdir(src)
|
names = os.listdir(src)
|
||||||
os.mkdir(dst)
|
os.mkdir(dst)
|
||||||
|
@ -788,9 +731,9 @@
|
||||||
errors = []
|
errors = []
|
||||||
for name in names:
|
for name in names:
|
||||||
srcname = os.path.join(src, name)
|
srcname = os.path.join(src, name)
|
||||||
--- ./MoinMoin/web/contexts.py.orig 2014-10-17 22:45:32.000000000 +0300
|
--- MoinMoin/web/contexts.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
+++ ./MoinMoin/web/contexts.py 2014-10-20 11:53:32.889284977 +0300
|
+++ MoinMoin/web/contexts.py 2020-11-11 09:27:00.000000000 +0200
|
||||||
@@ -218,6 +218,12 @@
|
@@ -221,6 +221,12 @@
|
||||||
|
|
||||||
# proxy further attribute lookups to the underlying request first
|
# proxy further attribute lookups to the underlying request first
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
|
@ -803,8 +746,8 @@
|
||||||
try:
|
try:
|
||||||
return getattr(self.request, name)
|
return getattr(self.request, name)
|
||||||
except AttributeError, e:
|
except AttributeError, e:
|
||||||
--- ./MoinMoin/xmlrpc/__init__.py.orig 2014-10-17 22:45:32.000000000 +0300
|
--- MoinMoin/xmlrpc/__init__.py.orig 2020-11-08 19:23:14.000000000 +0200
|
||||||
+++ ./MoinMoin/xmlrpc/__init__.py 2014-10-20 11:53:32.891284977 +0300
|
+++ MoinMoin/xmlrpc/__init__.py 2020-11-11 09:44:33.000000000 +0200
|
||||||
@@ -38,6 +38,32 @@
|
@@ -38,6 +38,32 @@
|
||||||
from MoinMoin.action import AttachFile
|
from MoinMoin.action import AttachFile
|
||||||
from MoinMoin import caching
|
from MoinMoin import caching
|
||||||
|
@ -838,21 +781,21 @@
|
||||||
|
|
||||||
logging_tearline = '- XMLRPC %s ' + '-' * 40
|
logging_tearline = '- XMLRPC %s ' + '-' * 40
|
||||||
|
|
||||||
@@ -132,7 +158,12 @@
|
@@ -133,7 +159,12 @@
|
||||||
else:
|
|
||||||
# overwrite any user there might be, if you need a valid user for
|
# overwrite any user there might be, if you need a valid user for
|
||||||
# xmlrpc, you have to use multicall and getAuthToken / applyAuthToken
|
# xmlrpc, you have to use multicall and getAuthToken / applyAuthToken
|
||||||
- request.user = user.User(request, auth_method='xmlrpc:invalid')
|
if request.cfg.xmlrpc_overwrite_user:
|
||||||
+ login_required = is_login_required(self.request)
|
- request.user = user.User(request, auth_method='xmlrpc:invalid')
|
||||||
+ if (not self.request.user or
|
+ login_required = is_login_required(self.request)
|
||||||
+ not self.request.user.valid or
|
+ if (not self.request.user or
|
||||||
+ login_required):
|
+ not self.request.user.valid or
|
||||||
+ self.request.user = user.User(self.request,
|
+ login_required):
|
||||||
+ auth_method='xmlrpc:invalid')
|
+ self.request.user = user.User(self.request,
|
||||||
|
+ auth_method='xmlrpc:invalid')
|
||||||
|
|
||||||
data = request.read()
|
data = request.read()
|
||||||
|
|
||||||
@@ -767,7 +798,14 @@
|
@@ -768,7 +799,14 @@
|
||||||
request.session = request.cfg.session_service.get_session(request)
|
request.session = request.cfg.session_service.get_session(request)
|
||||||
|
|
||||||
u = auth.setup_from_session(request, request.session)
|
u = auth.setup_from_session(request, request.session)
|
||||||
|
@ -868,298 +811,3 @@
|
||||||
|
|
||||||
if u and u.valid:
|
if u and u.valid:
|
||||||
request.user = u
|
request.user = u
|
||||||
--- ./MoinMoin/action/newaccount.py.orig 2014-10-17 22:45:32.000000000 +0300
|
|
||||||
+++ ./MoinMoin/action/newaccount.py 2014-10-20 12:06:36.348542933 +0300
|
|
||||||
@@ -31,7 +31,8 @@
|
|
||||||
|
|
||||||
# Require non-empty name
|
|
||||||
try:
|
|
||||||
- theuser.name = form['name']
|
|
||||||
+ name = wikiutil.clean_input(form.get('email', ['']))
|
|
||||||
+ theuser.name = name.strip()
|
|
||||||
except KeyError:
|
|
||||||
return _("Empty user name. Please enter a user name.")
|
|
||||||
|
|
||||||
@@ -104,12 +105,9 @@
|
|
||||||
|
|
||||||
row = html.TR()
|
|
||||||
tbl.append(row)
|
|
||||||
- row.append(html.TD().append(html.STRONG().append(
|
|
||||||
- html.Text(_("Name")))))
|
|
||||||
- cell = html.TD()
|
|
||||||
- row.append(cell)
|
|
||||||
- cell.append(html.INPUT(type="text", size="36", name="name"))
|
|
||||||
- cell.append(html.Text(' ' + _("(Use FirstnameLastname)")))
|
|
||||||
+ row.append(html.TD().append(html.STRONG().append(html.Text(_("Email")))))
|
|
||||||
+ row.append(html.TD().append(html.INPUT(type="text", size="36",
|
|
||||||
+ name="email")))
|
|
||||||
|
|
||||||
row = html.TR()
|
|
||||||
tbl.append(row)
|
|
||||||
@@ -125,12 +123,6 @@
|
|
||||||
row.append(html.TD().append(html.INPUT(type="password", size="36",
|
|
||||||
name="password2")))
|
|
||||||
|
|
||||||
- row = html.TR()
|
|
||||||
- tbl.append(row)
|
|
||||||
- row.append(html.TD().append(html.STRONG().append(html.Text(_("Email")))))
|
|
||||||
- row.append(html.TD().append(html.INPUT(type="text", size="36",
|
|
||||||
- name="email")))
|
|
||||||
-
|
|
||||||
textcha = TextCha(request)
|
|
||||||
if textcha.is_enabled():
|
|
||||||
row = html.TR()
|
|
||||||
@@ -159,7 +151,7 @@
|
|
||||||
found = True
|
|
||||||
break
|
|
||||||
|
|
||||||
- if not found:
|
|
||||||
+ if not found and False:
|
|
||||||
# we will not have linked, so forbid access
|
|
||||||
request.makeForbidden(403, 'No MoinAuth in auth list')
|
|
||||||
return
|
|
||||||
--- ./MoinMoin/action/recoverpass.py.orig 2014-10-17 22:45:32.000000000 +0300
|
|
||||||
+++ ./MoinMoin/action/recoverpass.py 2014-10-20 12:06:36.379542936 +0300
|
|
||||||
@@ -70,15 +70,9 @@
|
|
||||||
|
|
||||||
row = html.TR()
|
|
||||||
tbl.append(row)
|
|
||||||
- row.append(html.TD().append(html.STRONG().append(html.Text(_("Username")))))
|
|
||||||
- row.append(html.TD().append(html.INPUT(type="text", size="36",
|
|
||||||
- name="name")))
|
|
||||||
-
|
|
||||||
- row = html.TR()
|
|
||||||
- tbl.append(row)
|
|
||||||
row.append(html.TD().append(html.STRONG().append(html.Text(_("Email")))))
|
|
||||||
row.append(html.TD().append(html.INPUT(type="text", size="36",
|
|
||||||
- name="email")))
|
|
||||||
+ name="name")))
|
|
||||||
|
|
||||||
row = html.TR()
|
|
||||||
tbl.append(row)
|
|
||||||
@@ -111,7 +105,7 @@
|
|
||||||
|
|
||||||
row = html.TR()
|
|
||||||
tbl.append(row)
|
|
||||||
- row.append(html.TD().append(html.STRONG().append(html.Text(_("Username")))))
|
|
||||||
+ row.append(html.TD().append(html.STRONG().append(html.Text(_("Email")))))
|
|
||||||
value = name or ''
|
|
||||||
row.append(html.TD().append(html.INPUT(type='text', size="36",
|
|
||||||
name="name", value=value)))
|
|
||||||
--- ./MoinMoin/config/multiconfig.py.orig 2014-10-17 22:45:32.000000000 +0300
|
|
||||||
+++ ./MoinMoin/config/multiconfig.py 2016-08-15 22:34:12.813289705 +0300
|
|
||||||
@@ -12,6 +12,7 @@ import re
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
+import imp
|
|
||||||
|
|
||||||
from MoinMoin import log
|
|
||||||
logging = log.getLogger(__name__)
|
|
||||||
@@ -34,6 +35,25 @@ _farmconfig_mtime = None
|
|
||||||
_config_cache = {}
|
|
||||||
|
|
||||||
|
|
||||||
+def _findConfigModule(name):
|
|
||||||
+ """ Try to find config module or raise ImportError
|
|
||||||
+
|
|
||||||
+ Return first module that is a single file, skipping packages with
|
|
||||||
+ colliding names.
|
|
||||||
+ """
|
|
||||||
+ for path in sys.path:
|
|
||||||
+ if not path:
|
|
||||||
+ continue
|
|
||||||
+ try:
|
|
||||||
+ fp, pathname, description = imp.find_module(name, [path])
|
|
||||||
+ if not fp:
|
|
||||||
+ continue
|
|
||||||
+ return fp, pathname, description
|
|
||||||
+ except ImportError:
|
|
||||||
+ continue
|
|
||||||
+ raise ImportError('No module named %s' % name)
|
|
||||||
+
|
|
||||||
+
|
|
||||||
def _importConfigModule(name):
|
|
||||||
""" Import and return configuration module and its modification time
|
|
||||||
|
|
||||||
@@ -45,7 +65,8 @@ def _importConfigModule(name):
|
|
||||||
@return: module, modification time
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
- module = __import__(name, globals(), {})
|
|
||||||
+ fp, pathname, description = _findConfigModule(name)
|
|
||||||
+ module = imp.load_module(name, fp, pathname, description)
|
|
||||||
mtime = os.path.getmtime(module.__file__)
|
|
||||||
except ImportError:
|
|
||||||
raise
|
|
||||||
--- ./MoinMoin/macro/RecentChanges.py.orig 2014-10-17 22:45:33.000000000 +0300
|
|
||||||
+++ ./MoinMoin/macro/RecentChanges.py 2014-10-20 12:06:36.381542941 +0300
|
|
||||||
@@ -110,8 +110,12 @@
|
|
||||||
if request.cfg.show_names:
|
|
||||||
if len(lines) > 1:
|
|
||||||
counters = {}
|
|
||||||
+ editorcache = {}
|
|
||||||
for idx in range(len(lines)):
|
|
||||||
- name = lines[idx].getEditor(request)
|
|
||||||
+ editorkey = lines[idx].addr, lines[idx].hostname, lines[idx].userid
|
|
||||||
+ if editorkey not in editorcache:
|
|
||||||
+ editorcache[editorkey] = line.getEditor(request)
|
|
||||||
+ name = editorcache[editorkey]
|
|
||||||
if not name in counters:
|
|
||||||
counters[name] = []
|
|
||||||
counters[name].append(idx+1)
|
|
||||||
@@ -228,6 +232,9 @@
|
|
||||||
output.append(request.theme.recentchanges_footer(d))
|
|
||||||
return ''.join(output)
|
|
||||||
|
|
||||||
+def filter_pages(request, pages):
|
|
||||||
+ readable = request.user.may.read
|
|
||||||
+ return filter(lambda lines: readable(lines[0].pagename), pages)
|
|
||||||
|
|
||||||
def macro_RecentChanges(macro, abandoned=False):
|
|
||||||
# handle abandoned keyword
|
|
||||||
@@ -291,37 +298,36 @@
|
|
||||||
day_count = 0
|
|
||||||
|
|
||||||
for line in log.reverse():
|
|
||||||
-
|
|
||||||
- if not request.user.may.read(line.pagename):
|
|
||||||
- continue
|
|
||||||
-
|
|
||||||
line.time_tuple = request.user.getTime(wikiutil.version2timestamp(line.ed_time_usecs))
|
|
||||||
day = line.time_tuple[0:3]
|
|
||||||
hilite = line.ed_time_usecs > (bookmark_usecs or line.ed_time_usecs)
|
|
||||||
|
|
||||||
- if ((this_day != day or (not hilite and not max_days))) and len(pages) > 0:
|
|
||||||
+ if this_day != day or (not hilite and not max_days):
|
|
||||||
# new day or bookmark reached: print out stuff
|
|
||||||
this_day = day
|
|
||||||
for p in pages:
|
|
||||||
ignore_pages[p] = None
|
|
||||||
- pages = pages.values()
|
|
||||||
+ pages = filter_pages(request, pages.values())
|
|
||||||
pages.sort(cmp_lines)
|
|
||||||
pages.reverse()
|
|
||||||
|
|
||||||
- if request.user.valid:
|
|
||||||
- bmtime = pages[0][0].ed_time_usecs
|
|
||||||
- d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
|
|
||||||
- else:
|
|
||||||
- d['bookmark_link_html'] = None
|
|
||||||
- d['date'] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
|
|
||||||
- output.append(request.theme.recentchanges_daybreak(d))
|
|
||||||
+ if len(pages) > 0:
|
|
||||||
+ if request.user.valid:
|
|
||||||
+ bmtime = pages[0][0].ed_time_usecs
|
|
||||||
+ d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
|
|
||||||
+ else:
|
|
||||||
+ d['bookmark_link_html'] = None
|
|
||||||
+ d['date'] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
|
|
||||||
+ output.append(request.theme.recentchanges_daybreak(d))
|
|
||||||
+
|
|
||||||
+ for p in pages:
|
|
||||||
+ output.append(format_page_edits(macro, p, bookmark_usecs))
|
|
||||||
+
|
|
||||||
+ day_count += 1
|
|
||||||
+ if max_days and (day_count >= max_days):
|
|
||||||
+ break
|
|
||||||
|
|
||||||
- for p in pages:
|
|
||||||
- output.append(format_page_edits(macro, p, bookmark_usecs))
|
|
||||||
pages = {}
|
|
||||||
- day_count += 1
|
|
||||||
- if max_days and (day_count >= max_days):
|
|
||||||
- break
|
|
||||||
|
|
||||||
elif this_day != day:
|
|
||||||
# new day but no changes
|
|
||||||
@@ -340,16 +346,16 @@
|
|
||||||
else:
|
|
||||||
pages[line.pagename] = [line]
|
|
||||||
else:
|
|
||||||
- if len(pages) > 0:
|
|
||||||
- # end of loop reached: print out stuff
|
|
||||||
- # XXX duplicated code from above
|
|
||||||
- # but above does not trigger if we have the first day in wiki history
|
|
||||||
- for p in pages:
|
|
||||||
- ignore_pages[p] = None
|
|
||||||
- pages = pages.values()
|
|
||||||
- pages.sort(cmp_lines)
|
|
||||||
- pages.reverse()
|
|
||||||
+ # end of loop reached: print out stuff
|
|
||||||
+ # XXX duplicated code from above
|
|
||||||
+ # but above does not trigger if we have the first day in wiki history
|
|
||||||
+ for p in pages:
|
|
||||||
+ ignore_pages[p] = None
|
|
||||||
+ pages = filter_pages(request, pages.values())
|
|
||||||
+ pages.sort(cmp_lines)
|
|
||||||
+ pages.reverse()
|
|
||||||
|
|
||||||
+ if len(pages) > 0:
|
|
||||||
if request.user.valid:
|
|
||||||
bmtime = pages[0][0].ed_time_usecs
|
|
||||||
d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
|
|
||||||
--- ./MoinMoin/wikiutil.py.orig 2014-10-17 22:45:32.000000000 +0300
|
|
||||||
+++ ./MoinMoin/wikiutil.py 2014-10-20 12:06:36.382542942 +0300
|
|
||||||
@@ -471,15 +471,15 @@
|
|
||||||
generate_file_list(request)
|
|
||||||
|
|
||||||
try:
|
|
||||||
- _interwiki_list = request.cfg.cache.interwiki_list
|
|
||||||
- old_mtime = request.cfg.cache.interwiki_mtime
|
|
||||||
- if request.cfg.cache.interwiki_ts + (1*60) < now: # 1 minutes caching time
|
|
||||||
+ _interwiki_list = request.cfg.cache.interwiki_list[request.user.id]
|
|
||||||
+ old_mtime = request.cfg.cache.interwiki_mtime[request.user.id]
|
|
||||||
+ if request.cfg.cache.interwiki_ts[request.user.id] + (1*60) < now: # 1 minutes caching time
|
|
||||||
max_mtime = get_max_mtime(request.cfg.shared_intermap_files, Page(request, INTERWIKI_PAGE))
|
|
||||||
if max_mtime > old_mtime:
|
|
||||||
raise AttributeError # refresh cache
|
|
||||||
else:
|
|
||||||
- request.cfg.cache.interwiki_ts = now
|
|
||||||
- except AttributeError:
|
|
||||||
+ request.cfg.cache.interwiki_ts[request.user.id] = now
|
|
||||||
+ except (AttributeError, KeyError):
|
|
||||||
_interwiki_list = {}
|
|
||||||
lines = []
|
|
||||||
|
|
||||||
@@ -509,10 +509,28 @@
|
|
||||||
if request.cfg.interwikiname:
|
|
||||||
_interwiki_list[request.cfg.interwikiname] = request.script_root + '/'
|
|
||||||
|
|
||||||
+ # collab list
|
|
||||||
+ if hasattr(request.cfg, 'collab_basedir'):
|
|
||||||
+ from collabbackend import listCollabs
|
|
||||||
+ user = request.user.name
|
|
||||||
+ active = request.cfg.interwikiname
|
|
||||||
+ path = request.cfg.collab_basedir
|
|
||||||
+ baseurl = request.cfg.collab_baseurl
|
|
||||||
+ collablist = listCollabs(baseurl, user, path, active)
|
|
||||||
+
|
|
||||||
+ for collab in collablist:
|
|
||||||
+ _interwiki_list[collab[0]] = collab[3]
|
|
||||||
+
|
|
||||||
# save for later
|
|
||||||
- request.cfg.cache.interwiki_list = _interwiki_list
|
|
||||||
- request.cfg.cache.interwiki_ts = now
|
|
||||||
- request.cfg.cache.interwiki_mtime = get_max_mtime(request.cfg.shared_intermap_files, Page(request, INTERWIKI_PAGE))
|
|
||||||
+ if not getattr(request.cfg.cache, 'interwiki_list', None):
|
|
||||||
+ request.cfg.cache.interwiki_list = dict()
|
|
||||||
+ if not getattr(request.cfg.cache, 'interwiki_ts', None):
|
|
||||||
+ request.cfg.cache.interwiki_ts = dict()
|
|
||||||
+ if not getattr(request.cfg.cache, 'interwiki_mtime', None):
|
|
||||||
+ request.cfg.cache.interwiki_mtime = dict()
|
|
||||||
+ request.cfg.cache.interwiki_list[request.user.id] = _interwiki_list
|
|
||||||
+ request.cfg.cache.interwiki_ts[request.user.id] = now
|
|
||||||
+ request.cfg.cache.interwiki_mtime[request.user.id] = get_max_mtime(request.cfg.shared_intermap_files, Page(request, INTERWIKI_PAGE))
|
|
||||||
|
|
||||||
return _interwiki_list
|
|
||||||
|
|
||||||
@@ -2269,7 +2287,7 @@
|
|
||||||
"""
|
|
||||||
# note: filenames containing ../ (or ..\) are made safe by replacing
|
|
||||||
# the / (or the \). the .. will be kept, but is harmless then.
|
|
||||||
- basename = re.sub('[\x00-\x1f:/\\\\<>"*?%|]', '_', basename)
|
|
||||||
+ basename = re.sub('[\x00-\x1f:/\\\\<>"*?|]', '_', basename)
|
|
||||||
return basename
|
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,8 @@
|
||||||
get_url:
|
get_url:
|
||||||
url: "https://static.moinmo.in/files/moin-{{ moin_version }}.tar.gz"
|
url: "https://static.moinmo.in/files/moin-{{ moin_version }}.tar.gz"
|
||||||
dest: "{{ srcdir }}"
|
dest: "{{ srcdir }}"
|
||||||
checksum: sha1:bead31f53152395aa93c31dc3e0a8a417be39ccd
|
checksum: sha1:3eb13b4730bd97259a41c4cd500f8433778ff8cf
|
||||||
|
# checksum: sha1:bead31f53152395aa93c31dc3e0a8a417be39ccd
|
||||||
|
|
||||||
- name: extract moin package
|
- name: extract moin package
|
||||||
unarchive:
|
unarchive:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue