]>
git.ipfire.org Git - ipfire.org.git/blob - src/backend/wiki.py
12 from .decorators
import *
14 class Wiki(misc
.Object
):
15 def _get_pages(self
, query
, *args
):
16 res
= self
.db
.query(query
, *args
)
19 yield Page(self
.backend
, row
.id, data
=row
)
21 def _get_page(self
, query
, *args
):
22 res
= self
.db
.get(query
, *args
)
25 return Page(self
.backend
, res
.id, data
=res
)
27 def make_path(self
, page
, path
):
28 # Nothing to do for absolute links
29 if path
.startswith("/"):
32 # Relative links (one-level down)
33 elif path
.startswith("./"):
34 path
= os
.path
.join(page
, path
)
36 # All other relative links
38 p
= os
.path
.dirname(page
)
39 path
= os
.path
.join(p
, path
)
42 return os
.path
.normpath(path
)
44 def get_page_title(self
, page
, default
=None):
45 # Try to retrieve title from cache
46 title
= self
.memcache
.get("wiki:title:%s" % page
)
50 # If the title has not been in the cache, we will
52 doc
= self
.get_page(page
)
56 title
= os
.path
.basename(page
)
58 # Save in cache for forever
59 self
.memcache
.set("wiki:title:%s" % page
, title
)
63 def get_page(self
, page
, revision
=None):
64 page
= Page
.sanitise_page_name(page
)
68 return self
._get
_page
("SELECT * FROM wiki WHERE page = %s \
69 AND timestamp = %s", page
, revision
)
71 return self
._get
_page
("SELECT * FROM wiki WHERE page = %s \
72 ORDER BY timestamp DESC LIMIT 1", page
)
74 def get_recent_changes(self
, account
, limit
=None):
75 pages
= self
._get
_pages
("SELECT * FROM wiki \
76 ORDER BY timestamp DESC")
79 if not page
.check_acl(account
):
88 def create_page(self
, page
, author
, content
, changes
=None, address
=None):
89 page
= Page
.sanitise_page_name(page
)
91 # Write page to the database
92 page
= self
._get
_page
("INSERT INTO wiki(page, author_uid, markdown, changes, address) \
93 VALUES(%s, %s, %s, %s, %s) RETURNING *", page
, author
.uid
, content
or None, changes
, address
)
96 self
.memcache
.set("wiki:title:%s" % page
.page
, page
.title
)
98 # Send email to all watchers
99 page
._send
_watcher
_emails
(excludes
=[author
])
103 def delete_page(self
, page
, author
, **kwargs
):
104 # Do nothing if the page does not exist
105 if not self
.get_page(page
):
108 # Just creates a blank last version of the page
109 self
.create_page(page
, author
=author
, content
=None, **kwargs
)
111 def make_breadcrumbs(self
, url
):
112 # Split and strip all empty elements (double slashes)
113 parts
= list(e
for e
in url
.split("/") if e
)
116 for part
in ("/".join(parts
[:i
]) for i
in range(1, len(parts
))):
117 ret
.append(("/%s" % part
, self
.get_page_title(part
, os
.path
.basename(part
))))
121 def search(self
, query
, account
=None, limit
=None):
122 query
= util
.parse_search_query(query
)
124 res
= self
._get
_pages
("SELECT wiki.* FROM wiki_search_index search_index \
125 LEFT JOIN wiki ON search_index.wiki_id = wiki.id \
126 WHERE search_index.document @@ to_tsquery('english', %s) \
127 ORDER BY ts_rank(search_index.document, to_tsquery('english', %s)) DESC",
132 # Skip any pages the user doesn't have permission for
133 if not page
.check_acl(account
):
136 # Return any other pages
139 # Break when we have found enough pages
140 if limit
and len(pages
) >= limit
:
147 Needs to be called after a page has been changed
149 self
.db
.execute("REFRESH MATERIALIZED VIEW wiki_search_index")
151 def get_watchlist(self
, account
):
152 pages
= self
._get
_pages
(
153 "WITH pages AS (SELECT * FROM wiki_current \
154 LEFT JOIN wiki ON wiki_current.id = wiki.id) \
155 SELECT * FROM wiki_watchlist watchlist \
156 LEFT JOIN pages ON watchlist.page = pages.page \
157 WHERE watchlist.uid = %s",
165 def check_acl(self
, page
, account
):
166 res
= self
.db
.query("SELECT * FROM wiki_acls \
167 WHERE %s ILIKE (path || '%%') ORDER BY LENGTH(path) DESC LIMIT 1", page
)
170 # Access not permitted when user is not logged in
174 # If user is in a matching group, we grant permission
175 for group
in row
.groups
:
176 if group
in account
.groups
:
179 # Otherwise access is not permitted
182 # If no ACLs are found, we permit access
187 def _get_files(self
, query
, *args
):
188 res
= self
.db
.query(query
, *args
)
191 yield File(self
.backend
, row
.id, data
=row
)
193 def _get_file(self
, query
, *args
):
194 res
= self
.db
.get(query
, *args
)
197 return File(self
.backend
, res
.id, data
=res
)
199 def get_files(self
, path
):
200 files
= self
._get
_files
("SELECT * FROM wiki_files \
201 WHERE path = %s AND deleted_at IS NULL ORDER BY filename", path
)
205 def get_file_by_path(self
, path
):
206 path
, filename
= os
.path
.dirname(path
), os
.path
.basename(path
)
208 return self
._get
_file
("SELECT * FROM wiki_files \
209 WHERE path = %s AND filename = %s AND deleted_at IS NULL", path
, filename
)
211 def upload(self
, path
, filename
, data
, mimetype
, author
, address
):
212 # Upload the blob first
213 blob
= self
.db
.get("INSERT INTO wiki_blobs(data) VALUES(%s) RETURNING id", data
)
215 # Create entry for file
216 return self
._get
_file
("INSERT INTO wiki_files(path, filename, author_uid, address, \
217 mimetype, blob_id, size) VALUES(%s, %s, %s, %s, %s, %s, %s) RETURNING *", path
,
218 filename
, author
.uid
, address
, mimetype
, blob
.id, len(data
))
220 def render(self
, path
, text
):
221 r
= WikiRenderer(self
.backend
, path
)
223 return r
.render(text
)
226 class Page(misc
.Object
):
227 def init(self
, id, data
=None):
232 return "<%s %s %s>" % (self
.__class
__.__name
__, self
.page
, self
.timestamp
)
234 def __eq__(self
, other
):
235 if isinstance(other
, self
.__class
__):
236 return self
.id == other
.id
238 def __lt__(self
, other
):
239 if isinstance(other
, self
.__class
__):
240 if self
.page
== other
.page
:
241 return self
.timestamp
< other
.timestamp
243 return self
.page
< other
.page
246 def sanitise_page_name(page
):
250 # Make sure that the page name does NOT end with a /
251 if page
.endswith("/"):
254 # Make sure the page name starts with a /
255 if not page
.startswith("/"):
258 # Remove any double slashes
259 page
= page
.replace("//", "/")
269 return "https://wiki.ipfire.org%s" % self
.url
273 return self
.data
.page
277 return self
._title
or os
.path
.basename(self
.page
[1:])
281 if not self
.markdown
:
284 # Find first H1 headline in markdown
285 markdown
= self
.markdown
.splitlines()
287 m
= re
.match(r
"^# (.*)( #)?$", markdown
[0])
293 if self
.data
.author_uid
:
294 return self
.backend
.accounts
.get_by_uid(self
.data
.author_uid
)
298 return self
.data
.markdown
or ""
302 return self
.backend
.wiki
.render(self
.page
, self
.markdown
)
306 return self
.data
.timestamp
308 def was_deleted(self
):
309 return self
.markdown
is None
312 def breadcrumbs(self
):
313 return self
.backend
.wiki
.make_breadcrumbs(self
.page
)
315 def get_latest_revision(self
):
316 revisions
= self
.get_revisions()
318 # Return first object
319 for rev
in revisions
:
322 def get_revisions(self
):
323 return self
.backend
.wiki
._get
_pages
("SELECT * FROM wiki \
324 WHERE page = %s ORDER BY timestamp DESC", self
.page
)
327 def previous_revision(self
):
328 return self
.backend
.wiki
._get
_page
("SELECT * FROM wiki \
329 WHERE page = %s AND timestamp < %s ORDER BY timestamp DESC \
330 LIMIT 1", self
.page
, self
.timestamp
)
334 return self
.data
.changes
338 def check_acl(self
, account
):
339 return self
.backend
.wiki
.check_acl(self
.page
, account
)
345 parts
= self
.page
.split("/")
348 sidebar
= self
.backend
.wiki
.get_page("%s/sidebar" % os
.path
.join(*parts
))
358 if self
.previous_revision
:
359 diff
= difflib
.unified_diff(
360 self
.previous_revision
.markdown
.splitlines(),
361 self
.markdown
.splitlines(),
364 return "\n".join(diff
)
368 res
= self
.db
.query("SELECT uid FROM wiki_watchlist \
369 WHERE page = %s", self
.page
)
372 # Search for account by UID and skip if none was found
373 account
= self
.backend
.accounts
.get_by_uid(row
.uid
)
380 def is_watched_by(self
, account
):
381 res
= self
.db
.get("SELECT 1 FROM wiki_watchlist \
382 WHERE page = %s AND uid = %s", self
.page
, account
.uid
)
389 def add_watcher(self
, account
):
390 if self
.is_watched_by(account
):
393 self
.db
.execute("INSERT INTO wiki_watchlist(page, uid) \
394 VALUES(%s, %s)", self
.page
, account
.uid
)
396 def remove_watcher(self
, account
):
397 self
.db
.execute("DELETE FROM wiki_watchlist \
398 WHERE page = %s AND uid = %s", self
.page
, account
.uid
)
400 def _send_watcher_emails(self
, excludes
=[]):
401 # Nothing to do if there was no previous revision
402 if not self
.previous_revision
:
405 for watcher
in self
.watchers
:
406 # Skip everyone who is excluded
407 if watcher
in excludes
:
408 logging
.debug("Excluding %s" % watcher
)
412 if not self
.backend
.wiki
.check_acl(self
.page
, watcher
):
413 logging
.debug("Watcher %s does not have permissions" % watcher
)
416 logging
.debug("Sending watcher email to %s" % watcher
)
419 self
.backend
.messages
.send_template("wiki/messages/page-changed",
420 sender
="IPFire Wiki <wiki@ipfire.org>", recipients
=[watcher
],
421 page
=self
, priority
=-10)
424 class File(misc
.Object
):
425 def init(self
, id, data
):
431 return os
.path
.join(self
.path
, self
.filename
)
435 return self
.data
.path
439 return self
.data
.filename
443 return self
.data
.mimetype
447 return self
.data
.size
451 if self
.data
.author_uid
:
452 return self
.backend
.accounts
.get_by_uid(self
.data
.author_uid
)
455 def created_at(self
):
456 return self
.data
.created_at
459 return self
.mimetype
in ("application/pdf", "application/x-pdf")
462 return self
.mimetype
.startswith("image/")
466 res
= self
.db
.get("SELECT data FROM wiki_blobs \
467 WHERE id = %s", self
.data
.blob_id
)
470 return bytes(res
.data
)
472 def get_thumbnail(self
, size
):
473 cache_key
= "-".join((self
.path
, util
.normalize(self
.filename
), self
.created_at
.isoformat(), "%spx" % size
))
475 # Try to fetch the data from the cache
476 thumbnail
= self
.memcache
.get(cache_key
)
480 # Generate the thumbnail
481 thumbnail
= util
.generate_thumbnail(self
.blob
, size
)
483 # Put it into the cache for forever
484 self
.memcache
.set(cache_key
, thumbnail
)
489 class WikiRenderer(misc
.Object
):
502 links
= re
.compile(r
"<a href=\"(.*?
)\">(.*?
)</a
>")
505 images = re.compile(r"<img
alt(?
:=\"(.*?
)\")? src
=\"(.*?
)\" (?
:title
=\"(.*?
)\" )?
/>")
507 def init(self, path):
510 def _render_link(self, m):
511 url, text = m.groups()
516 if url.startswith("mailto
:"):
519 return """<a class="link
-external
" href="mailto
:%s">%s</a>""" % \
523 for schema in self.schemas:
524 if url.startswith(schema):
525 return """<a class="link
-external
" href="%s">%s</a>""" % \
528 # Everything else must be an internal link
529 path = self.backend.wiki.make_path(self.path, url)
531 return """<a href="%s">%s</a>""" % \
532 (path, text or self.backend.wiki.get_page_title(path))
534 def _render_image(self, m):
535 alt_text, url, caption = m.groups()
537 # Skip any absolute and external URLs
538 if url.startswith("/") or url.startswith("https
://") or url.startswith("http
://"):
539 return """<figure class="figure
"><img src="%s" class="figure
-img img
-fluid rounded
" alt="%s">
540 <figcaption class="figure
-caption
">%s</figcaption></figure>
541 """ % (url, alt_text, caption or "")
543 # Try to split query string
544 url, delimiter, qs = url.partition("?
")
546 # Parse query arguments
547 args = urllib.parse.parse_qs(qs)
549 # Build absolute path
550 url = self.backend.wiki.make_path(self.path, url)
553 file = self.backend.wiki.get_file_by_path(url)
554 if not file or not file.is_image():
555 return "<!-- Could
not find image
%s in %s -->" % (url, self.path)
557 # Scale down the image if not already done
561 return """<figure class="figure
"><img src="%s?
%s" class="figure
-img img
-fluid rounded
" alt="%s">
562 <figcaption class="figure
-caption
">%s</figcaption></figure>
563 """ % (url, urllib.parse.urlencode(args), caption, caption or "")
565 def render(self, text):
566 logging.debug("Rendering
%s" % self.path)
568 # Borrow this from the blog
569 text = self.backend.blog._render_text(text, lang="markdown
")
572 text = self.links.sub(self._render_link, text)
574 # Postprocess images to <figure>
575 text = self.images.sub(self._render_image, text)