]>
Commit | Line | Data |
---|---|---|
181d08f3 MT |
1 | #!/usr/bin/python3 |
2 | ||
4ed1dadb | 3 | import difflib |
181d08f3 | 4 | import logging |
6ac7e934 | 5 | import os.path |
181d08f3 | 6 | import re |
addc18d5 | 7 | import tornado.gen |
9e90e800 | 8 | import urllib.parse |
181d08f3 MT |
9 | |
10 | from . import misc | |
9523790a | 11 | from . import util |
181d08f3 MT |
12 | from .decorators import * |
13 | ||
c683a1ff MT |
14 | INTERWIKIS = { |
15 | "google" : ("https://www.google.com/search?q=%(url)s", None, "fab fa-google"), | |
16 | "rfc" : ("https://tools.ietf.org/html/rfc%(name)s", "RFC %s", None), | |
17 | "wp" : ("https://en.wikipedia.org/wiki/%(name)s", None, "fab fa-wikipedia-w"), | |
18 | } | |
19 | ||
181d08f3 MT |
20 | class Wiki(misc.Object): |
21 | def _get_pages(self, query, *args): | |
22 | res = self.db.query(query, *args) | |
23 | ||
24 | for row in res: | |
25 | yield Page(self.backend, row.id, data=row) | |
26 | ||
d398ca08 MT |
27 | def _get_page(self, query, *args): |
28 | res = self.db.get(query, *args) | |
29 | ||
30 | if res: | |
31 | return Page(self.backend, res.id, data=res) | |
32 | ||
6ac7e934 | 33 | def get_page_title(self, page, default=None): |
50c8dc11 MT |
34 | # Try to retrieve title from cache |
35 | title = self.memcache.get("wiki:title:%s" % page) | |
36 | if title: | |
37 | return title | |
38 | ||
39 | # If the title has not been in the cache, we will | |
40 | # have to look it up | |
6ac7e934 MT |
41 | doc = self.get_page(page) |
42 | if doc: | |
50c8dc11 MT |
43 | title = doc.title |
44 | else: | |
45 | title = os.path.basename(page) | |
6ac7e934 | 46 | |
50c8dc11 MT |
47 | # Save in cache for forever |
48 | self.memcache.set("wiki:title:%s" % page, title) | |
49 | ||
50 | return title | |
6ac7e934 | 51 | |
181d08f3 MT |
52 | def get_page(self, page, revision=None): |
53 | page = Page.sanitise_page_name(page) | |
54 | assert page | |
55 | ||
56 | if revision: | |
d398ca08 | 57 | return self._get_page("SELECT * FROM wiki WHERE page = %s \ |
181d08f3 MT |
58 | AND timestamp = %s", page, revision) |
59 | else: | |
d398ca08 | 60 | return self._get_page("SELECT * FROM wiki WHERE page = %s \ |
181d08f3 MT |
61 | ORDER BY timestamp DESC LIMIT 1", page) |
62 | ||
11afe905 MT |
63 | def get_recent_changes(self, account, limit=None): |
64 | pages = self._get_pages("SELECT * FROM wiki \ | |
f9db574a | 65 | WHERE timestamp >= NOW() - INTERVAL '4 weeks' \ |
11afe905 MT |
66 | ORDER BY timestamp DESC") |
67 | ||
68 | for page in pages: | |
69 | if not page.check_acl(account): | |
70 | continue | |
71 | ||
72 | yield page | |
73 | ||
74 | limit -= 1 | |
75 | if not limit: | |
76 | break | |
181d08f3 | 77 | |
495e9dc4 | 78 | def create_page(self, page, author, content, changes=None, address=None): |
181d08f3 MT |
79 | page = Page.sanitise_page_name(page) |
80 | ||
aba5e58a MT |
81 | # Write page to the database |
82 | page = self._get_page("INSERT INTO wiki(page, author_uid, markdown, changes, address) \ | |
df01767e | 83 | VALUES(%s, %s, %s, %s, %s) RETURNING *", page, author.uid, content or None, changes, address) |
181d08f3 | 84 | |
50c8dc11 | 85 | # Update cache |
980e486d | 86 | self.memcache.set("wiki:title:%s" % page.page, page.title) |
50c8dc11 | 87 | |
aba5e58a MT |
88 | # Send email to all watchers |
89 | page._send_watcher_emails(excludes=[author]) | |
90 | ||
91 | return page | |
92 | ||
495e9dc4 | 93 | def delete_page(self, page, author, **kwargs): |
181d08f3 MT |
94 | # Do nothing if the page does not exist |
95 | if not self.get_page(page): | |
96 | return | |
97 | ||
98 | # Just creates a blank last version of the page | |
495e9dc4 | 99 | self.create_page(page, author=author, content=None, **kwargs) |
181d08f3 | 100 | |
3168788e MT |
101 | def make_breadcrumbs(self, url): |
102 | # Split and strip all empty elements (double slashes) | |
181d08f3 MT |
103 | parts = list(e for e in url.split("/") if e) |
104 | ||
3168788e | 105 | ret = [] |
b1bf7d48 | 106 | for part in ("/".join(parts[:i]) for i in range(1, len(parts))): |
3168788e | 107 | ret.append(("/%s" % part, self.get_page_title(part, os.path.basename(part)))) |
181d08f3 | 108 | |
3168788e | 109 | return ret |
181d08f3 | 110 | |
11afe905 | 111 | def search(self, query, account=None, limit=None): |
9523790a MT |
112 | query = util.parse_search_query(query) |
113 | ||
114 | res = self._get_pages("SELECT wiki.* FROM wiki_search_index search_index \ | |
115 | LEFT JOIN wiki ON search_index.wiki_id = wiki.id \ | |
116 | WHERE search_index.document @@ to_tsquery('english', %s) \ | |
11afe905 MT |
117 | ORDER BY ts_rank(search_index.document, to_tsquery('english', %s)) DESC", |
118 | query, query) | |
9523790a | 119 | |
df80be2c | 120 | pages = [] |
11afe905 MT |
121 | for page in res: |
122 | # Skip any pages the user doesn't have permission for | |
123 | if not page.check_acl(account): | |
124 | continue | |
125 | ||
126 | # Return any other pages | |
df80be2c | 127 | pages.append(page) |
11afe905 | 128 | |
df80be2c MT |
129 | # Break when we have found enough pages |
130 | if limit and len(pages) >= limit: | |
11afe905 | 131 | break |
9523790a | 132 | |
df80be2c MT |
133 | return pages |
134 | ||
9523790a MT |
135 | def refresh(self): |
136 | """ | |
137 | Needs to be called after a page has been changed | |
138 | """ | |
139 | self.db.execute("REFRESH MATERIALIZED VIEW wiki_search_index") | |
140 | ||
11afe905 MT |
141 | # ACL |
142 | ||
143 | def check_acl(self, page, account): | |
144 | res = self.db.query("SELECT * FROM wiki_acls \ | |
145 | WHERE %s ILIKE (path || '%%') ORDER BY LENGTH(path) DESC LIMIT 1", page) | |
146 | ||
147 | for row in res: | |
148 | # Access not permitted when user is not logged in | |
149 | if not account: | |
150 | return False | |
151 | ||
152 | # If user is in a matching group, we grant permission | |
153 | for group in row.groups: | |
154 | if group in account.groups: | |
155 | return True | |
156 | ||
157 | # Otherwise access is not permitted | |
158 | return False | |
159 | ||
160 | # If no ACLs are found, we permit access | |
161 | return True | |
162 | ||
f2cfd873 MT |
163 | # Files |
164 | ||
165 | def _get_files(self, query, *args): | |
166 | res = self.db.query(query, *args) | |
167 | ||
168 | for row in res: | |
169 | yield File(self.backend, row.id, data=row) | |
170 | ||
171 | def _get_file(self, query, *args): | |
172 | res = self.db.get(query, *args) | |
173 | ||
174 | if res: | |
175 | return File(self.backend, res.id, data=res) | |
176 | ||
177 | def get_files(self, path): | |
178 | files = self._get_files("SELECT * FROM wiki_files \ | |
179 | WHERE path = %s AND deleted_at IS NULL ORDER BY filename", path) | |
180 | ||
181 | return list(files) | |
182 | ||
183 | def get_file_by_path(self, path): | |
184 | path, filename = os.path.dirname(path), os.path.basename(path) | |
185 | ||
186 | return self._get_file("SELECT * FROM wiki_files \ | |
187 | WHERE path = %s AND filename = %s AND deleted_at IS NULL", path, filename) | |
188 | ||
189 | def upload(self, path, filename, data, mimetype, author, address): | |
190 | # Upload the blob first | |
191 | blob = self.db.get("INSERT INTO wiki_blobs(data) VALUES(%s) RETURNING id", data) | |
192 | ||
193 | # Create entry for file | |
194 | return self._get_file("INSERT INTO wiki_files(path, filename, author_uid, address, \ | |
195 | mimetype, blob_id, size) VALUES(%s, %s, %s, %s, %s, %s, %s) RETURNING *", path, | |
196 | filename, author.uid, address, mimetype, blob.id, len(data)) | |
197 | ||
9e90e800 MT |
198 | def find_image(self, path, filename): |
199 | for p in (path, os.path.dirname(path)): | |
200 | file = self.get_file_by_path(os.path.join(p, filename)) | |
201 | ||
202 | if file and file.is_image(): | |
203 | return file | |
204 | ||
181d08f3 MT |
205 | |
206 | class Page(misc.Object): | |
db1e727e MT |
207 | # Wiki links |
208 | wiki_link = re.compile(r"\[\[([\w\d\/\-\.]+)(?:\|(.+?))?\]\]") | |
209 | ||
0c9bada3 | 210 | # External links |
6d12be2d | 211 | external_link = re.compile(r"\[\[((?:ftp|git|https?|rsync|sftp|ssh|webcal)\:\/\/.+?)(?:\|(.+?))?\]\]") |
0c9bada3 | 212 | |
e2205cff MT |
213 | # Interwiki links e.g. [[wp>IPFire]] |
214 | interwiki_link = re.compile(r"\[\[(\w+)>(.+?)(?:\|(.+?))?\]\]") | |
215 | ||
154f6179 MT |
216 | # Mail link |
217 | email_link = re.compile(r"\[\[([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)(?:\|(.+?))?\]\]") | |
218 | ||
181d08f3 MT |
219 | def init(self, id, data=None): |
220 | self.id = id | |
221 | self.data = data | |
222 | ||
dc847af5 MT |
223 | def __repr__(self): |
224 | return "<%s %s %s>" % (self.__class__.__name__, self.page, self.timestamp) | |
225 | ||
c21ffadb MT |
226 | def __eq__(self, other): |
227 | if isinstance(other, self.__class__): | |
228 | return self.id == other.id | |
229 | ||
181d08f3 MT |
230 | def __lt__(self, other): |
231 | if isinstance(other, self.__class__): | |
232 | if self.page == other.page: | |
233 | return self.timestamp < other.timestamp | |
234 | ||
235 | return self.page < other.page | |
236 | ||
237 | @staticmethod | |
238 | def sanitise_page_name(page): | |
239 | if not page: | |
240 | return "/" | |
241 | ||
242 | # Make sure that the page name does NOT end with a / | |
243 | if page.endswith("/"): | |
244 | page = page[:-1] | |
245 | ||
246 | # Make sure the page name starts with a / | |
247 | if not page.startswith("/"): | |
248 | page = "/%s" % page | |
249 | ||
250 | # Remove any double slashes | |
251 | page = page.replace("//", "/") | |
252 | ||
253 | return page | |
254 | ||
255 | @property | |
256 | def url(self): | |
db8448d9 | 257 | return self.page |
181d08f3 | 258 | |
4ed1dadb MT |
259 | @property |
260 | def full_url(self): | |
261 | return "https://wiki.ipfire.org%s" % self.url | |
262 | ||
181d08f3 MT |
263 | @property |
264 | def page(self): | |
265 | return self.data.page | |
266 | ||
267 | @property | |
268 | def title(self): | |
51e7a876 | 269 | return self._title or os.path.basename(self.page[1:]) |
181d08f3 MT |
270 | |
271 | @property | |
272 | def _title(self): | |
273 | if not self.markdown: | |
274 | return | |
275 | ||
276 | # Find first H1 headline in markdown | |
277 | markdown = self.markdown.splitlines() | |
278 | ||
279 | m = re.match(r"^# (.*)( #)?$", markdown[0]) | |
280 | if m: | |
281 | return m.group(1) | |
282 | ||
3b05ef6e MT |
283 | @lazy_property |
284 | def author(self): | |
285 | if self.data.author_uid: | |
286 | return self.backend.accounts.get_by_uid(self.data.author_uid) | |
287 | ||
db1e727e MT |
288 | def _render_wiki_link(self, m): |
289 | path, alias = m.groups() | |
290 | ||
291 | # Allow relative links | |
292 | if not path.startswith("/"): | |
293 | path = os.path.join(self.page, path) | |
294 | ||
295 | # Normalise links | |
296 | path = os.path.normpath(path) | |
297 | ||
298 | return """<a href="%s">%s</a>""" % ( | |
299 | path, | |
300 | alias or self.backend.wiki.get_page_title(path), | |
301 | ) | |
302 | ||
0c9bada3 MT |
303 | def _render_external_link(self, m): |
304 | url, alias = m.groups() | |
305 | ||
306 | return """<a class="link-external" href="%s">%s</a>""" % (url, alias or url) | |
307 | ||
c683a1ff MT |
308 | def _render_interwiki_link(self, m): |
309 | wiki = m.group(1) | |
310 | if not wiki: | |
311 | return | |
312 | ||
313 | # Retrieve URL | |
314 | try: | |
315 | url, repl, icon = INTERWIKIS[wiki] | |
316 | except KeyError: | |
317 | logging.warning("Invalid interwiki: %s" % wiki) | |
318 | return | |
319 | ||
320 | # Name of the page | |
321 | name = m.group(2) | |
322 | ||
323 | # Expand URL | |
324 | url = url % { | |
325 | "name" : name, | |
326 | "url" : urllib.parse.quote(name), | |
327 | } | |
328 | ||
329 | # Get alias (if present) | |
330 | alias = m.group(3) | |
331 | ||
332 | if not alias and repl: | |
333 | alias = repl % name | |
334 | ||
335 | # Put everything together | |
336 | s = [] | |
337 | ||
338 | if icon: | |
339 | s.append("<span class=\"%s\"></span>" % icon) | |
340 | ||
341 | s.append("""<a class="link-external" href="%s">%s</a>""" % (url, alias or name)) | |
342 | ||
343 | return " ".join(s) | |
344 | ||
154f6179 MT |
345 | def _render_email_link(self, m): |
346 | address, alias = m.groups() | |
347 | ||
348 | return """<a class="link-external" href="mailto:%s">%s</a>""" \ | |
349 | % (address, alias or address) | |
350 | ||
181d08f3 MT |
351 | def _render(self, text): |
352 | logging.debug("Rendering %s" % self) | |
353 | ||
9e90e800 MT |
354 | # Link images |
355 | replacements = [] | |
df6dd1a3 | 356 | for match in re.finditer(r"!\[(.*?)\]\((.*?)\)", text): |
9e90e800 MT |
357 | alt_text, url = match.groups() |
358 | ||
359 | # Skip any absolute and external URLs | |
360 | if url.startswith("/") or url.startswith("https://") or url.startswith("http://"): | |
361 | continue | |
362 | ||
363 | # Try to split query string | |
364 | url, delimiter, qs = url.partition("?") | |
365 | ||
366 | # Parse query arguments | |
367 | args = urllib.parse.parse_qs(qs) | |
368 | ||
369 | # Find image | |
370 | file = self.backend.wiki.find_image(self.page, url) | |
371 | if not file: | |
372 | continue | |
373 | ||
374 | # Scale down the image if not already done | |
375 | if not "s" in args: | |
376 | args["s"] = "768" | |
377 | ||
378 | # Format URL | |
379 | url = "%s?%s" % (file.url, urllib.parse.urlencode(args)) | |
380 | ||
bf59e35d | 381 | replacements.append((match.span(), file, alt_text, url)) |
9e90e800 MT |
382 | |
383 | # Apply all replacements | |
bf59e35d MT |
384 | for (start, end), file, alt_text, url in reversed(replacements): |
385 | text = text[:start] + "[![%s](%s)](%s?action=detail)" % (alt_text, url, file.url) + text[end:] | |
9e90e800 | 386 | |
db1e727e MT |
387 | # Handle wiki links |
388 | text = self.wiki_link.sub(self._render_wiki_link, text) | |
389 | ||
c683a1ff | 390 | # Handle interwiki links |
e2205cff | 391 | text = self.interwiki_link.sub(self._render_interwiki_link, text) |
c683a1ff | 392 | |
0c9bada3 MT |
393 | # Handle external links |
394 | text = self.external_link.sub(self._render_external_link, text) | |
395 | ||
154f6179 MT |
396 | # Handle email links |
397 | text = self.email_link.sub(self._render_email_link, text) | |
398 | ||
045ea3db MT |
399 | # Borrow this from the blog |
400 | return self.backend.blog._render_text(text, lang="markdown") | |
181d08f3 MT |
401 | |
402 | @property | |
403 | def markdown(self): | |
c21ffadb | 404 | return self.data.markdown or "" |
181d08f3 MT |
405 | |
406 | @property | |
407 | def html(self): | |
31834b04 | 408 | return self._render(self.markdown) |
addc18d5 | 409 | |
181d08f3 MT |
410 | @property |
411 | def timestamp(self): | |
412 | return self.data.timestamp | |
413 | ||
414 | def was_deleted(self): | |
415 | return self.markdown is None | |
416 | ||
417 | @lazy_property | |
418 | def breadcrumbs(self): | |
419 | return self.backend.wiki.make_breadcrumbs(self.page) | |
420 | ||
421 | def get_latest_revision(self): | |
7d699684 MT |
422 | revisions = self.get_revisions() |
423 | ||
424 | # Return first object | |
425 | for rev in revisions: | |
426 | return rev | |
427 | ||
428 | def get_revisions(self): | |
429 | return self.backend.wiki._get_pages("SELECT * FROM wiki \ | |
430 | WHERE page = %s ORDER BY timestamp DESC", self.page) | |
091ac36b | 431 | |
c21ffadb MT |
432 | @lazy_property |
433 | def previous_revision(self): | |
434 | return self.backend.wiki._get_page("SELECT * FROM wiki \ | |
435 | WHERE page = %s AND timestamp < %s ORDER BY timestamp DESC \ | |
436 | LIMIT 1", self.page, self.timestamp) | |
437 | ||
d398ca08 MT |
438 | @property |
439 | def changes(self): | |
440 | return self.data.changes | |
441 | ||
11afe905 MT |
442 | # ACL |
443 | ||
444 | def check_acl(self, account): | |
445 | return self.backend.wiki.check_acl(self.page, account) | |
446 | ||
091ac36b MT |
447 | # Sidebar |
448 | ||
449 | @lazy_property | |
450 | def sidebar(self): | |
451 | parts = self.page.split("/") | |
452 | ||
453 | while parts: | |
3cc5f666 | 454 | sidebar = self.backend.wiki.get_page("%s/sidebar" % os.path.join(*parts)) |
091ac36b MT |
455 | if sidebar: |
456 | return sidebar | |
457 | ||
458 | parts.pop() | |
f2cfd873 | 459 | |
d64a1e35 MT |
460 | # Watchers |
461 | ||
4ed1dadb MT |
462 | @lazy_property |
463 | def diff(self): | |
464 | if self.previous_revision: | |
465 | diff = difflib.unified_diff( | |
466 | self.previous_revision.markdown.splitlines(), | |
467 | self.markdown.splitlines(), | |
468 | ) | |
469 | ||
470 | return "\n".join(diff) | |
471 | ||
aba5e58a MT |
472 | @property |
473 | def watchers(self): | |
474 | res = self.db.query("SELECT uid FROM wiki_watchlist \ | |
475 | WHERE page = %s", self.page) | |
476 | ||
477 | for row in res: | |
478 | # Search for account by UID and skip if none was found | |
479 | account = self.backend.accounts.get_by_uid(row.uid) | |
480 | if not account: | |
481 | continue | |
482 | ||
483 | # Return the account | |
484 | yield account | |
485 | ||
f2e25ded | 486 | def is_watched_by(self, account): |
d64a1e35 MT |
487 | res = self.db.get("SELECT 1 FROM wiki_watchlist \ |
488 | WHERE page = %s AND uid = %s", self.page, account.uid) | |
489 | ||
490 | if res: | |
491 | return True | |
492 | ||
493 | return False | |
494 | ||
495 | def add_watcher(self, account): | |
f2e25ded | 496 | if self.is_watched_by(account): |
d64a1e35 MT |
497 | return |
498 | ||
499 | self.db.execute("INSERT INTO wiki_watchlist(page, uid) \ | |
500 | VALUES(%s, %s)", self.page, account.uid) | |
501 | ||
502 | def remove_watcher(self, account): | |
503 | self.db.execute("DELETE FROM wiki_watchlist \ | |
504 | WHERE page = %s AND uid = %s", self.page, account.uid) | |
505 | ||
aba5e58a MT |
506 | def _send_watcher_emails(self, excludes=[]): |
507 | # Nothing to do if there was no previous revision | |
508 | if not self.previous_revision: | |
509 | return | |
510 | ||
511 | for watcher in self.watchers: | |
512 | # Skip everyone who is excluded | |
513 | if watcher in excludes: | |
514 | logging.debug("Excluding %s" % watcher) | |
515 | continue | |
516 | ||
517 | logging.debug("Sending watcher email to %s" % watcher) | |
518 | ||
4ed1dadb MT |
519 | # Compose message |
520 | self.backend.messages.send_template("wiki/messages/page-changed", | |
521 | recipients=[watcher], page=self, priority=-10) | |
aba5e58a | 522 | |
f2cfd873 MT |
523 | |
524 | class File(misc.Object): | |
525 | def init(self, id, data): | |
526 | self.id = id | |
527 | self.data = data | |
528 | ||
529 | @property | |
530 | def url(self): | |
531 | return os.path.join(self.path, self.filename) | |
532 | ||
533 | @property | |
534 | def path(self): | |
535 | return self.data.path | |
536 | ||
537 | @property | |
538 | def filename(self): | |
539 | return self.data.filename | |
540 | ||
541 | @property | |
542 | def mimetype(self): | |
543 | return self.data.mimetype | |
544 | ||
545 | @property | |
546 | def size(self): | |
547 | return self.data.size | |
548 | ||
8cb0bea4 MT |
549 | @lazy_property |
550 | def author(self): | |
551 | if self.data.author_uid: | |
552 | return self.backend.accounts.get_by_uid(self.data.author_uid) | |
553 | ||
554 | @property | |
555 | def created_at(self): | |
556 | return self.data.created_at | |
557 | ||
558 | def is_pdf(self): | |
559 | return self.mimetype in ("application/pdf", "application/x-pdf") | |
560 | ||
f2cfd873 MT |
561 | def is_image(self): |
562 | return self.mimetype.startswith("image/") | |
563 | ||
564 | @lazy_property | |
565 | def blob(self): | |
566 | res = self.db.get("SELECT data FROM wiki_blobs \ | |
567 | WHERE id = %s", self.data.blob_id) | |
568 | ||
569 | if res: | |
570 | return bytes(res.data) | |
79dd9a0f MT |
571 | |
572 | def get_thumbnail(self, size): | |
75d9b3da MT |
573 | cache_key = "-".join((self.path, util.normalize(self.filename), self.created_at.isoformat(), "%spx" % size)) |
574 | ||
575 | # Try to fetch the data from the cache | |
576 | thumbnail = self.memcache.get(cache_key) | |
577 | if thumbnail: | |
578 | return thumbnail | |
579 | ||
580 | # Generate the thumbnail | |
5ef115cd | 581 | thumbnail = util.generate_thumbnail(self.blob, size) |
75d9b3da MT |
582 | |
583 | # Put it into the cache for forever | |
584 | self.memcache.set(cache_key, thumbnail) | |
585 | ||
586 | return thumbnail |