]>
Commit | Line | Data |
---|---|---|
428c995c JN |
1 | #! /usr/bin/perl |
2 | ||
3 | # Copyright (C) 2011 | |
4 | # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr> | |
5 | # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr> | |
6 | # Claire Fousse <claire.fousse@ensimag.imag.fr> | |
7 | # David Amouyal <david.amouyal@ensimag.imag.fr> | |
8 | # Matthieu Moy <matthieu.moy@grenoble-inp.fr> | |
9 | # License: GPL v2 or later | |
10 | ||
11 | # Gateway between Git and MediaWiki. | |
12 | # https://github.com/Bibzball/Git-Mediawiki/wiki | |
13 | # | |
14 | # Known limitations: | |
15 | # | |
428c995c JN |
16 | # - Poor performance in the best case: it takes forever to check |
17 | # whether we're up-to-date (on fetch or push) or to fetch a few | |
18 | # revisions from a large wiki, because we use exclusively a | |
19 | # page-based synchronization. We could switch to a wiki-wide | |
20 | # synchronization when the synchronization involves few revisions | |
21 | # but the wiki is large. | |
22 | # | |
23 | # - Git renames could be turned into MediaWiki renames (see TODO | |
24 | # below) | |
25 | # | |
26 | # - login/password support requires the user to write the password | |
27 | # cleartext in a file (see TODO below). | |
28 | # | |
29 | # - No way to import "one page, and all pages included in it" | |
30 | # | |
31 | # - Multiple remote MediaWikis have not been very well tested. | |
32 | ||
33 | use strict; | |
34 | use MediaWiki::API; | |
35 | use DateTime::Format::ISO8601; | |
428c995c | 36 | |
721a533f | 37 | # By default, use UTF-8 to communicate with Git and the user |
428c995c | 38 | binmode STDERR, ":utf8"; |
721a533f | 39 | binmode STDOUT, ":utf8"; |
428c995c JN |
40 | |
41 | use URI::Escape; | |
a7271ad1 MM |
42 | use IPC::Open2; |
43 | ||
428c995c JN |
44 | use warnings; |
45 | ||
46 | # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced | |
47 | use constant SLASH_REPLACEMENT => "%2F"; | |
48 | ||
49 | # It's not always possible to delete pages (may require some | |
50 | # priviledges). Deleted pages are replaced with this content. | |
51 | use constant DELETED_CONTENT => "[[Category:Deleted]]\n"; | |
52 | ||
53 | # It's not possible to create empty pages. New empty files in Git are | |
54 | # sent with this content instead. | |
55 | use constant EMPTY_CONTENT => "<!-- empty page -->\n"; | |
56 | ||
57 | # used to reflect file creation or deletion in diff. | |
58 | use constant NULL_SHA1 => "0000000000000000000000000000000000000000"; | |
59 | ||
60 | my $remotename = $ARGV[0]; | |
61 | my $url = $ARGV[1]; | |
62 | ||
63 | # Accept both space-separated and multiple keys in config file. | |
64 | # Spaces should be written as _ anyway because we'll use chomp. | |
65 | my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages")); | |
66 | chomp(@tracked_pages); | |
67 | ||
68 | # Just like @tracked_pages, but for MediaWiki categories. | |
69 | my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories")); | |
70 | chomp(@tracked_categories); | |
71 | ||
6a9e55b0 PV |
72 | # Import media files too. |
73 | my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport"); | |
74 | chomp($import_media); | |
75 | $import_media = ($import_media eq "true"); | |
76 | ||
428c995c JN |
77 | my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin"); |
78 | # TODO: ideally, this should be able to read from keyboard, but we're | |
79 | # inside a remote helper, so our stdin is connect to git, not to a | |
80 | # terminal. | |
81 | my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword"); | |
1d6abac9 | 82 | my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain"); |
428c995c JN |
83 | chomp($wiki_login); |
84 | chomp($wiki_passwd); | |
1d6abac9 | 85 | chomp($wiki_domain); |
428c995c JN |
86 | |
87 | # Import only last revisions (both for clone and fetch) | |
88 | my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow"); | |
89 | chomp($shallow_import); | |
90 | $shallow_import = ($shallow_import eq "true"); | |
91 | ||
93f0d338 MM |
92 | # Dumb push: don't update notes and mediawiki ref to reflect the last push. |
93 | # | |
94 | # Configurable with mediawiki.dumbPush, or per-remote with | |
95 | # remote.<remotename>.dumbPush. | |
96 | # | |
97 | # This means the user will have to re-import the just-pushed | |
98 | # revisions. On the other hand, this means that the Git revisions | |
99 | # corresponding to MediaWiki revisions are all imported from the wiki, | |
100 | # regardless of whether they were initially created in Git or from the | |
101 | # web interface, hence all users will get the same history (i.e. if | |
102 | # the push from Git to MediaWiki loses some information, everybody | |
103 | # will get the history with information lost). If the import is | |
104 | # deterministic, this means everybody gets the same sha1 for each | |
105 | # MediaWiki revision. | |
106 | my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush"); | |
107 | unless ($dumb_push) { | |
108 | $dumb_push = run_git("config --get --bool mediawiki.dumbPush"); | |
109 | } | |
110 | chomp($dumb_push); | |
111 | $dumb_push = ($dumb_push eq "true"); | |
112 | ||
428c995c JN |
113 | my $wiki_name = $url; |
114 | $wiki_name =~ s/[^\/]*:\/\///; | |
9fb79503 MM |
115 | # If URL is like http://user:password@example.com/, we clearly don't |
116 | # want the password in $wiki_name. While we're there, also remove user | |
117 | # and '@' sign, to avoid author like MWUser@HTTPUser@host.com | |
118 | $wiki_name =~ s/^.*@//; | |
428c995c JN |
119 | |
120 | # Commands parser | |
121 | my $entry; | |
122 | my @cmd; | |
123 | while (<STDIN>) { | |
124 | chomp; | |
125 | @cmd = split(/ /); | |
126 | if (defined($cmd[0])) { | |
127 | # Line not blank | |
128 | if ($cmd[0] eq "capabilities") { | |
129 | die("Too many arguments for capabilities") unless (!defined($cmd[1])); | |
130 | mw_capabilities(); | |
131 | } elsif ($cmd[0] eq "list") { | |
132 | die("Too many arguments for list") unless (!defined($cmd[2])); | |
133 | mw_list($cmd[1]); | |
134 | } elsif ($cmd[0] eq "import") { | |
135 | die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2])); | |
136 | mw_import($cmd[1]); | |
137 | } elsif ($cmd[0] eq "option") { | |
138 | die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3])); | |
139 | mw_option($cmd[1],$cmd[2]); | |
140 | } elsif ($cmd[0] eq "push") { | |
141 | mw_push($cmd[1]); | |
142 | } else { | |
143 | print STDERR "Unknown command. Aborting...\n"; | |
144 | last; | |
145 | } | |
146 | } else { | |
147 | # blank line: we should terminate | |
148 | last; | |
149 | } | |
150 | ||
151 | BEGIN { $| = 1 } # flush STDOUT, to make sure the previous | |
152 | # command is fully processed. | |
153 | } | |
154 | ||
155 | ########################## Functions ############################## | |
156 | ||
a7271ad1 MM |
157 | ## credential API management (generic functions) |
158 | ||
159 | sub credential_from_url { | |
160 | my $url = shift; | |
161 | my $parsed = URI->new($url); | |
162 | my %credential; | |
163 | ||
164 | if ($parsed->scheme) { | |
165 | $credential{protocol} = $parsed->scheme; | |
166 | } | |
167 | if ($parsed->host) { | |
168 | $credential{host} = $parsed->host; | |
169 | } | |
170 | if ($parsed->path) { | |
171 | $credential{path} = $parsed->path; | |
172 | } | |
173 | if ($parsed->userinfo) { | |
174 | if ($parsed->userinfo =~ /([^:]*):(.*)/) { | |
175 | $credential{username} = $1; | |
176 | $credential{password} = $2; | |
177 | } else { | |
178 | $credential{username} = $parsed->userinfo; | |
179 | } | |
180 | } | |
181 | ||
182 | return %credential; | |
183 | } | |
184 | ||
185 | sub credential_read { | |
186 | my %credential; | |
187 | my $reader = shift; | |
188 | my $op = shift; | |
189 | while (<$reader>) { | |
190 | my ($key, $value) = /([^=]*)=(.*)/; | |
191 | if (not defined $key) { | |
192 | die "ERROR receiving response from git credential $op:\n$_\n"; | |
193 | } | |
194 | $credential{$key} = $value; | |
195 | } | |
196 | return %credential; | |
197 | } | |
198 | ||
199 | sub credential_write { | |
200 | my $credential = shift; | |
201 | my $writer = shift; | |
202 | while (my ($key, $value) = each(%$credential) ) { | |
203 | if ($value) { | |
204 | print $writer "$key=$value\n"; | |
205 | } | |
206 | } | |
207 | } | |
208 | ||
209 | sub credential_run { | |
210 | my $op = shift; | |
211 | my $credential = shift; | |
212 | my $pid = open2(my $reader, my $writer, "git credential $op"); | |
213 | credential_write($credential, $writer); | |
214 | print $writer "\n"; | |
215 | close($writer); | |
216 | ||
217 | if ($op eq "fill") { | |
218 | %$credential = credential_read($reader, $op); | |
219 | } else { | |
220 | if (<$reader>) { | |
221 | die "ERROR while running git credential $op:\n$_"; | |
222 | } | |
223 | } | |
224 | close($reader); | |
225 | waitpid($pid, 0); | |
226 | my $child_exit_status = $? >> 8; | |
227 | if ($child_exit_status != 0) { | |
228 | die "'git credential $op' failed with code $child_exit_status."; | |
229 | } | |
230 | } | |
231 | ||
428c995c JN |
232 | # MediaWiki API instance, created lazily. |
233 | my $mediawiki; | |
234 | ||
235 | sub mw_connect_maybe { | |
236 | if ($mediawiki) { | |
a7271ad1 | 237 | return; |
428c995c JN |
238 | } |
239 | $mediawiki = MediaWiki::API->new; | |
240 | $mediawiki->{config}->{api_url} = "$url/api.php"; | |
241 | if ($wiki_login) { | |
a7271ad1 MM |
242 | my %credential = credential_from_url($url); |
243 | $credential{username} = $wiki_login; | |
244 | $credential{password} = $wiki_passwd; | |
245 | credential_run("fill", \%credential); | |
246 | my $request = {lgname => $credential{username}, | |
247 | lgpassword => $credential{password}, | |
248 | lgdomain => $wiki_domain}; | |
249 | if ($mediawiki->login($request)) { | |
250 | credential_run("approve", \%credential); | |
251 | print STDERR "Logged in mediawiki user \"$credential{username}\".\n"; | |
428c995c | 252 | } else { |
a7271ad1 MM |
253 | print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n"; |
254 | print STDERR " (error " . | |
255 | $mediawiki->{error}->{code} . ': ' . | |
256 | $mediawiki->{error}->{details} . ")\n"; | |
257 | credential_run("reject", \%credential); | |
258 | exit 1; | |
428c995c JN |
259 | } |
260 | } | |
261 | } | |
262 | ||
9cb74f36 MM |
263 | ## Functions for listing pages on the remote wiki |
264 | sub get_mw_tracked_pages { | |
265 | my $pages = shift; | |
6df7e0df MM |
266 | get_mw_page_list(\@tracked_pages, $pages); |
267 | } | |
268 | ||
269 | sub get_mw_page_list { | |
270 | my $page_list = shift; | |
271 | my $pages = shift; | |
272 | my @some_pages = @$page_list; | |
9cb74f36 MM |
273 | while (@some_pages) { |
274 | my $last = 50; | |
275 | if ($#some_pages < $last) { | |
276 | $last = $#some_pages; | |
277 | } | |
278 | my @slice = @some_pages[0..$last]; | |
279 | get_mw_first_pages(\@slice, $pages); | |
280 | @some_pages = @some_pages[51..$#some_pages]; | |
281 | } | |
282 | } | |
283 | ||
284 | sub get_mw_tracked_categories { | |
285 | my $pages = shift; | |
286 | foreach my $category (@tracked_categories) { | |
287 | if (index($category, ':') < 0) { | |
288 | # Mediawiki requires the Category | |
289 | # prefix, but let's not force the user | |
290 | # to specify it. | |
291 | $category = "Category:" . $category; | |
292 | } | |
293 | my $mw_pages = $mediawiki->list( { | |
294 | action => 'query', | |
295 | list => 'categorymembers', | |
296 | cmtitle => $category, | |
297 | cmlimit => 'max' } ) | |
298 | || die $mediawiki->{error}->{code} . ': ' | |
299 | . $mediawiki->{error}->{details}; | |
300 | foreach my $page (@{$mw_pages}) { | |
301 | $pages->{$page->{title}} = $page; | |
302 | } | |
303 | } | |
304 | } | |
305 | ||
306 | sub get_mw_all_pages { | |
307 | my $pages = shift; | |
308 | # No user-provided list, get the list of pages from the API. | |
309 | my $mw_pages = $mediawiki->list({ | |
310 | action => 'query', | |
311 | list => 'allpages', | |
312 | aplimit => 'max' | |
313 | }); | |
314 | if (!defined($mw_pages)) { | |
315 | print STDERR "fatal: could not get the list of wiki pages.\n"; | |
316 | print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; | |
317 | print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; | |
318 | exit 1; | |
319 | } | |
320 | foreach my $page (@{$mw_pages}) { | |
321 | $pages->{$page->{title}} = $page; | |
322 | } | |
323 | } | |
324 | ||
325 | # queries the wiki for a set of pages. Meant to be used within a loop | |
326 | # querying the wiki for slices of page list. | |
428c995c JN |
327 | sub get_mw_first_pages { |
328 | my $some_pages = shift; | |
329 | my @some_pages = @{$some_pages}; | |
330 | ||
331 | my $pages = shift; | |
332 | ||
333 | # pattern 'page1|page2|...' required by the API | |
334 | my $titles = join('|', @some_pages); | |
335 | ||
336 | my $mw_pages = $mediawiki->api({ | |
337 | action => 'query', | |
338 | titles => $titles, | |
339 | }); | |
340 | if (!defined($mw_pages)) { | |
341 | print STDERR "fatal: could not query the list of wiki pages.\n"; | |
342 | print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; | |
343 | print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; | |
344 | exit 1; | |
345 | } | |
346 | while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) { | |
347 | if ($id < 0) { | |
348 | print STDERR "Warning: page $page->{title} not found on wiki\n"; | |
349 | } else { | |
350 | $pages->{$page->{title}} = $page; | |
351 | } | |
352 | } | |
353 | } | |
354 | ||
9cb74f36 | 355 | # Get the list of pages to be fetched according to configuration. |
428c995c JN |
356 | sub get_mw_pages { |
357 | mw_connect_maybe(); | |
358 | ||
359 | my %pages; # hash on page titles to avoid duplicates | |
360 | my $user_defined; | |
361 | if (@tracked_pages) { | |
362 | $user_defined = 1; | |
363 | # The user provided a list of pages titles, but we | |
364 | # still need to query the API to get the page IDs. | |
9cb74f36 | 365 | get_mw_tracked_pages(\%pages); |
428c995c JN |
366 | } |
367 | if (@tracked_categories) { | |
368 | $user_defined = 1; | |
9cb74f36 | 369 | get_mw_tracked_categories(\%pages); |
428c995c JN |
370 | } |
371 | if (!$user_defined) { | |
9cb74f36 | 372 | get_mw_all_pages(\%pages); |
428c995c | 373 | } |
6a9e55b0 PV |
374 | if ($import_media) { |
375 | print STDERR "Getting media files for selected pages...\n"; | |
376 | if ($user_defined) { | |
377 | get_linked_mediafiles(\%pages); | |
378 | } else { | |
379 | get_all_mediafiles(\%pages); | |
380 | } | |
381 | } | |
428c995c JN |
382 | return values(%pages); |
383 | } | |
384 | ||
b3d98595 NKT |
385 | # usage: $out = run_git("command args"); |
386 | # $out = run_git("command args", "raw"); # don't interpret output as UTF-8. | |
428c995c | 387 | sub run_git { |
b3d98595 NKT |
388 | my $args = shift; |
389 | my $encoding = (shift || "encoding(UTF-8)"); | |
390 | open(my $git, "-|:$encoding", "git " . $args); | |
428c995c JN |
391 | my $res = do { local $/; <$git> }; |
392 | close($git); | |
393 | ||
394 | return $res; | |
395 | } | |
396 | ||
397 | ||
6a9e55b0 PV |
398 | sub get_all_mediafiles { |
399 | my $pages = shift; | |
400 | # Attach list of all pages for media files from the API, | |
401 | # they are in a different namespace, only one namespace | |
402 | # can be queried at the same moment | |
403 | my $mw_pages = $mediawiki->list({ | |
404 | action => 'query', | |
405 | list => 'allpages', | |
406 | apnamespace => get_mw_namespace_id("File"), | |
407 | aplimit => 'max' | |
408 | }); | |
409 | if (!defined($mw_pages)) { | |
410 | print STDERR "fatal: could not get the list of pages for media files.\n"; | |
411 | print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; | |
412 | print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; | |
413 | exit 1; | |
414 | } | |
415 | foreach my $page (@{$mw_pages}) { | |
416 | $pages->{$page->{title}} = $page; | |
417 | } | |
418 | } | |
419 | ||
420 | sub get_linked_mediafiles { | |
421 | my $pages = shift; | |
422 | my @titles = map $_->{title}, values(%{$pages}); | |
423 | ||
424 | # The query is split in small batches because of the MW API limit of | |
425 | # the number of links to be returned (500 links max). | |
426 | my $batch = 10; | |
427 | while (@titles) { | |
428 | if ($#titles < $batch) { | |
429 | $batch = $#titles; | |
430 | } | |
431 | my @slice = @titles[0..$batch]; | |
432 | ||
433 | # pattern 'page1|page2|...' required by the API | |
434 | my $mw_titles = join('|', @slice); | |
435 | ||
436 | # Media files could be included or linked from | |
437 | # a page, get all related | |
438 | my $query = { | |
439 | action => 'query', | |
440 | prop => 'links|images', | |
441 | titles => $mw_titles, | |
442 | plnamespace => get_mw_namespace_id("File"), | |
443 | pllimit => 'max' | |
444 | }; | |
445 | my $result = $mediawiki->api($query); | |
446 | ||
447 | while (my ($id, $page) = each(%{$result->{query}->{pages}})) { | |
6df7e0df | 448 | my @media_titles; |
6a9e55b0 PV |
449 | if (defined($page->{links})) { |
450 | my @link_titles = map $_->{title}, @{$page->{links}}; | |
6df7e0df | 451 | push(@media_titles, @link_titles); |
6a9e55b0 PV |
452 | } |
453 | if (defined($page->{images})) { | |
454 | my @image_titles = map $_->{title}, @{$page->{images}}; | |
6df7e0df | 455 | push(@media_titles, @image_titles); |
6a9e55b0 | 456 | } |
6df7e0df MM |
457 | if (@media_titles) { |
458 | get_mw_page_list(\@media_titles, $pages); | |
6a9e55b0 PV |
459 | } |
460 | } | |
461 | ||
462 | @titles = @titles[($batch+1)..$#titles]; | |
463 | } | |
464 | } | |
465 | ||
466 | sub get_mw_mediafile_for_page_revision { | |
467 | # Name of the file on Wiki, with the prefix. | |
6df7e0df | 468 | my $filename = shift; |
6a9e55b0 PV |
469 | my $timestamp = shift; |
470 | my %mediafile; | |
471 | ||
6df7e0df MM |
472 | # Search if on a media file with given timestamp exists on |
473 | # MediaWiki. In that case download the file. | |
6a9e55b0 PV |
474 | my $query = { |
475 | action => 'query', | |
476 | prop => 'imageinfo', | |
6df7e0df | 477 | titles => "File:" . $filename, |
6a9e55b0 PV |
478 | iistart => $timestamp, |
479 | iiend => $timestamp, | |
480 | iiprop => 'timestamp|archivename|url', | |
481 | iilimit => 1 | |
482 | }; | |
483 | my $result = $mediawiki->api($query); | |
484 | ||
6df7e0df | 485 | my ($fileid, $file) = each( %{$result->{query}->{pages}} ); |
6a9e55b0 PV |
486 | # If not defined it means there is no revision of the file for |
487 | # given timestamp. | |
488 | if (defined($file->{imageinfo})) { | |
6a9e55b0 PV |
489 | $mediafile{title} = $filename; |
490 | ||
491 | my $fileinfo = pop(@{$file->{imageinfo}}); | |
492 | $mediafile{timestamp} = $fileinfo->{timestamp}; | |
6df7e0df MM |
493 | # Mediawiki::API's download function doesn't support https URLs |
494 | # and can't download old versions of files. | |
495 | print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n"; | |
496 | $mediafile{content} = download_mw_mediafile($fileinfo->{url}); | |
6a9e55b0 PV |
497 | } |
498 | return %mediafile; | |
499 | } | |
500 | ||
6df7e0df | 501 | sub download_mw_mediafile { |
6a9e55b0 | 502 | my $url = shift; |
6a9e55b0 | 503 | |
6df7e0df MM |
504 | my $response = $mediawiki->{ua}->get($url); |
505 | if ($response->code == 200) { | |
506 | return $response->decoded_content; | |
6a9e55b0 | 507 | } else { |
6df7e0df MM |
508 | print STDERR "Error downloading mediafile from :\n"; |
509 | print STDERR "URL: $url\n"; | |
510 | print STDERR "Server response: " . $response->code . " " . $response->message . "\n"; | |
6a9e55b0 | 511 | exit 1; |
6a9e55b0 PV |
512 | } |
513 | } | |
514 | ||
428c995c JN |
515 | sub get_last_local_revision { |
516 | # Get note regarding last mediawiki revision | |
517 | my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null"); | |
518 | my @note_info = split(/ /, $note); | |
519 | ||
520 | my $lastrevision_number; | |
521 | if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) { | |
522 | print STDERR "No previous mediawiki revision found"; | |
523 | $lastrevision_number = 0; | |
524 | } else { | |
525 | # Notes are formatted : mediawiki_revision: #number | |
526 | $lastrevision_number = $note_info[1]; | |
527 | chomp($lastrevision_number); | |
528 | print STDERR "Last local mediawiki revision found is $lastrevision_number"; | |
529 | } | |
530 | return $lastrevision_number; | |
531 | } | |
532 | ||
3c1ed90e MM |
533 | # Remember the timestamp corresponding to a revision id. |
534 | my %basetimestamps; | |
535 | ||
428c995c JN |
536 | sub get_last_remote_revision { |
537 | mw_connect_maybe(); | |
538 | ||
539 | my @pages = get_mw_pages(); | |
540 | ||
541 | my $max_rev_num = 0; | |
542 | ||
543 | foreach my $page (@pages) { | |
544 | my $id = $page->{pageid}; | |
545 | ||
546 | my $query = { | |
547 | action => 'query', | |
548 | prop => 'revisions', | |
3c1ed90e | 549 | rvprop => 'ids|timestamp', |
428c995c JN |
550 | pageids => $id, |
551 | }; | |
552 | ||
553 | my $result = $mediawiki->api($query); | |
554 | ||
555 | my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}}); | |
556 | ||
3c1ed90e MM |
557 | $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp}; |
558 | ||
428c995c JN |
559 | $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num); |
560 | } | |
561 | ||
562 | print STDERR "Last remote revision found is $max_rev_num.\n"; | |
563 | return $max_rev_num; | |
564 | } | |
565 | ||
566 | # Clean content before sending it to MediaWiki | |
567 | sub mediawiki_clean { | |
568 | my $string = shift; | |
569 | my $page_created = shift; | |
570 | # Mediawiki does not allow blank space at the end of a page and ends with a single \n. | |
571 | # This function right trims a string and adds a \n at the end to follow this rule | |
572 | $string =~ s/\s+$//; | |
573 | if ($string eq "" && $page_created) { | |
574 | # Creating empty pages is forbidden. | |
575 | $string = EMPTY_CONTENT; | |
576 | } | |
577 | return $string."\n"; | |
578 | } | |
579 | ||
580 | # Filter applied on MediaWiki data before adding them to Git | |
581 | sub mediawiki_smudge { | |
582 | my $string = shift; | |
583 | if ($string eq EMPTY_CONTENT) { | |
584 | $string = ""; | |
585 | } | |
586 | # This \n is important. This is due to mediawiki's way to handle end of files. | |
587 | return $string."\n"; | |
588 | } | |
589 | ||
590 | sub mediawiki_clean_filename { | |
591 | my $filename = shift; | |
592 | $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g; | |
593 | # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded. | |
594 | # Do a variant of URL-encoding, i.e. looks like URL-encoding, | |
595 | # but with _ added to prevent MediaWiki from thinking this is | |
596 | # an actual special character. | |
597 | $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge; | |
598 | # If we use the uri escape before | |
599 | # we should unescape here, before anything | |
600 | ||
601 | return $filename; | |
602 | } | |
603 | ||
604 | sub mediawiki_smudge_filename { | |
605 | my $filename = shift; | |
606 | $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g; | |
607 | $filename =~ s/ /_/g; | |
608 | # Decode forbidden characters encoded in mediawiki_clean_filename | |
609 | $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge; | |
610 | return $filename; | |
611 | } | |
612 | ||
613 | sub literal_data { | |
614 | my ($content) = @_; | |
615 | print STDOUT "data ", bytes::length($content), "\n", $content; | |
616 | } | |
617 | ||
6a9e55b0 PV |
618 | sub literal_data_raw { |
619 | # Output possibly binary content. | |
620 | my ($content) = @_; | |
621 | # Avoid confusion between size in bytes and in characters | |
622 | utf8::downgrade($content); | |
623 | binmode STDOUT, ":raw"; | |
624 | print STDOUT "data ", bytes::length($content), "\n", $content; | |
625 | binmode STDOUT, ":utf8"; | |
626 | } | |
627 | ||
428c995c JN |
628 | sub mw_capabilities { |
629 | # Revisions are imported to the private namespace | |
630 | # refs/mediawiki/$remotename/ by the helper and fetched into | |
631 | # refs/remotes/$remotename later by fetch. | |
632 | print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n"; | |
633 | print STDOUT "import\n"; | |
634 | print STDOUT "list\n"; | |
635 | print STDOUT "push\n"; | |
636 | print STDOUT "\n"; | |
637 | } | |
638 | ||
639 | sub mw_list { | |
640 | # MediaWiki do not have branches, we consider one branch arbitrarily | |
641 | # called master, and HEAD pointing to it. | |
642 | print STDOUT "? refs/heads/master\n"; | |
643 | print STDOUT "\@refs/heads/master HEAD\n"; | |
644 | print STDOUT "\n"; | |
645 | } | |
646 | ||
647 | sub mw_option { | |
648 | print STDERR "remote-helper command 'option $_[0]' not yet implemented\n"; | |
649 | print STDOUT "unsupported\n"; | |
650 | } | |
651 | ||
652 | sub fetch_mw_revisions_for_page { | |
653 | my $page = shift; | |
654 | my $id = shift; | |
655 | my $fetch_from = shift; | |
656 | my @page_revs = (); | |
657 | my $query = { | |
658 | action => 'query', | |
659 | prop => 'revisions', | |
660 | rvprop => 'ids', | |
661 | rvdir => 'newer', | |
662 | rvstartid => $fetch_from, | |
663 | rvlimit => 500, | |
664 | pageids => $id, | |
665 | }; | |
666 | ||
667 | my $revnum = 0; | |
668 | # Get 500 revisions at a time due to the mediawiki api limit | |
669 | while (1) { | |
670 | my $result = $mediawiki->api($query); | |
671 | ||
672 | # Parse each of those 500 revisions | |
673 | foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) { | |
674 | my $page_rev_ids; | |
675 | $page_rev_ids->{pageid} = $page->{pageid}; | |
676 | $page_rev_ids->{revid} = $revision->{revid}; | |
677 | push(@page_revs, $page_rev_ids); | |
678 | $revnum++; | |
679 | } | |
680 | last unless $result->{'query-continue'}; | |
681 | $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid}; | |
682 | } | |
683 | if ($shallow_import && @page_revs) { | |
684 | print STDERR " Found 1 revision (shallow import).\n"; | |
685 | @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs); | |
686 | return $page_revs[0]; | |
687 | } | |
688 | print STDERR " Found ", $revnum, " revision(s).\n"; | |
689 | return @page_revs; | |
690 | } | |
691 | ||
692 | sub fetch_mw_revisions { | |
693 | my $pages = shift; my @pages = @{$pages}; | |
694 | my $fetch_from = shift; | |
695 | ||
696 | my @revisions = (); | |
697 | my $n = 1; | |
698 | foreach my $page (@pages) { | |
699 | my $id = $page->{pageid}; | |
700 | ||
701 | print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n"; | |
702 | $n++; | |
703 | my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from); | |
704 | @revisions = (@page_revs, @revisions); | |
705 | } | |
706 | ||
707 | return ($n, @revisions); | |
708 | } | |
709 | ||
710 | sub import_file_revision { | |
711 | my $commit = shift; | |
712 | my %commit = %{$commit}; | |
713 | my $full_import = shift; | |
714 | my $n = shift; | |
6a9e55b0 PV |
715 | my $mediafile = shift; |
716 | my %mediafile; | |
717 | if ($mediafile) { | |
718 | %mediafile = %{$mediafile}; | |
719 | } | |
428c995c JN |
720 | |
721 | my $title = $commit{title}; | |
722 | my $comment = $commit{comment}; | |
723 | my $content = $commit{content}; | |
724 | my $author = $commit{author}; | |
725 | my $date = $commit{date}; | |
726 | ||
727 | print STDOUT "commit refs/mediawiki/$remotename/master\n"; | |
728 | print STDOUT "mark :$n\n"; | |
729 | print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n"; | |
730 | literal_data($comment); | |
731 | ||
732 | # If it's not a clone, we need to know where to start from | |
733 | if (!$full_import && $n == 1) { | |
734 | print STDOUT "from refs/mediawiki/$remotename/master^0\n"; | |
735 | } | |
736 | if ($content ne DELETED_CONTENT) { | |
737 | print STDOUT "M 644 inline $title.mw\n"; | |
738 | literal_data($content); | |
6a9e55b0 PV |
739 | if (%mediafile) { |
740 | print STDOUT "M 644 inline $mediafile{title}\n"; | |
741 | literal_data_raw($mediafile{content}); | |
742 | } | |
428c995c JN |
743 | print STDOUT "\n\n"; |
744 | } else { | |
745 | print STDOUT "D $title.mw\n"; | |
746 | } | |
747 | ||
748 | # mediawiki revision number in the git note | |
749 | if ($full_import && $n == 1) { | |
750 | print STDOUT "reset refs/notes/$remotename/mediawiki\n"; | |
751 | } | |
752 | print STDOUT "commit refs/notes/$remotename/mediawiki\n"; | |
753 | print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n"; | |
754 | literal_data("Note added by git-mediawiki during import"); | |
755 | if (!$full_import && $n == 1) { | |
756 | print STDOUT "from refs/notes/$remotename/mediawiki^0\n"; | |
757 | } | |
758 | print STDOUT "N inline :$n\n"; | |
759 | literal_data("mediawiki_revision: " . $commit{mw_revision}); | |
760 | print STDOUT "\n\n"; | |
761 | } | |
762 | ||
763 | # parse a sequence of | |
764 | # <cmd> <arg1> | |
765 | # <cmd> <arg2> | |
766 | # \n | |
767 | # (like batch sequence of import and sequence of push statements) | |
768 | sub get_more_refs { | |
769 | my $cmd = shift; | |
770 | my @refs; | |
771 | while (1) { | |
772 | my $line = <STDIN>; | |
773 | if ($line =~ m/^$cmd (.*)$/) { | |
774 | push(@refs, $1); | |
775 | } elsif ($line eq "\n") { | |
776 | return @refs; | |
777 | } else { | |
778 | die("Invalid command in a '$cmd' batch: ". $_); | |
779 | } | |
780 | } | |
781 | } | |
782 | ||
783 | sub mw_import { | |
784 | # multiple import commands can follow each other. | |
785 | my @refs = (shift, get_more_refs("import")); | |
786 | foreach my $ref (@refs) { | |
787 | mw_import_ref($ref); | |
788 | } | |
789 | print STDOUT "done\n"; | |
790 | } | |
791 | ||
792 | sub mw_import_ref { | |
793 | my $ref = shift; | |
794 | # The remote helper will call "import HEAD" and | |
795 | # "import refs/heads/master". | |
796 | # Since HEAD is a symbolic ref to master (by convention, | |
797 | # followed by the output of the command "list" that we gave), | |
798 | # we don't need to do anything in this case. | |
799 | if ($ref eq "HEAD") { | |
800 | return; | |
801 | } | |
802 | ||
803 | mw_connect_maybe(); | |
804 | ||
805 | my @pages = get_mw_pages(); | |
806 | ||
807 | print STDERR "Searching revisions...\n"; | |
808 | my $last_local = get_last_local_revision(); | |
809 | my $fetch_from = $last_local + 1; | |
810 | if ($fetch_from == 1) { | |
811 | print STDERR ", fetching from beginning.\n"; | |
812 | } else { | |
813 | print STDERR ", fetching from here.\n"; | |
814 | } | |
815 | my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from); | |
816 | ||
817 | # Creation of the fast-import stream | |
818 | print STDERR "Fetching & writing export data...\n"; | |
819 | ||
820 | $n = 0; | |
821 | my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined | |
822 | ||
823 | foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) { | |
824 | # fetch the content of the pages | |
825 | my $query = { | |
826 | action => 'query', | |
827 | prop => 'revisions', | |
828 | rvprop => 'content|timestamp|comment|user|ids', | |
829 | revids => $pagerevid->{revid}, | |
830 | }; | |
831 | ||
832 | my $result = $mediawiki->api($query); | |
833 | ||
834 | my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}}); | |
835 | ||
836 | $n++; | |
837 | ||
6a9e55b0 | 838 | my $page_title = $result->{query}->{pages}->{$pagerevid->{pageid}}->{title}; |
428c995c JN |
839 | my %commit; |
840 | $commit{author} = $rev->{user} || 'Anonymous'; | |
841 | $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*'; | |
6a9e55b0 | 842 | $commit{title} = mediawiki_smudge_filename($page_title); |
428c995c JN |
843 | $commit{mw_revision} = $pagerevid->{revid}; |
844 | $commit{content} = mediawiki_smudge($rev->{'*'}); | |
845 | ||
846 | if (!defined($rev->{timestamp})) { | |
847 | $last_timestamp++; | |
848 | } else { | |
849 | $last_timestamp = $rev->{timestamp}; | |
850 | } | |
851 | $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp); | |
852 | ||
6a9e55b0 | 853 | # Differentiates classic pages and media files. |
6df7e0df | 854 | my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/; |
6a9e55b0 | 855 | my %mediafile; |
6df7e0df | 856 | if ($namespace && get_mw_namespace_id($namespace) == get_mw_namespace_id("File")) { |
6a9e55b0 PV |
857 | %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp}); |
858 | } | |
859 | # If this is a revision of the media page for new version | |
860 | # of a file do one common commit for both file and media page. | |
861 | # Else do commit only for that page. | |
862 | print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n"; | |
6df7e0df | 863 | import_file_revision(\%commit, ($fetch_from == 1), $n, \%mediafile); |
428c995c JN |
864 | } |
865 | ||
866 | if ($fetch_from == 1 && $n == 0) { | |
867 | print STDERR "You appear to have cloned an empty MediaWiki.\n"; | |
868 | # Something has to be done remote-helper side. If nothing is done, an error is | |
869 | # thrown saying that HEAD is refering to unknown object 0000000000000000000 | |
870 | # and the clone fails. | |
871 | } | |
872 | } | |
873 | ||
874 | sub error_non_fast_forward { | |
fd47d7b9 MM |
875 | my $advice = run_git("config --bool advice.pushNonFastForward"); |
876 | chomp($advice); | |
877 | if ($advice ne "false") { | |
878 | # Native git-push would show this after the summary. | |
879 | # We can't ask it to display it cleanly, so print it | |
880 | # ourselves before. | |
881 | print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n"; | |
882 | print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n"; | |
883 | print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n"; | |
884 | } | |
428c995c JN |
885 | print STDOUT "error $_[0] \"non-fast-forward\"\n"; |
886 | return 0; | |
887 | } | |
888 | ||
b3d98595 NKT |
889 | sub mw_upload_file { |
890 | my $complete_file_name = shift; | |
891 | my $new_sha1 = shift; | |
892 | my $extension = shift; | |
893 | my $file_deleted = shift; | |
894 | my $summary = shift; | |
895 | my $newrevid; | |
896 | my $path = "File:" . $complete_file_name; | |
897 | my %hashFiles = get_allowed_file_extensions(); | |
898 | if (!exists($hashFiles{$extension})) { | |
899 | print STDERR "$complete_file_name is not a permitted file on this wiki.\n"; | |
900 | print STDERR "Check the configuration of file uploads in your mediawiki.\n"; | |
901 | return $newrevid; | |
902 | } | |
903 | # Deleting and uploading a file requires a priviledged user | |
904 | if ($file_deleted) { | |
905 | mw_connect_maybe(); | |
906 | my $query = { | |
907 | action => 'delete', | |
908 | title => $path, | |
909 | reason => $summary | |
910 | }; | |
911 | if (!$mediawiki->edit($query)) { | |
912 | print STDERR "Failed to delete file on remote wiki\n"; | |
913 | print STDERR "Check your permissions on the remote site. Error code:\n"; | |
914 | print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details}; | |
915 | exit 1; | |
916 | } | |
917 | } else { | |
918 | # Don't let perl try to interpret file content as UTF-8 => use "raw" | |
919 | my $content = run_git("cat-file blob $new_sha1", "raw"); | |
920 | if ($content ne "") { | |
921 | mw_connect_maybe(); | |
922 | $mediawiki->{config}->{upload_url} = | |
923 | "$url/index.php/Special:Upload"; | |
924 | $mediawiki->edit({ | |
925 | action => 'upload', | |
926 | filename => $complete_file_name, | |
927 | comment => $summary, | |
928 | file => [undef, | |
929 | $complete_file_name, | |
930 | Content => $content], | |
931 | ignorewarnings => 1, | |
932 | }, { | |
933 | skip_encoding => 1 | |
934 | } ) || die $mediawiki->{error}->{code} . ':' | |
935 | . $mediawiki->{error}->{details}; | |
936 | my $last_file_page = $mediawiki->get_page({title => $path}); | |
937 | $newrevid = $last_file_page->{revid}; | |
938 | print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n"; | |
939 | } else { | |
940 | print STDERR "Empty file $complete_file_name not pushed.\n"; | |
941 | } | |
942 | } | |
943 | return $newrevid; | |
944 | } | |
945 | ||
428c995c JN |
946 | sub mw_push_file { |
947 | my $diff_info = shift; | |
948 | # $diff_info contains a string in this format: | |
949 | # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status> | |
950 | my @diff_info_split = split(/[ \t]/, $diff_info); | |
951 | ||
952 | # Filename, including .mw extension | |
953 | my $complete_file_name = shift; | |
954 | # Commit message | |
955 | my $summary = shift; | |
93f0d338 MM |
956 | # MediaWiki revision number. Keep the previous one by default, |
957 | # in case there's no edit to perform. | |
b3d98595 NKT |
958 | my $oldrevid = shift; |
959 | my $newrevid; | |
428c995c JN |
960 | |
961 | my $new_sha1 = $diff_info_split[3]; | |
962 | my $old_sha1 = $diff_info_split[2]; | |
963 | my $page_created = ($old_sha1 eq NULL_SHA1); | |
964 | my $page_deleted = ($new_sha1 eq NULL_SHA1); | |
965 | $complete_file_name = mediawiki_clean_filename($complete_file_name); | |
966 | ||
b3d98595 NKT |
967 | my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/; |
968 | if (!defined($extension)) { | |
969 | $extension = ""; | |
970 | } | |
971 | if ($extension eq "mw") { | |
428c995c JN |
972 | my $file_content; |
973 | if ($page_deleted) { | |
974 | # Deleting a page usually requires | |
975 | # special priviledges. A common | |
976 | # convention is to replace the page | |
977 | # with this content instead: | |
978 | $file_content = DELETED_CONTENT; | |
979 | } else { | |
980 | $file_content = run_git("cat-file blob $new_sha1"); | |
981 | } | |
982 | ||
983 | mw_connect_maybe(); | |
984 | ||
985 | my $result = $mediawiki->edit( { | |
986 | action => 'edit', | |
987 | summary => $summary, | |
988 | title => $title, | |
b3d98595 | 989 | basetimestamp => $basetimestamps{$oldrevid}, |
428c995c JN |
990 | text => mediawiki_clean($file_content, $page_created), |
991 | }, { | |
992 | skip_encoding => 1 # Helps with names with accentuated characters | |
3c1ed90e MM |
993 | }); |
994 | if (!$result) { | |
995 | if ($mediawiki->{error}->{code} == 3) { | |
996 | # edit conflicts, considered as non-fast-forward | |
997 | print STDERR 'Warning: Error ' . | |
998 | $mediawiki->{error}->{code} . | |
999 | ' from mediwiki: ' . $mediawiki->{error}->{details} . | |
1000 | ".\n"; | |
b3d98595 | 1001 | return ($oldrevid, "non-fast-forward"); |
3c1ed90e MM |
1002 | } else { |
1003 | # Other errors. Shouldn't happen => just die() | |
1004 | die 'Fatal: Error ' . | |
1005 | $mediawiki->{error}->{code} . | |
1006 | ' from mediwiki: ' . $mediawiki->{error}->{details}; | |
1007 | } | |
1008 | } | |
93f0d338 | 1009 | $newrevid = $result->{edit}->{newrevid}; |
ac86ec0f | 1010 | print STDERR "Pushed file: $new_sha1 - $title\n"; |
428c995c | 1011 | } else { |
b3d98595 NKT |
1012 | $newrevid = mw_upload_file($complete_file_name, $new_sha1, |
1013 | $extension, $page_deleted, | |
1014 | $summary); | |
428c995c | 1015 | } |
b3d98595 | 1016 | $newrevid = ($newrevid or $oldrevid); |
3c1ed90e | 1017 | return ($newrevid, "ok"); |
428c995c JN |
1018 | } |
1019 | ||
1020 | sub mw_push { | |
1021 | # multiple push statements can follow each other | |
1022 | my @refsspecs = (shift, get_more_refs("push")); | |
428c995c JN |
1023 | my $pushed; |
1024 | for my $refspec (@refsspecs) { | |
1025 | my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/ | |
1026 | or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>"); | |
1027 | if ($force) { | |
1028 | print STDERR "Warning: forced push not allowed on a MediaWiki.\n"; | |
1029 | } | |
1030 | if ($local eq "") { | |
1031 | print STDERR "Cannot delete remote branch on a MediaWiki\n"; | |
1032 | print STDOUT "error $remote cannot delete\n"; | |
1033 | next; | |
1034 | } | |
1035 | if ($remote ne "refs/heads/master") { | |
1036 | print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n"; | |
1037 | print STDOUT "error $remote only master allowed\n"; | |
1038 | next; | |
1039 | } | |
1040 | if (mw_push_revision($local, $remote)) { | |
1041 | $pushed = 1; | |
1042 | } | |
1043 | } | |
1044 | ||
1045 | # Notify Git that the push is done | |
1046 | print STDOUT "\n"; | |
1047 | ||
93f0d338 | 1048 | if ($pushed && $dumb_push) { |
428c995c JN |
1049 | print STDERR "Just pushed some revisions to MediaWiki.\n"; |
1050 | print STDERR "The pushed revisions now have to be re-imported, and your current branch\n"; | |
1051 | print STDERR "needs to be updated with these re-imported commits. You can do this with\n"; | |
1052 | print STDERR "\n"; | |
1053 | print STDERR " git pull --rebase\n"; | |
1054 | print STDERR "\n"; | |
1055 | } | |
1056 | } | |
1057 | ||
1058 | sub mw_push_revision { | |
1059 | my $local = shift; | |
1060 | my $remote = shift; # actually, this has to be "refs/heads/master" at this point. | |
1061 | my $last_local_revid = get_last_local_revision(); | |
1062 | print STDERR ".\n"; # Finish sentence started by get_last_local_revision() | |
1063 | my $last_remote_revid = get_last_remote_revision(); | |
93f0d338 | 1064 | my $mw_revision = $last_remote_revid; |
428c995c JN |
1065 | |
1066 | # Get sha1 of commit pointed by local HEAD | |
1067 | my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1); | |
1068 | # Get sha1 of commit pointed by remotes/$remotename/master | |
1069 | my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null"); | |
1070 | chomp($remoteorigin_sha1); | |
1071 | ||
1072 | if ($last_local_revid > 0 && | |
1073 | $last_local_revid < $last_remote_revid) { | |
1074 | return error_non_fast_forward($remote); | |
1075 | } | |
1076 | ||
1077 | if ($HEAD_sha1 eq $remoteorigin_sha1) { | |
1078 | # nothing to push | |
1079 | return 0; | |
1080 | } | |
1081 | ||
1082 | # Get every commit in between HEAD and refs/remotes/origin/master, | |
1083 | # including HEAD and refs/remotes/origin/master | |
1084 | my @commit_pairs = (); | |
1085 | if ($last_local_revid > 0) { | |
1086 | my $parsed_sha1 = $remoteorigin_sha1; | |
1087 | # Find a path from last MediaWiki commit to pushed commit | |
1088 | while ($parsed_sha1 ne $HEAD_sha1) { | |
1089 | my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local"))); | |
1090 | if (!@commit_info) { | |
1091 | return error_non_fast_forward($remote); | |
1092 | } | |
1093 | my @commit_info_split = split(/ |\n/, $commit_info[0]); | |
1094 | # $commit_info_split[1] is the sha1 of the commit to export | |
1095 | # $commit_info_split[0] is the sha1 of its direct child | |
1096 | push(@commit_pairs, \@commit_info_split); | |
1097 | $parsed_sha1 = $commit_info_split[1]; | |
1098 | } | |
1099 | } else { | |
1100 | # No remote mediawiki revision. Export the whole | |
1101 | # history (linearized with --first-parent) | |
1102 | print STDERR "Warning: no common ancestor, pushing complete history\n"; | |
1103 | my $history = run_git("rev-list --first-parent --children $local"); | |
1104 | my @history = split('\n', $history); | |
1105 | @history = @history[1..$#history]; | |
1106 | foreach my $line (reverse @history) { | |
1107 | my @commit_info_split = split(/ |\n/, $line); | |
1108 | push(@commit_pairs, \@commit_info_split); | |
1109 | } | |
1110 | } | |
1111 | ||
1112 | foreach my $commit_info_split (@commit_pairs) { | |
1113 | my $sha1_child = @{$commit_info_split}[0]; | |
1114 | my $sha1_commit = @{$commit_info_split}[1]; | |
1115 | my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit"); | |
1116 | # TODO: we could detect rename, and encode them with a #redirect on the wiki. | |
1117 | # TODO: for now, it's just a delete+add | |
1118 | my @diff_info_list = split(/\0/, $diff_infos); | |
28c24bd7 MM |
1119 | # Keep the subject line of the commit message as mediawiki comment for the revision |
1120 | my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit"); | |
428c995c JN |
1121 | chomp($commit_msg); |
1122 | # Push every blob | |
1123 | while (@diff_info_list) { | |
3c1ed90e | 1124 | my $status; |
428c995c JN |
1125 | # git diff-tree -z gives an output like |
1126 | # <metadata>\0<filename1>\0 | |
1127 | # <metadata>\0<filename2>\0 | |
1128 | # and we've split on \0. | |
1129 | my $info = shift(@diff_info_list); | |
1130 | my $file = shift(@diff_info_list); | |
3c1ed90e MM |
1131 | ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision); |
1132 | if ($status eq "non-fast-forward") { | |
1133 | # we may already have sent part of the | |
1134 | # commit to MediaWiki, but it's too | |
1135 | # late to cancel it. Stop the push in | |
1136 | # the middle, but still give an | |
1137 | # accurate error message. | |
1138 | return error_non_fast_forward($remote); | |
1139 | } | |
1140 | if ($status ne "ok") { | |
1141 | die("Unknown error from mw_push_file()"); | |
1142 | } | |
93f0d338 MM |
1143 | } |
1144 | unless ($dumb_push) { | |
1145 | run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit"); | |
1146 | run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child"); | |
428c995c JN |
1147 | } |
1148 | } | |
1149 | ||
1150 | print STDOUT "ok $remote\n"; | |
1151 | return 1; | |
1152 | } | |
b3d98595 NKT |
1153 | |
1154 | sub get_allowed_file_extensions { | |
1155 | mw_connect_maybe(); | |
1156 | ||
1157 | my $query = { | |
1158 | action => 'query', | |
1159 | meta => 'siteinfo', | |
1160 | siprop => 'fileextensions' | |
1161 | }; | |
1162 | my $result = $mediawiki->api($query); | |
1163 | my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}}; | |
1164 | my %hashFile = map {$_ => 1}@file_extensions; | |
1165 | ||
1166 | return %hashFile; | |
1167 | } | |
6a9e55b0 | 1168 | |
6df7e0df MM |
1169 | # In memory cache for MediaWiki namespace ids. |
1170 | my %namespace_id; | |
1171 | ||
1172 | # Namespaces whose id is cached in the configuration file | |
1173 | # (to avoid duplicates) | |
1174 | my %cached_mw_namespace_id; | |
1175 | ||
6a9e55b0 PV |
1176 | # Return MediaWiki id for a canonical namespace name. |
1177 | # Ex.: "File", "Project". | |
6a9e55b0 PV |
1178 | sub get_mw_namespace_id { |
1179 | mw_connect_maybe(); | |
1180 | my $name = shift; | |
1181 | ||
1182 | if (!exists $namespace_id{$name}) { | |
1183 | # Look at configuration file, if the record for that namespace is | |
6df7e0df | 1184 | # already cached. Namespaces are stored in form: |
6a9e55b0 PV |
1185 | # "Name_of_namespace:Id_namespace", ex.: "File:6". |
1186 | my @temp = split(/[ \n]/, run_git("config --get-all remote." | |
6df7e0df | 1187 | . $remotename .".namespaceCache")); |
6a9e55b0 PV |
1188 | chomp(@temp); |
1189 | foreach my $ns (@temp) { | |
6df7e0df MM |
1190 | my ($n, $id) = split(/:/, $ns); |
1191 | $namespace_id{$n} = $id; | |
1192 | $cached_mw_namespace_id{$n} = 1; | |
6a9e55b0 PV |
1193 | } |
1194 | } | |
1195 | ||
1196 | if (!exists $namespace_id{$name}) { | |
6df7e0df | 1197 | print STDERR "Namespace $name not found in cache, querying the wiki ...\n"; |
6a9e55b0 PV |
1198 | # NS not found => get namespace id from MW and store it in |
1199 | # configuration file. | |
1200 | my $query = { | |
1201 | action => 'query', | |
1202 | meta => 'siteinfo', | |
1203 | siprop => 'namespaces' | |
1204 | }; | |
1205 | my $result = $mediawiki->api($query); | |
1206 | ||
1207 | while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) { | |
6df7e0df MM |
1208 | if (defined($ns->{id}) && defined($ns->{canonical})) { |
1209 | $namespace_id{$ns->{canonical}} = $ns->{id}; | |
1210 | if ($ns->{'*'}) { | |
1211 | # alias (e.g. french Fichier: as alias for canonical File:) | |
1212 | $namespace_id{$ns->{'*'}} = $ns->{id}; | |
1213 | } | |
1214 | } | |
6a9e55b0 PV |
1215 | } |
1216 | } | |
1217 | ||
6df7e0df MM |
1218 | my $id = $namespace_id{$name}; |
1219 | ||
1220 | if (defined $id) { | |
1221 | # Store explicitely requested namespaces on disk | |
1222 | if (!exists $cached_mw_namespace_id{$name}) { | |
1223 | run_git("config --add remote.". $remotename | |
1224 | .".namespaceCache \"". $name .":". $id ."\""); | |
1225 | $cached_mw_namespace_id{$name} = 1; | |
1226 | } | |
1227 | return $id; | |
6a9e55b0 PV |
1228 | } else { |
1229 | die "No such namespace $name on MediaWiki."; | |
1230 | } | |
1231 | } |