]> git.ipfire.org Git - thirdparty/bugzilla.git/commitdiff
Bug 1386336 - Generate robots.txt at checksetup.pl from a template
authorMary Umoh <umohm12@gmail.com>
Tue, 8 Aug 2017 21:12:08 +0000 (17:12 -0400)
committerDylan William Hardison <dylan@hardison.net>
Tue, 8 Aug 2017 21:12:08 +0000 (17:12 -0400)
Bugzilla/Install/Filesystem.pm
extensions/SiteMapIndex/Extension.pm
extensions/SiteMapIndex/robots.txt [deleted file]
extensions/SiteMapIndex/template/en/default/hook/robots-end.txt.tmpl [new file with mode: 0644]
template/en/default/robots.txt.tmpl [moved from robots.txt with 75% similarity]

index adc1815c1050dae2ab20e2149c8d6c041fa4994f..3114d64be0180ee4d4602f2f95734342e2ccb93b 100644 (file)
@@ -214,11 +214,10 @@ sub FILESYSTEM {
 
         'Bugzilla.pm'    => { perms => CGI_READ },
         "$localconfig*"  => { perms => CGI_READ },
-        'META.*'        => { perms => CGI_READ },
-        'MYMETA.*'      => { perms => CGI_READ },
+        'META.*'         => { perms => CGI_READ },
+        'MYMETA.*'       => { perms => CGI_READ },
         'bugzilla.dtd'   => { perms => WS_SERVE },
         'mod_perl.pl'    => { perms => WS_SERVE },
-        'robots.txt'     => { perms => WS_SERVE },
         '.htaccess'      => { perms => WS_SERVE },
         'cvs-update.log' => { perms => WS_SERVE },
         'scripts/sendunsentbugmail.pl' => { perms => WS_EXECUTE },
@@ -405,6 +404,9 @@ sub FILESYSTEM {
         "skins/yui3.css"          => { perms     => CGI_READ,
                                        overwrite => 1,
                                        contents  => $yui3_all_css },
+        "robots.txt"              => { perms     => CGI_READ,
+                                       overwrite => 1,
+                                       contents  => \&robots_txt},
     );
 
     # Because checksetup controls the creation of index.html separately
@@ -952,6 +954,16 @@ sub _check_web_server_group {
     return $group_id;
 }
 
+sub robots_txt {
+    my $output = '';
+    my %vars;
+    Bugzilla::Hook::process("before_robots_txt", { vars => \%vars });
+    Bugzilla->template->process("robots.txt.tmpl", \%vars, \$output)
+    or die Bugzilla->template->error;
+    return $output;
+}
+
+
 1;
 
 __END__
index 1e2010adbc9f82c6f3069f274ababa5ed46b9c1b..a3f09348567ee4339abcf84695d2f185df6b249c 100644 (file)
@@ -90,9 +90,6 @@ sub install_before_final_checks {
     }
 
     return if (correct_urlbase() ne 'https://bugzilla.mozilla.org/');
-
-
-    $self->_fix_robots_txt();
 }
 
 sub install_filesystem {
@@ -127,38 +124,9 @@ EOT
     };
 }
 
-sub _fix_robots_txt {
-    my ($self) = @_;
-    my $cgi_path = bz_locations()->{'cgi_path'};
-    my $robots_file = "$cgi_path/robots.txt";
-    my $current_fh = new IO::File("$cgi_path/robots.txt", 'r');
-    if (!$current_fh) {
-        warn "$robots_file: $!";
-        return;
-    }
-
-    my $current_contents;
-    { local $/; $current_contents = <$current_fh> }
-    $current_fh->close();
-
-    return if $current_contents =~ /^Sitemap:/m;
-    my $backup_name = "$cgi_path/robots.txt.old";
-    print get_text('sitemap_fixing_robots', { current => $robots_file,
-                                              backup  => $backup_name }), "\n";
-    rename $robots_file, $backup_name or die "backup failed: $!";
-
-    my $new_fh = new IO::File($self->package_dir . '/robots.txt', 'r');
-    $new_fh || die "Could not open new robots.txt template file: $!";
-    my $new_contents;
-    { local $/; $new_contents = <$new_fh> }
-    $new_fh->close() || die "Could not close new robots.txt template file: $!";
-
-    my $sitemap_url = correct_urlbase() . SITEMAP_URL;
-    $new_contents =~ s/SITEMAP_URL/$sitemap_url/;
-    $new_fh = new IO::File("$cgi_path/robots.txt", 'w');
-    $new_fh || die "Could not open new robots.txt file: $!";
-    print $new_fh $new_contents;
-    $new_fh->close() || die "Could not close new robots.txt file: $!";
+sub before_robots_txt {
+    my ($self, $args) = @_;
+    $args->{vars}{SITEMAP_URL} = correct_urlbase() . SITEMAP_URL;
 }
 
 __PACKAGE__->NAME;
diff --git a/extensions/SiteMapIndex/robots.txt b/extensions/SiteMapIndex/robots.txt
deleted file mode 100644 (file)
index 74cc630..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-User-agent: *
-Disallow: /*.cgi
-Disallow: /show_bug.cgi*ctype=*
-Allow: /$
-Allow: /index.cgi
-Allow: /page.cgi
-Allow: /show_bug.cgi
-Allow: /describecomponents.cgi
-Allow: /data/SiteMapIndex/sitemap*.xml.gz
-Sitemap: SITEMAP_URL
diff --git a/extensions/SiteMapIndex/template/en/default/hook/robots-end.txt.tmpl b/extensions/SiteMapIndex/template/en/default/hook/robots-end.txt.tmpl
new file mode 100644 (file)
index 0000000..818afb1
--- /dev/null
@@ -0,0 +1,2 @@
+Allow: /data/SiteMapIndex/sitemap*.xml.gz
+Sitemap: [% SITEMAP_URL %]
similarity index 75%
rename from robots.txt
rename to template/en/default/robots.txt.tmpl
index cececd63263718467a6ae109ae217b4b975643db..762b000ecbb16f0db28c53474723866bc69077fa 100644 (file)
@@ -16,5 +16,4 @@ Disallow: /show_bug.cgi*format=multiple*
 Allow: /describecomponents.cgi
 Allow: /describekeywords.cgi
 
-Allow: /data/SiteMapIndex/sitemap*.xml.gz
-Sitemap: http://bugzilla.mozilla.org/page.cgi?id=sitemap/sitemap.xml
+[% Hook.process("end") %]