'Bugzilla.pm' => { perms => CGI_READ },
"$localconfig*" => { perms => CGI_READ },
- 'META.*' => { perms => CGI_READ },
- 'MYMETA.*' => { perms => CGI_READ },
+ 'META.*' => { perms => CGI_READ },
+ 'MYMETA.*' => { perms => CGI_READ },
'bugzilla.dtd' => { perms => WS_SERVE },
'mod_perl.pl' => { perms => WS_SERVE },
- 'robots.txt' => { perms => WS_SERVE },
'.htaccess' => { perms => WS_SERVE },
'cvs-update.log' => { perms => WS_SERVE },
'scripts/sendunsentbugmail.pl' => { perms => WS_EXECUTE },
"skins/yui3.css" => { perms => CGI_READ,
overwrite => 1,
contents => $yui3_all_css },
+ "robots.txt" => { perms => CGI_READ,
+ overwrite => 1,
+ contents => \&robots_txt},
);
# Because checksetup controls the creation of index.html separately
return $group_id;
}
+sub robots_txt {
+ my $output = '';
+ my %vars;
+ Bugzilla::Hook::process("before_robots_txt", { vars => \%vars });
+ Bugzilla->template->process("robots.txt.tmpl", \%vars, \$output)
+ or die Bugzilla->template->error;
+ return $output;
+}
+
+
1;
__END__
}
return if (correct_urlbase() ne 'https://bugzilla.mozilla.org/');
-
-
- $self->_fix_robots_txt();
}
sub install_filesystem {
};
}
-sub _fix_robots_txt {
- my ($self) = @_;
- my $cgi_path = bz_locations()->{'cgi_path'};
- my $robots_file = "$cgi_path/robots.txt";
- my $current_fh = new IO::File("$cgi_path/robots.txt", 'r');
- if (!$current_fh) {
- warn "$robots_file: $!";
- return;
- }
-
- my $current_contents;
- { local $/; $current_contents = <$current_fh> }
- $current_fh->close();
-
- return if $current_contents =~ /^Sitemap:/m;
- my $backup_name = "$cgi_path/robots.txt.old";
- print get_text('sitemap_fixing_robots', { current => $robots_file,
- backup => $backup_name }), "\n";
- rename $robots_file, $backup_name or die "backup failed: $!";
-
- my $new_fh = new IO::File($self->package_dir . '/robots.txt', 'r');
- $new_fh || die "Could not open new robots.txt template file: $!";
- my $new_contents;
- { local $/; $new_contents = <$new_fh> }
- $new_fh->close() || die "Could not close new robots.txt template file: $!";
-
- my $sitemap_url = correct_urlbase() . SITEMAP_URL;
- $new_contents =~ s/SITEMAP_URL/$sitemap_url/;
- $new_fh = new IO::File("$cgi_path/robots.txt", 'w');
- $new_fh || die "Could not open new robots.txt file: $!";
- print $new_fh $new_contents;
- $new_fh->close() || die "Could not close new robots.txt file: $!";
+sub before_robots_txt {
+ my ($self, $args) = @_;
+ $args->{vars}{SITEMAP_URL} = correct_urlbase() . SITEMAP_URL;
}
__PACKAGE__->NAME;
+++ /dev/null
-User-agent: *
-Disallow: /*.cgi
-Disallow: /show_bug.cgi*ctype=*
-Allow: /$
-Allow: /index.cgi
-Allow: /page.cgi
-Allow: /show_bug.cgi
-Allow: /describecomponents.cgi
-Allow: /data/SiteMapIndex/sitemap*.xml.gz
-Sitemap: SITEMAP_URL
--- /dev/null
+Allow: /data/SiteMapIndex/sitemap*.xml.gz
+Sitemap: [% SITEMAP_URL %]
Allow: /describecomponents.cgi
Allow: /describekeywords.cgi
-Allow: /data/SiteMapIndex/sitemap*.xml.gz
-Sitemap: http://bugzilla.mozilla.org/page.cgi?id=sitemap/sitemap.xml
+[% Hook.process("end") %]