ikiwiki / CVE-2019-9187
Attached is my proposed patch for ikiwiki.
Upstream patches applied with no dramas. I manually applied the changes
to the end of doc/security.mdwn but everything else applied OK
automatically.
I wasn't sure what version number to use. Previous jessie-security
releases use the convention of "3.20141016.x" so was wondering if I
should be using "3.20141016.5". At the moment have used
"3.20141016.4+deb8u1" instead.
--
Brian May <brian@linuxpenguins.xyz>
https://linuxpenguins.xyz/brian/
diff -Nru ikiwiki-3.20141016.4+deb8u1/CHANGELOG ikiwiki-3.20141016.4/CHANGELOG
--- ikiwiki-3.20141016.4+deb8u1/CHANGELOG 2019-03-07 17:35:55.000000000 +1100
+++ ikiwiki-3.20141016.4/CHANGELOG 2017-01-12 05:18:52.000000000 +1100
@@ -1,10 +1,3 @@
-ikiwiki (3.20141016.4+deb8u1) jessie-security; urgency=high
-
- * Non-maintainer upload by the LTS Team.
- * CVE-2019-9187: Fix server-side request forgery via aggregate plugin.
-
- -- Brian May <bam@debian.org> Thu, 07 Mar 2019 17:35:55 +1100
-
ikiwiki (3.20141016.4) jessie-security; urgency=high
* Reference CVE-2016-4561 in 3.20141016.3 changelog
diff -Nru ikiwiki-3.20141016.4+deb8u1/CVE-2019-9187-1.patch ikiwiki-3.20141016.4/CVE-2019-9187-1.patch
--- ikiwiki-3.20141016.4+deb8u1/CVE-2019-9187-1.patch 2019-03-07 17:25:37.000000000 +1100
+++ ikiwiki-3.20141016.4/CVE-2019-9187-1.patch 1970-01-01 10:00:00.000000000 +1000
@@ -1,28 +0,0 @@
-From e7b0d4a0fff8ed45a90c2efe8ef294bdf7c9bdac Mon Sep 17 00:00:00 2001
-From: Simon McVittie <smcv@debian.org>
-Date: Sun, 10 Feb 2019 16:29:19 +0000
-Subject: [PATCH] useragent: Raise an exception if the LWP module can't be
- loaded
-
-Signed-off-by: Simon McVittie <smcv@debian.org>
----
- IkiWiki.pm | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/IkiWiki.pm b/IkiWiki.pm
-index 90cb96e58..dc047b08a 100644
---- a/IkiWiki.pm
-+++ b/IkiWiki.pm
-@@ -2470,6 +2470,9 @@ sub add_autofile ($$$) {
- }
-
- sub useragent () {
-+ eval q{use LWP};
-+ error($@) if $@;
-+
- return LWP::UserAgent->new(
- cookie_jar => $config{cookiejar},
- env_proxy => 1, # respect proxy env vars
---
-2.11.0
-
diff -Nru ikiwiki-3.20141016.4+deb8u1/CVE-2019-9187-2.patch ikiwiki-3.20141016.4/CVE-2019-9187-2.patch
--- ikiwiki-3.20141016.4+deb8u1/CVE-2019-9187-2.patch 2019-03-07 17:26:25.000000000 +1100
+++ ikiwiki-3.20141016.4/CVE-2019-9187-2.patch 1970-01-01 10:00:00.000000000 +1000
@@ -1,238 +0,0 @@
-From 67543ce1d62161fdef9dca198289d7dd7dceacc0 Mon Sep 17 00:00:00 2001
-From: Simon McVittie <smcv@debian.org>
-Date: Sun, 10 Feb 2019 16:30:07 +0000
-Subject: [PATCH] useragent: Don't allow non-HTTP protocols to be used
-
-This prevents the aggregate plugin from being used to read the contents
-of local files via file:/// URLs.
-
-Signed-off-by: Simon McVittie <smcv@debian.org>
----
- IkiWiki.pm | 1 +
- t/aggregate-file.t | 173 +++++++++++++++++++++++++++++++++++++
- t/noparanoia/LWPx/ParanoidAgent.pm | 2 +
- t/secret.rss | 11 +++
- 4 files changed, 187 insertions(+)
- create mode 100755 t/aggregate-file.t
- create mode 100644 t/noparanoia/LWPx/ParanoidAgent.pm
- create mode 100644 t/secret.rss
-
-diff --git a/IkiWiki.pm b/IkiWiki.pm
-index dc047b08a..d5d1af56c 100644
---- a/IkiWiki.pm
-+++ b/IkiWiki.pm
-@@ -2477,6 +2477,7 @@ sub useragent () {
- cookie_jar => $config{cookiejar},
- env_proxy => 1, # respect proxy env vars
- agent => $config{useragent},
-+ protocols_allowed => [qw(http https)],
- );
- }
-
-diff --git a/t/aggregate-file.t b/t/aggregate-file.t
-new file mode 100755
-index 000000000..f00743dac
---- /dev/null
-+++ b/t/aggregate-file.t
-@@ -0,0 +1,173 @@
-+#!/usr/bin/perl
-+use utf8;
-+use warnings;
-+use strict;
-+
-+use Encode;
-+use Test::More;
-+
-+BEGIN {
-+ plan(skip_all => "CGI not available")
-+ unless eval q{
-+ use CGI qw();
-+ 1;
-+ };
-+
-+ plan(skip_all => "IPC::Run not available")
-+ unless eval q{
-+ use IPC::Run qw(run);
-+ 1;
-+ };
-+
-+ use_ok('IkiWiki');
-+ use_ok('YAML::XS');
-+}
-+
-+# We check for English error messages
-+$ENV{LC_ALL} = 'C';
-+
-+use Cwd qw(getcwd);
-+use Errno qw(ENOENT);
-+
-+my $installed = $ENV{INSTALLED_TESTS};
-+
-+my @command;
-+if ($installed) {
-+ @command = qw(ikiwiki --plugin inline);
-+}
-+else {
-+ ok(! system("make -s ikiwiki.out"));
-+ @command = ("perl", "-I".getcwd."/blib/lib", './ikiwiki.out',
-+ '--underlaydir='.getcwd.'/underlays/basewiki',
-+ '--set', 'underlaydirbase='.getcwd.'/underlays',
-+ '--templatedir='.getcwd.'/templates');
-+}
-+
-+sub write_old_file {
-+ my $name = shift;
-+ my $dir = shift;
-+ my $content = shift;
-+ writefile($name, $dir, $content);
-+ ok(utime(333333333, 333333333, "$dir/$name"));
-+}
-+
-+sub write_setup_file {
-+ my %params = @_;
-+ my %setup = (
-+ wikiname => 'this is the name of my wiki',
-+ srcdir => getcwd.'/t/tmp/in',
-+ destdir => getcwd.'/t/tmp/out',
-+ url => 'http://example.com',
-+ cgiurl => 'http://example.com/cgi-bin/ikiwiki.cgi',
-+ cgi_wrapper => getcwd.'/t/tmp/ikiwiki.cgi',
-+ cgi_wrappermode => '0751',
-+ add_plugins => [qw(aggregate)],
-+ disable_plugins => [qw(emailauth openid passwordauth)],
-+ aggregate_webtrigger => 1,
-+ );
-+ if ($params{without_paranoia}) {
-+ $setup{libdirs} = [getcwd.'/t/noparanoia'];
-+ }
-+ unless ($installed) {
-+ $setup{ENV} = { 'PERL5LIB' => getcwd.'/blib/lib' };
-+ }
-+ writefile("test.setup", "t/tmp",
-+ "# IkiWiki::Setup::Yaml - YAML formatted setup file\n" .
-+ Dump(\%setup));
-+}
-+
-+sub thoroughly_rebuild {
-+ ok(unlink("t/tmp/ikiwiki.cgi") || $!{ENOENT});
-+ ok(! system(@command, qw(--setup t/tmp/test.setup --rebuild --wrappers)));
-+}
-+
-+sub run_cgi {
-+ my (%args) = @_;
-+ my ($in, $out);
-+ my $method = $args{method} || 'GET';
-+ my $environ = $args{environ} || {};
-+ my $params = $args{params} || { do => 'prefs' };
-+
-+ my %defaults = (
-+ SCRIPT_NAME => '/cgi-bin/ikiwiki.cgi',
-+ HTTP_HOST => 'example.com',
-+ );
-+
-+ my $cgi = CGI->new($args{params});
-+ my $query_string = $cgi->query_string();
-+ diag $query_string;
-+
-+ if ($method eq 'POST') {
-+ $defaults{REQUEST_METHOD} = 'POST';
-+ $in = $query_string;
-+ $defaults{CONTENT_LENGTH} = length $in;
-+ } else {
-+ $defaults{REQUEST_METHOD} = 'GET';
-+ $defaults{QUERY_STRING} = $query_string;
-+ }
-+
-+ my %envvars = (
-+ %defaults,
-+ %$environ,
-+ );
-+ run(["./t/tmp/ikiwiki.cgi"], \$in, \$out, init => sub {
-+ map {
-+ $ENV{$_} = $envvars{$_}
-+ } keys(%envvars);
-+ });
-+
-+ return decode_utf8($out);
-+}
-+
-+sub test {
-+ my $content;
-+
-+ ok(! system(qw(rm -rf t/tmp)));
-+ ok(! system(qw(mkdir t/tmp)));
-+
-+ write_old_file('aggregator.mdwn', 't/tmp/in',
-+ '[[!aggregate name="ssrf" url="file://'.getcwd.'/t/secret.rss"]]'
-+ .'[[!inline pages="internal(aggregator/*)"]]');
-+
-+ write_setup_file();
-+ thoroughly_rebuild();
-+
-+ $content = run_cgi(
-+ method => 'GET',
-+ params => {
-+ do => 'aggregate_webtrigger',
-+ },
-+ );
-+ unlike($content, qr{creating new page});
-+ unlike($content, qr{Secrets});
-+ ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf');
-+ ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf/Secrets_go_here._aggregated');
-+
-+ thoroughly_rebuild();
-+ $content = readfile('t/tmp/out/aggregator/index.html');
-+ unlike($content, qr{Secrets});
-+
-+ diag('Trying test again with LWPx::ParanoidAgent disabled');
-+
-+ write_setup_file(without_paranoia => 1);
-+ thoroughly_rebuild();
-+
-+ $content = run_cgi(
-+ method => 'GET',
-+ params => {
-+ do => 'aggregate_webtrigger',
-+ },
-+ );
-+ unlike($content, qr{creating new page});
-+ unlike($content, qr{Secrets});
-+ ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf');
-+ ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf/Secrets_go_here._aggregated');
-+
-+ thoroughly_rebuild();
-+ $content = readfile('t/tmp/out/aggregator/index.html');
-+ unlike($content, qr{Secrets});
-+}
-+
-+test();
-+
-+done_testing();
-diff --git a/t/noparanoia/LWPx/ParanoidAgent.pm b/t/noparanoia/LWPx/ParanoidAgent.pm
-new file mode 100644
-index 000000000..751e80ce6
---- /dev/null
-+++ b/t/noparanoia/LWPx/ParanoidAgent.pm
-@@ -0,0 +1,2 @@
-+# make import fail
-+0;
-diff --git a/t/secret.rss b/t/secret.rss
-new file mode 100644
-index 000000000..11202e9ed
---- /dev/null
-+++ b/t/secret.rss
-@@ -0,0 +1,11 @@
-+<?xml version="1.0"?>
-+<rss version="2.0">
-+<channel>
-+<title>Secrets go here</title>
-+<description>Secrets go here</description>
-+<item>
-+ <title>Secrets go here</title>
-+ <description>Secrets go here</description>
-+</item>
-+</channel>
-+</rss>
---
-2.11.0
-
diff -Nru ikiwiki-3.20141016.4+deb8u1/CVE-2019-9187-3.patch ikiwiki-3.20141016.4/CVE-2019-9187-3.patch
--- ikiwiki-3.20141016.4+deb8u1/CVE-2019-9187-3.patch 2019-03-07 17:26:41.000000000 +1100
+++ ikiwiki-3.20141016.4/CVE-2019-9187-3.patch 1970-01-01 10:00:00.000000000 +1000
@@ -1,590 +0,0 @@
-From d283e4ca1aeb6ca8cc0951c8495f778071076013 Mon Sep 17 00:00:00 2001
-From: Simon McVittie <smcv@debian.org>
-Date: Sun, 10 Feb 2019 17:22:06 +0000
-Subject: [PATCH] useragent: Automatically choose whether to use
- LWPx::ParanoidAgent
-
-The simple implementation of this, which I'd prefer to use, would be:
-if we can import LWPx::ParanoidAgent, use it; otherwise, use
-LWP::UserAgent.
-
-However, aggregate has historically worked with proxies, and
-LWPx::ParanoidAgent quite reasonably refuses to work with proxies
-(because it can't know whether those proxies are going to do the same
-filtering that LWPx::ParanoidAgent would).
-
-Signed-off-by: Simon McVittie <smcv@debian.org>
----
- IkiWiki.pm | 123 ++++++++++++++++-
- IkiWiki/Plugin/aggregate.pm | 5 +-
- IkiWiki/Plugin/blogspam.pm | 16 +--
- IkiWiki/Plugin/openid.pm | 12 +-
- IkiWiki/Plugin/pinger.pm | 21 ++-
- t/useragent.t | 317 ++++++++++++++++++++++++++++++++++++++++++++
- 6 files changed, 458 insertions(+), 36 deletions(-)
- create mode 100755 t/useragent.t
-
-diff --git a/IkiWiki.pm b/IkiWiki.pm
-index d5d1af56c..efb48293a 100644
---- a/IkiWiki.pm
-+++ b/IkiWiki.pm
-@@ -2469,16 +2469,131 @@ sub add_autofile ($$$) {
- $autofiles{$file}{generator}=$generator;
- }
-
--sub useragent () {
-+sub useragent (@) {
-+ my %params = @_;
-+ my $for_url = delete $params{for_url};
-+ # Fail safe, in case a plugin calling this function is relying on
-+ # a future parameter to make the UA more strict
-+ foreach my $key (keys %params) {
-+ error "Internal error: useragent(\"$key\" => ...) not understood";
-+ }
-+
- eval q{use LWP};
- error($@) if $@;
-
-- return LWP::UserAgent->new(
-- cookie_jar => $config{cookiejar},
-- env_proxy => 1, # respect proxy env vars
-+ my %args = (
- agent => $config{useragent},
-+ cookie_jar => $config{cookiejar},
-+ env_proxy => 0,
- protocols_allowed => [qw(http https)],
- );
-+ my %proxies;
-+
-+ if (defined $for_url) {
-+ # We know which URL we're going to fetch, so we can choose
-+ # whether it's going to go through a proxy or not.
-+ #
-+ # We reimplement http_proxy, https_proxy and no_proxy here, so
-+ # that we are not relying on LWP implementing them exactly the
-+ # same way we do.
-+
-+ eval q{use URI};
-+ error($@) if $@;
-+
-+ my $proxy;
-+ my $uri = URI->new($for_url);
-+
-+ if ($uri->scheme eq 'http') {
-+ $proxy = $ENV{http_proxy};
-+ # HTTP_PROXY is deliberately not implemented
-+ # because the HTTP_* namespace is also used by CGI
-+ }
-+ elsif ($uri->scheme eq 'https') {
-+ $proxy = $ENV{https_proxy};
-+ $proxy = $ENV{HTTPS_PROXY} unless defined $proxy;
-+ }
-+ else {
-+ $proxy = undef;
-+ }
-+
-+ foreach my $var (qw(no_proxy NO_PROXY)) {
-+ my $no_proxy = $ENV{$var};
-+ if (defined $no_proxy) {
-+ foreach my $domain (split /\s*,\s*/, $no_proxy) {
-+ if ($domain =~ s/^\*?\.//) {
-+ # no_proxy="*.example.com" or
-+ # ".example.com": match suffix
-+ # against .example.com
-+ if ($uri->host =~ m/(^|\.)\Q$domain\E$/i) {
-+ $proxy = undef;
-+ }
-+ }
-+ else {
-+ # no_proxy="example.com":
-+ # match exactly example.com
-+ if (lc $uri->host eq lc $domain) {
-+ $proxy = undef;
-+ }
-+ }
-+ }
-+ }
-+ }
-+
-+ if (defined $proxy) {
-+ $proxies{$uri->scheme} = $proxy;
-+ # Paranoia: make sure we can't bypass the proxy
-+ $args{protocols_allowed} = [$uri->scheme];
-+ }
-+ }
-+ else {
-+ # The plugin doesn't know yet which URL(s) it's going to
-+ # fetch, so we have to make some conservative assumptions.
-+ my $http_proxy = $ENV{http_proxy};
-+ my $https_proxy = $ENV{https_proxy};
-+ $https_proxy = $ENV{HTTPS_PROXY} unless defined $https_proxy;
-+
-+ # We don't respect no_proxy here: if we are not using the
-+ # paranoid user-agent, then we need to give the proxy the
-+ # opportunity to reject undesirable requests.
-+
-+ # If we have one, we need the other: otherwise, neither
-+ # LWPx::ParanoidAgent nor the proxy would have the
-+ # opportunity to filter requests for the other protocol.
-+ if (defined $https_proxy && defined $http_proxy) {
-+ %proxies = (http => $http_proxy, https => $https_proxy);
-+ }
-+ elsif (defined $https_proxy) {
-+ %proxies = (http => $https_proxy, https => $https_proxy);
-+ }
-+ elsif (defined $http_proxy) {
-+ %proxies = (http => $http_proxy, https => $http_proxy);
-+ }
-+
-+ }
-+
-+ if (scalar keys %proxies) {
-+ # The configured proxy is responsible for deciding which
-+ # URLs are acceptable to fetch and which URLs are not.
-+ my $ua = LWP::UserAgent->new(%args);
-+ foreach my $scheme (@{$ua->protocols_allowed}) {
-+ unless ($proxies{$scheme}) {
-+ error "internal error: $scheme is allowed but has no proxy";
-+ }
-+ }
-+ # We can't pass the proxies in %args because that only
-+ # works since LWP 6.24.
-+ foreach my $scheme (keys %proxies) {
-+ $ua->proxy($scheme, $proxies{$scheme});
-+ }
-+ return $ua;
-+ }
-+
-+ eval q{use LWPx::ParanoidAgent};
-+ if ($@) {
-+ print STDERR "warning: installing LWPx::ParanoidAgent is recommended\n";
-+ return LWP::UserAgent->new(%args);
-+ }
-+ return LWPx::ParanoidAgent->new(%args);
- }
-
- sub sortspec_translate ($$) {
-diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm
-index 05e22a290..8f0870e2e 100644
---- a/IkiWiki/Plugin/aggregate.pm
-+++ b/IkiWiki/Plugin/aggregate.pm
-@@ -513,7 +513,10 @@ sub aggregate (@) {
- }
- $feed->{feedurl}=pop @urls;
- }
-- my $ua=useragent();
-+ # Using the for_url parameter makes sure we crash if used
-+ # with an older IkiWiki.pm that didn't automatically try
-+ # to use LWPx::ParanoidAgent.
-+ my $ua=useragent(for_url => $feed->{feedurl});
- my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
- if (! $res) {
- $feed->{message}=URI::Fetch->errstr;
-diff --git a/IkiWiki/Plugin/blogspam.pm b/IkiWiki/Plugin/blogspam.pm
-index 3eb4cf8b3..3835f52ca 100644
---- a/IkiWiki/Plugin/blogspam.pm
-+++ b/IkiWiki/Plugin/blogspam.pm
-@@ -57,18 +57,10 @@ sub checkconfig () {
- };
- error $@ if $@;
-
-- eval q{use LWPx::ParanoidAgent};
-- if (!$@) {
-- $client=LWPx::ParanoidAgent->new(agent => $config{useragent});
-- }
-- else {
-- eval q{use LWP};
-- if ($@) {
-- error $@;
-- return;
-- }
-- $client=useragent();
-- }
-+ # Using the for_url parameter makes sure we crash if used
-+ # with an older IkiWiki.pm that didn't automatically try
-+ # to use LWPx::ParanoidAgent.
-+ $client=useragent(for_url => $config{blogspam_server});
- }
-
- sub checkcontent (@) {
-diff --git a/IkiWiki/Plugin/openid.pm b/IkiWiki/Plugin/openid.pm
-index 35ef52a58..eb21955e9 100644
---- a/IkiWiki/Plugin/openid.pm
-+++ b/IkiWiki/Plugin/openid.pm
-@@ -219,14 +219,10 @@ sub getobj ($$) {
- eval q{use Net::OpenID::Consumer};
- error($@) if $@;
-
-- my $ua;
-- eval q{use LWPx::ParanoidAgent};
-- if (! $@) {
-- $ua=LWPx::ParanoidAgent->new(agent => $config{useragent});
-- }
-- else {
-- $ua=useragent();
-- }
-+ # We pass the for_url parameter, even though it's undef, because
-+ # that will make sure we crash if used with an older IkiWiki.pm
-+ # that didn't automatically try to use LWPx::ParanoidAgent.
-+ my $ua=useragent(for_url => undef);
-
- # Store the secret in the session.
- my $secret=$session->param("openid_secret");
-diff --git a/IkiWiki/Plugin/pinger.pm b/IkiWiki/Plugin/pinger.pm
-index b2d54af8a..ec764caee 100644
---- a/IkiWiki/Plugin/pinger.pm
-+++ b/IkiWiki/Plugin/pinger.pm
-@@ -70,17 +70,16 @@ sub ping {
- eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
-
- my $ua;
-- eval q{use LWPx::ParanoidAgent};
-- if (!$@) {
-- $ua=LWPx::ParanoidAgent->new(agent => $config{useragent});
-- }
-- else {
-- eval q{use LWP};
-- if ($@) {
-- debug(gettext("LWP not found, not pinging"));
-- return;
-- }
-- $ua=useragent();
-+ eval {
-+ # We pass the for_url parameter, even though it's
-+ # undef, because that will make sure we crash if used
-+ # with an older IkiWiki.pm that didn't automatically
-+ # try to use LWPx::ParanoidAgent.
-+ $ua=useragent(for_url => undef);
-+ };
-+ if ($@) {
-+ debug(gettext("LWP not found, not pinging").": $@");
-+ return;
- }
- $ua->timeout($config{pinger_timeout} || 15);
-
-diff --git a/t/useragent.t b/t/useragent.t
-new file mode 100755
-index 000000000..195a86521
---- /dev/null
-+++ b/t/useragent.t
-@@ -0,0 +1,317 @@
-+#!/usr/bin/perl
-+use warnings;
-+use strict;
-+use Test::More;
-+
-+my $have_paranoid_agent;
-+BEGIN {
-+ plan(skip_all => 'LWP not available')
-+ unless eval q{
-+ use LWP qw(); 1;
-+ };
-+ use_ok("IkiWiki");
-+ $have_paranoid_agent = eval q{
-+ use LWPx::ParanoidAgent qw(); 1;
-+ };
-+}
-+
-+eval { useragent(future_feature => 1); };
-+ok($@, 'future features should cause useragent to fail');
-+
-+diag "==== No proxy ====";
-+delete $ENV{http_proxy};
-+delete $ENV{https_proxy};
-+delete $ENV{no_proxy};
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{NO_PROXY};
-+
-+diag "---- Unspecified URL ----";
-+my $ua = useragent(for_url => undef);
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef, 'No http proxy');
-+is($ua->proxy('https'), undef, 'No https proxy');
-+
-+diag "---- Specified URL ----";
-+$ua = useragent(for_url => 'http://example.com');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef, 'No http proxy');
-+is($ua->proxy('https'), undef, 'No https proxy');
-+
-+diag "==== Proxy for everything ====";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+$ENV{https_proxy} = 'http://sproxy:8080';
-+delete $ENV{no_proxy};
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{NO_PROXY};
-+
-+diag "---- Unspecified URL ----";
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+$ua = useragent(for_url => 'http://example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+# We don't care what $ua->proxy('https') is, because it won't be used
-+$ua = useragent(for_url => 'https://example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+# We don't care what $ua->proxy('http') is, because it won't be used
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+
-+diag "==== Selective proxy ====";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+$ENV{https_proxy} = 'http://sproxy:8080';
-+$ENV{no_proxy} = '*.example.net,example.com,.example.org';
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{NO_PROXY};
-+
-+diag "---- Unspecified URL ----";
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+
-+diag "---- Exact match for no_proxy ----";
-+$ua = useragent(for_url => 'http://example.com');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- Subdomain of exact domain in no_proxy ----";
-+$ua = useragent(for_url => 'http://sub.example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+
-+diag "---- example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://sub.example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.net does not match *.example.net ----";
-+$ua = useragent(for_url => 'https://badexample.net');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "---- example.org matches .example.org ----";
-+$ua = useragent(for_url => 'https://example.org');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.org matches .example.org ----";
-+$ua = useragent(for_url => 'https://sub.example.org');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.org does not match .example.org ----";
-+$ua = useragent(for_url => 'https://badexample.org');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "==== Selective proxy (alternate variables) ====";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+delete $ENV{https_proxy};
-+$ENV{HTTPS_PROXY} = 'http://sproxy:8080';
-+delete $ENV{no_proxy};
-+$ENV{NO_PROXY} = '*.example.net,example.com,.example.org';
-+
-+diag "---- Unspecified URL ----";
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+
-+diag "---- Exact match for no_proxy ----";
-+$ua = useragent(for_url => 'http://example.com');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- Subdomain of exact domain in no_proxy ----";
-+$ua = useragent(for_url => 'http://sub.example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+
-+diag "---- example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://sub.example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.net does not match *.example.net ----";
-+$ua = useragent(for_url => 'https://badexample.net');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "---- example.org matches .example.org ----";
-+$ua = useragent(for_url => 'https://example.org');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.org matches .example.org ----";
-+$ua = useragent(for_url => 'https://sub.example.org');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.org does not match .example.org ----";
-+$ua = useragent(for_url => 'https://badexample.org');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "==== Selective proxy (many variables) ====";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+$ENV{https_proxy} = 'http://sproxy:8080';
-+# This one should be ignored in favour of https_proxy
-+$ENV{HTTPS_PROXY} = 'http://not.preferred.proxy:3128';
-+# These two should be merged
-+$ENV{no_proxy} = '*.example.net,example.com';
-+$ENV{NO_PROXY} = '.example.org';
-+
-+diag "---- Unspecified URL ----";
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+
-+diag "---- Exact match for no_proxy ----";
-+$ua = useragent(for_url => 'http://example.com');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- Subdomain of exact domain in no_proxy ----";
-+$ua = useragent(for_url => 'http://sub.example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+
-+diag "---- example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://sub.example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.net does not match *.example.net ----";
-+$ua = useragent(for_url => 'https://badexample.net');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "==== One but not the other ====\n";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+delete $ENV{https_proxy};
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{no_proxy};
-+delete $ENV{NO_PROXY};
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://proxy:8080', 'should use proxy');
-+
-+delete $ENV{http_proxy};
-+$ENV{https_proxy} = 'http://sproxy:8080';
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{no_proxy};
-+delete $ENV{NO_PROXY};
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://sproxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+done_testing;
---
-2.11.0
-
diff -Nru ikiwiki-3.20141016.4+deb8u1/CVE-2019-9187-4.patch ikiwiki-3.20141016.4/CVE-2019-9187-4.patch
--- ikiwiki-3.20141016.4+deb8u1/CVE-2019-9187-4.patch 2019-03-07 17:26:55.000000000 +1100
+++ ikiwiki-3.20141016.4/CVE-2019-9187-4.patch 1970-01-01 10:00:00.000000000 +1000
@@ -1,175 +0,0 @@
-From 9a275b2f1846d7268c71a740975447e269383849 Mon Sep 17 00:00:00 2001
-From: Simon McVittie <smcv@debian.org>
-Date: Sun, 10 Feb 2019 16:56:41 +0000
-Subject: [PATCH] doc: Document security issues involving LWP::UserAgent
-
-Recommend the LWPx::ParanoidAgent module where appropriate.
-It is particularly important for openid, since unauthenticated users
-can control which URLs that plugin will contact. Conversely, it is
-non-critical for blogspam, since the URL to be contacted is under
-the wiki administrator's control.
-
-Signed-off-by: Simon McVittie <smcv@debian.org>
----
- doc/plugins/aggregate.mdwn | 4 ++++
- doc/plugins/blogspam.mdwn | 2 ++
- doc/plugins/openid.mdwn | 7 +++++--
- doc/plugins/pinger.mdwn | 8 +++++---
- doc/security.mdwn | 49 +++++++++++++++++++++++++++++++++++++++++++++
- doc/tips/using_a_proxy.mdwn | 22 ++++++++++++++++++++
- 6 files changed, 87 insertions(+), 5 deletions(-)
- create mode 100644 doc/tips/using_a_proxy.mdwn
-
-diff --git a/doc/plugins/aggregate.mdwn b/doc/plugins/aggregate.mdwn
-index 75123d923..b1db828d1 100644
---- a/doc/plugins/aggregate.mdwn
-+++ b/doc/plugins/aggregate.mdwn
-@@ -11,6 +11,10 @@ The [[meta]] and [[tag]] plugins are also recommended to be used with this
- one. Either the [[htmltidy]] or [[htmlbalance]] plugin is suggested, since
- feeds can easily contain html problems, some of which these plugins can fix.
-
-+Installing the [[!cpan LWPx::ParanoidAgent]] Perl module is strongly
-+recommended. The [[!cpan LWP]] module can also be used, but is susceptible
-+to server-side request forgery.
-+
- ## triggering aggregation
-
- You will need to run ikiwiki periodically from a cron job, passing it the
-diff --git a/doc/plugins/blogspam.mdwn b/doc/plugins/blogspam.mdwn
-index 745fc48e2..0ebae7d84 100644
---- a/doc/plugins/blogspam.mdwn
-+++ b/doc/plugins/blogspam.mdwn
-@@ -11,6 +11,8 @@ To check for and moderate comments, log in to the wiki as an admin,
- go to your Preferences page, and click the "Comment Moderation" button.
-
- The plugin requires the [[!cpan JSON]] perl module.
-+The [[!cpan LWPx::ParanoidAgent]] Perl module is recommended,
-+although this plugin can also fall back to [[!cpan LWP]].
-
- You can control how content is tested via the `blogspam_options` setting.
- The list of options is [here](http://blogspam.net/api/2.0/testComment.html#options).
-diff --git a/doc/plugins/openid.mdwn b/doc/plugins/openid.mdwn
-index 4c8e0d381..a061cb43f 100644
---- a/doc/plugins/openid.mdwn
-+++ b/doc/plugins/openid.mdwn
-@@ -7,8 +7,11 @@ into the wiki.
- The plugin needs the [[!cpan Net::OpenID::Consumer]] perl module.
- Version 1.x is needed in order for OpenID v2 to work.
-
--The [[!cpan LWPx::ParanoidAgent]] perl module is used if available, for
--added security. Finally, the [[!cpan Crypt::SSLeay]] perl module is needed
-+The [[!cpan LWPx::ParanoidAgent]] Perl module is strongly recommended.
-+The [[!cpan LWP]] module can also be used, but is susceptible to
-+server-side request forgery.
-+
-+The [[!cpan Crypt::SSLeay]] Perl module is needed
- to support users entering "https" OpenID urls.
-
- This plugin is enabled by default, but can be turned off if you want to
-diff --git a/doc/plugins/pinger.mdwn b/doc/plugins/pinger.mdwn
-index 00d83e1bb..f37979ac6 100644
---- a/doc/plugins/pinger.mdwn
-+++ b/doc/plugins/pinger.mdwn
-@@ -10,9 +10,11 @@ can be kept up-to-date.
- To configure what URLs to ping, use the [[ikiwiki/directive/ping]]
- [[ikiwiki/directive]].
-
--The [[!cpan LWP]] perl module is used for pinging. Or the [[!cpan
--LWPx::ParanoidAgent]] perl module is used if available, for added security.
--Finally, the [[!cpan Crypt::SSLeay]] perl module is needed to support pinging
-+The [[!cpan LWPx::ParanoidAgent]] Perl module is strongly recommended.
-+The [[!cpan LWP]] module can also be used, but is susceptible
-+to server-side request forgery.
-+
-+The [[!cpan Crypt::SSLeay]] perl module is needed to support pinging
- "https" urls.
-
- By default the pinger will try to ping a site for 15 seconds before timing
-diff --git a/doc/security.mdwn b/doc/security.mdwn
-index e7770dd27..378a2e4bc 100644
---- a/doc/security.mdwn
-+++ b/doc/security.mdwn
-@@ -611,3 +611,52 @@ This was fixed in ikiwiki 3.20170111, with fixes backported to Debian 8
- in version 3.20141016.4.
-
- ([[!debcve CVE-2017-0356]]/OVE-20170111-0001)
-+
-+## Server-side request forgery via aggregate plugin
-+
-+The ikiwiki maintainers discovered that the [[plugins/aggregate]] plugin
-+did not use [[!cpan LWPx::ParanoidAgent]]. On sites where the
-+aggregate plugin is enabled, authorized wiki editors could tell ikiwiki
-+to fetch potentially undesired URIs even if LWPx::ParanoidAgent was
-+installed:
-+
-+* local files via `file:` URIs
-+* other URI schemes that might be misused by attackers, such as `gopher:`
-+* hosts that resolve to loopback IP addresses (127.x.x.x)
-+* hosts that resolve to RFC 1918 IP addresses (192.168.x.x etc.)
-+
-+This could be used by an attacker to publish information that should not have
-+been accessible, cause denial of service by requesting "tarpit" URIs that are
-+slow to respond, or cause undesired side-effects if local web servers implement
-+["unsafe"](https://tools.ietf.org/html/rfc7231#section-4.2.1) GET requests.
-+([[!debcve CVE-2019-9187]])
-+
-+Additionally, if the LWPx::ParanoidAgent module was not installed, the
-+[[plugins/blogspam]], [[plugins/openid]] and [[plugins/pinger]] plugins
-+would fall back to [[!cpan LWP]], which is susceptible to similar attacks.
-+This is unlikely to be a practical problem for the blogspam plugin because
-+the URL it requests is under the control of the wiki administrator, but
-+the openid plugin can request URLs controlled by unauthenticated remote
-+users, and the pinger plugin can request URLs controlled by authorized
-+wiki editors.
-+
-+This is addressed in ikiwiki 3.20190228 as follows, with the same fixes
-+backported to Debian 9 in version 3.20170111.1:
-+
-+* URI schemes other than `http:` and `https:` are not accepted, preventing
-+ access to `file:`, `gopher:`, etc.
-+
-+* If a proxy is [[configured in the ikiwiki setup file|tips/using_a_proxy]],
-+ it is used for all outgoing `http:` and `https:` requests. In this case
-+ the proxy is responsible for blocking any requests that are undesired,
-+ including loopback or RFC 1918 addresses.
-+
-+* If a proxy is not configured, and LWPx::ParanoidAgent is installed,
-+ it will be used. This prevents loopback and RFC 1918 IP addresses, and
-+ sets a timeout to avoid denial of service via "tarpit" URIs.
-+
-+* Otherwise, the ordinary LWP user-agent will be used. This allows requests
-+ to loopback and RFC 1918 IP addresses, and has less robust timeout
-+ behaviour. We are not treating this as a vulnerability: if this
-+ behaviour is not acceptable for your site, please make sure to install
-+ LWPx::ParanoidAgent or disable the affected plugins.
-diff --git a/doc/tips/using_a_proxy.mdwn b/doc/tips/using_a_proxy.mdwn
-new file mode 100644
-index 000000000..39df3c42a
---- /dev/null
-+++ b/doc/tips/using_a_proxy.mdwn
-@@ -0,0 +1,22 @@
-+Some ikiwiki plugins make outgoing HTTP requests from the web server:
-+
-+* [[plugins/aggregate]] (to download Atom and RSS feeds)
-+* [[plugins/blogspam]] (to check whether a comment or edit is spam)
-+* [[plugins/openid]] (to authenticate users)
-+* [[plugins/pinger]] (to ping other ikiwiki installations)
-+
-+If your ikiwiki installation cannot contact the Internet without going
-+through a proxy, you can configure this in the [[setup file|setup]] by
-+setting environment variables:
-+
-+ ENV:
-+ http_proxy: "http://proxy.example.com:8080"
-+ https_proxy: "http://proxy.example.com:8080"
-+ # optional
-+ no_proxy: ".example.com,www.example.org"
-+
-+Note that some plugins will use the configured proxy for all destinations,
-+even if they are listed in `no_proxy`.
-+
-+To avoid server-side request forgery attacks, ensure that your proxy does
-+not allow requests to addresses that are considered to be internal.
---
-2.11.0
-
diff -Nru ikiwiki-3.20141016.4+deb8u1/debian/changelog ikiwiki-3.20141016.4/debian/changelog
--- ikiwiki-3.20141016.4+deb8u1/debian/changelog 2019-03-07 17:35:55.000000000 +1100
+++ ikiwiki-3.20141016.4/debian/changelog 2017-01-12 05:18:52.000000000 +1100
@@ -1,10 +1,3 @@
-ikiwiki (3.20141016.4+deb8u1) jessie-security; urgency=high
-
- * Non-maintainer upload by the LTS Team.
- * CVE-2019-9187: Fix server-side request forgery via aggregate plugin.
-
- -- Brian May <bam@debian.org> Thu, 07 Mar 2019 17:35:55 +1100
-
ikiwiki (3.20141016.4) jessie-security; urgency=high
* Reference CVE-2016-4561 in 3.20141016.3 changelog
diff -Nru ikiwiki-3.20141016.4+deb8u1/debian/control ikiwiki-3.20141016.4/debian/control
--- ikiwiki-3.20141016.4+deb8u1/debian/control 2019-03-07 17:35:55.000000000 +1100
+++ ikiwiki-3.20141016.4/debian/control 2017-01-12 05:18:52.000000000 +1100
@@ -17,8 +17,7 @@
libnet-openid-consumer-perl,
libxml-feed-perl,
libxml-parser-perl,
- libxml-twig-perl,
- liblwpx-paranoidagent-perl,
+ libxml-twig-perl
Maintainer: Simon McVittie <smcv@debian.org>
Uploaders: Josh Triplett <josh@freedesktop.org>
Standards-Version: 3.9.5
diff -Nru ikiwiki-3.20141016.4+deb8u1/debian/patches/CVE-2019-9187-1.patch ikiwiki-3.20141016.4/debian/patches/CVE-2019-9187-1.patch
--- ikiwiki-3.20141016.4+deb8u1/debian/patches/CVE-2019-9187-1.patch 2019-03-07 17:32:31.000000000 +1100
+++ ikiwiki-3.20141016.4/debian/patches/CVE-2019-9187-1.patch 1970-01-01 10:00:00.000000000 +1000
@@ -1,23 +0,0 @@
-From e7b0d4a0fff8ed45a90c2efe8ef294bdf7c9bdac Mon Sep 17 00:00:00 2001
-From: Simon McVittie <smcv@debian.org>
-Date: Sun, 10 Feb 2019 16:29:19 +0000
-Subject: [PATCH] useragent: Raise an exception if the LWP module can't be
- loaded
-
-Signed-off-by: Simon McVittie <smcv@debian.org>
----
- IkiWiki.pm | 3 +++
- 1 file changed, 3 insertions(+)
-
---- a/IkiWiki.pm
-+++ b/IkiWiki.pm
-@@ -2368,6 +2368,9 @@
- }
-
- sub useragent () {
-+ eval q{use LWP};
-+ error($@) if $@;
-+
- return LWP::UserAgent->new(
- cookie_jar => $config{cookiejar},
- env_proxy => 1, # respect proxy env vars
diff -Nru ikiwiki-3.20141016.4+deb8u1/debian/patches/CVE-2019-9187-2.patch ikiwiki-3.20141016.4/debian/patches/CVE-2019-9187-2.patch
--- ikiwiki-3.20141016.4+deb8u1/debian/patches/CVE-2019-9187-2.patch 2019-03-07 17:32:43.000000000 +1100
+++ ikiwiki-3.20141016.4/debian/patches/CVE-2019-9187-2.patch 1970-01-01 10:00:00.000000000 +1000
@@ -1,224 +0,0 @@
-From 67543ce1d62161fdef9dca198289d7dd7dceacc0 Mon Sep 17 00:00:00 2001
-From: Simon McVittie <smcv@debian.org>
-Date: Sun, 10 Feb 2019 16:30:07 +0000
-Subject: [PATCH] useragent: Don't allow non-HTTP protocols to be used
-
-This prevents the aggregate plugin from being used to read the contents
-of local files via file:/// URLs.
-
-Signed-off-by: Simon McVittie <smcv@debian.org>
----
- IkiWiki.pm | 1 +
- t/aggregate-file.t | 173 +++++++++++++++++++++++++++++++++++++
- t/noparanoia/LWPx/ParanoidAgent.pm | 2 +
- t/secret.rss | 11 +++
- 4 files changed, 187 insertions(+)
- create mode 100755 t/aggregate-file.t
- create mode 100644 t/noparanoia/LWPx/ParanoidAgent.pm
- create mode 100644 t/secret.rss
-
---- a/IkiWiki.pm
-+++ b/IkiWiki.pm
-@@ -2375,6 +2375,7 @@
- cookie_jar => $config{cookiejar},
- env_proxy => 1, # respect proxy env vars
- agent => $config{useragent},
-+ protocols_allowed => [qw(http https)],
- );
- }
-
---- /dev/null
-+++ b/t/aggregate-file.t
-@@ -0,0 +1,173 @@
-+#!/usr/bin/perl
-+use utf8;
-+use warnings;
-+use strict;
-+
-+use Encode;
-+use Test::More;
-+
-+BEGIN {
-+ plan(skip_all => "CGI not available")
-+ unless eval q{
-+ use CGI qw();
-+ 1;
-+ };
-+
-+ plan(skip_all => "IPC::Run not available")
-+ unless eval q{
-+ use IPC::Run qw(run);
-+ 1;
-+ };
-+
-+ use_ok('IkiWiki');
-+ use_ok('YAML::XS');
-+}
-+
-+# We check for English error messages
-+$ENV{LC_ALL} = 'C';
-+
-+use Cwd qw(getcwd);
-+use Errno qw(ENOENT);
-+
-+my $installed = $ENV{INSTALLED_TESTS};
-+
-+my @command;
-+if ($installed) {
-+ @command = qw(ikiwiki --plugin inline);
-+}
-+else {
-+ ok(! system("make -s ikiwiki.out"));
-+ @command = ("perl", "-I".getcwd."/blib/lib", './ikiwiki.out',
-+ '--underlaydir='.getcwd.'/underlays/basewiki',
-+ '--set', 'underlaydirbase='.getcwd.'/underlays',
-+ '--templatedir='.getcwd.'/templates');
-+}
-+
-+sub write_old_file {
-+ my $name = shift;
-+ my $dir = shift;
-+ my $content = shift;
-+ writefile($name, $dir, $content);
-+ ok(utime(333333333, 333333333, "$dir/$name"));
-+}
-+
-+sub write_setup_file {
-+ my %params = @_;
-+ my %setup = (
-+ wikiname => 'this is the name of my wiki',
-+ srcdir => getcwd.'/t/tmp/in',
-+ destdir => getcwd.'/t/tmp/out',
-+ url => 'http://example.com',
-+ cgiurl => 'http://example.com/cgi-bin/ikiwiki.cgi',
-+ cgi_wrapper => getcwd.'/t/tmp/ikiwiki.cgi',
-+ cgi_wrappermode => '0751',
-+ add_plugins => [qw(aggregate)],
-+ disable_plugins => [qw(emailauth openid passwordauth)],
-+ aggregate_webtrigger => 1,
-+ );
-+ if ($params{without_paranoia}) {
-+ $setup{libdirs} = [getcwd.'/t/noparanoia'];
-+ }
-+ unless ($installed) {
-+ $setup{ENV} = { 'PERL5LIB' => getcwd.'/blib/lib' };
-+ }
-+ writefile("test.setup", "t/tmp",
-+ "# IkiWiki::Setup::Yaml - YAML formatted setup file\n" .
-+ Dump(\%setup));
-+}
-+
-+sub thoroughly_rebuild {
-+ ok(unlink("t/tmp/ikiwiki.cgi") || $!{ENOENT});
-+ ok(! system(@command, qw(--setup t/tmp/test.setup --rebuild --wrappers)));
-+}
-+
-+sub run_cgi {
-+ my (%args) = @_;
-+ my ($in, $out);
-+ my $method = $args{method} || 'GET';
-+ my $environ = $args{environ} || {};
-+ my $params = $args{params} || { do => 'prefs' };
-+
-+ my %defaults = (
-+ SCRIPT_NAME => '/cgi-bin/ikiwiki.cgi',
-+ HTTP_HOST => 'example.com',
-+ );
-+
-+ my $cgi = CGI->new($args{params});
-+ my $query_string = $cgi->query_string();
-+ diag $query_string;
-+
-+ if ($method eq 'POST') {
-+ $defaults{REQUEST_METHOD} = 'POST';
-+ $in = $query_string;
-+ $defaults{CONTENT_LENGTH} = length $in;
-+ } else {
-+ $defaults{REQUEST_METHOD} = 'GET';
-+ $defaults{QUERY_STRING} = $query_string;
-+ }
-+
-+ my %envvars = (
-+ %defaults,
-+ %$environ,
-+ );
-+ run(["./t/tmp/ikiwiki.cgi"], \$in, \$out, init => sub {
-+ map {
-+ $ENV{$_} = $envvars{$_}
-+ } keys(%envvars);
-+ });
-+
-+ return decode_utf8($out);
-+}
-+
-+sub test {
-+ my $content;
-+
-+ ok(! system(qw(rm -rf t/tmp)));
-+ ok(! system(qw(mkdir t/tmp)));
-+
-+ write_old_file('aggregator.mdwn', 't/tmp/in',
-+ '[[!aggregate name="ssrf" url="file://'.getcwd.'/t/secret.rss"]]'
-+ .'[[!inline pages="internal(aggregator/*)"]]');
-+
-+ write_setup_file();
-+ thoroughly_rebuild();
-+
-+ $content = run_cgi(
-+ method => 'GET',
-+ params => {
-+ do => 'aggregate_webtrigger',
-+ },
-+ );
-+ unlike($content, qr{creating new page});
-+ unlike($content, qr{Secrets});
-+ ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf');
-+ ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf/Secrets_go_here._aggregated');
-+
-+ thoroughly_rebuild();
-+ $content = readfile('t/tmp/out/aggregator/index.html');
-+ unlike($content, qr{Secrets});
-+
-+ diag('Trying test again with LWPx::ParanoidAgent disabled');
-+
-+ write_setup_file(without_paranoia => 1);
-+ thoroughly_rebuild();
-+
-+ $content = run_cgi(
-+ method => 'GET',
-+ params => {
-+ do => 'aggregate_webtrigger',
-+ },
-+ );
-+ unlike($content, qr{creating new page});
-+ unlike($content, qr{Secrets});
-+ ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf');
-+ ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf/Secrets_go_here._aggregated');
-+
-+ thoroughly_rebuild();
-+ $content = readfile('t/tmp/out/aggregator/index.html');
-+ unlike($content, qr{Secrets});
-+}
-+
-+test();
-+
-+done_testing();
---- /dev/null
-+++ b/t/noparanoia/LWPx/ParanoidAgent.pm
-@@ -0,0 +1,2 @@
-+# make import fail
-+0;
---- /dev/null
-+++ b/t/secret.rss
-@@ -0,0 +1,11 @@
-+<?xml version="1.0"?>
-+<rss version="2.0">
-+<channel>
-+<title>Secrets go here</title>
-+<description>Secrets go here</description>
-+<item>
-+ <title>Secrets go here</title>
-+ <description>Secrets go here</description>
-+</item>
-+</channel>
-+</rss>
diff -Nru ikiwiki-3.20141016.4+deb8u1/debian/patches/CVE-2019-9187-3.patch ikiwiki-3.20141016.4/debian/patches/CVE-2019-9187-3.patch
--- ikiwiki-3.20141016.4+deb8u1/debian/patches/CVE-2019-9187-3.patch 2019-03-07 17:32:58.000000000 +1100
+++ ikiwiki-3.20141016.4/debian/patches/CVE-2019-9187-3.patch 1970-01-01 10:00:00.000000000 +1000
@@ -1,574 +0,0 @@
-From d283e4ca1aeb6ca8cc0951c8495f778071076013 Mon Sep 17 00:00:00 2001
-From: Simon McVittie <smcv@debian.org>
-Date: Sun, 10 Feb 2019 17:22:06 +0000
-Subject: [PATCH] useragent: Automatically choose whether to use
- LWPx::ParanoidAgent
-
-The simple implementation of this, which I'd prefer to use, would be:
-if we can import LWPx::ParanoidAgent, use it; otherwise, use
-LWP::UserAgent.
-
-However, aggregate has historically worked with proxies, and
-LWPx::ParanoidAgent quite reasonably refuses to work with proxies
-(because it can't know whether those proxies are going to do the same
-filtering that LWPx::ParanoidAgent would).
-
-Signed-off-by: Simon McVittie <smcv@debian.org>
----
- IkiWiki.pm | 123 ++++++++++++++++-
- IkiWiki/Plugin/aggregate.pm | 5 +-
- IkiWiki/Plugin/blogspam.pm | 16 +--
- IkiWiki/Plugin/openid.pm | 12 +-
- IkiWiki/Plugin/pinger.pm | 21 ++-
- t/useragent.t | 317 ++++++++++++++++++++++++++++++++++++++++++++
- 6 files changed, 458 insertions(+), 36 deletions(-)
- create mode 100755 t/useragent.t
-
---- a/IkiWiki.pm
-+++ b/IkiWiki.pm
-@@ -2367,16 +2367,131 @@
- $autofiles{$file}{generator}=$generator;
- }
-
--sub useragent () {
-+sub useragent (@) {
-+ my %params = @_;
-+ my $for_url = delete $params{for_url};
-+ # Fail safe, in case a plugin calling this function is relying on
-+ # a future parameter to make the UA more strict
-+ foreach my $key (keys %params) {
-+ error "Internal error: useragent(\"$key\" => ...) not understood";
-+ }
-+
- eval q{use LWP};
- error($@) if $@;
-
-- return LWP::UserAgent->new(
-- cookie_jar => $config{cookiejar},
-- env_proxy => 1, # respect proxy env vars
-+ my %args = (
- agent => $config{useragent},
-+ cookie_jar => $config{cookiejar},
-+ env_proxy => 0,
- protocols_allowed => [qw(http https)],
- );
-+ my %proxies;
-+
-+ if (defined $for_url) {
-+ # We know which URL we're going to fetch, so we can choose
-+ # whether it's going to go through a proxy or not.
-+ #
-+ # We reimplement http_proxy, https_proxy and no_proxy here, so
-+ # that we are not relying on LWP implementing them exactly the
-+ # same way we do.
-+
-+ eval q{use URI};
-+ error($@) if $@;
-+
-+ my $proxy;
-+ my $uri = URI->new($for_url);
-+
-+ if ($uri->scheme eq 'http') {
-+ $proxy = $ENV{http_proxy};
-+ # HTTP_PROXY is deliberately not implemented
-+ # because the HTTP_* namespace is also used by CGI
-+ }
-+ elsif ($uri->scheme eq 'https') {
-+ $proxy = $ENV{https_proxy};
-+ $proxy = $ENV{HTTPS_PROXY} unless defined $proxy;
-+ }
-+ else {
-+ $proxy = undef;
-+ }
-+
-+ foreach my $var (qw(no_proxy NO_PROXY)) {
-+ my $no_proxy = $ENV{$var};
-+ if (defined $no_proxy) {
-+ foreach my $domain (split /\s*,\s*/, $no_proxy) {
-+ if ($domain =~ s/^\*?\.//) {
-+ # no_proxy="*.example.com" or
-+ # ".example.com": match suffix
-+ # against .example.com
-+ if ($uri->host =~ m/(^|\.)\Q$domain\E$/i) {
-+ $proxy = undef;
-+ }
-+ }
-+ else {
-+ # no_proxy="example.com":
-+ # match exactly example.com
-+ if (lc $uri->host eq lc $domain) {
-+ $proxy = undef;
-+ }
-+ }
-+ }
-+ }
-+ }
-+
-+ if (defined $proxy) {
-+ $proxies{$uri->scheme} = $proxy;
-+ # Paranoia: make sure we can't bypass the proxy
-+ $args{protocols_allowed} = [$uri->scheme];
-+ }
-+ }
-+ else {
-+ # The plugin doesn't know yet which URL(s) it's going to
-+ # fetch, so we have to make some conservative assumptions.
-+ my $http_proxy = $ENV{http_proxy};
-+ my $https_proxy = $ENV{https_proxy};
-+ $https_proxy = $ENV{HTTPS_PROXY} unless defined $https_proxy;
-+
-+ # We don't respect no_proxy here: if we are not using the
-+ # paranoid user-agent, then we need to give the proxy the
-+ # opportunity to reject undesirable requests.
-+
-+ # If we have one, we need the other: otherwise, neither
-+ # LWPx::ParanoidAgent nor the proxy would have the
-+ # opportunity to filter requests for the other protocol.
-+ if (defined $https_proxy && defined $http_proxy) {
-+ %proxies = (http => $http_proxy, https => $https_proxy);
-+ }
-+ elsif (defined $https_proxy) {
-+ %proxies = (http => $https_proxy, https => $https_proxy);
-+ }
-+ elsif (defined $http_proxy) {
-+ %proxies = (http => $http_proxy, https => $http_proxy);
-+ }
-+
-+ }
-+
-+ if (scalar keys %proxies) {
-+ # The configured proxy is responsible for deciding which
-+ # URLs are acceptable to fetch and which URLs are not.
-+ my $ua = LWP::UserAgent->new(%args);
-+ foreach my $scheme (@{$ua->protocols_allowed}) {
-+ unless ($proxies{$scheme}) {
-+ error "internal error: $scheme is allowed but has no proxy";
-+ }
-+ }
-+ # We can't pass the proxies in %args because that only
-+ # works since LWP 6.24.
-+ foreach my $scheme (keys %proxies) {
-+ $ua->proxy($scheme, $proxies{$scheme});
-+ }
-+ return $ua;
-+ }
-+
-+ eval q{use LWPx::ParanoidAgent};
-+ if ($@) {
-+ print STDERR "warning: installing LWPx::ParanoidAgent is recommended\n";
-+ return LWP::UserAgent->new(%args);
-+ }
-+ return LWPx::ParanoidAgent->new(%args);
- }
-
- sub sortspec_translate ($$) {
---- a/IkiWiki/Plugin/aggregate.pm
-+++ b/IkiWiki/Plugin/aggregate.pm
-@@ -513,7 +513,10 @@
- }
- $feed->{feedurl}=pop @urls;
- }
-- my $ua=useragent();
-+ # Using the for_url parameter makes sure we crash if used
-+ # with an older IkiWiki.pm that didn't automatically try
-+ # to use LWPx::ParanoidAgent.
-+ my $ua=useragent(for_url => $feed->{feedurl});
- my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
- if (! $res) {
- $feed->{message}=URI::Fetch->errstr;
---- a/IkiWiki/Plugin/blogspam.pm
-+++ b/IkiWiki/Plugin/blogspam.pm
-@@ -57,18 +57,10 @@
- };
- error $@ if $@;
-
-- eval q{use LWPx::ParanoidAgent};
-- if (!$@) {
-- $client=LWPx::ParanoidAgent->new(agent => $config{useragent});
-- }
-- else {
-- eval q{use LWP};
-- if ($@) {
-- error $@;
-- return;
-- }
-- $client=useragent();
-- }
-+ # Using the for_url parameter makes sure we crash if used
-+ # with an older IkiWiki.pm that didn't automatically try
-+ # to use LWPx::ParanoidAgent.
-+ $client=useragent(for_url => $config{blogspam_server});
- }
-
- sub checkcontent (@) {
---- a/IkiWiki/Plugin/openid.pm
-+++ b/IkiWiki/Plugin/openid.pm
-@@ -237,14 +237,10 @@
- eval q{use Net::OpenID::Consumer};
- error($@) if $@;
-
-- my $ua;
-- eval q{use LWPx::ParanoidAgent};
-- if (! $@) {
-- $ua=LWPx::ParanoidAgent->new(agent => $config{useragent});
-- }
-- else {
-- $ua=useragent();
-- }
-+ # We pass the for_url parameter, even though it's undef, because
-+ # that will make sure we crash if used with an older IkiWiki.pm
-+ # that didn't automatically try to use LWPx::ParanoidAgent.
-+ my $ua=useragent(for_url => undef);
-
- # Store the secret in the session.
- my $secret=$session->param("openid_secret");
---- a/IkiWiki/Plugin/pinger.pm
-+++ b/IkiWiki/Plugin/pinger.pm
-@@ -70,17 +70,16 @@
- eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
-
- my $ua;
-- eval q{use LWPx::ParanoidAgent};
-- if (!$@) {
-- $ua=LWPx::ParanoidAgent->new(agent => $config{useragent});
-- }
-- else {
-- eval q{use LWP};
-- if ($@) {
-- debug(gettext("LWP not found, not pinging"));
-- return;
-- }
-- $ua=useragent();
-+ eval {
-+ # We pass the for_url parameter, even though it's
-+ # undef, because that will make sure we crash if used
-+ # with an older IkiWiki.pm that didn't automatically
-+ # try to use LWPx::ParanoidAgent.
-+ $ua=useragent(for_url => undef);
-+ };
-+ if ($@) {
-+ debug(gettext("LWP not found, not pinging").": $@");
-+ return;
- }
- $ua->timeout($config{pinger_timeout} || 15);
-
---- /dev/null
-+++ b/t/useragent.t
-@@ -0,0 +1,317 @@
-+#!/usr/bin/perl
-+use warnings;
-+use strict;
-+use Test::More;
-+
-+my $have_paranoid_agent;
-+BEGIN {
-+ plan(skip_all => 'LWP not available')
-+ unless eval q{
-+ use LWP qw(); 1;
-+ };
-+ use_ok("IkiWiki");
-+ $have_paranoid_agent = eval q{
-+ use LWPx::ParanoidAgent qw(); 1;
-+ };
-+}
-+
-+eval { useragent(future_feature => 1); };
-+ok($@, 'future features should cause useragent to fail');
-+
-+diag "==== No proxy ====";
-+delete $ENV{http_proxy};
-+delete $ENV{https_proxy};
-+delete $ENV{no_proxy};
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{NO_PROXY};
-+
-+diag "---- Unspecified URL ----";
-+my $ua = useragent(for_url => undef);
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef, 'No http proxy');
-+is($ua->proxy('https'), undef, 'No https proxy');
-+
-+diag "---- Specified URL ----";
-+$ua = useragent(for_url => 'http://example.com');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef, 'No http proxy');
-+is($ua->proxy('https'), undef, 'No https proxy');
-+
-+diag "==== Proxy for everything ====";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+$ENV{https_proxy} = 'http://sproxy:8080';
-+delete $ENV{no_proxy};
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{NO_PROXY};
-+
-+diag "---- Unspecified URL ----";
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+$ua = useragent(for_url => 'http://example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+# We don't care what $ua->proxy('https') is, because it won't be used
-+$ua = useragent(for_url => 'https://example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+# We don't care what $ua->proxy('http') is, because it won't be used
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+
-+diag "==== Selective proxy ====";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+$ENV{https_proxy} = 'http://sproxy:8080';
-+$ENV{no_proxy} = '*.example.net,example.com,.example.org';
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{NO_PROXY};
-+
-+diag "---- Unspecified URL ----";
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+
-+diag "---- Exact match for no_proxy ----";
-+$ua = useragent(for_url => 'http://example.com');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- Subdomain of exact domain in no_proxy ----";
-+$ua = useragent(for_url => 'http://sub.example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+
-+diag "---- example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://sub.example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.net does not match *.example.net ----";
-+$ua = useragent(for_url => 'https://badexample.net');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "---- example.org matches .example.org ----";
-+$ua = useragent(for_url => 'https://example.org');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.org matches .example.org ----";
-+$ua = useragent(for_url => 'https://sub.example.org');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.org does not match .example.org ----";
-+$ua = useragent(for_url => 'https://badexample.org');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "==== Selective proxy (alternate variables) ====";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+delete $ENV{https_proxy};
-+$ENV{HTTPS_PROXY} = 'http://sproxy:8080';
-+delete $ENV{no_proxy};
-+$ENV{NO_PROXY} = '*.example.net,example.com,.example.org';
-+
-+diag "---- Unspecified URL ----";
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+
-+diag "---- Exact match for no_proxy ----";
-+$ua = useragent(for_url => 'http://example.com');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- Subdomain of exact domain in no_proxy ----";
-+$ua = useragent(for_url => 'http://sub.example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+
-+diag "---- example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://sub.example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.net does not match *.example.net ----";
-+$ua = useragent(for_url => 'https://badexample.net');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "---- example.org matches .example.org ----";
-+$ua = useragent(for_url => 'https://example.org');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.org matches .example.org ----";
-+$ua = useragent(for_url => 'https://sub.example.org');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.org does not match .example.org ----";
-+$ua = useragent(for_url => 'https://badexample.org');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "==== Selective proxy (many variables) ====";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+$ENV{https_proxy} = 'http://sproxy:8080';
-+# This one should be ignored in favour of https_proxy
-+$ENV{HTTPS_PROXY} = 'http://not.preferred.proxy:3128';
-+# These two should be merged
-+$ENV{no_proxy} = '*.example.net,example.com';
-+$ENV{NO_PROXY} = '.example.org';
-+
-+diag "---- Unspecified URL ----";
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-+
-+diag "---- Exact match for no_proxy ----";
-+$ua = useragent(for_url => 'http://example.com');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- Subdomain of exact domain in no_proxy ----";
-+$ua = useragent(for_url => 'http://sub.example.com');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+
-+diag "---- example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- sub.example.net matches *.example.net ----";
-+$ua = useragent(for_url => 'https://sub.example.net');
-+SKIP: {
-+ skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
-+ ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-+}
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), undef);
-+is($ua->proxy('https'), undef);
-+
-+diag "---- badexample.net does not match *.example.net ----";
-+$ua = useragent(for_url => 'https://badexample.net');
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+diag "==== One but not the other ====\n";
-+$ENV{http_proxy} = 'http://proxy:8080';
-+delete $ENV{https_proxy};
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{no_proxy};
-+delete $ENV{NO_PROXY};
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://proxy:8080', 'should use proxy');
-+
-+delete $ENV{http_proxy};
-+$ENV{https_proxy} = 'http://sproxy:8080';
-+delete $ENV{HTTPS_PROXY};
-+delete $ENV{no_proxy};
-+delete $ENV{NO_PROXY};
-+$ua = useragent(for_url => undef);
-+ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-+is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-+is($ua->proxy('http'), 'http://sproxy:8080', 'should use proxy');
-+is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-+
-+done_testing;
diff -Nru ikiwiki-3.20141016.4+deb8u1/debian/patches/CVE-2019-9187-4.patch ikiwiki-3.20141016.4/debian/patches/CVE-2019-9187-4.patch
--- ikiwiki-3.20141016.4+deb8u1/debian/patches/CVE-2019-9187-4.patch 2019-03-07 17:35:45.000000000 +1100
+++ ikiwiki-3.20141016.4/debian/patches/CVE-2019-9187-4.patch 1970-01-01 10:00:00.000000000 +1000
@@ -1,159 +0,0 @@
-From 9a275b2f1846d7268c71a740975447e269383849 Mon Sep 17 00:00:00 2001
-From: Simon McVittie <smcv@debian.org>
-Date: Sun, 10 Feb 2019 16:56:41 +0000
-Subject: [PATCH] doc: Document security issues involving LWP::UserAgent
-
-Recommend the LWPx::ParanoidAgent module where appropriate.
-It is particularly important for openid, since unauthenticated users
-can control which URLs that plugin will contact. Conversely, it is
-non-critical for blogspam, since the URL to be contacted is under
-the wiki administrator's control.
-
-Signed-off-by: Simon McVittie <smcv@debian.org>
----
- doc/plugins/aggregate.mdwn | 4 ++++
- doc/plugins/blogspam.mdwn | 2 ++
- doc/plugins/openid.mdwn | 7 +++++--
- doc/plugins/pinger.mdwn | 8 +++++---
- doc/security.mdwn | 49 +++++++++++++++++++++++++++++++++++++++++++++
- doc/tips/using_a_proxy.mdwn | 22 ++++++++++++++++++++
- 6 files changed, 87 insertions(+), 5 deletions(-)
- create mode 100644 doc/tips/using_a_proxy.mdwn
-
---- a/doc/plugins/aggregate.mdwn
-+++ b/doc/plugins/aggregate.mdwn
-@@ -11,6 +11,10 @@
- one. Either the [[htmltidy]] or [[htmlbalance]] plugin is suggested, since
- feeds can easily contain html problems, some of which these plugins can fix.
-
-+Installing the [[!cpan LWPx::ParanoidAgent]] Perl module is strongly
-+recommended. The [[!cpan LWP]] module can also be used, but is susceptible
-+to server-side request forgery.
-+
- ## triggering aggregation
-
- You will need to run ikiwiki periodically from a cron job, passing it the
---- a/doc/plugins/blogspam.mdwn
-+++ b/doc/plugins/blogspam.mdwn
-@@ -11,6 +11,8 @@
- go to your Preferences page, and click the "Comment Moderation" button.
-
- The plugin requires the [[!cpan JSON]] perl module.
-+The [[!cpan LWPx::ParanoidAgent]] Perl module is recommended,
-+although this plugin can also fall back to [[!cpan LWP]].
-
- You can control how content is tested via the `blogspam_options` setting.
- The list of options is [here](http://blogspam.net/api/testComment.html#options).
---- a/doc/plugins/openid.mdwn
-+++ b/doc/plugins/openid.mdwn
-@@ -7,8 +7,11 @@
- The plugin needs the [[!cpan Net::OpenID::Consumer]] perl module.
- Version 1.x is needed in order for OpenID v2 to work.
-
--The [[!cpan LWPx::ParanoidAgent]] perl module is used if available, for
--added security. Finally, the [[!cpan Crypt::SSLeay]] perl module is needed
-+The [[!cpan LWPx::ParanoidAgent]] Perl module is strongly recommended.
-+The [[!cpan LWP]] module can also be used, but is susceptible to
-+server-side request forgery.
-+
-+The [[!cpan Crypt::SSLeay]] Perl module is needed
- to support users entering "https" OpenID urls.
-
- This plugin is enabled by default, but can be turned off if you want to
---- a/doc/plugins/pinger.mdwn
-+++ b/doc/plugins/pinger.mdwn
-@@ -10,9 +10,11 @@
- To configure what URLs to ping, use the [[ikiwiki/directive/ping]]
- [[ikiwiki/directive]].
-
--The [[!cpan LWP]] perl module is used for pinging. Or the [[!cpan
--LWPx::ParanoidAgent]] perl module is used if available, for added security.
--Finally, the [[!cpan Crypt::SSLeay]] perl module is needed to support pinging
-+The [[!cpan LWPx::ParanoidAgent]] Perl module is strongly recommended.
-+The [[!cpan LWP]] module can also be used, but is susceptible
-+to server-side request forgery.
-+
-+The [[!cpan Crypt::SSLeay]] perl module is needed to support pinging
- "https" urls.
-
- By default the pinger will try to ping a site for 15 seconds before timing
---- a/doc/security.mdwn
-+++ b/doc/security.mdwn
-@@ -526,3 +526,52 @@
- able to attach images. Upgrading ImageMagick to a version where
- CVE-2016-3714 has been fixed is also recommended, but at the time of
- writing no such version is available.
-+
-+## Server-side request forgery via aggregate plugin
-+
-+The ikiwiki maintainers discovered that the [[plugins/aggregate]] plugin
-+did not use [[!cpan LWPx::ParanoidAgent]]. On sites where the
-+aggregate plugin is enabled, authorized wiki editors could tell ikiwiki
-+to fetch potentially undesired URIs even if LWPx::ParanoidAgent was
-+installed:
-+
-+* local files via `file:` URIs
-+* other URI schemes that might be misused by attackers, such as `gopher:`
-+* hosts that resolve to loopback IP addresses (127.x.x.x)
-+* hosts that resolve to RFC 1918 IP addresses (192.168.x.x etc.)
-+
-+This could be used by an attacker to publish information that should not have
-+been accessible, cause denial of service by requesting "tarpit" URIs that are
-+slow to respond, or cause undesired side-effects if local web servers implement
-+["unsafe"](https://tools.ietf.org/html/rfc7231#section-4.2.1) GET requests.
-+([[!debcve CVE-2019-9187]])
-+
-+Additionally, if the LWPx::ParanoidAgent module was not installed, the
-+[[plugins/blogspam]], [[plugins/openid]] and [[plugins/pinger]] plugins
-+would fall back to [[!cpan LWP]], which is susceptible to similar attacks.
-+This is unlikely to be a practical problem for the blogspam plugin because
-+the URL it requests is under the control of the wiki administrator, but
-+the openid plugin can request URLs controlled by unauthenticated remote
-+users, and the pinger plugin can request URLs controlled by authorized
-+wiki editors.
-+
-+This is addressed in ikiwiki 3.20190228 as follows, with the same fixes
-+backported to Debian 9 in version 3.20170111.1:
-+
-+* URI schemes other than `http:` and `https:` are not accepted, preventing
-+ access to `file:`, `gopher:`, etc.
-+
-+* If a proxy is [[configured in the ikiwiki setup file|tips/using_a_proxy]],
-+ it is used for all outgoing `http:` and `https:` requests. In this case
-+ the proxy is responsible for blocking any requests that are undesired,
-+ including loopback or RFC 1918 addresses.
-+
-+* If a proxy is not configured, and LWPx::ParanoidAgent is installed,
-+ it will be used. This prevents loopback and RFC 1918 IP addresses, and
-+ sets a timeout to avoid denial of service via "tarpit" URIs.
-+
-+* Otherwise, the ordinary LWP user-agent will be used. This allows requests
-+ to loopback and RFC 1918 IP addresses, and has less robust timeout
-+ behaviour. We are not treating this as a vulnerability: if this
-+ behaviour is not acceptable for your site, please make sure to install
-+ LWPx::ParanoidAgent or disable the affected plugins.
---- /dev/null
-+++ b/doc/tips/using_a_proxy.mdwn
-@@ -0,0 +1,22 @@
-+Some ikiwiki plugins make outgoing HTTP requests from the web server:
-+
-+* [[plugins/aggregate]] (to download Atom and RSS feeds)
-+* [[plugins/blogspam]] (to check whether a comment or edit is spam)
-+* [[plugins/openid]] (to authenticate users)
-+* [[plugins/pinger]] (to ping other ikiwiki installations)
-+
-+If your ikiwiki installation cannot contact the Internet without going
-+through a proxy, you can configure this in the [[setup file|setup]] by
-+setting environment variables:
-+
-+ ENV:
-+ http_proxy: "http://proxy.example.com:8080"
-+ https_proxy: "http://proxy.example.com:8080"
-+ # optional
-+ no_proxy: ".example.com,www.example.org"
-+
-+Note that some plugins will use the configured proxy for all destinations,
-+even if they are listed in `no_proxy`.
-+
-+To avoid server-side request forgery attacks, ensure that your proxy does
-+not allow requests to addresses that are considered to be internal.
diff -Nru ikiwiki-3.20141016.4+deb8u1/debian/patches/series ikiwiki-3.20141016.4/debian/patches/series
--- ikiwiki-3.20141016.4+deb8u1/debian/patches/series 2019-03-07 17:33:02.000000000 +1100
+++ ikiwiki-3.20141016.4/debian/patches/series 1970-01-01 10:00:00.000000000 +1000
@@ -1,5 +0,0 @@
-CVE-2019-9187-1.patch
-CVE-2019-9187-2.patch
-CVE-2019-9187-3.patch
-CVE-2019-9187-4.patch
-abc
diff -Nru ikiwiki-3.20141016.4+deb8u1/doc/plugins/aggregate.mdwn ikiwiki-3.20141016.4/doc/plugins/aggregate.mdwn
--- ikiwiki-3.20141016.4+deb8u1/doc/plugins/aggregate.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/doc/plugins/aggregate.mdwn 2017-01-12 05:18:52.000000000 +1100
@@ -11,10 +11,6 @@
one. Either the [[htmltidy]] or [[htmlbalance]] plugin is suggested, since
feeds can easily contain html problems, some of which these plugins can fix.
-Installing the [[!cpan LWPx::ParanoidAgent]] Perl module is strongly
-recommended. The [[!cpan LWP]] module can also be used, but is susceptible
-to server-side request forgery.
-
## triggering aggregation
You will need to run ikiwiki periodically from a cron job, passing it the
diff -Nru ikiwiki-3.20141016.4+deb8u1/doc/plugins/blogspam.mdwn ikiwiki-3.20141016.4/doc/plugins/blogspam.mdwn
--- ikiwiki-3.20141016.4+deb8u1/doc/plugins/blogspam.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/doc/plugins/blogspam.mdwn 2017-01-12 05:18:52.000000000 +1100
@@ -11,8 +11,6 @@
go to your Preferences page, and click the "Comment Moderation" button.
The plugin requires the [[!cpan JSON]] perl module.
-The [[!cpan LWPx::ParanoidAgent]] Perl module is recommended,
-although this plugin can also fall back to [[!cpan LWP]].
You can control how content is tested via the `blogspam_options` setting.
The list of options is [here](http://blogspam.net/api/testComment.html#options).
diff -Nru ikiwiki-3.20141016.4+deb8u1/doc/plugins/openid.mdwn ikiwiki-3.20141016.4/doc/plugins/openid.mdwn
--- ikiwiki-3.20141016.4+deb8u1/doc/plugins/openid.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/doc/plugins/openid.mdwn 2017-01-12 05:18:52.000000000 +1100
@@ -7,11 +7,8 @@
The plugin needs the [[!cpan Net::OpenID::Consumer]] perl module.
Version 1.x is needed in order for OpenID v2 to work.
-The [[!cpan LWPx::ParanoidAgent]] Perl module is strongly recommended.
-The [[!cpan LWP]] module can also be used, but is susceptible to
-server-side request forgery.
-
-The [[!cpan Crypt::SSLeay]] Perl module is needed
+The [[!cpan LWPx::ParanoidAgent]] perl module is used if available, for
+added security. Finally, the [[!cpan Crypt::SSLeay]] perl module is needed
to support users entering "https" OpenID urls.
This plugin is enabled by default, but can be turned off if you want to
diff -Nru ikiwiki-3.20141016.4+deb8u1/doc/plugins/pinger.mdwn ikiwiki-3.20141016.4/doc/plugins/pinger.mdwn
--- ikiwiki-3.20141016.4+deb8u1/doc/plugins/pinger.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/doc/plugins/pinger.mdwn 2017-01-12 05:18:52.000000000 +1100
@@ -10,11 +10,9 @@
To configure what URLs to ping, use the [[ikiwiki/directive/ping]]
[[ikiwiki/directive]].
-The [[!cpan LWPx::ParanoidAgent]] Perl module is strongly recommended.
-The [[!cpan LWP]] module can also be used, but is susceptible
-to server-side request forgery.
-
-The [[!cpan Crypt::SSLeay]] perl module is needed to support pinging
+The [[!cpan LWP]] perl module is used for pinging. Or the [[!cpan
+LWPx::ParanoidAgent]] perl module is used if available, for added security.
+Finally, the [[!cpan Crypt::SSLeay]] perl module is needed to support pinging
"https" urls.
By default the pinger will try to ping a site for 15 seconds before timing
diff -Nru ikiwiki-3.20141016.4+deb8u1/doc/security.mdwn ikiwiki-3.20141016.4/doc/security.mdwn
--- ikiwiki-3.20141016.4+deb8u1/doc/security.mdwn 2019-03-07 17:35:19.000000000 +1100
+++ ikiwiki-3.20141016.4/doc/security.mdwn 2017-01-12 05:18:52.000000000 +1100
@@ -526,52 +526,3 @@
able to attach images. Upgrading ImageMagick to a version where
CVE-2016-3714 has been fixed is also recommended, but at the time of
writing no such version is available.
-
-## Server-side request forgery via aggregate plugin
-
-The ikiwiki maintainers discovered that the [[plugins/aggregate]] plugin
-did not use [[!cpan LWPx::ParanoidAgent]]. On sites where the
-aggregate plugin is enabled, authorized wiki editors could tell ikiwiki
-to fetch potentially undesired URIs even if LWPx::ParanoidAgent was
-installed:
-
-* local files via `file:` URIs
-* other URI schemes that might be misused by attackers, such as `gopher:`
-* hosts that resolve to loopback IP addresses (127.x.x.x)
-* hosts that resolve to RFC 1918 IP addresses (192.168.x.x etc.)
-
-This could be used by an attacker to publish information that should not have
-been accessible, cause denial of service by requesting "tarpit" URIs that are
-slow to respond, or cause undesired side-effects if local web servers implement
-["unsafe"](https://tools.ietf.org/html/rfc7231#section-4.2.1) GET requests.
-([[!debcve CVE-2019-9187]])
-
-Additionally, if the LWPx::ParanoidAgent module was not installed, the
-[[plugins/blogspam]], [[plugins/openid]] and [[plugins/pinger]] plugins
-would fall back to [[!cpan LWP]], which is susceptible to similar attacks.
-This is unlikely to be a practical problem for the blogspam plugin because
-the URL it requests is under the control of the wiki administrator, but
-the openid plugin can request URLs controlled by unauthenticated remote
-users, and the pinger plugin can request URLs controlled by authorized
-wiki editors.
-
-This is addressed in ikiwiki 3.20190228 as follows, with the same fixes
-backported to Debian 9 in version 3.20170111.1:
-
-* URI schemes other than `http:` and `https:` are not accepted, preventing
- access to `file:`, `gopher:`, etc.
-
-* If a proxy is [[configured in the ikiwiki setup file|tips/using_a_proxy]],
- it is used for all outgoing `http:` and `https:` requests. In this case
- the proxy is responsible for blocking any requests that are undesired,
- including loopback or RFC 1918 addresses.
-
-* If a proxy is not configured, and LWPx::ParanoidAgent is installed,
- it will be used. This prevents loopback and RFC 1918 IP addresses, and
- sets a timeout to avoid denial of service via "tarpit" URIs.
-
-* Otherwise, the ordinary LWP user-agent will be used. This allows requests
- to loopback and RFC 1918 IP addresses, and has less robust timeout
- behaviour. We are not treating this as a vulnerability: if this
- behaviour is not acceptable for your site, please make sure to install
- LWPx::ParanoidAgent or disable the affected plugins.
diff -Nru ikiwiki-3.20141016.4+deb8u1/doc/security.mdwn.rej ikiwiki-3.20141016.4/doc/security.mdwn.rej
--- ikiwiki-3.20141016.4+deb8u1/doc/security.mdwn.rej 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/doc/security.mdwn.rej 1970-01-01 10:00:00.000000000 +1000
@@ -1,55 +0,0 @@
---- doc/security.mdwn
-+++ doc/security.mdwn
-@@ -611,3 +611,52 @@ This was fixed in ikiwiki 3.20170111, with fixes backported to Debian 8
- in version 3.20141016.4.
-
- ([[!debcve CVE-2017-0356]]/OVE-20170111-0001)
-+
-+## Server-side request forgery via aggregate plugin
-+
-+The ikiwiki maintainers discovered that the [[plugins/aggregate]] plugin
-+did not use [[!cpan LWPx::ParanoidAgent]]. On sites where the
-+aggregate plugin is enabled, authorized wiki editors could tell ikiwiki
-+to fetch potentially undesired URIs even if LWPx::ParanoidAgent was
-+installed:
-+
-+* local files via `file:` URIs
-+* other URI schemes that might be misused by attackers, such as `gopher:`
-+* hosts that resolve to loopback IP addresses (127.x.x.x)
-+* hosts that resolve to RFC 1918 IP addresses (192.168.x.x etc.)
-+
-+This could be used by an attacker to publish information that should not have
-+been accessible, cause denial of service by requesting "tarpit" URIs that are
-+slow to respond, or cause undesired side-effects if local web servers implement
-+["unsafe"](https://tools.ietf.org/html/rfc7231#section-4.2.1) GET requests.
-+([[!debcve CVE-2019-9187]])
-+
-+Additionally, if the LWPx::ParanoidAgent module was not installed, the
-+[[plugins/blogspam]], [[plugins/openid]] and [[plugins/pinger]] plugins
-+would fall back to [[!cpan LWP]], which is susceptible to similar attacks.
-+This is unlikely to be a practical problem for the blogspam plugin because
-+the URL it requests is under the control of the wiki administrator, but
-+the openid plugin can request URLs controlled by unauthenticated remote
-+users, and the pinger plugin can request URLs controlled by authorized
-+wiki editors.
-+
-+This is addressed in ikiwiki 3.20190228 as follows, with the same fixes
-+backported to Debian 9 in version 3.20170111.1:
-+
-+* URI schemes other than `http:` and `https:` are not accepted, preventing
-+ access to `file:`, `gopher:`, etc.
-+
-+* If a proxy is [[configured in the ikiwiki setup file|tips/using_a_proxy]],
-+ it is used for all outgoing `http:` and `https:` requests. In this case
-+ the proxy is responsible for blocking any requests that are undesired,
-+ including loopback or RFC 1918 addresses.
-+
-+* If a proxy is not configured, and LWPx::ParanoidAgent is installed,
-+ it will be used. This prevents loopback and RFC 1918 IP addresses, and
-+ sets a timeout to avoid denial of service via "tarpit" URIs.
-+
-+* Otherwise, the ordinary LWP user-agent will be used. This allows requests
-+ to loopback and RFC 1918 IP addresses, and has less robust timeout
-+ behaviour. We are not treating this as a vulnerability: if this
-+ behaviour is not acceptable for your site, please make sure to install
-+ LWPx::ParanoidAgent or disable the affected plugins.
diff -Nru ikiwiki-3.20141016.4+deb8u1/doc/tips/using_a_proxy.mdwn ikiwiki-3.20141016.4/doc/tips/using_a_proxy.mdwn
--- ikiwiki-3.20141016.4+deb8u1/doc/tips/using_a_proxy.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/doc/tips/using_a_proxy.mdwn 1970-01-01 10:00:00.000000000 +1000
@@ -1,22 +0,0 @@
-Some ikiwiki plugins make outgoing HTTP requests from the web server:
-
-* [[plugins/aggregate]] (to download Atom and RSS feeds)
-* [[plugins/blogspam]] (to check whether a comment or edit is spam)
-* [[plugins/openid]] (to authenticate users)
-* [[plugins/pinger]] (to ping other ikiwiki installations)
-
-If your ikiwiki installation cannot contact the Internet without going
-through a proxy, you can configure this in the [[setup file|setup]] by
-setting environment variables:
-
- ENV:
- http_proxy: "http://proxy.example.com:8080"
- https_proxy: "http://proxy.example.com:8080"
- # optional
- no_proxy: ".example.com,www.example.org"
-
-Note that some plugins will use the configured proxy for all destinations,
-even if they are listed in `no_proxy`.
-
-To avoid server-side request forgery attacks, ensure that your proxy does
-not allow requests to addresses that are considered to be internal.
diff -Nru ikiwiki-3.20141016.4+deb8u1/IkiWiki/Plugin/aggregate.pm ikiwiki-3.20141016.4/IkiWiki/Plugin/aggregate.pm
--- ikiwiki-3.20141016.4+deb8u1/IkiWiki/Plugin/aggregate.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/IkiWiki/Plugin/aggregate.pm 2017-01-12 05:18:52.000000000 +1100
@@ -513,10 +513,7 @@
}
$feed->{feedurl}=pop @urls;
}
- # Using the for_url parameter makes sure we crash if used
- # with an older IkiWiki.pm that didn't automatically try
- # to use LWPx::ParanoidAgent.
- my $ua=useragent(for_url => $feed->{feedurl});
+ my $ua=useragent();
my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
if (! $res) {
$feed->{message}=URI::Fetch->errstr;
diff -Nru ikiwiki-3.20141016.4+deb8u1/IkiWiki/Plugin/blogspam.pm ikiwiki-3.20141016.4/IkiWiki/Plugin/blogspam.pm
--- ikiwiki-3.20141016.4+deb8u1/IkiWiki/Plugin/blogspam.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/IkiWiki/Plugin/blogspam.pm 2017-01-12 05:18:52.000000000 +1100
@@ -57,10 +57,18 @@
};
error $@ if $@;
- # Using the for_url parameter makes sure we crash if used
- # with an older IkiWiki.pm that didn't automatically try
- # to use LWPx::ParanoidAgent.
- $client=useragent(for_url => $config{blogspam_server});
+ eval q{use LWPx::ParanoidAgent};
+ if (!$@) {
+ $client=LWPx::ParanoidAgent->new(agent => $config{useragent});
+ }
+ else {
+ eval q{use LWP};
+ if ($@) {
+ error $@;
+ return;
+ }
+ $client=useragent();
+ }
}
sub checkcontent (@) {
diff -Nru ikiwiki-3.20141016.4+deb8u1/IkiWiki/Plugin/openid.pm ikiwiki-3.20141016.4/IkiWiki/Plugin/openid.pm
--- ikiwiki-3.20141016.4+deb8u1/IkiWiki/Plugin/openid.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/IkiWiki/Plugin/openid.pm 2017-01-12 05:18:52.000000000 +1100
@@ -237,10 +237,14 @@
eval q{use Net::OpenID::Consumer};
error($@) if $@;
- # We pass the for_url parameter, even though it's undef, because
- # that will make sure we crash if used with an older IkiWiki.pm
- # that didn't automatically try to use LWPx::ParanoidAgent.
- my $ua=useragent(for_url => undef);
+ my $ua;
+ eval q{use LWPx::ParanoidAgent};
+ if (! $@) {
+ $ua=LWPx::ParanoidAgent->new(agent => $config{useragent});
+ }
+ else {
+ $ua=useragent();
+ }
# Store the secret in the session.
my $secret=$session->param("openid_secret");
diff -Nru ikiwiki-3.20141016.4+deb8u1/IkiWiki/Plugin/pinger.pm ikiwiki-3.20141016.4/IkiWiki/Plugin/pinger.pm
--- ikiwiki-3.20141016.4+deb8u1/IkiWiki/Plugin/pinger.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/IkiWiki/Plugin/pinger.pm 2017-01-12 05:18:52.000000000 +1100
@@ -70,16 +70,17 @@
eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
my $ua;
- eval {
- # We pass the for_url parameter, even though it's
- # undef, because that will make sure we crash if used
- # with an older IkiWiki.pm that didn't automatically
- # try to use LWPx::ParanoidAgent.
- $ua=useragent(for_url => undef);
- };
- if ($@) {
- debug(gettext("LWP not found, not pinging").": $@");
- return;
+ eval q{use LWPx::ParanoidAgent};
+ if (!$@) {
+ $ua=LWPx::ParanoidAgent->new(agent => $config{useragent});
+ }
+ else {
+ eval q{use LWP};
+ if ($@) {
+ debug(gettext("LWP not found, not pinging"));
+ return;
+ }
+ $ua=useragent();
}
$ua->timeout($config{pinger_timeout} || 15);
diff -Nru ikiwiki-3.20141016.4+deb8u1/IkiWiki.pm ikiwiki-3.20141016.4/IkiWiki.pm
--- ikiwiki-3.20141016.4+deb8u1/IkiWiki.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/IkiWiki.pm 2017-01-12 05:18:52.000000000 +1100
@@ -2367,131 +2367,12 @@
$autofiles{$file}{generator}=$generator;
}
-sub useragent (@) {
- my %params = @_;
- my $for_url = delete $params{for_url};
- # Fail safe, in case a plugin calling this function is relying on
- # a future parameter to make the UA more strict
- foreach my $key (keys %params) {
- error "Internal error: useragent(\"$key\" => ...) not understood";
- }
-
- eval q{use LWP};
- error($@) if $@;
-
- my %args = (
- agent => $config{useragent},
+sub useragent () {
+ return LWP::UserAgent->new(
cookie_jar => $config{cookiejar},
- env_proxy => 0,
- protocols_allowed => [qw(http https)],
+ env_proxy => 1, # respect proxy env vars
+ agent => $config{useragent},
);
- my %proxies;
-
- if (defined $for_url) {
- # We know which URL we're going to fetch, so we can choose
- # whether it's going to go through a proxy or not.
- #
- # We reimplement http_proxy, https_proxy and no_proxy here, so
- # that we are not relying on LWP implementing them exactly the
- # same way we do.
-
- eval q{use URI};
- error($@) if $@;
-
- my $proxy;
- my $uri = URI->new($for_url);
-
- if ($uri->scheme eq 'http') {
- $proxy = $ENV{http_proxy};
- # HTTP_PROXY is deliberately not implemented
- # because the HTTP_* namespace is also used by CGI
- }
- elsif ($uri->scheme eq 'https') {
- $proxy = $ENV{https_proxy};
- $proxy = $ENV{HTTPS_PROXY} unless defined $proxy;
- }
- else {
- $proxy = undef;
- }
-
- foreach my $var (qw(no_proxy NO_PROXY)) {
- my $no_proxy = $ENV{$var};
- if (defined $no_proxy) {
- foreach my $domain (split /\s*,\s*/, $no_proxy) {
- if ($domain =~ s/^\*?\.//) {
- # no_proxy="*.example.com" or
- # ".example.com": match suffix
- # against .example.com
- if ($uri->host =~ m/(^|\.)\Q$domain\E$/i) {
- $proxy = undef;
- }
- }
- else {
- # no_proxy="example.com":
- # match exactly example.com
- if (lc $uri->host eq lc $domain) {
- $proxy = undef;
- }
- }
- }
- }
- }
-
- if (defined $proxy) {
- $proxies{$uri->scheme} = $proxy;
- # Paranoia: make sure we can't bypass the proxy
- $args{protocols_allowed} = [$uri->scheme];
- }
- }
- else {
- # The plugin doesn't know yet which URL(s) it's going to
- # fetch, so we have to make some conservative assumptions.
- my $http_proxy = $ENV{http_proxy};
- my $https_proxy = $ENV{https_proxy};
- $https_proxy = $ENV{HTTPS_PROXY} unless defined $https_proxy;
-
- # We don't respect no_proxy here: if we are not using the
- # paranoid user-agent, then we need to give the proxy the
- # opportunity to reject undesirable requests.
-
- # If we have one, we need the other: otherwise, neither
- # LWPx::ParanoidAgent nor the proxy would have the
- # opportunity to filter requests for the other protocol.
- if (defined $https_proxy && defined $http_proxy) {
- %proxies = (http => $http_proxy, https => $https_proxy);
- }
- elsif (defined $https_proxy) {
- %proxies = (http => $https_proxy, https => $https_proxy);
- }
- elsif (defined $http_proxy) {
- %proxies = (http => $http_proxy, https => $http_proxy);
- }
-
- }
-
- if (scalar keys %proxies) {
- # The configured proxy is responsible for deciding which
- # URLs are acceptable to fetch and which URLs are not.
- my $ua = LWP::UserAgent->new(%args);
- foreach my $scheme (@{$ua->protocols_allowed}) {
- unless ($proxies{$scheme}) {
- error "internal error: $scheme is allowed but has no proxy";
- }
- }
- # We can't pass the proxies in %args because that only
- # works since LWP 6.24.
- foreach my $scheme (keys %proxies) {
- $ua->proxy($scheme, $proxies{$scheme});
- }
- return $ua;
- }
-
- eval q{use LWPx::ParanoidAgent};
- if ($@) {
- print STDERR "warning: installing LWPx::ParanoidAgent is recommended\n";
- return LWP::UserAgent->new(%args);
- }
- return LWPx::ParanoidAgent->new(%args);
}
sub sortspec_translate ($$) {
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/applied-patches ikiwiki-3.20141016.4/.pc/applied-patches
--- ikiwiki-3.20141016.4+deb8u1/.pc/applied-patches 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/applied-patches 1970-01-01 10:00:00.000000000 +1000
@@ -1,4 +0,0 @@
-CVE-2019-9187-1.patch
-CVE-2019-9187-2.patch
-CVE-2019-9187-3.patch
-CVE-2019-9187-4.patch
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-1.patch/IkiWiki.pm ikiwiki-3.20141016.4/.pc/CVE-2019-9187-1.patch/IkiWiki.pm
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-1.patch/IkiWiki.pm 2019-03-07 17:32:28.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-1.patch/IkiWiki.pm 1970-01-01 10:00:00.000000000 +1000
@@ -1,3028 +0,0 @@
-#!/usr/bin/perl
-
-package IkiWiki;
-
-use warnings;
-use strict;
-use Encode;
-use URI::Escape q{uri_escape_utf8};
-use POSIX ();
-use Storable;
-use open qw{:utf8 :std};
-
-use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
- %pagestate %wikistate %renderedfiles %oldrenderedfiles
- %pagesources %delpagesources %destsources %depends %depends_simple
- @mass_depends %hooks %forcerebuild %loaded_plugins %typedlinks
- %oldtypedlinks %autofiles @underlayfiles $lastrev $phase};
-
-use Exporter q{import};
-our @EXPORT = qw(hook debug error htmlpage template template_depends
- deptype add_depends pagespec_match pagespec_match_list bestlink
- htmllink readfile writefile pagetype srcfile pagename
- displaytime strftime_utf8 will_render gettext ngettext urlto targetpage
- add_underlay pagetitle titlepage linkpage newpagefile
- inject add_link add_autofile useragent
- %config %links %pagestate %wikistate %renderedfiles
- %pagesources %destsources %typedlinks);
-our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
-our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
-our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
-
-# Page dependency types.
-our $DEPEND_CONTENT=1;
-our $DEPEND_PRESENCE=2;
-our $DEPEND_LINKS=4;
-
-# Phases of processing.
-sub PHASE_SCAN () { 0 }
-sub PHASE_RENDER () { 1 }
-$phase = PHASE_SCAN;
-
-# Optimisation.
-use Memoize;
-memoize("abs2rel");
-memoize("sortspec_translate");
-memoize("pagespec_translate");
-memoize("template_file");
-
-sub getsetup () {
- wikiname => {
- type => "string",
- default => "wiki",
- description => "name of the wiki",
- safe => 1,
- rebuild => 1,
- },
- adminemail => {
- type => "string",
- default => undef,
- example => 'me@example.com',
- description => "contact email for wiki",
- safe => 1,
- rebuild => 0,
- },
- adminuser => {
- type => "string",
- default => [],
- description => "users who are wiki admins",
- safe => 1,
- rebuild => 0,
- },
- banned_users => {
- type => "string",
- default => [],
- description => "users who are banned from the wiki",
- safe => 1,
- rebuild => 0,
- },
- srcdir => {
- type => "string",
- default => undef,
- example => "$ENV{HOME}/wiki",
- description => "where the source of the wiki is located",
- safe => 0, # path
- rebuild => 1,
- },
- destdir => {
- type => "string",
- default => undef,
- example => "/var/www/wiki",
- description => "where to build the wiki",
- safe => 0, # path
- rebuild => 1,
- },
- url => {
- type => "string",
- default => '',
- example => "http://example.com/wiki",
- description => "base url to the wiki",
- safe => 1,
- rebuild => 1,
- },
- cgiurl => {
- type => "string",
- default => '',
- example => "http://example.com/wiki/ikiwiki.cgi",
- description => "url to the ikiwiki.cgi",
- safe => 1,
- rebuild => 1,
- },
- reverse_proxy => {
- type => "boolean",
- default => 0,
- description => "do not adjust cgiurl if CGI is accessed via different URL",
- advanced => 0,
- safe => 1,
- rebuild => 0, # only affects CGI requests
- },
- cgi_wrapper => {
- type => "string",
- default => '',
- example => "/var/www/wiki/ikiwiki.cgi",
- description => "filename of cgi wrapper to generate",
- safe => 0, # file
- rebuild => 0,
- },
- cgi_wrappermode => {
- type => "string",
- default => '06755',
- description => "mode for cgi_wrapper (can safely be made suid)",
- safe => 0,
- rebuild => 0,
- },
- cgi_overload_delay => {
- type => "string",
- default => '',
- example => "10",
- description => "number of seconds to delay CGI requests when overloaded",
- safe => 1,
- rebuild => 0,
- },
- cgi_overload_message => {
- type => "string",
- default => '',
- example => "Please wait",
- description => "message to display when overloaded (may contain html)",
- safe => 1,
- rebuild => 0,
- },
- only_committed_changes => {
- type => "boolean",
- default => 0,
- description => "enable optimization of only refreshing committed changes?",
- safe => 1,
- rebuild => 0,
- },
- rcs => {
- type => "string",
- default => '',
- description => "rcs backend to use",
- safe => 0, # don't allow overriding
- rebuild => 0,
- },
- default_plugins => {
- type => "internal",
- default => [qw{mdwn link inline meta htmlscrubber passwordauth
- openid signinedit lockedit conditional
- recentchanges parentlinks editpage
- templatebody}],
- description => "plugins to enable by default",
- safe => 0,
- rebuild => 1,
- },
- add_plugins => {
- type => "string",
- default => [],
- description => "plugins to add to the default configuration",
- safe => 1,
- rebuild => 1,
- },
- disable_plugins => {
- type => "string",
- default => [],
- description => "plugins to disable",
- safe => 1,
- rebuild => 1,
- },
- templatedir => {
- type => "string",
- default => "$installdir/share/ikiwiki/templates",
- description => "additional directory to search for template files",
- advanced => 1,
- safe => 0, # path
- rebuild => 1,
- },
- underlaydir => {
- type => "string",
- default => "$installdir/share/ikiwiki/basewiki",
- description => "base wiki source location",
- advanced => 1,
- safe => 0, # path
- rebuild => 0,
- },
- underlaydirbase => {
- type => "internal",
- default => "$installdir/share/ikiwiki",
- description => "parent directory containing additional underlays",
- safe => 0,
- rebuild => 0,
- },
- wrappers => {
- type => "internal",
- default => [],
- description => "wrappers to generate",
- safe => 0,
- rebuild => 0,
- },
- underlaydirs => {
- type => "internal",
- default => [],
- description => "additional underlays to use",
- safe => 0,
- rebuild => 0,
- },
- verbose => {
- type => "boolean",
- example => 1,
- description => "display verbose messages?",
- safe => 1,
- rebuild => 0,
- },
- syslog => {
- type => "boolean",
- example => 1,
- description => "log to syslog?",
- safe => 1,
- rebuild => 0,
- },
- usedirs => {
- type => "boolean",
- default => 1,
- description => "create output files named page/index.html?",
- safe => 0, # changing requires manual transition
- rebuild => 1,
- },
- prefix_directives => {
- type => "boolean",
- default => 1,
- description => "use '!'-prefixed preprocessor directives?",
- safe => 0, # changing requires manual transition
- rebuild => 1,
- },
- indexpages => {
- type => "boolean",
- default => 0,
- description => "use page/index.mdwn source files",
- safe => 1,
- rebuild => 1,
- },
- discussion => {
- type => "boolean",
- default => 1,
- description => "enable Discussion pages?",
- safe => 1,
- rebuild => 1,
- },
- discussionpage => {
- type => "string",
- default => gettext("Discussion"),
- description => "name of Discussion pages",
- safe => 1,
- rebuild => 1,
- },
- html5 => {
- type => "boolean",
- default => 0,
- description => "generate HTML5?",
- advanced => 0,
- safe => 1,
- rebuild => 1,
- },
- sslcookie => {
- type => "boolean",
- default => 0,
- description => "only send cookies over SSL connections?",
- advanced => 1,
- safe => 1,
- rebuild => 0,
- },
- default_pageext => {
- type => "string",
- default => "mdwn",
- description => "extension to use for new pages",
- safe => 0, # not sanitized
- rebuild => 0,
- },
- htmlext => {
- type => "string",
- default => "html",
- description => "extension to use for html files",
- safe => 0, # not sanitized
- rebuild => 1,
- },
- timeformat => {
- type => "string",
- default => '%c',
- description => "strftime format string to display date",
- advanced => 1,
- safe => 1,
- rebuild => 1,
- },
- locale => {
- type => "string",
- default => undef,
- example => "en_US.UTF-8",
- description => "UTF-8 locale to use",
- advanced => 1,
- safe => 0,
- rebuild => 1,
- },
- userdir => {
- type => "string",
- default => "",
- example => "users",
- description => "put user pages below specified page",
- safe => 1,
- rebuild => 1,
- },
- numbacklinks => {
- type => "integer",
- default => 10,
- description => "how many backlinks to show before hiding excess (0 to show all)",
- safe => 1,
- rebuild => 1,
- },
- hardlink => {
- type => "boolean",
- default => 0,
- description => "attempt to hardlink source files? (optimisation for large files)",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- umask => {
- type => "string",
- example => "public",
- description => "force ikiwiki to use a particular umask (keywords public, group or private, or a number)",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- wrappergroup => {
- type => "string",
- example => "ikiwiki",
- description => "group for wrappers to run in",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- libdir => {
- type => "string",
- default => "",
- example => "$ENV{HOME}/.ikiwiki/",
- description => "extra library and plugin directory",
- advanced => 1,
- safe => 0, # directory
- rebuild => 0,
- },
- ENV => {
- type => "string",
- default => {},
- description => "environment variables",
- safe => 0, # paranoia
- rebuild => 0,
- },
- timezone => {
- type => "string",
- default => "",
- example => "US/Eastern",
- description => "time zone name",
- safe => 1,
- rebuild => 1,
- },
- include => {
- type => "string",
- default => undef,
- example => '^\.htaccess$',
- description => "regexp of normally excluded files to include",
- advanced => 1,
- safe => 0, # regexp
- rebuild => 1,
- },
- exclude => {
- type => "string",
- default => undef,
- example => '^(*\.private|Makefile)$',
- description => "regexp of files that should be skipped",
- advanced => 1,
- safe => 0, # regexp
- rebuild => 1,
- },
- wiki_file_prune_regexps => {
- type => "internal",
- default => [qr/(^|\/)\.\.(\/|$)/, qr/^\//, qr/^\./, qr/\/\./,
- qr/\.x?html?$/, qr/\.ikiwiki-new$/,
- qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//,
- qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//,
- qr/(^|\/)CVS\//, qr/\.dpkg-tmp$/],
- description => "regexps of source files to ignore",
- safe => 0,
- rebuild => 1,
- },
- wiki_file_chars => {
- type => "string",
- description => "specifies the characters that are allowed in source filenames",
- default => "-[:alnum:]+/.:_",
- safe => 0,
- rebuild => 1,
- },
- wiki_file_regexp => {
- type => "internal",
- description => "regexp of legal source files",
- safe => 0,
- rebuild => 1,
- },
- web_commit_regexp => {
- type => "internal",
- default => qr/^web commit (by (.*?(?=: |$))|from ([0-9a-fA-F:.]+[0-9a-fA-F])):?(.*)/,
- description => "regexp to parse web commits from logs",
- safe => 0,
- rebuild => 0,
- },
- cgi => {
- type => "internal",
- default => 0,
- description => "run as a cgi",
- safe => 0,
- rebuild => 0,
- },
- cgi_disable_uploads => {
- type => "internal",
- default => 1,
- description => "whether CGI should accept file uploads",
- safe => 0,
- rebuild => 0,
- },
- post_commit => {
- type => "internal",
- default => 0,
- description => "run as a post-commit hook",
- safe => 0,
- rebuild => 0,
- },
- rebuild => {
- type => "internal",
- default => 0,
- description => "running in rebuild mode",
- safe => 0,
- rebuild => 0,
- },
- setup => {
- type => "internal",
- default => undef,
- description => "running in setup mode",
- safe => 0,
- rebuild => 0,
- },
- clean => {
- type => "internal",
- default => 0,
- description => "running in clean mode",
- safe => 0,
- rebuild => 0,
- },
- refresh => {
- type => "internal",
- default => 0,
- description => "running in refresh mode",
- safe => 0,
- rebuild => 0,
- },
- test_receive => {
- type => "internal",
- default => 0,
- description => "running in receive test mode",
- safe => 0,
- rebuild => 0,
- },
- wrapper_background_command => {
- type => "internal",
- default => '',
- description => "background shell command to run",
- safe => 0,
- rebuild => 0,
- },
- gettime => {
- type => "internal",
- description => "running in gettime mode",
- safe => 0,
- rebuild => 0,
- },
- w3mmode => {
- type => "internal",
- default => 0,
- description => "running in w3mmode",
- safe => 0,
- rebuild => 0,
- },
- wikistatedir => {
- type => "internal",
- default => undef,
- description => "path to the .ikiwiki directory holding ikiwiki state",
- safe => 0,
- rebuild => 0,
- },
- setupfile => {
- type => "internal",
- default => undef,
- description => "path to setup file",
- safe => 0,
- rebuild => 0,
- },
- setuptype => {
- type => "internal",
- default => "Yaml",
- description => "perl class to use to dump setup file",
- safe => 0,
- rebuild => 0,
- },
- allow_symlinks_before_srcdir => {
- type => "boolean",
- default => 0,
- description => "allow symlinks in the path leading to the srcdir (potentially insecure)",
- safe => 0,
- rebuild => 0,
- },
- cookiejar => {
- type => "string",
- default => { file => "$ENV{HOME}/.ikiwiki/cookies" },
- description => "cookie control",
- safe => 0, # hooks into perl module internals
- rebuild => 0,
- },
- useragent => {
- type => "string",
- default => "ikiwiki/$version",
- example => "Wget/1.13.4 (linux-gnu)",
- description => "set custom user agent string for outbound HTTP requests e.g. when fetching aggregated RSS feeds",
- safe => 0,
- rebuild => 0,
- },
-}
-
-sub defaultconfig () {
- my %s=getsetup();
- my @ret;
- foreach my $key (keys %s) {
- push @ret, $key, $s{$key}->{default};
- }
- return @ret;
-}
-
-# URL to top of wiki as a path starting with /, valid from any wiki page or
-# the CGI; if that's not possible, an absolute URL. Either way, it ends with /
-my $local_url;
-# URL to CGI script, similar to $local_url
-my $local_cgiurl;
-
-sub checkconfig () {
- # locale stuff; avoid LC_ALL since it overrides everything
- if (defined $ENV{LC_ALL}) {
- $ENV{LANG} = $ENV{LC_ALL};
- delete $ENV{LC_ALL};
- }
- if (defined $config{locale}) {
- if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) {
- $ENV{LANG}=$config{locale};
- define_gettext();
- }
- }
-
- if (! defined $config{wiki_file_regexp}) {
- $config{wiki_file_regexp}=qr/(^[$config{wiki_file_chars}]+$)/;
- }
-
- if (ref $config{ENV} eq 'HASH') {
- foreach my $val (keys %{$config{ENV}}) {
- $ENV{$val}=$config{ENV}{$val};
- }
- }
- if (defined $config{timezone} && length $config{timezone}) {
- $ENV{TZ}=$config{timezone};
- }
- else {
- $config{timezone}=$ENV{TZ};
- }
-
- if ($config{w3mmode}) {
- eval q{use Cwd q{abs_path}};
- error($@) if $@;
- $config{srcdir}=possibly_foolish_untaint(abs_path($config{srcdir}));
- $config{destdir}=possibly_foolish_untaint(abs_path($config{destdir}));
- $config{cgiurl}="file:///\$LIB/ikiwiki-w3m.cgi/".$config{cgiurl}
- unless $config{cgiurl} =~ m!file:///!;
- $config{url}="file://".$config{destdir};
- }
-
- if ($config{cgi} && ! length $config{url}) {
- error(gettext("Must specify url to wiki with --url when using --cgi"));
- }
-
- if (defined $config{url} && length $config{url}) {
- eval q{use URI};
- my $baseurl = URI->new($config{url});
-
- $local_url = $baseurl->path . "/";
- $local_cgiurl = undef;
-
- if (length $config{cgiurl}) {
- my $cgiurl = URI->new($config{cgiurl});
-
- $local_cgiurl = $cgiurl->path;
-
- if ($cgiurl->scheme eq 'https' &&
- $baseurl->scheme eq 'http') {
- # We assume that the same content is available
- # over both http and https, because if it
- # wasn't, accessing the static content
- # from the CGI would be mixed-content,
- # which would be a security flaw.
-
- if ($cgiurl->authority ne $baseurl->authority) {
- # use protocol-relative URL for
- # static content
- $local_url = "$config{url}/";
- $local_url =~ s{^http://}{//};
- }
- # else use host-relative URL for static content
-
- # either way, CGI needs to be absolute
- $local_cgiurl = $config{cgiurl};
- }
- elsif ($cgiurl->scheme ne $baseurl->scheme) {
- # too far apart, fall back to absolute URLs
- $local_url = "$config{url}/";
- $local_cgiurl = $config{cgiurl};
- }
- elsif ($cgiurl->authority ne $baseurl->authority) {
- # slightly too far apart, fall back to
- # protocol-relative URLs
- $local_url = "$config{url}/";
- $local_url =~ s{^https?://}{//};
- $local_cgiurl = $config{cgiurl};
- $local_cgiurl =~ s{^https?://}{//};
- }
- # else keep host-relative URLs
- }
-
- $local_url =~ s{//$}{/};
- }
- else {
- $local_cgiurl = $config{cgiurl};
- }
-
- $config{wikistatedir}="$config{srcdir}/.ikiwiki"
- unless exists $config{wikistatedir} && defined $config{wikistatedir};
-
- if (defined $config{umask}) {
- my $u = possibly_foolish_untaint($config{umask});
-
- if ($u =~ m/^\d+$/) {
- umask($u);
- }
- elsif ($u eq 'private') {
- umask(077);
- }
- elsif ($u eq 'group') {
- umask(027);
- }
- elsif ($u eq 'public') {
- umask(022);
- }
- else {
- error(sprintf(gettext("unsupported umask setting %s"), $u));
- }
- }
-
- run_hooks(checkconfig => sub { shift->() });
-
- return 1;
-}
-
-sub listplugins () {
- my %ret;
-
- foreach my $dir (@INC, $config{libdir}) {
- next unless defined $dir && length $dir;
- foreach my $file (glob("$dir/IkiWiki/Plugin/*.pm")) {
- my ($plugin)=$file=~/.*\/(.*)\.pm$/;
- $ret{$plugin}=1;
- }
- }
- foreach my $dir ($config{libdir}, "$installdir/lib/ikiwiki") {
- next unless defined $dir && length $dir;
- foreach my $file (glob("$dir/plugins/*")) {
- $ret{basename($file)}=1 if -x $file;
- }
- }
-
- return keys %ret;
-}
-
-sub loadplugins () {
- if (defined $config{libdir} && length $config{libdir}) {
- unshift @INC, possibly_foolish_untaint($config{libdir});
- }
-
- foreach my $plugin (@{$config{default_plugins}}, @{$config{add_plugins}}) {
- loadplugin($plugin);
- }
-
- if ($config{rcs}) {
- if (exists $hooks{rcs}) {
- error(gettext("cannot use multiple rcs plugins"));
- }
- loadplugin($config{rcs});
- }
- if (! exists $hooks{rcs}) {
- loadplugin("norcs");
- }
-
- run_hooks(getopt => sub { shift->() });
- if (grep /^-/, @ARGV) {
- print STDERR "Unknown option (or missing parameter): $_\n"
- foreach grep /^-/, @ARGV;
- usage();
- }
-
- return 1;
-}
-
-sub loadplugin ($;$) {
- my $plugin=shift;
- my $force=shift;
-
- return if ! $force && grep { $_ eq $plugin} @{$config{disable_plugins}};
-
- foreach my $dir (defined $config{libdir} ? possibly_foolish_untaint($config{libdir}) : undef,
- "$installdir/lib/ikiwiki") {
- if (defined $dir && -x "$dir/plugins/$plugin") {
- eval { require IkiWiki::Plugin::external };
- if ($@) {
- my $reason=$@;
- error(sprintf(gettext("failed to load external plugin needed for %s plugin: %s"), $plugin, $reason));
- }
- import IkiWiki::Plugin::external "$dir/plugins/$plugin";
- $loaded_plugins{$plugin}=1;
- return 1;
- }
- }
-
- my $mod="IkiWiki::Plugin::".possibly_foolish_untaint($plugin);
- eval qq{use $mod};
- if ($@) {
- error("Failed to load plugin $mod: $@");
- }
- $loaded_plugins{$plugin}=1;
- return 1;
-}
-
-sub error ($;$) {
- my $message=shift;
- my $cleaner=shift;
- log_message('err' => $message) if $config{syslog};
- if (defined $cleaner) {
- $cleaner->();
- }
- die $message."\n";
-}
-
-sub debug ($) {
- return unless $config{verbose};
- return log_message(debug => @_);
-}
-
-my $log_open=0;
-my $log_failed=0;
-sub log_message ($$) {
- my $type=shift;
-
- if ($config{syslog}) {
- require Sys::Syslog;
- if (! $log_open) {
- Sys::Syslog::setlogsock('unix');
- Sys::Syslog::openlog('ikiwiki', '', 'user');
- $log_open=1;
- }
- eval {
- # keep a copy to avoid editing the original config repeatedly
- my $wikiname = $config{wikiname};
- utf8::encode($wikiname);
- Sys::Syslog::syslog($type, "[$wikiname] %s", join(" ", @_));
- };
- if ($@) {
- print STDERR "failed to syslog: $@" unless $log_failed;
- $log_failed=1;
- print STDERR "@_\n";
- }
- return $@;
- }
- elsif (! $config{cgi}) {
- return print "@_\n";
- }
- else {
- return print STDERR "@_\n";
- }
-}
-
-sub possibly_foolish_untaint ($) {
- my $tainted=shift;
- my ($untainted)=$tainted=~/(.*)/s;
- return $untainted;
-}
-
-sub basename ($) {
- my $file=shift;
-
- $file=~s!.*/+!!;
- return $file;
-}
-
-sub dirname ($) {
- my $file=shift;
-
- $file=~s!/*[^/]+$!!;
- return $file;
-}
-
-sub isinternal ($) {
- my $page=shift;
- return exists $pagesources{$page} &&
- $pagesources{$page} =~ /\._([^.]+)$/;
-}
-
-sub pagetype ($) {
- my $file=shift;
-
- if ($file =~ /\.([^.]+)$/) {
- return $1 if exists $hooks{htmlize}{$1};
- }
- my $base=basename($file);
- if (exists $hooks{htmlize}{$base} &&
- $hooks{htmlize}{$base}{noextension}) {
- return $base;
- }
- return;
-}
-
-my %pagename_cache;
-
-sub pagename ($) {
- my $file=shift;
-
- if (exists $pagename_cache{$file}) {
- return $pagename_cache{$file};
- }
-
- my $type=pagetype($file);
- my $page=$file;
- $page=~s/\Q.$type\E*$//
- if defined $type && !$hooks{htmlize}{$type}{keepextension}
- && !$hooks{htmlize}{$type}{noextension};
- if ($config{indexpages} && $page=~/(.*)\/index$/) {
- $page=$1;
- }
-
- $pagename_cache{$file} = $page;
- return $page;
-}
-
-sub newpagefile ($$) {
- my $page=shift;
- my $type=shift;
-
- if (! $config{indexpages} || $page eq 'index') {
- return $page.".".$type;
- }
- else {
- return $page."/index.".$type;
- }
-}
-
-sub targetpage ($$;$) {
- my $page=shift;
- my $ext=shift;
- my $filename=shift;
-
- if (defined $filename) {
- return $page."/".$filename.".".$ext;
- }
- elsif (! $config{usedirs} || $page eq 'index') {
- return $page.".".$ext;
- }
- else {
- return $page."/index.".$ext;
- }
-}
-
-sub htmlpage ($) {
- my $page=shift;
-
- return targetpage($page, $config{htmlext});
-}
-
-sub srcfile_stat {
- my $file=shift;
- my $nothrow=shift;
-
- return "$config{srcdir}/$file", stat(_) if -e "$config{srcdir}/$file";
- foreach my $dir (@{$config{underlaydirs}}, $config{underlaydir}) {
- return "$dir/$file", stat(_) if -e "$dir/$file";
- }
- error("internal error: $file cannot be found in $config{srcdir} or underlay") unless $nothrow;
- return;
-}
-
-sub srcfile ($;$) {
- return (srcfile_stat(@_))[0];
-}
-
-sub add_literal_underlay ($) {
- my $dir=shift;
-
- if (! grep { $_ eq $dir } @{$config{underlaydirs}}) {
- unshift @{$config{underlaydirs}}, $dir;
- }
-}
-
-sub add_underlay ($) {
- my $dir = shift;
-
- if ($dir !~ /^\//) {
- $dir="$config{underlaydirbase}/$dir";
- }
-
- add_literal_underlay($dir);
- # why does it return 1? we just don't know
- return 1;
-}
-
-sub readfile ($;$$) {
- my $file=shift;
- my $binary=shift;
- my $wantfd=shift;
-
- if (-l $file) {
- error("cannot read a symlink ($file)");
- }
-
- local $/=undef;
- open (my $in, "<", $file) || error("failed to read $file: $!");
- binmode($in) if ($binary);
- return \*$in if $wantfd;
- my $ret=<$in>;
- # check for invalid utf-8, and toss it back to avoid crashes
- if (! utf8::valid($ret)) {
- $ret=encode_utf8($ret);
- }
- close $in || error("failed to read $file: $!");
- return $ret;
-}
-
-sub prep_writefile ($$) {
- my $file=shift;
- my $destdir=shift;
-
- my $test=$file;
- while (length $test) {
- if (-l "$destdir/$test") {
- error("cannot write to a symlink ($test)");
- }
- if (-f _ && $test ne $file) {
- # Remove conflicting file.
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- foreach my $f (@{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- if ($f eq $test) {
- unlink("$destdir/$test");
- last;
- }
- }
- }
- }
- $test=dirname($test);
- }
-
- my $dir=dirname("$destdir/$file");
- if (! -d $dir) {
- my $d="";
- foreach my $s (split(m!/+!, $dir)) {
- $d.="$s/";
- if (! -d $d) {
- mkdir($d) || error("failed to create directory $d: $!");
- }
- }
- }
-
- return 1;
-}
-
-sub writefile ($$$;$$) {
- my $file=shift; # can include subdirs
- my $destdir=shift; # directory to put file in
- my $content=shift;
- my $binary=shift;
- my $writer=shift;
-
- prep_writefile($file, $destdir);
-
- my $newfile="$destdir/$file.ikiwiki-new";
- if (-l $newfile) {
- error("cannot write to a symlink ($newfile)");
- }
-
- my $cleanup = sub { unlink($newfile) };
- open (my $out, '>', $newfile) || error("failed to write $newfile: $!", $cleanup);
- binmode($out) if ($binary);
- if ($writer) {
- $writer->(\*$out, $cleanup);
- }
- else {
- print $out $content or error("failed writing to $newfile: $!", $cleanup);
- }
- close $out || error("failed saving $newfile: $!", $cleanup);
- rename($newfile, "$destdir/$file") ||
- error("failed renaming $newfile to $destdir/$file: $!", $cleanup);
-
- return 1;
-}
-
-my %cleared;
-sub will_render ($$;$) {
- my $page=shift;
- my $dest=shift;
- my $clear=shift;
-
- # Important security check for independently created files.
- if (-e "$config{destdir}/$dest" && ! $config{rebuild} &&
- ! grep { $_ eq $dest } (@{$renderedfiles{$page}}, @{$oldrenderedfiles{$page}}, @{$wikistate{editpage}{previews}})) {
- my $from_other_page=0;
- # Expensive, but rarely runs.
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- if (grep {
- $_ eq $dest ||
- dirname($_) eq $dest
- } @{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- $from_other_page=1;
- last;
- }
- }
-
- error("$config{destdir}/$dest independently created, not overwriting with version from $page")
- unless $from_other_page;
- }
-
- # If $dest exists as a directory, remove conflicting files in it
- # rendered from other pages.
- if (-d _) {
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- foreach my $f (@{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- if (dirname($f) eq $dest) {
- unlink("$config{destdir}/$f");
- rmdir(dirname("$config{destdir}/$f"));
- }
- }
- }
- }
-
- if (! $clear || $cleared{$page}) {
- $renderedfiles{$page}=[$dest, grep { $_ ne $dest } @{$renderedfiles{$page}}];
- }
- else {
- foreach my $old (@{$renderedfiles{$page}}) {
- delete $destsources{$old};
- }
- $renderedfiles{$page}=[$dest];
- $cleared{$page}=1;
- }
- $destsources{$dest}=$page;
-
- return 1;
-}
-
-sub bestlink ($$) {
- my $page=shift;
- my $link=shift;
-
- my $cwd=$page;
- if ($link=~s/^\/+//) {
- # absolute links
- $cwd="";
- }
- $link=~s/\/$//;
-
- do {
- my $l=$cwd;
- $l.="/" if length $l;
- $l.=$link;
-
- if (exists $pagesources{$l}) {
- return $l;
- }
- elsif (exists $pagecase{lc $l}) {
- return $pagecase{lc $l};
- }
- } while $cwd=~s{/?[^/]+$}{};
-
- if (length $config{userdir}) {
- my $l = "$config{userdir}/".lc($link);
- if (exists $pagesources{$l}) {
- return $l;
- }
- elsif (exists $pagecase{lc $l}) {
- return $pagecase{lc $l};
- }
- }
-
- #print STDERR "warning: page $page, broken link: $link\n";
- return "";
-}
-
-sub isinlinableimage ($) {
- my $file=shift;
-
- return $file =~ /\.(png|gif|jpg|jpeg|svg)$/i;
-}
-
-sub pagetitle ($;$) {
- my $page=shift;
- my $unescaped=shift;
-
- if ($unescaped) {
- $page=~s/(__(\d+)__|_)/$1 eq '_' ? ' ' : chr($2)/eg;
- }
- else {
- $page=~s/(__(\d+)__|_)/$1 eq '_' ? ' ' : "&#$2;"/eg;
- }
-
- return $page;
-}
-
-sub titlepage ($) {
- my $title=shift;
- # support use w/o %config set
- my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
- $title=~s/([^$chars]|_)/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg;
- return $title;
-}
-
-sub linkpage ($) {
- my $link=shift;
- my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
- $link=~s/([^$chars])/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg;
- return $link;
-}
-
-sub cgiurl (@) {
- my %params=@_;
-
- my $cgiurl=$local_cgiurl;
-
- if (exists $params{cgiurl}) {
- $cgiurl=$params{cgiurl};
- delete $params{cgiurl};
- }
-
- unless (%params) {
- return $cgiurl;
- }
-
- return $cgiurl."?".
- join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params);
-}
-
-sub cgiurl_abs (@) {
- eval q{use URI};
- URI->new_abs(cgiurl(@_), $config{cgiurl});
-}
-
-sub baseurl (;$) {
- my $page=shift;
-
- return $local_url if ! defined $page;
-
- $page=htmlpage($page);
- $page=~s/[^\/]+$//;
- $page=~s/[^\/]+\//..\//g;
- return $page;
-}
-
-sub urlabs ($$) {
- my $url=shift;
- my $urlbase=shift;
-
- return $url unless defined $urlbase && length $urlbase;
-
- eval q{use URI};
- URI->new_abs($url, $urlbase)->as_string;
-}
-
-sub abs2rel ($$) {
- # Work around very innefficient behavior in File::Spec if abs2rel
- # is passed two relative paths. It's much faster if paths are
- # absolute! (Debian bug #376658; fixed in debian unstable now)
- my $path="/".shift;
- my $base="/".shift;
-
- require File::Spec;
- my $ret=File::Spec->abs2rel($path, $base);
- $ret=~s/^// if defined $ret;
- return $ret;
-}
-
-sub displaytime ($;$$) {
- # Plugins can override this function to mark up the time to
- # display.
- my $time=formattime($_[0], $_[1]);
- if ($config{html5}) {
- return '<time datetime="'.date_3339($_[0]).'"'.
- ($_[2] ? ' pubdate="pubdate"' : '').
- '>'.$time.'</time>';
- }
- else {
- return '<span class="date">'.$time.'</span>';
- }
-}
-
-sub formattime ($;$) {
- # Plugins can override this function to format the time.
- my $time=shift;
- my $format=shift;
- if (! defined $format) {
- $format=$config{timeformat};
- }
-
- return strftime_utf8($format, localtime($time));
-}
-
-my $strftime_encoding;
-sub strftime_utf8 {
- # strftime doesn't know about encodings, so make sure
- # its output is properly treated as utf8.
- # Note that this does not handle utf-8 in the format string.
- ($strftime_encoding) = POSIX::setlocale(&POSIX::LC_TIME) =~ m#\.([^@]+)#
- unless defined $strftime_encoding;
- $strftime_encoding
- ? Encode::decode($strftime_encoding, POSIX::strftime(@_))
- : POSIX::strftime(@_);
-}
-
-sub date_3339 ($) {
- my $time=shift;
-
- my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
- POSIX::setlocale(&POSIX::LC_TIME, "C");
- my $ret=POSIX::strftime("%Y-%m-%dT%H:%M:%SZ", gmtime($time));
- POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
- return $ret;
-}
-
-sub beautify_urlpath ($) {
- my $url=shift;
-
- # Ensure url is not an empty link, and if necessary,
- # add ./ to avoid colon confusion.
- if ($url !~ /^\// && $url !~ /^\.\.?\//) {
- $url="./$url";
- }
-
- if ($config{usedirs}) {
- $url =~ s!/index.$config{htmlext}$!/!;
- }
-
- return $url;
-}
-
-sub urlto ($;$$) {
- my $to=shift;
- my $from=shift;
- my $absolute=shift;
-
- if (! length $to) {
- $to = 'index';
- }
-
- if (! $destsources{$to}) {
- $to=htmlpage($to);
- }
-
- if ($absolute) {
- return $config{url}.beautify_urlpath("/".$to);
- }
-
- if (! defined $from) {
- my $u = $local_url || '';
- $u =~ s{/$}{};
- return $u.beautify_urlpath("/".$to);
- }
-
- my $link = abs2rel($to, dirname(htmlpage($from)));
-
- return beautify_urlpath($link);
-}
-
-sub isselflink ($$) {
- # Plugins can override this function to support special types
- # of selflinks.
- my $page=shift;
- my $link=shift;
-
- return $page eq $link;
-}
-
-sub htmllink ($$$;@) {
- my $lpage=shift; # the page doing the linking
- my $page=shift; # the page that will contain the link (different for inline)
- my $link=shift;
- my %opts=@_;
-
- $link=~s/\/$//;
-
- my $bestlink;
- if (! $opts{forcesubpage}) {
- $bestlink=bestlink($lpage, $link);
- }
- else {
- $bestlink="$lpage/".lc($link);
- }
-
- my $linktext;
- if (defined $opts{linktext}) {
- $linktext=$opts{linktext};
- }
- else {
- $linktext=pagetitle(basename($link));
- }
-
- return "<span class=\"selflink\">$linktext</span>"
- if length $bestlink && isselflink($page, $bestlink) &&
- ! defined $opts{anchor};
-
- if (! $destsources{$bestlink}) {
- $bestlink=htmlpage($bestlink);
-
- if (! $destsources{$bestlink}) {
- my $cgilink = "";
- if (length $config{cgiurl}) {
- $cgilink = "<a href=\"".
- cgiurl(
- do => "create",
- page => $link,
- from => $lpage
- )."\" rel=\"nofollow\">?</a>";
- }
- return "<span class=\"createlink\">$cgilink$linktext</span>"
- }
- }
-
- $bestlink=abs2rel($bestlink, dirname(htmlpage($page)));
- $bestlink=beautify_urlpath($bestlink);
-
- if (! $opts{noimageinline} && isinlinableimage($bestlink)) {
- return "<img src=\"$bestlink\" alt=\"$linktext\" />";
- }
-
- if (defined $opts{anchor}) {
- $bestlink.="#".$opts{anchor};
- }
-
- my @attrs;
- foreach my $attr (qw{rel class title}) {
- if (defined $opts{$attr}) {
- push @attrs, " $attr=\"$opts{$attr}\"";
- }
- }
-
- return "<a href=\"$bestlink\"@attrs>$linktext</a>";
-}
-
-sub userpage ($) {
- my $user=shift;
- return length $config{userdir} ? "$config{userdir}/$user" : $user;
-}
-
-sub openiduser ($) {
- my $user=shift;
-
- if (defined $user && $user =~ m!^https?://! &&
- eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
- my $display;
-
- if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) {
- $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user);
- }
- else {
- # backcompat with old version
- my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
- $display=$oid->display;
- }
-
- # Convert "user.somehost.com" to "user [somehost.com]"
- # (also "user.somehost.co.uk")
- if ($display !~ /\[/) {
- $display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/;
- }
- # Convert "http://somehost.com/user" to "user [somehost.com]".
- # (also "https://somehost.com/user/")
- if ($display !~ /\[/) {
- $display=~s/^https?:\/\/(.+)\/([^\/#?]+)\/?(?:[#?].*)?$/$2 [$1]/;
- }
- $display=~s!^https?://!!; # make sure this is removed
- eval q{use CGI 'escapeHTML'};
- error($@) if $@;
- return escapeHTML($display);
- }
- return;
-}
-
-sub htmlize ($$$$) {
- my $page=shift;
- my $destpage=shift;
- my $type=shift;
- my $content=shift;
-
- my $oneline = $content !~ /\n/;
-
- if (exists $hooks{htmlize}{$type}) {
- $content=$hooks{htmlize}{$type}{call}->(
- page => $page,
- content => $content,
- );
- }
- else {
- error("htmlization of $type not supported");
- }
-
- run_hooks(sanitize => sub {
- $content=shift->(
- page => $page,
- destpage => $destpage,
- content => $content,
- );
- });
-
- if ($oneline) {
- # hack to get rid of enclosing junk added by markdown
- # and other htmlizers/sanitizers
- $content=~s/^<p>//i;
- $content=~s/<\/p>\n*$//i;
- }
-
- return $content;
-}
-
-sub linkify ($$$) {
- my $page=shift;
- my $destpage=shift;
- my $content=shift;
-
- run_hooks(linkify => sub {
- $content=shift->(
- page => $page,
- destpage => $destpage,
- content => $content,
- );
- });
-
- return $content;
-}
-
-our %preprocessing;
-our $preprocess_preview=0;
-sub preprocess ($$$;$$) {
- my $page=shift; # the page the data comes from
- my $destpage=shift; # the page the data will appear in (different for inline)
- my $content=shift;
- my $scan=shift;
- my $preview=shift;
-
- # Using local because it needs to be set within any nested calls
- # of this function.
- local $preprocess_preview=$preview if defined $preview;
-
- my $handle=sub {
- my $escape=shift;
- my $prefix=shift;
- my $command=shift;
- my $params=shift;
- $params="" if ! defined $params;
-
- if (length $escape) {
- return "[[$prefix$command $params]]";
- }
- elsif (exists $hooks{preprocess}{$command}) {
- return "" if $scan && ! $hooks{preprocess}{$command}{scan};
- # Note: preserve order of params, some plugins may
- # consider it significant.
- my @params;
- while ($params =~ m{
- (?:([-.\w]+)=)? # 1: named parameter key?
- (?:
- """(.*?)""" # 2: triple-quoted value
- |
- "([^"]*?)" # 3: single-quoted value
- |
- '''(.*?)''' # 4: triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (.*?)\n\5 # 6: heredoc value
- |
- (\S+) # 7: unquoted value
- )
- (?:\s+|$) # delimiter to next param
- }msgx) {
- my $key=$1;
- my $val;
- if (defined $2) {
- $val=$2;
- $val=~s/\r\n/\n/mg;
- $val=~s/^\n+//g;
- $val=~s/\n+$//g;
- }
- elsif (defined $3) {
- $val=$3;
- }
- elsif (defined $4) {
- $val=$4;
- }
- elsif (defined $7) {
- $val=$7;
- }
- elsif (defined $6) {
- $val=$6;
- }
-
- if (defined $key) {
- push @params, $key, $val;
- }
- else {
- push @params, $val, '';
- }
- }
- if ($preprocessing{$page}++ > 8) {
- # Avoid loops of preprocessed pages preprocessing
- # other pages that preprocess them, etc.
- return "[[!$command <span class=\"error\">".
- sprintf(gettext("preprocessing loop detected on %s at depth %i"),
- $page, $preprocessing{$page}).
- "</span>]]";
- }
- my $ret;
- if (! $scan) {
- $ret=eval {
- $hooks{preprocess}{$command}{call}->(
- @params,
- page => $page,
- destpage => $destpage,
- preview => $preprocess_preview,
- );
- };
- if ($@) {
- my $error=$@;
- chomp $error;
- eval q{use HTML::Entities};
- $error = encode_entities($error);
- $ret="[[!$command <span class=\"error\">".
- gettext("Error").": $error"."</span>]]";
- }
- }
- else {
- # use void context during scan pass
- eval {
- $hooks{preprocess}{$command}{call}->(
- @params,
- page => $page,
- destpage => $destpage,
- preview => $preprocess_preview,
- );
- };
- $ret="";
- }
- $preprocessing{$page}--;
- return $ret;
- }
- else {
- return "[[$prefix$command $params]]";
- }
- };
-
- my $regex;
- if ($config{prefix_directives}) {
- $regex = qr{
- (\\?) # 1: escape?
- \[\[(!) # directive open; 2: prefix
- ([-\w]+) # 3: command
- ( # 4: the parameters..
- \s+ # Must have space if parameters present
- (?:
- (?:[-.\w]+=)? # named parameter key?
- (?:
- """.*?""" # triple-quoted value
- |
- "[^"]*?" # single-quoted value
- |
- '''.*?''' # triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (?:.*?)\n\5 # heredoc value
- |
- [^"\s\]]+ # unquoted value
- )
- \s* # whitespace or end
- # of directive
- )
- *)? # 0 or more parameters
- \]\] # directive closed
- }sx;
- }
- else {
- $regex = qr{
- (\\?) # 1: escape?
- \[\[(!?) # directive open; 2: optional prefix
- ([-\w]+) # 3: command
- \s+
- ( # 4: the parameters..
- (?:
- (?:[-.\w]+=)? # named parameter key?
- (?:
- """.*?""" # triple-quoted value
- |
- "[^"]*?" # single-quoted value
- |
- '''.*?''' # triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (?:.*?)\n\5 # heredoc value
- |
- [^"\s\]]+ # unquoted value
- )
- \s* # whitespace or end
- # of directive
- )
- *) # 0 or more parameters
- \]\] # directive closed
- }sx;
- }
-
- $content =~ s{$regex}{$handle->($1, $2, $3, $4)}eg;
- return $content;
-}
-
-sub filter ($$$) {
- my $page=shift;
- my $destpage=shift;
- my $content=shift;
-
- run_hooks(filter => sub {
- $content=shift->(page => $page, destpage => $destpage,
- content => $content);
- });
-
- return $content;
-}
-
-sub check_canedit ($$$;$) {
- my $page=shift;
- my $q=shift;
- my $session=shift;
- my $nonfatal=shift;
-
- my $canedit;
- run_hooks(canedit => sub {
- return if defined $canedit;
- my $ret=shift->($page, $q, $session);
- if (defined $ret) {
- if ($ret eq "") {
- $canedit=1;
- }
- elsif (ref $ret eq 'CODE') {
- $ret->() unless $nonfatal;
- $canedit=0;
- }
- elsif (defined $ret) {
- error($ret) unless $nonfatal;
- $canedit=0;
- }
- }
- });
- return defined $canedit ? $canedit : 1;
-}
-
-sub check_content (@) {
- my %params=@_;
-
- return 1 if ! exists $hooks{checkcontent}; # optimisation
-
- if (exists $pagesources{$params{page}}) {
- my @diff;
- my %old=map { $_ => 1 }
- split("\n", readfile(srcfile($pagesources{$params{page}})));
- foreach my $line (split("\n", $params{content})) {
- push @diff, $line if ! exists $old{$line};
- }
- $params{diff}=join("\n", @diff);
- }
-
- my $ok;
- run_hooks(checkcontent => sub {
- return if defined $ok;
- my $ret=shift->(%params);
- if (defined $ret) {
- if ($ret eq "") {
- $ok=1;
- }
- elsif (ref $ret eq 'CODE') {
- $ret->() unless $params{nonfatal};
- $ok=0;
- }
- elsif (defined $ret) {
- error($ret) unless $params{nonfatal};
- $ok=0;
- }
- }
-
- });
- return defined $ok ? $ok : 1;
-}
-
-sub check_canchange (@) {
- my %params = @_;
- my $cgi = $params{cgi};
- my $session = $params{session};
- my @changes = @{$params{changes}};
-
- my %newfiles;
- foreach my $change (@changes) {
- # This untaint is safe because we check file_pruned and
- # wiki_file_regexp.
- my ($file)=$change->{file}=~/$config{wiki_file_regexp}/;
- $file=possibly_foolish_untaint($file);
- if (! defined $file || ! length $file ||
- file_pruned($file)) {
- error(gettext("bad file name %s"), $file);
- }
-
- my $type=pagetype($file);
- my $page=pagename($file) if defined $type;
-
- if ($change->{action} eq 'add') {
- $newfiles{$file}=1;
- }
-
- if ($change->{action} eq 'change' ||
- $change->{action} eq 'add') {
- if (defined $page) {
- check_canedit($page, $cgi, $session);
- next;
- }
- else {
- if (IkiWiki::Plugin::attachment->can("check_canattach")) {
- IkiWiki::Plugin::attachment::check_canattach($session, $file, $change->{path});
- check_canedit($file, $cgi, $session);
- next;
- }
- }
- }
- elsif ($change->{action} eq 'remove') {
- # check_canremove tests to see if the file is present
- # on disk. This will fail when a single commit adds a
- # file and then removes it again. Avoid the problem
- # by not testing the removal in such pairs of changes.
- # (The add is still tested, just to make sure that
- # no data is added to the repo that a web edit
- # could not add.)
- next if $newfiles{$file};
-
- if (IkiWiki::Plugin::remove->can("check_canremove")) {
- IkiWiki::Plugin::remove::check_canremove(defined $page ? $page : $file, $cgi, $session);
- check_canedit(defined $page ? $page : $file, $cgi, $session);
- next;
- }
- }
- else {
- error "unknown action ".$change->{action};
- }
-
- error sprintf(gettext("you are not allowed to change %s"), $file);
- }
-}
-
-
-my $wikilock;
-
-sub lockwiki () {
- # Take an exclusive lock on the wiki to prevent multiple concurrent
- # run issues. The lock will be dropped on program exit.
- if (! -d $config{wikistatedir}) {
- mkdir($config{wikistatedir});
- }
- open($wikilock, '>', "$config{wikistatedir}/lockfile") ||
- error ("cannot write to $config{wikistatedir}/lockfile: $!");
- if (! flock($wikilock, 2)) { # LOCK_EX
- error("failed to get lock");
- }
- return 1;
-}
-
-sub unlockwiki () {
- POSIX::close($ENV{IKIWIKI_CGILOCK_FD}) if exists $ENV{IKIWIKI_CGILOCK_FD};
- return close($wikilock) if $wikilock;
- return;
-}
-
-my $commitlock;
-
-sub commit_hook_enabled () {
- open($commitlock, '+>', "$config{wikistatedir}/commitlock") ||
- error("cannot write to $config{wikistatedir}/commitlock: $!");
- if (! flock($commitlock, 1 | 4)) { # LOCK_SH | LOCK_NB to test
- close($commitlock) || error("failed closing commitlock: $!");
- return 0;
- }
- close($commitlock) || error("failed closing commitlock: $!");
- return 1;
-}
-
-sub disable_commit_hook () {
- open($commitlock, '>', "$config{wikistatedir}/commitlock") ||
- error("cannot write to $config{wikistatedir}/commitlock: $!");
- if (! flock($commitlock, 2)) { # LOCK_EX
- error("failed to get commit lock");
- }
- return 1;
-}
-
-sub enable_commit_hook () {
- return close($commitlock) if $commitlock;
- return;
-}
-
-sub loadindex () {
- %oldrenderedfiles=%pagectime=();
- my $rebuild=$config{rebuild};
- if (! $rebuild) {
- %pagesources=%pagemtime=%oldlinks=%links=%depends=
- %destsources=%renderedfiles=%pagecase=%pagestate=
- %depends_simple=%typedlinks=%oldtypedlinks=();
- }
- my $in;
- if (! open ($in, "<", "$config{wikistatedir}/indexdb")) {
- if (-e "$config{wikistatedir}/index") {
- system("ikiwiki-transition", "indexdb", $config{srcdir});
- open ($in, "<", "$config{wikistatedir}/indexdb") || return;
- }
- else {
- # gettime on first build
- $config{gettime}=1 unless defined $config{gettime};
- return;
- }
- }
-
- my $index=Storable::fd_retrieve($in);
- if (! defined $index) {
- return 0;
- }
-
- my $pages;
- if (exists $index->{version} && ! ref $index->{version}) {
- $pages=$index->{page};
- %wikistate=%{$index->{state}};
- # Handle plugins that got disabled by loading a new setup.
- if (exists $config{setupfile}) {
- require IkiWiki::Setup;
- IkiWiki::Setup::disabled_plugins(
- grep { ! $loaded_plugins{$_} } keys %wikistate);
- }
- }
- else {
- $pages=$index;
- %wikistate=();
- }
-
- foreach my $src (keys %$pages) {
- my $d=$pages->{$src};
- my $page;
- if (exists $d->{page} && ! $rebuild) {
- $page=$d->{page};
- }
- else {
- $page=pagename($src);
- }
- $pagectime{$page}=$d->{ctime};
- $pagesources{$page}=$src;
- if (! $rebuild) {
- $pagemtime{$page}=$d->{mtime};
- $renderedfiles{$page}=$d->{dest};
- if (exists $d->{links} && ref $d->{links}) {
- $links{$page}=$d->{links};
- $oldlinks{$page}=[@{$d->{links}}];
- }
- if (ref $d->{depends_simple} eq 'ARRAY') {
- # old format
- $depends_simple{$page}={
- map { $_ => 1 } @{$d->{depends_simple}}
- };
- }
- elsif (exists $d->{depends_simple}) {
- $depends_simple{$page}=$d->{depends_simple};
- }
- if (exists $d->{dependslist}) {
- # old format
- $depends{$page}={
- map { $_ => $DEPEND_CONTENT }
- @{$d->{dependslist}}
- };
- }
- elsif (exists $d->{depends} && ! ref $d->{depends}) {
- # old format
- $depends{$page}={$d->{depends} => $DEPEND_CONTENT };
- }
- elsif (exists $d->{depends}) {
- $depends{$page}=$d->{depends};
- }
- if (exists $d->{state}) {
- $pagestate{$page}=$d->{state};
- }
- if (exists $d->{typedlinks}) {
- $typedlinks{$page}=$d->{typedlinks};
-
- while (my ($type, $links) = each %{$typedlinks{$page}}) {
- next unless %$links;
- $oldtypedlinks{$page}{$type} = {%$links};
- }
- }
- }
- $oldrenderedfiles{$page}=[@{$d->{dest}}];
- }
- foreach my $page (keys %pagesources) {
- $pagecase{lc $page}=$page;
- }
- foreach my $page (keys %renderedfiles) {
- $destsources{$_}=$page foreach @{$renderedfiles{$page}};
- }
- $lastrev=$index->{lastrev};
- @underlayfiles=@{$index->{underlayfiles}} if ref $index->{underlayfiles};
- return close($in);
-}
-
-sub saveindex () {
- run_hooks(savestate => sub { shift->() });
-
- my @plugins=keys %loaded_plugins;
-
- if (! -d $config{wikistatedir}) {
- mkdir($config{wikistatedir});
- }
- my $newfile="$config{wikistatedir}/indexdb.new";
- my $cleanup = sub { unlink($newfile) };
- open (my $out, '>', $newfile) || error("cannot write to $newfile: $!", $cleanup);
-
- my %index;
- foreach my $page (keys %pagemtime) {
- next unless $pagemtime{$page};
- my $src=$pagesources{$page};
-
- $index{page}{$src}={
- page => $page,
- ctime => $pagectime{$page},
- mtime => $pagemtime{$page},
- dest => $renderedfiles{$page},
- links => $links{$page},
- };
-
- if (exists $depends{$page}) {
- $index{page}{$src}{depends} = $depends{$page};
- }
-
- if (exists $depends_simple{$page}) {
- $index{page}{$src}{depends_simple} = $depends_simple{$page};
- }
-
- if (exists $typedlinks{$page} && %{$typedlinks{$page}}) {
- $index{page}{$src}{typedlinks} = $typedlinks{$page};
- }
-
- if (exists $pagestate{$page}) {
- $index{page}{$src}{state}=$pagestate{$page};
- }
- }
-
- $index{state}={};
- foreach my $id (@plugins) {
- $index{state}{$id}={}; # used to detect disabled plugins
- foreach my $key (keys %{$wikistate{$id}}) {
- $index{state}{$id}{$key}=$wikistate{$id}{$key};
- }
- }
-
- $index{lastrev}=$lastrev;
- $index{underlayfiles}=\@underlayfiles;
-
- $index{version}="3";
- my $ret=Storable::nstore_fd(\%index, $out);
- return if ! defined $ret || ! $ret;
- close $out || error("failed saving to $newfile: $!", $cleanup);
- rename($newfile, "$config{wikistatedir}/indexdb") ||
- error("failed renaming $newfile to $config{wikistatedir}/indexdb", $cleanup);
-
- return 1;
-}
-
-sub template_file ($) {
- my $name=shift;
-
- my $tpage=($name =~ s/^\///) ? $name : "templates/$name";
- my $template;
- if ($name !~ /\.tmpl$/ && exists $pagesources{$tpage}) {
- $template=srcfile($pagesources{$tpage}, 1);
- $name.=".tmpl";
- }
- else {
- $template=srcfile($tpage, 1);
- }
-
- if (defined $template) {
- return $template, $tpage, 1 if wantarray;
- return $template;
- }
- else {
- $name=~s:/::; # avoid path traversal
- foreach my $dir ($config{templatedir},
- "$installdir/share/ikiwiki/templates") {
- if (-e "$dir/$name") {
- $template="$dir/$name";
- last;
- }
- }
- if (defined $template) {
- return $template, $tpage if wantarray;
- return $template;
- }
- }
-
- return;
-}
-
-sub template_depends ($$;@) {
- my $name=shift;
- my $page=shift;
-
- my ($filename, $tpage, $untrusted)=template_file($name);
- if (! defined $filename) {
- error(sprintf(gettext("template %s not found"), $name))
- }
-
- if (defined $page && defined $tpage) {
- add_depends($page, $tpage);
- }
-
- my @opts=(
- filter => sub {
- my $text_ref = shift;
- ${$text_ref} = decode_utf8(${$text_ref});
- run_hooks(readtemplate => sub {
- ${$text_ref} = shift->(
- id => $name,
- page => $tpage,
- content => ${$text_ref},
- untrusted => $untrusted,
- );
- });
- },
- loop_context_vars => 1,
- die_on_bad_params => 0,
- parent_global_vars => 1,
- filename => $filename,
- @_,
- ($untrusted ? (no_includes => 1) : ()),
- );
- return @opts if wantarray;
-
- require HTML::Template;
- return HTML::Template->new(@opts);
-}
-
-sub template ($;@) {
- template_depends(shift, undef, @_);
-}
-
-sub templateactions ($$) {
- my $template=shift;
- my $page=shift;
-
- my $have_actions=0;
- my @actions;
- run_hooks(pageactions => sub {
- push @actions, map { { action => $_ } }
- grep { defined } shift->(page => $page);
- });
- $template->param(actions => \@actions);
-
- if ($config{cgiurl} && exists $hooks{auth}) {
- $template->param(prefsurl => cgiurl(do => "prefs"));
- $have_actions=1;
- }
-
- if ($have_actions || @actions) {
- $template->param(have_actions => 1);
- }
-}
-
-sub hook (@) {
- my %param=@_;
-
- if (! exists $param{type} || ! ref $param{call} || ! exists $param{id}) {
- error 'hook requires type, call, and id parameters';
- }
-
- return if $param{no_override} && exists $hooks{$param{type}}{$param{id}};
-
- $hooks{$param{type}}{$param{id}}=\%param;
- return 1;
-}
-
-sub run_hooks ($$) {
- # Calls the given sub for each hook of the given type,
- # passing it the hook function to call.
- my $type=shift;
- my $sub=shift;
-
- if (exists $hooks{$type}) {
- my (@first, @middle, @last);
- foreach my $id (keys %{$hooks{$type}}) {
- if ($hooks{$type}{$id}{first}) {
- push @first, $id;
- }
- elsif ($hooks{$type}{$id}{last}) {
- push @last, $id;
- }
- else {
- push @middle, $id;
- }
- }
- foreach my $id (@first, @middle, @last) {
- $sub->($hooks{$type}{$id}{call});
- }
- }
-
- return 1;
-}
-
-sub rcs_update () {
- $hooks{rcs}{rcs_update}{call}->(@_);
-}
-
-sub rcs_prepedit ($) {
- $hooks{rcs}{rcs_prepedit}{call}->(@_);
-}
-
-sub rcs_commit (@) {
- $hooks{rcs}{rcs_commit}{call}->(@_);
-}
-
-sub rcs_commit_staged (@) {
- $hooks{rcs}{rcs_commit_staged}{call}->(@_);
-}
-
-sub rcs_add ($) {
- $hooks{rcs}{rcs_add}{call}->(@_);
-}
-
-sub rcs_remove ($) {
- $hooks{rcs}{rcs_remove}{call}->(@_);
-}
-
-sub rcs_rename ($$) {
- $hooks{rcs}{rcs_rename}{call}->(@_);
-}
-
-sub rcs_recentchanges ($) {
- $hooks{rcs}{rcs_recentchanges}{call}->(@_);
-}
-
-sub rcs_diff ($;$) {
- $hooks{rcs}{rcs_diff}{call}->(@_);
-}
-
-sub rcs_getctime ($) {
- $hooks{rcs}{rcs_getctime}{call}->(@_);
-}
-
-sub rcs_getmtime ($) {
- $hooks{rcs}{rcs_getmtime}{call}->(@_);
-}
-
-sub rcs_receive () {
- $hooks{rcs}{rcs_receive}{call}->();
-}
-
-sub add_depends ($$;$) {
- my $page=shift;
- my $pagespec=shift;
- my $deptype=shift || $DEPEND_CONTENT;
-
- # Is the pagespec a simple page name?
- if ($pagespec =~ /$config{wiki_file_regexp}/ &&
- $pagespec !~ /[\s*?()!]/) {
- $depends_simple{$page}{lc $pagespec} |= $deptype;
- return 1;
- }
-
- # Add explicit dependencies for influences.
- my $sub=pagespec_translate($pagespec);
- return unless defined $sub;
- foreach my $p (keys %pagesources) {
- my $r=$sub->($p, location => $page);
- my $i=$r->influences;
- my $static=$r->influences_static;
- foreach my $k (keys %$i) {
- next unless $r || $static || $k eq $page;
- $depends_simple{$page}{lc $k} |= $i->{$k};
- }
- last if $static;
- }
-
- $depends{$page}{$pagespec} |= $deptype;
- return 1;
-}
-
-sub deptype (@) {
- my $deptype=0;
- foreach my $type (@_) {
- if ($type eq 'presence') {
- $deptype |= $DEPEND_PRESENCE;
- }
- elsif ($type eq 'links') {
- $deptype |= $DEPEND_LINKS;
- }
- elsif ($type eq 'content') {
- $deptype |= $DEPEND_CONTENT;
- }
- }
- return $deptype;
-}
-
-my $file_prune_regexp;
-sub file_pruned ($) {
- my $file=shift;
-
- if (defined $config{include} && length $config{include}) {
- return 0 if $file =~ m/$config{include}/;
- }
-
- if (! defined $file_prune_regexp) {
- $file_prune_regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')';
- $file_prune_regexp=qr/$file_prune_regexp/;
- }
- return $file =~ m/$file_prune_regexp/;
-}
-
-sub define_gettext () {
- # If translation is needed, redefine the gettext function to do it.
- # Otherwise, it becomes a quick no-op.
- my $gettext_obj;
- my $getobj;
- if ((exists $ENV{LANG} && length $ENV{LANG}) ||
- (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) ||
- (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) {
- $getobj=sub {
- $gettext_obj=eval q{
- use Locale::gettext q{textdomain};
- Locale::gettext->domain('ikiwiki')
- };
- };
- }
-
- no warnings 'redefine';
- *gettext=sub {
- $getobj->() if $getobj;
- if ($gettext_obj) {
- $gettext_obj->get(shift);
- }
- else {
- return shift;
- }
- };
- *ngettext=sub {
- $getobj->() if $getobj;
- if ($gettext_obj) {
- $gettext_obj->nget(@_);
- }
- else {
- return ($_[2] == 1 ? $_[0] : $_[1])
- }
- };
-}
-
-sub gettext {
- define_gettext();
- gettext(@_);
-}
-
-sub ngettext {
- define_gettext();
- ngettext(@_);
-}
-
-sub yesno ($) {
- my $val=shift;
-
- return (defined $val && (lc($val) eq gettext("yes") || lc($val) eq "yes" || $val eq "1"));
-}
-
-sub inject {
- # Injects a new function into the symbol table to replace an
- # exported function.
- my %params=@_;
-
- # This is deep ugly perl foo, beware.
- no strict;
- no warnings;
- if (! defined $params{parent}) {
- $params{parent}='::';
- $params{old}=\&{$params{name}};
- $params{name}=~s/.*:://;
- }
- my $parent=$params{parent};
- foreach my $ns (grep /^\w+::/, keys %{$parent}) {
- $ns = $params{parent} . $ns;
- inject(%params, parent => $ns) unless $ns eq '::main::';
- *{$ns . $params{name}} = $params{call}
- if exists ${$ns}{$params{name}} &&
- \&{${$ns}{$params{name}}} == $params{old};
- }
- use strict;
- use warnings;
-}
-
-sub add_link ($$;$) {
- my $page=shift;
- my $link=shift;
- my $type=shift;
-
- push @{$links{$page}}, $link
- unless grep { $_ eq $link } @{$links{$page}};
-
- if (defined $type) {
- $typedlinks{$page}{$type}{$link} = 1;
- }
-}
-
-sub add_autofile ($$$) {
- my $file=shift;
- my $plugin=shift;
- my $generator=shift;
-
- $autofiles{$file}{plugin}=$plugin;
- $autofiles{$file}{generator}=$generator;
-}
-
-sub useragent () {
- return LWP::UserAgent->new(
- cookie_jar => $config{cookiejar},
- env_proxy => 1, # respect proxy env vars
- agent => $config{useragent},
- );
-}
-
-sub sortspec_translate ($$) {
- my $spec = shift;
- my $reverse = shift;
-
- my $code = "";
- my @data;
- while ($spec =~ m{
- \s*
- (-?) # group 1: perhaps negated
- \s*
- ( # group 2: a word
- \w+\([^\)]*\) # command(params)
- |
- [^\s]+ # or anything else
- )
- \s*
- }gx) {
- my $negated = $1;
- my $word = $2;
- my $params = undef;
-
- if ($word =~ m/^(\w+)\((.*)\)$/) {
- # command with parameters
- $params = $2;
- $word = $1;
- }
- elsif ($word !~ m/^\w+$/) {
- error(sprintf(gettext("invalid sort type %s"), $word));
- }
-
- if (length $code) {
- $code .= " || ";
- }
-
- if ($negated) {
- $code .= "-";
- }
-
- if (exists $IkiWiki::SortSpec::{"cmp_$word"}) {
- if (defined $params) {
- push @data, $params;
- $code .= "IkiWiki::SortSpec::cmp_$word(\$data[$#data])";
- }
- else {
- $code .= "IkiWiki::SortSpec::cmp_$word(undef)";
- }
- }
- else {
- error(sprintf(gettext("unknown sort type %s"), $word));
- }
- }
-
- if (! length $code) {
- # undefined sorting method... sort arbitrarily
- return sub { 0 };
- }
-
- if ($reverse) {
- $code="-($code)";
- }
-
- no warnings;
- return eval 'sub { '.$code.' }';
-}
-
-sub pagespec_translate ($) {
- my $spec=shift;
-
- # Convert spec to perl code.
- my $code="";
- my @data;
- while ($spec=~m{
- \s* # ignore whitespace
- ( # 1: match a single word
- \! # !
- |
- \( # (
- |
- \) # )
- |
- \w+\([^\)]*\) # command(params)
- |
- [^\s()]+ # any other text
- )
- \s* # ignore whitespace
- }gx) {
- my $word=$1;
- if (lc $word eq 'and') {
- $code.=' &';
- }
- elsif (lc $word eq 'or') {
- $code.=' |';
- }
- elsif ($word eq "(" || $word eq ")" || $word eq "!") {
- $code.=' '.$word;
- }
- elsif ($word =~ /^(\w+)\((.*)\)$/) {
- if (exists $IkiWiki::PageSpec::{"match_$1"}) {
- push @data, $2;
- $code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
- }
- else {
- push @data, qq{unknown function in pagespec "$word"};
- $code.="IkiWiki::ErrorReason->new(\$data[$#data])";
- }
- }
- else {
- push @data, $word;
- $code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
- }
- }
-
- if (! length $code) {
- $code="IkiWiki::FailReason->new('empty pagespec')";
- }
-
- no warnings;
- return eval 'sub { my $page=shift; '.$code.' }';
-}
-
-sub pagespec_match ($$;@) {
- my $page=shift;
- my $spec=shift;
- my @params=@_;
-
- # Backwards compatability with old calling convention.
- if (@params == 1) {
- unshift @params, 'location';
- }
-
- my $sub=pagespec_translate($spec);
- return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
- if ! defined $sub;
- return $sub->($page, @params);
-}
-
-# e.g. @pages = sort_pages("title", \@pages, reverse => "yes")
-#
-# Not exported yet, but could be in future if it is generally useful.
-# Note that this signature is not the same as IkiWiki::SortSpec::sort_pages,
-# which is "more internal".
-sub sort_pages ($$;@) {
- my $sort = shift;
- my $list = shift;
- my %params = @_;
- $sort = sortspec_translate($sort, $params{reverse});
- return IkiWiki::SortSpec::sort_pages($sort, @$list);
-}
-
-sub pagespec_match_list ($$;@) {
- my $page=shift;
- my $pagespec=shift;
- my %params=@_;
-
- # Backwards compatability with old calling convention.
- if (ref $page) {
- print STDERR "warning: a plugin (".caller().") is using pagespec_match_list in an obsolete way, and needs to be updated\n";
- $params{list}=$page;
- $page=$params{location}; # ugh!
- }
-
- my $sub=pagespec_translate($pagespec);
- error "syntax error in pagespec \"$pagespec\""
- if ! defined $sub;
- my $sort=sortspec_translate($params{sort}, $params{reverse})
- if defined $params{sort};
-
- my @candidates;
- if (exists $params{list}) {
- @candidates=exists $params{filter}
- ? grep { ! $params{filter}->($_) } @{$params{list}}
- : @{$params{list}};
- }
- else {
- @candidates=exists $params{filter}
- ? grep { ! $params{filter}->($_) } keys %pagesources
- : keys %pagesources;
- }
-
- # clear params, remainder is passed to pagespec
- $depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT);
- my $num=$params{num};
- delete @params{qw{num deptype reverse sort filter list}};
-
- # when only the top matches will be returned, it's efficient to
- # sort before matching to pagespec,
- if (defined $num && defined $sort) {
- @candidates=IkiWiki::SortSpec::sort_pages(
- $sort, @candidates);
- }
-
- my @matches;
- my $firstfail;
- my $count=0;
- my $accum=IkiWiki::SuccessReason->new();
- foreach my $p (@candidates) {
- my $r=$sub->($p, %params, location => $page);
- error(sprintf(gettext("cannot match pages: %s"), $r))
- if $r->isa("IkiWiki::ErrorReason");
- unless ($r || $r->influences_static) {
- $r->remove_influence($p);
- }
- $accum |= $r;
- if ($r) {
- push @matches, $p;
- last if defined $num && ++$count == $num;
- }
- }
-
- # Add simple dependencies for accumulated influences.
- my $i=$accum->influences;
- foreach my $k (keys %$i) {
- $depends_simple{$page}{lc $k} |= $i->{$k};
- }
-
- # when all matches will be returned, it's efficient to
- # sort after matching
- if (! defined $num && defined $sort) {
- return IkiWiki::SortSpec::sort_pages(
- $sort, @matches);
- }
- else {
- return @matches;
- }
-}
-
-sub pagespec_valid ($) {
- my $spec=shift;
-
- return defined pagespec_translate($spec);
-}
-
-sub glob2re ($) {
- my $re=quotemeta(shift);
- $re=~s/\\\*/.*/g;
- $re=~s/\\\?/./g;
- return qr/^$re$/i;
-}
-
-package IkiWiki::FailReason;
-
-use overload (
- '""' => sub { $_[0][0] },
- '0+' => sub { 0 },
- '!' => sub { bless $_[0], 'IkiWiki::SuccessReason'},
- '&' => sub { $_[0]->merge_influences($_[1], 1); $_[0] },
- '|' => sub { $_[1]->merge_influences($_[0]); $_[1] },
- fallback => 1,
-);
-
-our @ISA = 'IkiWiki::SuccessReason';
-
-package IkiWiki::SuccessReason;
-
-# A blessed array-ref:
-#
-# [0]: human-readable reason for success (or, in FailReason subclass, failure)
-# [1]{""}:
-# - if absent or false, the influences of this evaluation are "static",
-# see the influences_static method
-# - if true, they are dynamic (not static)
-# [1]{any other key}:
-# the dependency types of influences, as returned by the influences method
-
-use overload (
- # in string context, it's the human-readable reason
- '""' => sub { $_[0][0] },
- # in boolean context, SuccessReason is 1 and FailReason is 0
- '0+' => sub { 1 },
- # negating a result gives the opposite result with the same influences
- '!' => sub { bless $_[0], 'IkiWiki::FailReason'},
- # A & B = (A ? B : A) with the influences of both
- '&' => sub { $_[1]->merge_influences($_[0], 1); $_[1] },
- # A | B = (A ? A : B) with the influences of both
- '|' => sub { $_[0]->merge_influences($_[1]); $_[0] },
- fallback => 1,
-);
-
-# SuccessReason->new("human-readable reason", page => deptype, ...)
-
-sub new {
- my $class = shift;
- my $value = shift;
- return bless [$value, {@_}], $class;
-}
-
-# influences(): return a reference to a copy of the hash
-# { page => dependency type } describing the pages that indirectly influenced
-# this result, but would not cause a dependency through ikiwiki's core
-# dependency logic.
-#
-# See [[todo/dependency_types]] for extensive discussion of what this means.
-#
-# influences(page => deptype, ...): remove all influences, replace them
-# with the arguments, and return a reference to a copy of the new influences.
-
-sub influences {
- my $this=shift;
- $this->[1]={@_} if @_;
- my %i=%{$this->[1]};
- delete $i{""};
- return \%i;
-}
-
-# True if this result has the same influences whichever page it matches,
-# For instance, whether bar matches backlink(foo) is influenced only by
-# the set of links in foo, so its only influence is { foo => DEPEND_LINKS },
-# which does not mention bar anywhere.
-#
-# False if this result would have different influences when matching
-# different pages. For instance, when testing whether link(foo) matches bar,
-# { bar => DEPEND_LINKS } is an influence on that result, because changing
-# bar's links could change the outcome; so its influences are not the same
-# as when testing whether link(foo) matches baz.
-#
-# Static influences are one of the things that make pagespec_match_list
-# more efficient than repeated calls to pagespec_match.
-
-sub influences_static {
- return ! $_[0][1]->{""};
-}
-
-# Change the influences of $this to be the influences of "$this & $other"
-# or "$this | $other".
-#
-# If both $this and $other are either successful or have influences,
-# or this is an "or" operation, the result has all the influences from
-# either of the arguments. It has dynamic influences if either argument
-# has dynamic influences.
-#
-# If this is an "and" operation, and at least one argument is a
-# FailReason with no influences, the result has no influences, and they
-# are not dynamic. For instance, link(foo) matching bar is influenced
-# by bar, but enabled(ddate) has no influences. Suppose ddate is disabled;
-# then (link(foo) and enabled(ddate)) not matching bar is not influenced by
-# bar, because it would be false however often you edit bar.
-
-sub merge_influences {
- my $this=shift;
- my $other=shift;
- my $anded=shift;
-
- # This "if" is odd because it needs to avoid negating $this
- # or $other, which would alter the objects in-place. Be careful.
- if (! $anded || (($this || %{$this->[1]}) &&
- ($other || %{$other->[1]}))) {
- foreach my $influence (keys %{$other->[1]}) {
- $this->[1]{$influence} |= $other->[1]{$influence};
- }
- }
- else {
- # influence blocker
- $this->[1]={};
- }
-}
-
-# Change $this so it is not considered to be influenced by $torm.
-
-sub remove_influence {
- my $this=shift;
- my $torm=shift;
-
- delete $this->[1]{$torm};
-}
-
-package IkiWiki::ErrorReason;
-
-our @ISA = 'IkiWiki::FailReason';
-
-package IkiWiki::PageSpec;
-
-sub derel ($$) {
- my $path=shift;
- my $from=shift;
-
- if ($path =~ m!^\.(/|$)!) {
- if ($1) {
- $from=~s#/?[^/]+$## if defined $from;
- $path=~s#^\./##;
- $path="$from/$path" if defined $from && length $from;
- }
- else {
- $path = $from;
- $path = "" unless defined $path;
- }
- }
-
- return $path;
-}
-
-my %glob_cache;
-
-sub match_glob ($$;@) {
- my $page=shift;
- my $glob=shift;
- my %params=@_;
-
- $glob=derel($glob, $params{location});
-
- # Instead of converting the glob to a regex every time,
- # cache the compiled regex to save time.
- my $re=$glob_cache{$glob};
- unless (defined $re) {
- $glob_cache{$glob} = $re = IkiWiki::glob2re($glob);
- }
- if ($page =~ $re) {
- if (! IkiWiki::isinternal($page) || $params{internal}) {
- return IkiWiki::SuccessReason->new("$glob matches $page");
- }
- else {
- return IkiWiki::FailReason->new("$glob matches $page, but the page is an internal page");
- }
- }
- else {
- return IkiWiki::FailReason->new("$glob does not match $page");
- }
-}
-
-sub match_internal ($$;@) {
- return match_glob(shift, shift, @_, internal => 1)
-}
-
-sub match_page ($$;@) {
- my $page=shift;
- my $match=match_glob($page, shift, @_);
- if ($match) {
- my $source=exists $IkiWiki::pagesources{$page} ?
- $IkiWiki::pagesources{$page} :
- $IkiWiki::delpagesources{$page};
- my $type=defined $source ? IkiWiki::pagetype($source) : undef;
- if (! defined $type) {
- return IkiWiki::FailReason->new("$page is not a page");
- }
- }
- return $match;
-}
-
-sub match_link ($$;@) {
- my $page=shift;
- my $link=lc(shift);
- my %params=@_;
-
- $link=derel($link, $params{location});
- my $from=exists $params{location} ? $params{location} : '';
- my $linktype=$params{linktype};
- my $qualifier='';
- if (defined $linktype) {
- $qualifier=" with type $linktype";
- }
-
- my $links = $IkiWiki::links{$page};
- return IkiWiki::FailReason->new("$page has no links", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- unless $links && @{$links};
- my $bestlink = IkiWiki::bestlink($from, $link);
- foreach my $p (@{$links}) {
- next unless (! defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p});
-
- if (length $bestlink) {
- if ($bestlink eq IkiWiki::bestlink($page, $p)) {
- return IkiWiki::SuccessReason->new("$page links to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- }
- else {
- if (match_glob($p, $link, %params)) {
- return IkiWiki::SuccessReason->new("$page links to page $p$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- my ($p_rel)=$p=~/^\/?(.*)/;
- $link=~s/^\///;
- if (match_glob($p_rel, $link, %params)) {
- return IkiWiki::SuccessReason->new("$page links to page $p_rel$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- }
- }
- return IkiWiki::FailReason->new("$page does not link to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1);
-}
-
-sub match_backlink ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
- if ($testpage eq '.') {
- $testpage = $params{'location'}
- }
- my $ret=match_link($testpage, $page, @_);
- $ret->influences($testpage => $IkiWiki::DEPEND_LINKS);
- return $ret;
-}
-
-sub match_created_before ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
-
- $testpage=derel($testpage, $params{location});
-
- if (exists $IkiWiki::pagectime{$testpage}) {
- if ($IkiWiki::pagectime{$page} < $IkiWiki::pagectime{$testpage}) {
- return IkiWiki::SuccessReason->new("$page created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- else {
- return IkiWiki::FailReason->new("$page not created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- }
- else {
- return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
-}
-
-sub match_created_after ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
-
- $testpage=derel($testpage, $params{location});
-
- if (exists $IkiWiki::pagectime{$testpage}) {
- if ($IkiWiki::pagectime{$page} > $IkiWiki::pagectime{$testpage}) {
- return IkiWiki::SuccessReason->new("$page created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- else {
- return IkiWiki::FailReason->new("$page not created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- }
- else {
- return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
-}
-
-sub match_creation_day ($$;@) {
- my $page=shift;
- my $d=shift;
- if ($d !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid day $d");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[3] == $d) {
- return IkiWiki::SuccessReason->new('creation_day matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_day did not match');
- }
-}
-
-sub match_creation_month ($$;@) {
- my $page=shift;
- my $m=shift;
- if ($m !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid month $m");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[4] + 1 == $m) {
- return IkiWiki::SuccessReason->new('creation_month matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_month did not match');
- }
-}
-
-sub match_creation_year ($$;@) {
- my $page=shift;
- my $y=shift;
- if ($y !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid year $y");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[5] + 1900 == $y) {
- return IkiWiki::SuccessReason->new('creation_year matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_year did not match');
- }
-}
-
-sub match_user ($$;@) {
- shift;
- my $user=shift;
- my %params=@_;
-
- if (! exists $params{user}) {
- return IkiWiki::ErrorReason->new("no user specified");
- }
-
- my $regexp=IkiWiki::glob2re($user);
-
- if (defined $params{user} && $params{user}=~$regexp) {
- return IkiWiki::SuccessReason->new("user is $user");
- }
- elsif (! defined $params{user}) {
- return IkiWiki::FailReason->new("not logged in");
- }
- else {
- return IkiWiki::FailReason->new("user is $params{user}, not $user");
- }
-}
-
-sub match_admin ($$;@) {
- shift;
- shift;
- my %params=@_;
-
- if (! exists $params{user}) {
- return IkiWiki::ErrorReason->new("no user specified");
- }
-
- if (defined $params{user} && IkiWiki::is_admin($params{user})) {
- return IkiWiki::SuccessReason->new("user is an admin");
- }
- elsif (! defined $params{user}) {
- return IkiWiki::FailReason->new("not logged in");
- }
- else {
- return IkiWiki::FailReason->new("user is not an admin");
- }
-}
-
-sub match_ip ($$;@) {
- shift;
- my $ip=shift;
- my %params=@_;
-
- if (! exists $params{ip}) {
- return IkiWiki::ErrorReason->new("no IP specified");
- }
-
- my $regexp=IkiWiki::glob2re(lc $ip);
-
- if (defined $params{ip} && lc $params{ip}=~$regexp) {
- return IkiWiki::SuccessReason->new("IP is $ip");
- }
- else {
- return IkiWiki::FailReason->new("IP is $params{ip}, not $ip");
- }
-}
-
-package IkiWiki::SortSpec;
-
-# This is in the SortSpec namespace so that the $a and $b that sort() uses
-# are easily available in this namespace, for cmp functions to use them.
-sub sort_pages {
- my $f=shift;
- sort $f @_
-}
-
-sub cmp_title {
- IkiWiki::pagetitle(IkiWiki::basename($a))
- cmp
- IkiWiki::pagetitle(IkiWiki::basename($b))
-}
-
-sub cmp_path { IkiWiki::pagetitle($a) cmp IkiWiki::pagetitle($b) }
-sub cmp_mtime { $IkiWiki::pagemtime{$b} <=> $IkiWiki::pagemtime{$a} }
-sub cmp_age { $IkiWiki::pagectime{$b} <=> $IkiWiki::pagectime{$a} }
-
-1
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-2.patch/IkiWiki.pm ikiwiki-3.20141016.4/.pc/CVE-2019-9187-2.patch/IkiWiki.pm
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-2.patch/IkiWiki.pm 2019-03-07 17:32:38.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-2.patch/IkiWiki.pm 1970-01-01 10:00:00.000000000 +1000
@@ -1,3031 +0,0 @@
-#!/usr/bin/perl
-
-package IkiWiki;
-
-use warnings;
-use strict;
-use Encode;
-use URI::Escape q{uri_escape_utf8};
-use POSIX ();
-use Storable;
-use open qw{:utf8 :std};
-
-use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
- %pagestate %wikistate %renderedfiles %oldrenderedfiles
- %pagesources %delpagesources %destsources %depends %depends_simple
- @mass_depends %hooks %forcerebuild %loaded_plugins %typedlinks
- %oldtypedlinks %autofiles @underlayfiles $lastrev $phase};
-
-use Exporter q{import};
-our @EXPORT = qw(hook debug error htmlpage template template_depends
- deptype add_depends pagespec_match pagespec_match_list bestlink
- htmllink readfile writefile pagetype srcfile pagename
- displaytime strftime_utf8 will_render gettext ngettext urlto targetpage
- add_underlay pagetitle titlepage linkpage newpagefile
- inject add_link add_autofile useragent
- %config %links %pagestate %wikistate %renderedfiles
- %pagesources %destsources %typedlinks);
-our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
-our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
-our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
-
-# Page dependency types.
-our $DEPEND_CONTENT=1;
-our $DEPEND_PRESENCE=2;
-our $DEPEND_LINKS=4;
-
-# Phases of processing.
-sub PHASE_SCAN () { 0 }
-sub PHASE_RENDER () { 1 }
-$phase = PHASE_SCAN;
-
-# Optimisation.
-use Memoize;
-memoize("abs2rel");
-memoize("sortspec_translate");
-memoize("pagespec_translate");
-memoize("template_file");
-
-sub getsetup () {
- wikiname => {
- type => "string",
- default => "wiki",
- description => "name of the wiki",
- safe => 1,
- rebuild => 1,
- },
- adminemail => {
- type => "string",
- default => undef,
- example => 'me@example.com',
- description => "contact email for wiki",
- safe => 1,
- rebuild => 0,
- },
- adminuser => {
- type => "string",
- default => [],
- description => "users who are wiki admins",
- safe => 1,
- rebuild => 0,
- },
- banned_users => {
- type => "string",
- default => [],
- description => "users who are banned from the wiki",
- safe => 1,
- rebuild => 0,
- },
- srcdir => {
- type => "string",
- default => undef,
- example => "$ENV{HOME}/wiki",
- description => "where the source of the wiki is located",
- safe => 0, # path
- rebuild => 1,
- },
- destdir => {
- type => "string",
- default => undef,
- example => "/var/www/wiki",
- description => "where to build the wiki",
- safe => 0, # path
- rebuild => 1,
- },
- url => {
- type => "string",
- default => '',
- example => "http://example.com/wiki",
- description => "base url to the wiki",
- safe => 1,
- rebuild => 1,
- },
- cgiurl => {
- type => "string",
- default => '',
- example => "http://example.com/wiki/ikiwiki.cgi",
- description => "url to the ikiwiki.cgi",
- safe => 1,
- rebuild => 1,
- },
- reverse_proxy => {
- type => "boolean",
- default => 0,
- description => "do not adjust cgiurl if CGI is accessed via different URL",
- advanced => 0,
- safe => 1,
- rebuild => 0, # only affects CGI requests
- },
- cgi_wrapper => {
- type => "string",
- default => '',
- example => "/var/www/wiki/ikiwiki.cgi",
- description => "filename of cgi wrapper to generate",
- safe => 0, # file
- rebuild => 0,
- },
- cgi_wrappermode => {
- type => "string",
- default => '06755',
- description => "mode for cgi_wrapper (can safely be made suid)",
- safe => 0,
- rebuild => 0,
- },
- cgi_overload_delay => {
- type => "string",
- default => '',
- example => "10",
- description => "number of seconds to delay CGI requests when overloaded",
- safe => 1,
- rebuild => 0,
- },
- cgi_overload_message => {
- type => "string",
- default => '',
- example => "Please wait",
- description => "message to display when overloaded (may contain html)",
- safe => 1,
- rebuild => 0,
- },
- only_committed_changes => {
- type => "boolean",
- default => 0,
- description => "enable optimization of only refreshing committed changes?",
- safe => 1,
- rebuild => 0,
- },
- rcs => {
- type => "string",
- default => '',
- description => "rcs backend to use",
- safe => 0, # don't allow overriding
- rebuild => 0,
- },
- default_plugins => {
- type => "internal",
- default => [qw{mdwn link inline meta htmlscrubber passwordauth
- openid signinedit lockedit conditional
- recentchanges parentlinks editpage
- templatebody}],
- description => "plugins to enable by default",
- safe => 0,
- rebuild => 1,
- },
- add_plugins => {
- type => "string",
- default => [],
- description => "plugins to add to the default configuration",
- safe => 1,
- rebuild => 1,
- },
- disable_plugins => {
- type => "string",
- default => [],
- description => "plugins to disable",
- safe => 1,
- rebuild => 1,
- },
- templatedir => {
- type => "string",
- default => "$installdir/share/ikiwiki/templates",
- description => "additional directory to search for template files",
- advanced => 1,
- safe => 0, # path
- rebuild => 1,
- },
- underlaydir => {
- type => "string",
- default => "$installdir/share/ikiwiki/basewiki",
- description => "base wiki source location",
- advanced => 1,
- safe => 0, # path
- rebuild => 0,
- },
- underlaydirbase => {
- type => "internal",
- default => "$installdir/share/ikiwiki",
- description => "parent directory containing additional underlays",
- safe => 0,
- rebuild => 0,
- },
- wrappers => {
- type => "internal",
- default => [],
- description => "wrappers to generate",
- safe => 0,
- rebuild => 0,
- },
- underlaydirs => {
- type => "internal",
- default => [],
- description => "additional underlays to use",
- safe => 0,
- rebuild => 0,
- },
- verbose => {
- type => "boolean",
- example => 1,
- description => "display verbose messages?",
- safe => 1,
- rebuild => 0,
- },
- syslog => {
- type => "boolean",
- example => 1,
- description => "log to syslog?",
- safe => 1,
- rebuild => 0,
- },
- usedirs => {
- type => "boolean",
- default => 1,
- description => "create output files named page/index.html?",
- safe => 0, # changing requires manual transition
- rebuild => 1,
- },
- prefix_directives => {
- type => "boolean",
- default => 1,
- description => "use '!'-prefixed preprocessor directives?",
- safe => 0, # changing requires manual transition
- rebuild => 1,
- },
- indexpages => {
- type => "boolean",
- default => 0,
- description => "use page/index.mdwn source files",
- safe => 1,
- rebuild => 1,
- },
- discussion => {
- type => "boolean",
- default => 1,
- description => "enable Discussion pages?",
- safe => 1,
- rebuild => 1,
- },
- discussionpage => {
- type => "string",
- default => gettext("Discussion"),
- description => "name of Discussion pages",
- safe => 1,
- rebuild => 1,
- },
- html5 => {
- type => "boolean",
- default => 0,
- description => "generate HTML5?",
- advanced => 0,
- safe => 1,
- rebuild => 1,
- },
- sslcookie => {
- type => "boolean",
- default => 0,
- description => "only send cookies over SSL connections?",
- advanced => 1,
- safe => 1,
- rebuild => 0,
- },
- default_pageext => {
- type => "string",
- default => "mdwn",
- description => "extension to use for new pages",
- safe => 0, # not sanitized
- rebuild => 0,
- },
- htmlext => {
- type => "string",
- default => "html",
- description => "extension to use for html files",
- safe => 0, # not sanitized
- rebuild => 1,
- },
- timeformat => {
- type => "string",
- default => '%c',
- description => "strftime format string to display date",
- advanced => 1,
- safe => 1,
- rebuild => 1,
- },
- locale => {
- type => "string",
- default => undef,
- example => "en_US.UTF-8",
- description => "UTF-8 locale to use",
- advanced => 1,
- safe => 0,
- rebuild => 1,
- },
- userdir => {
- type => "string",
- default => "",
- example => "users",
- description => "put user pages below specified page",
- safe => 1,
- rebuild => 1,
- },
- numbacklinks => {
- type => "integer",
- default => 10,
- description => "how many backlinks to show before hiding excess (0 to show all)",
- safe => 1,
- rebuild => 1,
- },
- hardlink => {
- type => "boolean",
- default => 0,
- description => "attempt to hardlink source files? (optimisation for large files)",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- umask => {
- type => "string",
- example => "public",
- description => "force ikiwiki to use a particular umask (keywords public, group or private, or a number)",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- wrappergroup => {
- type => "string",
- example => "ikiwiki",
- description => "group for wrappers to run in",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- libdir => {
- type => "string",
- default => "",
- example => "$ENV{HOME}/.ikiwiki/",
- description => "extra library and plugin directory",
- advanced => 1,
- safe => 0, # directory
- rebuild => 0,
- },
- ENV => {
- type => "string",
- default => {},
- description => "environment variables",
- safe => 0, # paranoia
- rebuild => 0,
- },
- timezone => {
- type => "string",
- default => "",
- example => "US/Eastern",
- description => "time zone name",
- safe => 1,
- rebuild => 1,
- },
- include => {
- type => "string",
- default => undef,
- example => '^\.htaccess$',
- description => "regexp of normally excluded files to include",
- advanced => 1,
- safe => 0, # regexp
- rebuild => 1,
- },
- exclude => {
- type => "string",
- default => undef,
- example => '^(*\.private|Makefile)$',
- description => "regexp of files that should be skipped",
- advanced => 1,
- safe => 0, # regexp
- rebuild => 1,
- },
- wiki_file_prune_regexps => {
- type => "internal",
- default => [qr/(^|\/)\.\.(\/|$)/, qr/^\//, qr/^\./, qr/\/\./,
- qr/\.x?html?$/, qr/\.ikiwiki-new$/,
- qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//,
- qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//,
- qr/(^|\/)CVS\//, qr/\.dpkg-tmp$/],
- description => "regexps of source files to ignore",
- safe => 0,
- rebuild => 1,
- },
- wiki_file_chars => {
- type => "string",
- description => "specifies the characters that are allowed in source filenames",
- default => "-[:alnum:]+/.:_",
- safe => 0,
- rebuild => 1,
- },
- wiki_file_regexp => {
- type => "internal",
- description => "regexp of legal source files",
- safe => 0,
- rebuild => 1,
- },
- web_commit_regexp => {
- type => "internal",
- default => qr/^web commit (by (.*?(?=: |$))|from ([0-9a-fA-F:.]+[0-9a-fA-F])):?(.*)/,
- description => "regexp to parse web commits from logs",
- safe => 0,
- rebuild => 0,
- },
- cgi => {
- type => "internal",
- default => 0,
- description => "run as a cgi",
- safe => 0,
- rebuild => 0,
- },
- cgi_disable_uploads => {
- type => "internal",
- default => 1,
- description => "whether CGI should accept file uploads",
- safe => 0,
- rebuild => 0,
- },
- post_commit => {
- type => "internal",
- default => 0,
- description => "run as a post-commit hook",
- safe => 0,
- rebuild => 0,
- },
- rebuild => {
- type => "internal",
- default => 0,
- description => "running in rebuild mode",
- safe => 0,
- rebuild => 0,
- },
- setup => {
- type => "internal",
- default => undef,
- description => "running in setup mode",
- safe => 0,
- rebuild => 0,
- },
- clean => {
- type => "internal",
- default => 0,
- description => "running in clean mode",
- safe => 0,
- rebuild => 0,
- },
- refresh => {
- type => "internal",
- default => 0,
- description => "running in refresh mode",
- safe => 0,
- rebuild => 0,
- },
- test_receive => {
- type => "internal",
- default => 0,
- description => "running in receive test mode",
- safe => 0,
- rebuild => 0,
- },
- wrapper_background_command => {
- type => "internal",
- default => '',
- description => "background shell command to run",
- safe => 0,
- rebuild => 0,
- },
- gettime => {
- type => "internal",
- description => "running in gettime mode",
- safe => 0,
- rebuild => 0,
- },
- w3mmode => {
- type => "internal",
- default => 0,
- description => "running in w3mmode",
- safe => 0,
- rebuild => 0,
- },
- wikistatedir => {
- type => "internal",
- default => undef,
- description => "path to the .ikiwiki directory holding ikiwiki state",
- safe => 0,
- rebuild => 0,
- },
- setupfile => {
- type => "internal",
- default => undef,
- description => "path to setup file",
- safe => 0,
- rebuild => 0,
- },
- setuptype => {
- type => "internal",
- default => "Yaml",
- description => "perl class to use to dump setup file",
- safe => 0,
- rebuild => 0,
- },
- allow_symlinks_before_srcdir => {
- type => "boolean",
- default => 0,
- description => "allow symlinks in the path leading to the srcdir (potentially insecure)",
- safe => 0,
- rebuild => 0,
- },
- cookiejar => {
- type => "string",
- default => { file => "$ENV{HOME}/.ikiwiki/cookies" },
- description => "cookie control",
- safe => 0, # hooks into perl module internals
- rebuild => 0,
- },
- useragent => {
- type => "string",
- default => "ikiwiki/$version",
- example => "Wget/1.13.4 (linux-gnu)",
- description => "set custom user agent string for outbound HTTP requests e.g. when fetching aggregated RSS feeds",
- safe => 0,
- rebuild => 0,
- },
-}
-
-sub defaultconfig () {
- my %s=getsetup();
- my @ret;
- foreach my $key (keys %s) {
- push @ret, $key, $s{$key}->{default};
- }
- return @ret;
-}
-
-# URL to top of wiki as a path starting with /, valid from any wiki page or
-# the CGI; if that's not possible, an absolute URL. Either way, it ends with /
-my $local_url;
-# URL to CGI script, similar to $local_url
-my $local_cgiurl;
-
-sub checkconfig () {
- # locale stuff; avoid LC_ALL since it overrides everything
- if (defined $ENV{LC_ALL}) {
- $ENV{LANG} = $ENV{LC_ALL};
- delete $ENV{LC_ALL};
- }
- if (defined $config{locale}) {
- if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) {
- $ENV{LANG}=$config{locale};
- define_gettext();
- }
- }
-
- if (! defined $config{wiki_file_regexp}) {
- $config{wiki_file_regexp}=qr/(^[$config{wiki_file_chars}]+$)/;
- }
-
- if (ref $config{ENV} eq 'HASH') {
- foreach my $val (keys %{$config{ENV}}) {
- $ENV{$val}=$config{ENV}{$val};
- }
- }
- if (defined $config{timezone} && length $config{timezone}) {
- $ENV{TZ}=$config{timezone};
- }
- else {
- $config{timezone}=$ENV{TZ};
- }
-
- if ($config{w3mmode}) {
- eval q{use Cwd q{abs_path}};
- error($@) if $@;
- $config{srcdir}=possibly_foolish_untaint(abs_path($config{srcdir}));
- $config{destdir}=possibly_foolish_untaint(abs_path($config{destdir}));
- $config{cgiurl}="file:///\$LIB/ikiwiki-w3m.cgi/".$config{cgiurl}
- unless $config{cgiurl} =~ m!file:///!;
- $config{url}="file://".$config{destdir};
- }
-
- if ($config{cgi} && ! length $config{url}) {
- error(gettext("Must specify url to wiki with --url when using --cgi"));
- }
-
- if (defined $config{url} && length $config{url}) {
- eval q{use URI};
- my $baseurl = URI->new($config{url});
-
- $local_url = $baseurl->path . "/";
- $local_cgiurl = undef;
-
- if (length $config{cgiurl}) {
- my $cgiurl = URI->new($config{cgiurl});
-
- $local_cgiurl = $cgiurl->path;
-
- if ($cgiurl->scheme eq 'https' &&
- $baseurl->scheme eq 'http') {
- # We assume that the same content is available
- # over both http and https, because if it
- # wasn't, accessing the static content
- # from the CGI would be mixed-content,
- # which would be a security flaw.
-
- if ($cgiurl->authority ne $baseurl->authority) {
- # use protocol-relative URL for
- # static content
- $local_url = "$config{url}/";
- $local_url =~ s{^http://}{//};
- }
- # else use host-relative URL for static content
-
- # either way, CGI needs to be absolute
- $local_cgiurl = $config{cgiurl};
- }
- elsif ($cgiurl->scheme ne $baseurl->scheme) {
- # too far apart, fall back to absolute URLs
- $local_url = "$config{url}/";
- $local_cgiurl = $config{cgiurl};
- }
- elsif ($cgiurl->authority ne $baseurl->authority) {
- # slightly too far apart, fall back to
- # protocol-relative URLs
- $local_url = "$config{url}/";
- $local_url =~ s{^https?://}{//};
- $local_cgiurl = $config{cgiurl};
- $local_cgiurl =~ s{^https?://}{//};
- }
- # else keep host-relative URLs
- }
-
- $local_url =~ s{//$}{/};
- }
- else {
- $local_cgiurl = $config{cgiurl};
- }
-
- $config{wikistatedir}="$config{srcdir}/.ikiwiki"
- unless exists $config{wikistatedir} && defined $config{wikistatedir};
-
- if (defined $config{umask}) {
- my $u = possibly_foolish_untaint($config{umask});
-
- if ($u =~ m/^\d+$/) {
- umask($u);
- }
- elsif ($u eq 'private') {
- umask(077);
- }
- elsif ($u eq 'group') {
- umask(027);
- }
- elsif ($u eq 'public') {
- umask(022);
- }
- else {
- error(sprintf(gettext("unsupported umask setting %s"), $u));
- }
- }
-
- run_hooks(checkconfig => sub { shift->() });
-
- return 1;
-}
-
-sub listplugins () {
- my %ret;
-
- foreach my $dir (@INC, $config{libdir}) {
- next unless defined $dir && length $dir;
- foreach my $file (glob("$dir/IkiWiki/Plugin/*.pm")) {
- my ($plugin)=$file=~/.*\/(.*)\.pm$/;
- $ret{$plugin}=1;
- }
- }
- foreach my $dir ($config{libdir}, "$installdir/lib/ikiwiki") {
- next unless defined $dir && length $dir;
- foreach my $file (glob("$dir/plugins/*")) {
- $ret{basename($file)}=1 if -x $file;
- }
- }
-
- return keys %ret;
-}
-
-sub loadplugins () {
- if (defined $config{libdir} && length $config{libdir}) {
- unshift @INC, possibly_foolish_untaint($config{libdir});
- }
-
- foreach my $plugin (@{$config{default_plugins}}, @{$config{add_plugins}}) {
- loadplugin($plugin);
- }
-
- if ($config{rcs}) {
- if (exists $hooks{rcs}) {
- error(gettext("cannot use multiple rcs plugins"));
- }
- loadplugin($config{rcs});
- }
- if (! exists $hooks{rcs}) {
- loadplugin("norcs");
- }
-
- run_hooks(getopt => sub { shift->() });
- if (grep /^-/, @ARGV) {
- print STDERR "Unknown option (or missing parameter): $_\n"
- foreach grep /^-/, @ARGV;
- usage();
- }
-
- return 1;
-}
-
-sub loadplugin ($;$) {
- my $plugin=shift;
- my $force=shift;
-
- return if ! $force && grep { $_ eq $plugin} @{$config{disable_plugins}};
-
- foreach my $dir (defined $config{libdir} ? possibly_foolish_untaint($config{libdir}) : undef,
- "$installdir/lib/ikiwiki") {
- if (defined $dir && -x "$dir/plugins/$plugin") {
- eval { require IkiWiki::Plugin::external };
- if ($@) {
- my $reason=$@;
- error(sprintf(gettext("failed to load external plugin needed for %s plugin: %s"), $plugin, $reason));
- }
- import IkiWiki::Plugin::external "$dir/plugins/$plugin";
- $loaded_plugins{$plugin}=1;
- return 1;
- }
- }
-
- my $mod="IkiWiki::Plugin::".possibly_foolish_untaint($plugin);
- eval qq{use $mod};
- if ($@) {
- error("Failed to load plugin $mod: $@");
- }
- $loaded_plugins{$plugin}=1;
- return 1;
-}
-
-sub error ($;$) {
- my $message=shift;
- my $cleaner=shift;
- log_message('err' => $message) if $config{syslog};
- if (defined $cleaner) {
- $cleaner->();
- }
- die $message."\n";
-}
-
-sub debug ($) {
- return unless $config{verbose};
- return log_message(debug => @_);
-}
-
-my $log_open=0;
-my $log_failed=0;
-sub log_message ($$) {
- my $type=shift;
-
- if ($config{syslog}) {
- require Sys::Syslog;
- if (! $log_open) {
- Sys::Syslog::setlogsock('unix');
- Sys::Syslog::openlog('ikiwiki', '', 'user');
- $log_open=1;
- }
- eval {
- # keep a copy to avoid editing the original config repeatedly
- my $wikiname = $config{wikiname};
- utf8::encode($wikiname);
- Sys::Syslog::syslog($type, "[$wikiname] %s", join(" ", @_));
- };
- if ($@) {
- print STDERR "failed to syslog: $@" unless $log_failed;
- $log_failed=1;
- print STDERR "@_\n";
- }
- return $@;
- }
- elsif (! $config{cgi}) {
- return print "@_\n";
- }
- else {
- return print STDERR "@_\n";
- }
-}
-
-sub possibly_foolish_untaint ($) {
- my $tainted=shift;
- my ($untainted)=$tainted=~/(.*)/s;
- return $untainted;
-}
-
-sub basename ($) {
- my $file=shift;
-
- $file=~s!.*/+!!;
- return $file;
-}
-
-sub dirname ($) {
- my $file=shift;
-
- $file=~s!/*[^/]+$!!;
- return $file;
-}
-
-sub isinternal ($) {
- my $page=shift;
- return exists $pagesources{$page} &&
- $pagesources{$page} =~ /\._([^.]+)$/;
-}
-
-sub pagetype ($) {
- my $file=shift;
-
- if ($file =~ /\.([^.]+)$/) {
- return $1 if exists $hooks{htmlize}{$1};
- }
- my $base=basename($file);
- if (exists $hooks{htmlize}{$base} &&
- $hooks{htmlize}{$base}{noextension}) {
- return $base;
- }
- return;
-}
-
-my %pagename_cache;
-
-sub pagename ($) {
- my $file=shift;
-
- if (exists $pagename_cache{$file}) {
- return $pagename_cache{$file};
- }
-
- my $type=pagetype($file);
- my $page=$file;
- $page=~s/\Q.$type\E*$//
- if defined $type && !$hooks{htmlize}{$type}{keepextension}
- && !$hooks{htmlize}{$type}{noextension};
- if ($config{indexpages} && $page=~/(.*)\/index$/) {
- $page=$1;
- }
-
- $pagename_cache{$file} = $page;
- return $page;
-}
-
-sub newpagefile ($$) {
- my $page=shift;
- my $type=shift;
-
- if (! $config{indexpages} || $page eq 'index') {
- return $page.".".$type;
- }
- else {
- return $page."/index.".$type;
- }
-}
-
-sub targetpage ($$;$) {
- my $page=shift;
- my $ext=shift;
- my $filename=shift;
-
- if (defined $filename) {
- return $page."/".$filename.".".$ext;
- }
- elsif (! $config{usedirs} || $page eq 'index') {
- return $page.".".$ext;
- }
- else {
- return $page."/index.".$ext;
- }
-}
-
-sub htmlpage ($) {
- my $page=shift;
-
- return targetpage($page, $config{htmlext});
-}
-
-sub srcfile_stat {
- my $file=shift;
- my $nothrow=shift;
-
- return "$config{srcdir}/$file", stat(_) if -e "$config{srcdir}/$file";
- foreach my $dir (@{$config{underlaydirs}}, $config{underlaydir}) {
- return "$dir/$file", stat(_) if -e "$dir/$file";
- }
- error("internal error: $file cannot be found in $config{srcdir} or underlay") unless $nothrow;
- return;
-}
-
-sub srcfile ($;$) {
- return (srcfile_stat(@_))[0];
-}
-
-sub add_literal_underlay ($) {
- my $dir=shift;
-
- if (! grep { $_ eq $dir } @{$config{underlaydirs}}) {
- unshift @{$config{underlaydirs}}, $dir;
- }
-}
-
-sub add_underlay ($) {
- my $dir = shift;
-
- if ($dir !~ /^\//) {
- $dir="$config{underlaydirbase}/$dir";
- }
-
- add_literal_underlay($dir);
- # why does it return 1? we just don't know
- return 1;
-}
-
-sub readfile ($;$$) {
- my $file=shift;
- my $binary=shift;
- my $wantfd=shift;
-
- if (-l $file) {
- error("cannot read a symlink ($file)");
- }
-
- local $/=undef;
- open (my $in, "<", $file) || error("failed to read $file: $!");
- binmode($in) if ($binary);
- return \*$in if $wantfd;
- my $ret=<$in>;
- # check for invalid utf-8, and toss it back to avoid crashes
- if (! utf8::valid($ret)) {
- $ret=encode_utf8($ret);
- }
- close $in || error("failed to read $file: $!");
- return $ret;
-}
-
-sub prep_writefile ($$) {
- my $file=shift;
- my $destdir=shift;
-
- my $test=$file;
- while (length $test) {
- if (-l "$destdir/$test") {
- error("cannot write to a symlink ($test)");
- }
- if (-f _ && $test ne $file) {
- # Remove conflicting file.
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- foreach my $f (@{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- if ($f eq $test) {
- unlink("$destdir/$test");
- last;
- }
- }
- }
- }
- $test=dirname($test);
- }
-
- my $dir=dirname("$destdir/$file");
- if (! -d $dir) {
- my $d="";
- foreach my $s (split(m!/+!, $dir)) {
- $d.="$s/";
- if (! -d $d) {
- mkdir($d) || error("failed to create directory $d: $!");
- }
- }
- }
-
- return 1;
-}
-
-sub writefile ($$$;$$) {
- my $file=shift; # can include subdirs
- my $destdir=shift; # directory to put file in
- my $content=shift;
- my $binary=shift;
- my $writer=shift;
-
- prep_writefile($file, $destdir);
-
- my $newfile="$destdir/$file.ikiwiki-new";
- if (-l $newfile) {
- error("cannot write to a symlink ($newfile)");
- }
-
- my $cleanup = sub { unlink($newfile) };
- open (my $out, '>', $newfile) || error("failed to write $newfile: $!", $cleanup);
- binmode($out) if ($binary);
- if ($writer) {
- $writer->(\*$out, $cleanup);
- }
- else {
- print $out $content or error("failed writing to $newfile: $!", $cleanup);
- }
- close $out || error("failed saving $newfile: $!", $cleanup);
- rename($newfile, "$destdir/$file") ||
- error("failed renaming $newfile to $destdir/$file: $!", $cleanup);
-
- return 1;
-}
-
-my %cleared;
-sub will_render ($$;$) {
- my $page=shift;
- my $dest=shift;
- my $clear=shift;
-
- # Important security check for independently created files.
- if (-e "$config{destdir}/$dest" && ! $config{rebuild} &&
- ! grep { $_ eq $dest } (@{$renderedfiles{$page}}, @{$oldrenderedfiles{$page}}, @{$wikistate{editpage}{previews}})) {
- my $from_other_page=0;
- # Expensive, but rarely runs.
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- if (grep {
- $_ eq $dest ||
- dirname($_) eq $dest
- } @{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- $from_other_page=1;
- last;
- }
- }
-
- error("$config{destdir}/$dest independently created, not overwriting with version from $page")
- unless $from_other_page;
- }
-
- # If $dest exists as a directory, remove conflicting files in it
- # rendered from other pages.
- if (-d _) {
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- foreach my $f (@{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- if (dirname($f) eq $dest) {
- unlink("$config{destdir}/$f");
- rmdir(dirname("$config{destdir}/$f"));
- }
- }
- }
- }
-
- if (! $clear || $cleared{$page}) {
- $renderedfiles{$page}=[$dest, grep { $_ ne $dest } @{$renderedfiles{$page}}];
- }
- else {
- foreach my $old (@{$renderedfiles{$page}}) {
- delete $destsources{$old};
- }
- $renderedfiles{$page}=[$dest];
- $cleared{$page}=1;
- }
- $destsources{$dest}=$page;
-
- return 1;
-}
-
-sub bestlink ($$) {
- my $page=shift;
- my $link=shift;
-
- my $cwd=$page;
- if ($link=~s/^\/+//) {
- # absolute links
- $cwd="";
- }
- $link=~s/\/$//;
-
- do {
- my $l=$cwd;
- $l.="/" if length $l;
- $l.=$link;
-
- if (exists $pagesources{$l}) {
- return $l;
- }
- elsif (exists $pagecase{lc $l}) {
- return $pagecase{lc $l};
- }
- } while $cwd=~s{/?[^/]+$}{};
-
- if (length $config{userdir}) {
- my $l = "$config{userdir}/".lc($link);
- if (exists $pagesources{$l}) {
- return $l;
- }
- elsif (exists $pagecase{lc $l}) {
- return $pagecase{lc $l};
- }
- }
-
- #print STDERR "warning: page $page, broken link: $link\n";
- return "";
-}
-
-sub isinlinableimage ($) {
- my $file=shift;
-
- return $file =~ /\.(png|gif|jpg|jpeg|svg)$/i;
-}
-
-sub pagetitle ($;$) {
- my $page=shift;
- my $unescaped=shift;
-
- if ($unescaped) {
- $page=~s/(__(\d+)__|_)/$1 eq '_' ? ' ' : chr($2)/eg;
- }
- else {
- $page=~s/(__(\d+)__|_)/$1 eq '_' ? ' ' : "&#$2;"/eg;
- }
-
- return $page;
-}
-
-sub titlepage ($) {
- my $title=shift;
- # support use w/o %config set
- my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
- $title=~s/([^$chars]|_)/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg;
- return $title;
-}
-
-sub linkpage ($) {
- my $link=shift;
- my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
- $link=~s/([^$chars])/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg;
- return $link;
-}
-
-sub cgiurl (@) {
- my %params=@_;
-
- my $cgiurl=$local_cgiurl;
-
- if (exists $params{cgiurl}) {
- $cgiurl=$params{cgiurl};
- delete $params{cgiurl};
- }
-
- unless (%params) {
- return $cgiurl;
- }
-
- return $cgiurl."?".
- join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params);
-}
-
-sub cgiurl_abs (@) {
- eval q{use URI};
- URI->new_abs(cgiurl(@_), $config{cgiurl});
-}
-
-sub baseurl (;$) {
- my $page=shift;
-
- return $local_url if ! defined $page;
-
- $page=htmlpage($page);
- $page=~s/[^\/]+$//;
- $page=~s/[^\/]+\//..\//g;
- return $page;
-}
-
-sub urlabs ($$) {
- my $url=shift;
- my $urlbase=shift;
-
- return $url unless defined $urlbase && length $urlbase;
-
- eval q{use URI};
- URI->new_abs($url, $urlbase)->as_string;
-}
-
-sub abs2rel ($$) {
- # Work around very innefficient behavior in File::Spec if abs2rel
- # is passed two relative paths. It's much faster if paths are
- # absolute! (Debian bug #376658; fixed in debian unstable now)
- my $path="/".shift;
- my $base="/".shift;
-
- require File::Spec;
- my $ret=File::Spec->abs2rel($path, $base);
- $ret=~s/^// if defined $ret;
- return $ret;
-}
-
-sub displaytime ($;$$) {
- # Plugins can override this function to mark up the time to
- # display.
- my $time=formattime($_[0], $_[1]);
- if ($config{html5}) {
- return '<time datetime="'.date_3339($_[0]).'"'.
- ($_[2] ? ' pubdate="pubdate"' : '').
- '>'.$time.'</time>';
- }
- else {
- return '<span class="date">'.$time.'</span>';
- }
-}
-
-sub formattime ($;$) {
- # Plugins can override this function to format the time.
- my $time=shift;
- my $format=shift;
- if (! defined $format) {
- $format=$config{timeformat};
- }
-
- return strftime_utf8($format, localtime($time));
-}
-
-my $strftime_encoding;
-sub strftime_utf8 {
- # strftime doesn't know about encodings, so make sure
- # its output is properly treated as utf8.
- # Note that this does not handle utf-8 in the format string.
- ($strftime_encoding) = POSIX::setlocale(&POSIX::LC_TIME) =~ m#\.([^@]+)#
- unless defined $strftime_encoding;
- $strftime_encoding
- ? Encode::decode($strftime_encoding, POSIX::strftime(@_))
- : POSIX::strftime(@_);
-}
-
-sub date_3339 ($) {
- my $time=shift;
-
- my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
- POSIX::setlocale(&POSIX::LC_TIME, "C");
- my $ret=POSIX::strftime("%Y-%m-%dT%H:%M:%SZ", gmtime($time));
- POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
- return $ret;
-}
-
-sub beautify_urlpath ($) {
- my $url=shift;
-
- # Ensure url is not an empty link, and if necessary,
- # add ./ to avoid colon confusion.
- if ($url !~ /^\// && $url !~ /^\.\.?\//) {
- $url="./$url";
- }
-
- if ($config{usedirs}) {
- $url =~ s!/index.$config{htmlext}$!/!;
- }
-
- return $url;
-}
-
-sub urlto ($;$$) {
- my $to=shift;
- my $from=shift;
- my $absolute=shift;
-
- if (! length $to) {
- $to = 'index';
- }
-
- if (! $destsources{$to}) {
- $to=htmlpage($to);
- }
-
- if ($absolute) {
- return $config{url}.beautify_urlpath("/".$to);
- }
-
- if (! defined $from) {
- my $u = $local_url || '';
- $u =~ s{/$}{};
- return $u.beautify_urlpath("/".$to);
- }
-
- my $link = abs2rel($to, dirname(htmlpage($from)));
-
- return beautify_urlpath($link);
-}
-
-sub isselflink ($$) {
- # Plugins can override this function to support special types
- # of selflinks.
- my $page=shift;
- my $link=shift;
-
- return $page eq $link;
-}
-
-sub htmllink ($$$;@) {
- my $lpage=shift; # the page doing the linking
- my $page=shift; # the page that will contain the link (different for inline)
- my $link=shift;
- my %opts=@_;
-
- $link=~s/\/$//;
-
- my $bestlink;
- if (! $opts{forcesubpage}) {
- $bestlink=bestlink($lpage, $link);
- }
- else {
- $bestlink="$lpage/".lc($link);
- }
-
- my $linktext;
- if (defined $opts{linktext}) {
- $linktext=$opts{linktext};
- }
- else {
- $linktext=pagetitle(basename($link));
- }
-
- return "<span class=\"selflink\">$linktext</span>"
- if length $bestlink && isselflink($page, $bestlink) &&
- ! defined $opts{anchor};
-
- if (! $destsources{$bestlink}) {
- $bestlink=htmlpage($bestlink);
-
- if (! $destsources{$bestlink}) {
- my $cgilink = "";
- if (length $config{cgiurl}) {
- $cgilink = "<a href=\"".
- cgiurl(
- do => "create",
- page => $link,
- from => $lpage
- )."\" rel=\"nofollow\">?</a>";
- }
- return "<span class=\"createlink\">$cgilink$linktext</span>"
- }
- }
-
- $bestlink=abs2rel($bestlink, dirname(htmlpage($page)));
- $bestlink=beautify_urlpath($bestlink);
-
- if (! $opts{noimageinline} && isinlinableimage($bestlink)) {
- return "<img src=\"$bestlink\" alt=\"$linktext\" />";
- }
-
- if (defined $opts{anchor}) {
- $bestlink.="#".$opts{anchor};
- }
-
- my @attrs;
- foreach my $attr (qw{rel class title}) {
- if (defined $opts{$attr}) {
- push @attrs, " $attr=\"$opts{$attr}\"";
- }
- }
-
- return "<a href=\"$bestlink\"@attrs>$linktext</a>";
-}
-
-sub userpage ($) {
- my $user=shift;
- return length $config{userdir} ? "$config{userdir}/$user" : $user;
-}
-
-sub openiduser ($) {
- my $user=shift;
-
- if (defined $user && $user =~ m!^https?://! &&
- eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
- my $display;
-
- if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) {
- $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user);
- }
- else {
- # backcompat with old version
- my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
- $display=$oid->display;
- }
-
- # Convert "user.somehost.com" to "user [somehost.com]"
- # (also "user.somehost.co.uk")
- if ($display !~ /\[/) {
- $display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/;
- }
- # Convert "http://somehost.com/user" to "user [somehost.com]".
- # (also "https://somehost.com/user/")
- if ($display !~ /\[/) {
- $display=~s/^https?:\/\/(.+)\/([^\/#?]+)\/?(?:[#?].*)?$/$2 [$1]/;
- }
- $display=~s!^https?://!!; # make sure this is removed
- eval q{use CGI 'escapeHTML'};
- error($@) if $@;
- return escapeHTML($display);
- }
- return;
-}
-
-sub htmlize ($$$$) {
- my $page=shift;
- my $destpage=shift;
- my $type=shift;
- my $content=shift;
-
- my $oneline = $content !~ /\n/;
-
- if (exists $hooks{htmlize}{$type}) {
- $content=$hooks{htmlize}{$type}{call}->(
- page => $page,
- content => $content,
- );
- }
- else {
- error("htmlization of $type not supported");
- }
-
- run_hooks(sanitize => sub {
- $content=shift->(
- page => $page,
- destpage => $destpage,
- content => $content,
- );
- });
-
- if ($oneline) {
- # hack to get rid of enclosing junk added by markdown
- # and other htmlizers/sanitizers
- $content=~s/^<p>//i;
- $content=~s/<\/p>\n*$//i;
- }
-
- return $content;
-}
-
-sub linkify ($$$) {
- my $page=shift;
- my $destpage=shift;
- my $content=shift;
-
- run_hooks(linkify => sub {
- $content=shift->(
- page => $page,
- destpage => $destpage,
- content => $content,
- );
- });
-
- return $content;
-}
-
-our %preprocessing;
-our $preprocess_preview=0;
-sub preprocess ($$$;$$) {
- my $page=shift; # the page the data comes from
- my $destpage=shift; # the page the data will appear in (different for inline)
- my $content=shift;
- my $scan=shift;
- my $preview=shift;
-
- # Using local because it needs to be set within any nested calls
- # of this function.
- local $preprocess_preview=$preview if defined $preview;
-
- my $handle=sub {
- my $escape=shift;
- my $prefix=shift;
- my $command=shift;
- my $params=shift;
- $params="" if ! defined $params;
-
- if (length $escape) {
- return "[[$prefix$command $params]]";
- }
- elsif (exists $hooks{preprocess}{$command}) {
- return "" if $scan && ! $hooks{preprocess}{$command}{scan};
- # Note: preserve order of params, some plugins may
- # consider it significant.
- my @params;
- while ($params =~ m{
- (?:([-.\w]+)=)? # 1: named parameter key?
- (?:
- """(.*?)""" # 2: triple-quoted value
- |
- "([^"]*?)" # 3: single-quoted value
- |
- '''(.*?)''' # 4: triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (.*?)\n\5 # 6: heredoc value
- |
- (\S+) # 7: unquoted value
- )
- (?:\s+|$) # delimiter to next param
- }msgx) {
- my $key=$1;
- my $val;
- if (defined $2) {
- $val=$2;
- $val=~s/\r\n/\n/mg;
- $val=~s/^\n+//g;
- $val=~s/\n+$//g;
- }
- elsif (defined $3) {
- $val=$3;
- }
- elsif (defined $4) {
- $val=$4;
- }
- elsif (defined $7) {
- $val=$7;
- }
- elsif (defined $6) {
- $val=$6;
- }
-
- if (defined $key) {
- push @params, $key, $val;
- }
- else {
- push @params, $val, '';
- }
- }
- if ($preprocessing{$page}++ > 8) {
- # Avoid loops of preprocessed pages preprocessing
- # other pages that preprocess them, etc.
- return "[[!$command <span class=\"error\">".
- sprintf(gettext("preprocessing loop detected on %s at depth %i"),
- $page, $preprocessing{$page}).
- "</span>]]";
- }
- my $ret;
- if (! $scan) {
- $ret=eval {
- $hooks{preprocess}{$command}{call}->(
- @params,
- page => $page,
- destpage => $destpage,
- preview => $preprocess_preview,
- );
- };
- if ($@) {
- my $error=$@;
- chomp $error;
- eval q{use HTML::Entities};
- $error = encode_entities($error);
- $ret="[[!$command <span class=\"error\">".
- gettext("Error").": $error"."</span>]]";
- }
- }
- else {
- # use void context during scan pass
- eval {
- $hooks{preprocess}{$command}{call}->(
- @params,
- page => $page,
- destpage => $destpage,
- preview => $preprocess_preview,
- );
- };
- $ret="";
- }
- $preprocessing{$page}--;
- return $ret;
- }
- else {
- return "[[$prefix$command $params]]";
- }
- };
-
- my $regex;
- if ($config{prefix_directives}) {
- $regex = qr{
- (\\?) # 1: escape?
- \[\[(!) # directive open; 2: prefix
- ([-\w]+) # 3: command
- ( # 4: the parameters..
- \s+ # Must have space if parameters present
- (?:
- (?:[-.\w]+=)? # named parameter key?
- (?:
- """.*?""" # triple-quoted value
- |
- "[^"]*?" # single-quoted value
- |
- '''.*?''' # triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (?:.*?)\n\5 # heredoc value
- |
- [^"\s\]]+ # unquoted value
- )
- \s* # whitespace or end
- # of directive
- )
- *)? # 0 or more parameters
- \]\] # directive closed
- }sx;
- }
- else {
- $regex = qr{
- (\\?) # 1: escape?
- \[\[(!?) # directive open; 2: optional prefix
- ([-\w]+) # 3: command
- \s+
- ( # 4: the parameters..
- (?:
- (?:[-.\w]+=)? # named parameter key?
- (?:
- """.*?""" # triple-quoted value
- |
- "[^"]*?" # single-quoted value
- |
- '''.*?''' # triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (?:.*?)\n\5 # heredoc value
- |
- [^"\s\]]+ # unquoted value
- )
- \s* # whitespace or end
- # of directive
- )
- *) # 0 or more parameters
- \]\] # directive closed
- }sx;
- }
-
- $content =~ s{$regex}{$handle->($1, $2, $3, $4)}eg;
- return $content;
-}
-
-sub filter ($$$) {
- my $page=shift;
- my $destpage=shift;
- my $content=shift;
-
- run_hooks(filter => sub {
- $content=shift->(page => $page, destpage => $destpage,
- content => $content);
- });
-
- return $content;
-}
-
-sub check_canedit ($$$;$) {
- my $page=shift;
- my $q=shift;
- my $session=shift;
- my $nonfatal=shift;
-
- my $canedit;
- run_hooks(canedit => sub {
- return if defined $canedit;
- my $ret=shift->($page, $q, $session);
- if (defined $ret) {
- if ($ret eq "") {
- $canedit=1;
- }
- elsif (ref $ret eq 'CODE') {
- $ret->() unless $nonfatal;
- $canedit=0;
- }
- elsif (defined $ret) {
- error($ret) unless $nonfatal;
- $canedit=0;
- }
- }
- });
- return defined $canedit ? $canedit : 1;
-}
-
-sub check_content (@) {
- my %params=@_;
-
- return 1 if ! exists $hooks{checkcontent}; # optimisation
-
- if (exists $pagesources{$params{page}}) {
- my @diff;
- my %old=map { $_ => 1 }
- split("\n", readfile(srcfile($pagesources{$params{page}})));
- foreach my $line (split("\n", $params{content})) {
- push @diff, $line if ! exists $old{$line};
- }
- $params{diff}=join("\n", @diff);
- }
-
- my $ok;
- run_hooks(checkcontent => sub {
- return if defined $ok;
- my $ret=shift->(%params);
- if (defined $ret) {
- if ($ret eq "") {
- $ok=1;
- }
- elsif (ref $ret eq 'CODE') {
- $ret->() unless $params{nonfatal};
- $ok=0;
- }
- elsif (defined $ret) {
- error($ret) unless $params{nonfatal};
- $ok=0;
- }
- }
-
- });
- return defined $ok ? $ok : 1;
-}
-
-sub check_canchange (@) {
- my %params = @_;
- my $cgi = $params{cgi};
- my $session = $params{session};
- my @changes = @{$params{changes}};
-
- my %newfiles;
- foreach my $change (@changes) {
- # This untaint is safe because we check file_pruned and
- # wiki_file_regexp.
- my ($file)=$change->{file}=~/$config{wiki_file_regexp}/;
- $file=possibly_foolish_untaint($file);
- if (! defined $file || ! length $file ||
- file_pruned($file)) {
- error(gettext("bad file name %s"), $file);
- }
-
- my $type=pagetype($file);
- my $page=pagename($file) if defined $type;
-
- if ($change->{action} eq 'add') {
- $newfiles{$file}=1;
- }
-
- if ($change->{action} eq 'change' ||
- $change->{action} eq 'add') {
- if (defined $page) {
- check_canedit($page, $cgi, $session);
- next;
- }
- else {
- if (IkiWiki::Plugin::attachment->can("check_canattach")) {
- IkiWiki::Plugin::attachment::check_canattach($session, $file, $change->{path});
- check_canedit($file, $cgi, $session);
- next;
- }
- }
- }
- elsif ($change->{action} eq 'remove') {
- # check_canremove tests to see if the file is present
- # on disk. This will fail when a single commit adds a
- # file and then removes it again. Avoid the problem
- # by not testing the removal in such pairs of changes.
- # (The add is still tested, just to make sure that
- # no data is added to the repo that a web edit
- # could not add.)
- next if $newfiles{$file};
-
- if (IkiWiki::Plugin::remove->can("check_canremove")) {
- IkiWiki::Plugin::remove::check_canremove(defined $page ? $page : $file, $cgi, $session);
- check_canedit(defined $page ? $page : $file, $cgi, $session);
- next;
- }
- }
- else {
- error "unknown action ".$change->{action};
- }
-
- error sprintf(gettext("you are not allowed to change %s"), $file);
- }
-}
-
-
-my $wikilock;
-
-sub lockwiki () {
- # Take an exclusive lock on the wiki to prevent multiple concurrent
- # run issues. The lock will be dropped on program exit.
- if (! -d $config{wikistatedir}) {
- mkdir($config{wikistatedir});
- }
- open($wikilock, '>', "$config{wikistatedir}/lockfile") ||
- error ("cannot write to $config{wikistatedir}/lockfile: $!");
- if (! flock($wikilock, 2)) { # LOCK_EX
- error("failed to get lock");
- }
- return 1;
-}
-
-sub unlockwiki () {
- POSIX::close($ENV{IKIWIKI_CGILOCK_FD}) if exists $ENV{IKIWIKI_CGILOCK_FD};
- return close($wikilock) if $wikilock;
- return;
-}
-
-my $commitlock;
-
-sub commit_hook_enabled () {
- open($commitlock, '+>', "$config{wikistatedir}/commitlock") ||
- error("cannot write to $config{wikistatedir}/commitlock: $!");
- if (! flock($commitlock, 1 | 4)) { # LOCK_SH | LOCK_NB to test
- close($commitlock) || error("failed closing commitlock: $!");
- return 0;
- }
- close($commitlock) || error("failed closing commitlock: $!");
- return 1;
-}
-
-sub disable_commit_hook () {
- open($commitlock, '>', "$config{wikistatedir}/commitlock") ||
- error("cannot write to $config{wikistatedir}/commitlock: $!");
- if (! flock($commitlock, 2)) { # LOCK_EX
- error("failed to get commit lock");
- }
- return 1;
-}
-
-sub enable_commit_hook () {
- return close($commitlock) if $commitlock;
- return;
-}
-
-sub loadindex () {
- %oldrenderedfiles=%pagectime=();
- my $rebuild=$config{rebuild};
- if (! $rebuild) {
- %pagesources=%pagemtime=%oldlinks=%links=%depends=
- %destsources=%renderedfiles=%pagecase=%pagestate=
- %depends_simple=%typedlinks=%oldtypedlinks=();
- }
- my $in;
- if (! open ($in, "<", "$config{wikistatedir}/indexdb")) {
- if (-e "$config{wikistatedir}/index") {
- system("ikiwiki-transition", "indexdb", $config{srcdir});
- open ($in, "<", "$config{wikistatedir}/indexdb") || return;
- }
- else {
- # gettime on first build
- $config{gettime}=1 unless defined $config{gettime};
- return;
- }
- }
-
- my $index=Storable::fd_retrieve($in);
- if (! defined $index) {
- return 0;
- }
-
- my $pages;
- if (exists $index->{version} && ! ref $index->{version}) {
- $pages=$index->{page};
- %wikistate=%{$index->{state}};
- # Handle plugins that got disabled by loading a new setup.
- if (exists $config{setupfile}) {
- require IkiWiki::Setup;
- IkiWiki::Setup::disabled_plugins(
- grep { ! $loaded_plugins{$_} } keys %wikistate);
- }
- }
- else {
- $pages=$index;
- %wikistate=();
- }
-
- foreach my $src (keys %$pages) {
- my $d=$pages->{$src};
- my $page;
- if (exists $d->{page} && ! $rebuild) {
- $page=$d->{page};
- }
- else {
- $page=pagename($src);
- }
- $pagectime{$page}=$d->{ctime};
- $pagesources{$page}=$src;
- if (! $rebuild) {
- $pagemtime{$page}=$d->{mtime};
- $renderedfiles{$page}=$d->{dest};
- if (exists $d->{links} && ref $d->{links}) {
- $links{$page}=$d->{links};
- $oldlinks{$page}=[@{$d->{links}}];
- }
- if (ref $d->{depends_simple} eq 'ARRAY') {
- # old format
- $depends_simple{$page}={
- map { $_ => 1 } @{$d->{depends_simple}}
- };
- }
- elsif (exists $d->{depends_simple}) {
- $depends_simple{$page}=$d->{depends_simple};
- }
- if (exists $d->{dependslist}) {
- # old format
- $depends{$page}={
- map { $_ => $DEPEND_CONTENT }
- @{$d->{dependslist}}
- };
- }
- elsif (exists $d->{depends} && ! ref $d->{depends}) {
- # old format
- $depends{$page}={$d->{depends} => $DEPEND_CONTENT };
- }
- elsif (exists $d->{depends}) {
- $depends{$page}=$d->{depends};
- }
- if (exists $d->{state}) {
- $pagestate{$page}=$d->{state};
- }
- if (exists $d->{typedlinks}) {
- $typedlinks{$page}=$d->{typedlinks};
-
- while (my ($type, $links) = each %{$typedlinks{$page}}) {
- next unless %$links;
- $oldtypedlinks{$page}{$type} = {%$links};
- }
- }
- }
- $oldrenderedfiles{$page}=[@{$d->{dest}}];
- }
- foreach my $page (keys %pagesources) {
- $pagecase{lc $page}=$page;
- }
- foreach my $page (keys %renderedfiles) {
- $destsources{$_}=$page foreach @{$renderedfiles{$page}};
- }
- $lastrev=$index->{lastrev};
- @underlayfiles=@{$index->{underlayfiles}} if ref $index->{underlayfiles};
- return close($in);
-}
-
-sub saveindex () {
- run_hooks(savestate => sub { shift->() });
-
- my @plugins=keys %loaded_plugins;
-
- if (! -d $config{wikistatedir}) {
- mkdir($config{wikistatedir});
- }
- my $newfile="$config{wikistatedir}/indexdb.new";
- my $cleanup = sub { unlink($newfile) };
- open (my $out, '>', $newfile) || error("cannot write to $newfile: $!", $cleanup);
-
- my %index;
- foreach my $page (keys %pagemtime) {
- next unless $pagemtime{$page};
- my $src=$pagesources{$page};
-
- $index{page}{$src}={
- page => $page,
- ctime => $pagectime{$page},
- mtime => $pagemtime{$page},
- dest => $renderedfiles{$page},
- links => $links{$page},
- };
-
- if (exists $depends{$page}) {
- $index{page}{$src}{depends} = $depends{$page};
- }
-
- if (exists $depends_simple{$page}) {
- $index{page}{$src}{depends_simple} = $depends_simple{$page};
- }
-
- if (exists $typedlinks{$page} && %{$typedlinks{$page}}) {
- $index{page}{$src}{typedlinks} = $typedlinks{$page};
- }
-
- if (exists $pagestate{$page}) {
- $index{page}{$src}{state}=$pagestate{$page};
- }
- }
-
- $index{state}={};
- foreach my $id (@plugins) {
- $index{state}{$id}={}; # used to detect disabled plugins
- foreach my $key (keys %{$wikistate{$id}}) {
- $index{state}{$id}{$key}=$wikistate{$id}{$key};
- }
- }
-
- $index{lastrev}=$lastrev;
- $index{underlayfiles}=\@underlayfiles;
-
- $index{version}="3";
- my $ret=Storable::nstore_fd(\%index, $out);
- return if ! defined $ret || ! $ret;
- close $out || error("failed saving to $newfile: $!", $cleanup);
- rename($newfile, "$config{wikistatedir}/indexdb") ||
- error("failed renaming $newfile to $config{wikistatedir}/indexdb", $cleanup);
-
- return 1;
-}
-
-sub template_file ($) {
- my $name=shift;
-
- my $tpage=($name =~ s/^\///) ? $name : "templates/$name";
- my $template;
- if ($name !~ /\.tmpl$/ && exists $pagesources{$tpage}) {
- $template=srcfile($pagesources{$tpage}, 1);
- $name.=".tmpl";
- }
- else {
- $template=srcfile($tpage, 1);
- }
-
- if (defined $template) {
- return $template, $tpage, 1 if wantarray;
- return $template;
- }
- else {
- $name=~s:/::; # avoid path traversal
- foreach my $dir ($config{templatedir},
- "$installdir/share/ikiwiki/templates") {
- if (-e "$dir/$name") {
- $template="$dir/$name";
- last;
- }
- }
- if (defined $template) {
- return $template, $tpage if wantarray;
- return $template;
- }
- }
-
- return;
-}
-
-sub template_depends ($$;@) {
- my $name=shift;
- my $page=shift;
-
- my ($filename, $tpage, $untrusted)=template_file($name);
- if (! defined $filename) {
- error(sprintf(gettext("template %s not found"), $name))
- }
-
- if (defined $page && defined $tpage) {
- add_depends($page, $tpage);
- }
-
- my @opts=(
- filter => sub {
- my $text_ref = shift;
- ${$text_ref} = decode_utf8(${$text_ref});
- run_hooks(readtemplate => sub {
- ${$text_ref} = shift->(
- id => $name,
- page => $tpage,
- content => ${$text_ref},
- untrusted => $untrusted,
- );
- });
- },
- loop_context_vars => 1,
- die_on_bad_params => 0,
- parent_global_vars => 1,
- filename => $filename,
- @_,
- ($untrusted ? (no_includes => 1) : ()),
- );
- return @opts if wantarray;
-
- require HTML::Template;
- return HTML::Template->new(@opts);
-}
-
-sub template ($;@) {
- template_depends(shift, undef, @_);
-}
-
-sub templateactions ($$) {
- my $template=shift;
- my $page=shift;
-
- my $have_actions=0;
- my @actions;
- run_hooks(pageactions => sub {
- push @actions, map { { action => $_ } }
- grep { defined } shift->(page => $page);
- });
- $template->param(actions => \@actions);
-
- if ($config{cgiurl} && exists $hooks{auth}) {
- $template->param(prefsurl => cgiurl(do => "prefs"));
- $have_actions=1;
- }
-
- if ($have_actions || @actions) {
- $template->param(have_actions => 1);
- }
-}
-
-sub hook (@) {
- my %param=@_;
-
- if (! exists $param{type} || ! ref $param{call} || ! exists $param{id}) {
- error 'hook requires type, call, and id parameters';
- }
-
- return if $param{no_override} && exists $hooks{$param{type}}{$param{id}};
-
- $hooks{$param{type}}{$param{id}}=\%param;
- return 1;
-}
-
-sub run_hooks ($$) {
- # Calls the given sub for each hook of the given type,
- # passing it the hook function to call.
- my $type=shift;
- my $sub=shift;
-
- if (exists $hooks{$type}) {
- my (@first, @middle, @last);
- foreach my $id (keys %{$hooks{$type}}) {
- if ($hooks{$type}{$id}{first}) {
- push @first, $id;
- }
- elsif ($hooks{$type}{$id}{last}) {
- push @last, $id;
- }
- else {
- push @middle, $id;
- }
- }
- foreach my $id (@first, @middle, @last) {
- $sub->($hooks{$type}{$id}{call});
- }
- }
-
- return 1;
-}
-
-sub rcs_update () {
- $hooks{rcs}{rcs_update}{call}->(@_);
-}
-
-sub rcs_prepedit ($) {
- $hooks{rcs}{rcs_prepedit}{call}->(@_);
-}
-
-sub rcs_commit (@) {
- $hooks{rcs}{rcs_commit}{call}->(@_);
-}
-
-sub rcs_commit_staged (@) {
- $hooks{rcs}{rcs_commit_staged}{call}->(@_);
-}
-
-sub rcs_add ($) {
- $hooks{rcs}{rcs_add}{call}->(@_);
-}
-
-sub rcs_remove ($) {
- $hooks{rcs}{rcs_remove}{call}->(@_);
-}
-
-sub rcs_rename ($$) {
- $hooks{rcs}{rcs_rename}{call}->(@_);
-}
-
-sub rcs_recentchanges ($) {
- $hooks{rcs}{rcs_recentchanges}{call}->(@_);
-}
-
-sub rcs_diff ($;$) {
- $hooks{rcs}{rcs_diff}{call}->(@_);
-}
-
-sub rcs_getctime ($) {
- $hooks{rcs}{rcs_getctime}{call}->(@_);
-}
-
-sub rcs_getmtime ($) {
- $hooks{rcs}{rcs_getmtime}{call}->(@_);
-}
-
-sub rcs_receive () {
- $hooks{rcs}{rcs_receive}{call}->();
-}
-
-sub add_depends ($$;$) {
- my $page=shift;
- my $pagespec=shift;
- my $deptype=shift || $DEPEND_CONTENT;
-
- # Is the pagespec a simple page name?
- if ($pagespec =~ /$config{wiki_file_regexp}/ &&
- $pagespec !~ /[\s*?()!]/) {
- $depends_simple{$page}{lc $pagespec} |= $deptype;
- return 1;
- }
-
- # Add explicit dependencies for influences.
- my $sub=pagespec_translate($pagespec);
- return unless defined $sub;
- foreach my $p (keys %pagesources) {
- my $r=$sub->($p, location => $page);
- my $i=$r->influences;
- my $static=$r->influences_static;
- foreach my $k (keys %$i) {
- next unless $r || $static || $k eq $page;
- $depends_simple{$page}{lc $k} |= $i->{$k};
- }
- last if $static;
- }
-
- $depends{$page}{$pagespec} |= $deptype;
- return 1;
-}
-
-sub deptype (@) {
- my $deptype=0;
- foreach my $type (@_) {
- if ($type eq 'presence') {
- $deptype |= $DEPEND_PRESENCE;
- }
- elsif ($type eq 'links') {
- $deptype |= $DEPEND_LINKS;
- }
- elsif ($type eq 'content') {
- $deptype |= $DEPEND_CONTENT;
- }
- }
- return $deptype;
-}
-
-my $file_prune_regexp;
-sub file_pruned ($) {
- my $file=shift;
-
- if (defined $config{include} && length $config{include}) {
- return 0 if $file =~ m/$config{include}/;
- }
-
- if (! defined $file_prune_regexp) {
- $file_prune_regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')';
- $file_prune_regexp=qr/$file_prune_regexp/;
- }
- return $file =~ m/$file_prune_regexp/;
-}
-
-sub define_gettext () {
- # If translation is needed, redefine the gettext function to do it.
- # Otherwise, it becomes a quick no-op.
- my $gettext_obj;
- my $getobj;
- if ((exists $ENV{LANG} && length $ENV{LANG}) ||
- (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) ||
- (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) {
- $getobj=sub {
- $gettext_obj=eval q{
- use Locale::gettext q{textdomain};
- Locale::gettext->domain('ikiwiki')
- };
- };
- }
-
- no warnings 'redefine';
- *gettext=sub {
- $getobj->() if $getobj;
- if ($gettext_obj) {
- $gettext_obj->get(shift);
- }
- else {
- return shift;
- }
- };
- *ngettext=sub {
- $getobj->() if $getobj;
- if ($gettext_obj) {
- $gettext_obj->nget(@_);
- }
- else {
- return ($_[2] == 1 ? $_[0] : $_[1])
- }
- };
-}
-
-sub gettext {
- define_gettext();
- gettext(@_);
-}
-
-sub ngettext {
- define_gettext();
- ngettext(@_);
-}
-
-sub yesno ($) {
- my $val=shift;
-
- return (defined $val && (lc($val) eq gettext("yes") || lc($val) eq "yes" || $val eq "1"));
-}
-
-sub inject {
- # Injects a new function into the symbol table to replace an
- # exported function.
- my %params=@_;
-
- # This is deep ugly perl foo, beware.
- no strict;
- no warnings;
- if (! defined $params{parent}) {
- $params{parent}='::';
- $params{old}=\&{$params{name}};
- $params{name}=~s/.*:://;
- }
- my $parent=$params{parent};
- foreach my $ns (grep /^\w+::/, keys %{$parent}) {
- $ns = $params{parent} . $ns;
- inject(%params, parent => $ns) unless $ns eq '::main::';
- *{$ns . $params{name}} = $params{call}
- if exists ${$ns}{$params{name}} &&
- \&{${$ns}{$params{name}}} == $params{old};
- }
- use strict;
- use warnings;
-}
-
-sub add_link ($$;$) {
- my $page=shift;
- my $link=shift;
- my $type=shift;
-
- push @{$links{$page}}, $link
- unless grep { $_ eq $link } @{$links{$page}};
-
- if (defined $type) {
- $typedlinks{$page}{$type}{$link} = 1;
- }
-}
-
-sub add_autofile ($$$) {
- my $file=shift;
- my $plugin=shift;
- my $generator=shift;
-
- $autofiles{$file}{plugin}=$plugin;
- $autofiles{$file}{generator}=$generator;
-}
-
-sub useragent () {
- eval q{use LWP};
- error($@) if $@;
-
- return LWP::UserAgent->new(
- cookie_jar => $config{cookiejar},
- env_proxy => 1, # respect proxy env vars
- agent => $config{useragent},
- );
-}
-
-sub sortspec_translate ($$) {
- my $spec = shift;
- my $reverse = shift;
-
- my $code = "";
- my @data;
- while ($spec =~ m{
- \s*
- (-?) # group 1: perhaps negated
- \s*
- ( # group 2: a word
- \w+\([^\)]*\) # command(params)
- |
- [^\s]+ # or anything else
- )
- \s*
- }gx) {
- my $negated = $1;
- my $word = $2;
- my $params = undef;
-
- if ($word =~ m/^(\w+)\((.*)\)$/) {
- # command with parameters
- $params = $2;
- $word = $1;
- }
- elsif ($word !~ m/^\w+$/) {
- error(sprintf(gettext("invalid sort type %s"), $word));
- }
-
- if (length $code) {
- $code .= " || ";
- }
-
- if ($negated) {
- $code .= "-";
- }
-
- if (exists $IkiWiki::SortSpec::{"cmp_$word"}) {
- if (defined $params) {
- push @data, $params;
- $code .= "IkiWiki::SortSpec::cmp_$word(\$data[$#data])";
- }
- else {
- $code .= "IkiWiki::SortSpec::cmp_$word(undef)";
- }
- }
- else {
- error(sprintf(gettext("unknown sort type %s"), $word));
- }
- }
-
- if (! length $code) {
- # undefined sorting method... sort arbitrarily
- return sub { 0 };
- }
-
- if ($reverse) {
- $code="-($code)";
- }
-
- no warnings;
- return eval 'sub { '.$code.' }';
-}
-
-sub pagespec_translate ($) {
- my $spec=shift;
-
- # Convert spec to perl code.
- my $code="";
- my @data;
- while ($spec=~m{
- \s* # ignore whitespace
- ( # 1: match a single word
- \! # !
- |
- \( # (
- |
- \) # )
- |
- \w+\([^\)]*\) # command(params)
- |
- [^\s()]+ # any other text
- )
- \s* # ignore whitespace
- }gx) {
- my $word=$1;
- if (lc $word eq 'and') {
- $code.=' &';
- }
- elsif (lc $word eq 'or') {
- $code.=' |';
- }
- elsif ($word eq "(" || $word eq ")" || $word eq "!") {
- $code.=' '.$word;
- }
- elsif ($word =~ /^(\w+)\((.*)\)$/) {
- if (exists $IkiWiki::PageSpec::{"match_$1"}) {
- push @data, $2;
- $code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
- }
- else {
- push @data, qq{unknown function in pagespec "$word"};
- $code.="IkiWiki::ErrorReason->new(\$data[$#data])";
- }
- }
- else {
- push @data, $word;
- $code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
- }
- }
-
- if (! length $code) {
- $code="IkiWiki::FailReason->new('empty pagespec')";
- }
-
- no warnings;
- return eval 'sub { my $page=shift; '.$code.' }';
-}
-
-sub pagespec_match ($$;@) {
- my $page=shift;
- my $spec=shift;
- my @params=@_;
-
- # Backwards compatability with old calling convention.
- if (@params == 1) {
- unshift @params, 'location';
- }
-
- my $sub=pagespec_translate($spec);
- return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
- if ! defined $sub;
- return $sub->($page, @params);
-}
-
-# e.g. @pages = sort_pages("title", \@pages, reverse => "yes")
-#
-# Not exported yet, but could be in future if it is generally useful.
-# Note that this signature is not the same as IkiWiki::SortSpec::sort_pages,
-# which is "more internal".
-sub sort_pages ($$;@) {
- my $sort = shift;
- my $list = shift;
- my %params = @_;
- $sort = sortspec_translate($sort, $params{reverse});
- return IkiWiki::SortSpec::sort_pages($sort, @$list);
-}
-
-sub pagespec_match_list ($$;@) {
- my $page=shift;
- my $pagespec=shift;
- my %params=@_;
-
- # Backwards compatability with old calling convention.
- if (ref $page) {
- print STDERR "warning: a plugin (".caller().") is using pagespec_match_list in an obsolete way, and needs to be updated\n";
- $params{list}=$page;
- $page=$params{location}; # ugh!
- }
-
- my $sub=pagespec_translate($pagespec);
- error "syntax error in pagespec \"$pagespec\""
- if ! defined $sub;
- my $sort=sortspec_translate($params{sort}, $params{reverse})
- if defined $params{sort};
-
- my @candidates;
- if (exists $params{list}) {
- @candidates=exists $params{filter}
- ? grep { ! $params{filter}->($_) } @{$params{list}}
- : @{$params{list}};
- }
- else {
- @candidates=exists $params{filter}
- ? grep { ! $params{filter}->($_) } keys %pagesources
- : keys %pagesources;
- }
-
- # clear params, remainder is passed to pagespec
- $depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT);
- my $num=$params{num};
- delete @params{qw{num deptype reverse sort filter list}};
-
- # when only the top matches will be returned, it's efficient to
- # sort before matching to pagespec,
- if (defined $num && defined $sort) {
- @candidates=IkiWiki::SortSpec::sort_pages(
- $sort, @candidates);
- }
-
- my @matches;
- my $firstfail;
- my $count=0;
- my $accum=IkiWiki::SuccessReason->new();
- foreach my $p (@candidates) {
- my $r=$sub->($p, %params, location => $page);
- error(sprintf(gettext("cannot match pages: %s"), $r))
- if $r->isa("IkiWiki::ErrorReason");
- unless ($r || $r->influences_static) {
- $r->remove_influence($p);
- }
- $accum |= $r;
- if ($r) {
- push @matches, $p;
- last if defined $num && ++$count == $num;
- }
- }
-
- # Add simple dependencies for accumulated influences.
- my $i=$accum->influences;
- foreach my $k (keys %$i) {
- $depends_simple{$page}{lc $k} |= $i->{$k};
- }
-
- # when all matches will be returned, it's efficient to
- # sort after matching
- if (! defined $num && defined $sort) {
- return IkiWiki::SortSpec::sort_pages(
- $sort, @matches);
- }
- else {
- return @matches;
- }
-}
-
-sub pagespec_valid ($) {
- my $spec=shift;
-
- return defined pagespec_translate($spec);
-}
-
-sub glob2re ($) {
- my $re=quotemeta(shift);
- $re=~s/\\\*/.*/g;
- $re=~s/\\\?/./g;
- return qr/^$re$/i;
-}
-
-package IkiWiki::FailReason;
-
-use overload (
- '""' => sub { $_[0][0] },
- '0+' => sub { 0 },
- '!' => sub { bless $_[0], 'IkiWiki::SuccessReason'},
- '&' => sub { $_[0]->merge_influences($_[1], 1); $_[0] },
- '|' => sub { $_[1]->merge_influences($_[0]); $_[1] },
- fallback => 1,
-);
-
-our @ISA = 'IkiWiki::SuccessReason';
-
-package IkiWiki::SuccessReason;
-
-# A blessed array-ref:
-#
-# [0]: human-readable reason for success (or, in FailReason subclass, failure)
-# [1]{""}:
-# - if absent or false, the influences of this evaluation are "static",
-# see the influences_static method
-# - if true, they are dynamic (not static)
-# [1]{any other key}:
-# the dependency types of influences, as returned by the influences method
-
-use overload (
- # in string context, it's the human-readable reason
- '""' => sub { $_[0][0] },
- # in boolean context, SuccessReason is 1 and FailReason is 0
- '0+' => sub { 1 },
- # negating a result gives the opposite result with the same influences
- '!' => sub { bless $_[0], 'IkiWiki::FailReason'},
- # A & B = (A ? B : A) with the influences of both
- '&' => sub { $_[1]->merge_influences($_[0], 1); $_[1] },
- # A | B = (A ? A : B) with the influences of both
- '|' => sub { $_[0]->merge_influences($_[1]); $_[0] },
- fallback => 1,
-);
-
-# SuccessReason->new("human-readable reason", page => deptype, ...)
-
-sub new {
- my $class = shift;
- my $value = shift;
- return bless [$value, {@_}], $class;
-}
-
-# influences(): return a reference to a copy of the hash
-# { page => dependency type } describing the pages that indirectly influenced
-# this result, but would not cause a dependency through ikiwiki's core
-# dependency logic.
-#
-# See [[todo/dependency_types]] for extensive discussion of what this means.
-#
-# influences(page => deptype, ...): remove all influences, replace them
-# with the arguments, and return a reference to a copy of the new influences.
-
-sub influences {
- my $this=shift;
- $this->[1]={@_} if @_;
- my %i=%{$this->[1]};
- delete $i{""};
- return \%i;
-}
-
-# True if this result has the same influences whichever page it matches,
-# For instance, whether bar matches backlink(foo) is influenced only by
-# the set of links in foo, so its only influence is { foo => DEPEND_LINKS },
-# which does not mention bar anywhere.
-#
-# False if this result would have different influences when matching
-# different pages. For instance, when testing whether link(foo) matches bar,
-# { bar => DEPEND_LINKS } is an influence on that result, because changing
-# bar's links could change the outcome; so its influences are not the same
-# as when testing whether link(foo) matches baz.
-#
-# Static influences are one of the things that make pagespec_match_list
-# more efficient than repeated calls to pagespec_match.
-
-sub influences_static {
- return ! $_[0][1]->{""};
-}
-
-# Change the influences of $this to be the influences of "$this & $other"
-# or "$this | $other".
-#
-# If both $this and $other are either successful or have influences,
-# or this is an "or" operation, the result has all the influences from
-# either of the arguments. It has dynamic influences if either argument
-# has dynamic influences.
-#
-# If this is an "and" operation, and at least one argument is a
-# FailReason with no influences, the result has no influences, and they
-# are not dynamic. For instance, link(foo) matching bar is influenced
-# by bar, but enabled(ddate) has no influences. Suppose ddate is disabled;
-# then (link(foo) and enabled(ddate)) not matching bar is not influenced by
-# bar, because it would be false however often you edit bar.
-
-sub merge_influences {
- my $this=shift;
- my $other=shift;
- my $anded=shift;
-
- # This "if" is odd because it needs to avoid negating $this
- # or $other, which would alter the objects in-place. Be careful.
- if (! $anded || (($this || %{$this->[1]}) &&
- ($other || %{$other->[1]}))) {
- foreach my $influence (keys %{$other->[1]}) {
- $this->[1]{$influence} |= $other->[1]{$influence};
- }
- }
- else {
- # influence blocker
- $this->[1]={};
- }
-}
-
-# Change $this so it is not considered to be influenced by $torm.
-
-sub remove_influence {
- my $this=shift;
- my $torm=shift;
-
- delete $this->[1]{$torm};
-}
-
-package IkiWiki::ErrorReason;
-
-our @ISA = 'IkiWiki::FailReason';
-
-package IkiWiki::PageSpec;
-
-sub derel ($$) {
- my $path=shift;
- my $from=shift;
-
- if ($path =~ m!^\.(/|$)!) {
- if ($1) {
- $from=~s#/?[^/]+$## if defined $from;
- $path=~s#^\./##;
- $path="$from/$path" if defined $from && length $from;
- }
- else {
- $path = $from;
- $path = "" unless defined $path;
- }
- }
-
- return $path;
-}
-
-my %glob_cache;
-
-sub match_glob ($$;@) {
- my $page=shift;
- my $glob=shift;
- my %params=@_;
-
- $glob=derel($glob, $params{location});
-
- # Instead of converting the glob to a regex every time,
- # cache the compiled regex to save time.
- my $re=$glob_cache{$glob};
- unless (defined $re) {
- $glob_cache{$glob} = $re = IkiWiki::glob2re($glob);
- }
- if ($page =~ $re) {
- if (! IkiWiki::isinternal($page) || $params{internal}) {
- return IkiWiki::SuccessReason->new("$glob matches $page");
- }
- else {
- return IkiWiki::FailReason->new("$glob matches $page, but the page is an internal page");
- }
- }
- else {
- return IkiWiki::FailReason->new("$glob does not match $page");
- }
-}
-
-sub match_internal ($$;@) {
- return match_glob(shift, shift, @_, internal => 1)
-}
-
-sub match_page ($$;@) {
- my $page=shift;
- my $match=match_glob($page, shift, @_);
- if ($match) {
- my $source=exists $IkiWiki::pagesources{$page} ?
- $IkiWiki::pagesources{$page} :
- $IkiWiki::delpagesources{$page};
- my $type=defined $source ? IkiWiki::pagetype($source) : undef;
- if (! defined $type) {
- return IkiWiki::FailReason->new("$page is not a page");
- }
- }
- return $match;
-}
-
-sub match_link ($$;@) {
- my $page=shift;
- my $link=lc(shift);
- my %params=@_;
-
- $link=derel($link, $params{location});
- my $from=exists $params{location} ? $params{location} : '';
- my $linktype=$params{linktype};
- my $qualifier='';
- if (defined $linktype) {
- $qualifier=" with type $linktype";
- }
-
- my $links = $IkiWiki::links{$page};
- return IkiWiki::FailReason->new("$page has no links", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- unless $links && @{$links};
- my $bestlink = IkiWiki::bestlink($from, $link);
- foreach my $p (@{$links}) {
- next unless (! defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p});
-
- if (length $bestlink) {
- if ($bestlink eq IkiWiki::bestlink($page, $p)) {
- return IkiWiki::SuccessReason->new("$page links to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- }
- else {
- if (match_glob($p, $link, %params)) {
- return IkiWiki::SuccessReason->new("$page links to page $p$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- my ($p_rel)=$p=~/^\/?(.*)/;
- $link=~s/^\///;
- if (match_glob($p_rel, $link, %params)) {
- return IkiWiki::SuccessReason->new("$page links to page $p_rel$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- }
- }
- return IkiWiki::FailReason->new("$page does not link to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1);
-}
-
-sub match_backlink ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
- if ($testpage eq '.') {
- $testpage = $params{'location'}
- }
- my $ret=match_link($testpage, $page, @_);
- $ret->influences($testpage => $IkiWiki::DEPEND_LINKS);
- return $ret;
-}
-
-sub match_created_before ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
-
- $testpage=derel($testpage, $params{location});
-
- if (exists $IkiWiki::pagectime{$testpage}) {
- if ($IkiWiki::pagectime{$page} < $IkiWiki::pagectime{$testpage}) {
- return IkiWiki::SuccessReason->new("$page created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- else {
- return IkiWiki::FailReason->new("$page not created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- }
- else {
- return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
-}
-
-sub match_created_after ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
-
- $testpage=derel($testpage, $params{location});
-
- if (exists $IkiWiki::pagectime{$testpage}) {
- if ($IkiWiki::pagectime{$page} > $IkiWiki::pagectime{$testpage}) {
- return IkiWiki::SuccessReason->new("$page created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- else {
- return IkiWiki::FailReason->new("$page not created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- }
- else {
- return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
-}
-
-sub match_creation_day ($$;@) {
- my $page=shift;
- my $d=shift;
- if ($d !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid day $d");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[3] == $d) {
- return IkiWiki::SuccessReason->new('creation_day matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_day did not match');
- }
-}
-
-sub match_creation_month ($$;@) {
- my $page=shift;
- my $m=shift;
- if ($m !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid month $m");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[4] + 1 == $m) {
- return IkiWiki::SuccessReason->new('creation_month matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_month did not match');
- }
-}
-
-sub match_creation_year ($$;@) {
- my $page=shift;
- my $y=shift;
- if ($y !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid year $y");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[5] + 1900 == $y) {
- return IkiWiki::SuccessReason->new('creation_year matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_year did not match');
- }
-}
-
-sub match_user ($$;@) {
- shift;
- my $user=shift;
- my %params=@_;
-
- if (! exists $params{user}) {
- return IkiWiki::ErrorReason->new("no user specified");
- }
-
- my $regexp=IkiWiki::glob2re($user);
-
- if (defined $params{user} && $params{user}=~$regexp) {
- return IkiWiki::SuccessReason->new("user is $user");
- }
- elsif (! defined $params{user}) {
- return IkiWiki::FailReason->new("not logged in");
- }
- else {
- return IkiWiki::FailReason->new("user is $params{user}, not $user");
- }
-}
-
-sub match_admin ($$;@) {
- shift;
- shift;
- my %params=@_;
-
- if (! exists $params{user}) {
- return IkiWiki::ErrorReason->new("no user specified");
- }
-
- if (defined $params{user} && IkiWiki::is_admin($params{user})) {
- return IkiWiki::SuccessReason->new("user is an admin");
- }
- elsif (! defined $params{user}) {
- return IkiWiki::FailReason->new("not logged in");
- }
- else {
- return IkiWiki::FailReason->new("user is not an admin");
- }
-}
-
-sub match_ip ($$;@) {
- shift;
- my $ip=shift;
- my %params=@_;
-
- if (! exists $params{ip}) {
- return IkiWiki::ErrorReason->new("no IP specified");
- }
-
- my $regexp=IkiWiki::glob2re(lc $ip);
-
- if (defined $params{ip} && lc $params{ip}=~$regexp) {
- return IkiWiki::SuccessReason->new("IP is $ip");
- }
- else {
- return IkiWiki::FailReason->new("IP is $params{ip}, not $ip");
- }
-}
-
-package IkiWiki::SortSpec;
-
-# This is in the SortSpec namespace so that the $a and $b that sort() uses
-# are easily available in this namespace, for cmp functions to use them.
-sub sort_pages {
- my $f=shift;
- sort $f @_
-}
-
-sub cmp_title {
- IkiWiki::pagetitle(IkiWiki::basename($a))
- cmp
- IkiWiki::pagetitle(IkiWiki::basename($b))
-}
-
-sub cmp_path { IkiWiki::pagetitle($a) cmp IkiWiki::pagetitle($b) }
-sub cmp_mtime { $IkiWiki::pagemtime{$b} <=> $IkiWiki::pagemtime{$a} }
-sub cmp_age { $IkiWiki::pagectime{$b} <=> $IkiWiki::pagectime{$a} }
-
-1
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/aggregate.pm ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/aggregate.pm
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/aggregate.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/aggregate.pm 1970-01-01 10:00:00.000000000 +1000
@@ -1,789 +0,0 @@
-#!/usr/bin/perl
-# Feed aggregation plugin.
-package IkiWiki::Plugin::aggregate;
-
-use warnings;
-use strict;
-use IkiWiki 3.00;
-use HTML::Parser;
-use HTML::Tagset;
-use HTML::Entities;
-use open qw{:utf8 :std};
-
-my %feeds;
-my %guids;
-
-sub import {
- hook(type => "getopt", id => "aggregate", call => \&getopt);
- hook(type => "getsetup", id => "aggregate", call => \&getsetup);
- hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
- last => 1);
- hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
- hook(type => "preprocess", id => "aggregate", call => \&preprocess);
- hook(type => "delete", id => "aggregate", call => \&delete);
- hook(type => "savestate", id => "aggregate", call => \&savestate);
- hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
- if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
- hook(type => "cgi", id => "aggregate", call => \&cgi);
- }
-}
-
-sub getopt () {
- eval q{use Getopt::Long};
- error($@) if $@;
- Getopt::Long::Configure('pass_through');
- GetOptions(
- "aggregate" => \$config{aggregate},
- "aggregateinternal!" => \$config{aggregateinternal},
- );
-}
-
-sub getsetup () {
- return
- plugin => {
- safe => 1,
- rebuild => undef,
- },
- aggregateinternal => {
- type => "boolean",
- example => 1,
- description => "enable aggregation to internal pages?",
- safe => 0, # enabling needs manual transition
- rebuild => 0,
- },
- aggregate_webtrigger => {
- type => "boolean",
- example => 0,
- description => "allow aggregation to be triggered via the web?",
- safe => 1,
- rebuild => 0,
- },
-}
-
-sub checkconfig () {
- if (! defined $config{aggregateinternal}) {
- $config{aggregateinternal}=1;
- }
-
- # This is done here rather than in a refresh hook because it
- # needs to run before the wiki is locked.
- if ($config{aggregate} && ! ($config{post_commit} &&
- IkiWiki::commit_hook_enabled())) {
- launchaggregation();
- }
-}
-
-sub cgi ($) {
- my $cgi=shift;
-
- if (defined $cgi->param('do') &&
- $cgi->param("do") eq "aggregate_webtrigger") {
- $|=1;
- print "Content-Type: text/plain\n\n";
- $config{cgi}=0;
- $config{verbose}=1;
- $config{syslog}=0;
- print gettext("Aggregation triggered via web.")."\n\n";
- if (launchaggregation()) {
- IkiWiki::lockwiki();
- IkiWiki::loadindex();
- require IkiWiki::Render;
- IkiWiki::refresh();
- IkiWiki::saveindex();
- }
- else {
- print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
- }
- exit 0;
- }
-}
-
-sub launchaggregation () {
- # See if any feeds need aggregation.
- loadstate();
- my @feeds=needsaggregate();
- return unless @feeds;
- if (! lockaggregate()) {
- error("an aggregation process is already running");
- }
- # force a later rebuild of source pages
- $IkiWiki::forcerebuild{$_->{sourcepage}}=1
- foreach @feeds;
-
- # Fork a child process to handle the aggregation.
- # The parent process will then handle building the
- # result. This avoids messy code to clear state
- # accumulated while aggregating.
- defined(my $pid = fork) or error("Can't fork: $!");
- if (! $pid) {
- IkiWiki::loadindex();
- # Aggregation happens without the main wiki lock
- # being held. This allows editing pages etc while
- # aggregation is running.
- aggregate(@feeds);
-
- IkiWiki::lockwiki;
- # Merge changes, since aggregation state may have
- # changed on disk while the aggregation was happening.
- mergestate();
- expire();
- savestate();
- IkiWiki::unlockwiki;
- exit 0;
- }
- waitpid($pid,0);
- if ($?) {
- error "aggregation failed with code $?";
- }
-
- clearstate();
- unlockaggregate();
-
- return 1;
-}
-
-# Pages with extension _aggregated have plain html markup, pass through.
-sub htmlize (@) {
- my %params=@_;
- return $params{content};
-}
-
-# Used by ikiwiki-transition aggregateinternal.
-sub migrate_to_internal {
- if (! lockaggregate()) {
- error("an aggregation process is currently running");
- }
-
- IkiWiki::lockwiki();
- loadstate();
- $config{verbose}=1;
-
- foreach my $data (values %guids) {
- next unless $data->{page};
- next if $data->{expired};
-
- $config{aggregateinternal} = 0;
- my $oldname = "$config{srcdir}/".htmlfn($data->{page});
- if (! -e $oldname) {
- $oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
- }
-
- my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
-
- $config{aggregateinternal} = 1;
- my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
-
- debug "moving $oldname -> $newname";
- if (-e $newname) {
- if (-e $oldname) {
- error("$newname already exists");
- }
- else {
- debug("already renamed to $newname?");
- }
- }
- elsif (-e $oldname) {
- rename($oldname, $newname) || error("$!");
- }
- else {
- debug("$oldname not found");
- }
- if (-e $oldoutput) {
- require IkiWiki::Render;
- debug("removing output file $oldoutput");
- IkiWiki::prune($oldoutput, $config{destdir});
- }
- }
-
- savestate();
- IkiWiki::unlockwiki;
-
- unlockaggregate();
-}
-
-sub needsbuild (@) {
- my $needsbuild=shift;
-
- loadstate();
-
- foreach my $feed (values %feeds) {
- if (exists $pagesources{$feed->{sourcepage}} &&
- grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
- # Mark all feeds originating on this page as
- # not yet seen; preprocess will unmark those that
- # still exist.
- markunseen($feed->{sourcepage});
- }
- }
-
- return $needsbuild;
-}
-
-sub preprocess (@) {
- my %params=@_;
-
- foreach my $required (qw{name url}) {
- if (! exists $params{$required}) {
- error sprintf(gettext("missing %s parameter"), $required)
- }
- }
-
- my $feed={};
- my $name=$params{name};
- if (exists $feeds{$name}) {
- $feed=$feeds{$name};
- }
- else {
- $feeds{$name}=$feed;
- }
- $feed->{name}=$name;
- $feed->{sourcepage}=$params{page};
- $feed->{url}=$params{url};
- my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
- $dir=~s/^\/+//;
- ($dir)=$dir=~/$config{wiki_file_regexp}/;
- $feed->{dir}=$dir;
- $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
- $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
- $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
- $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
- if (exists $params{template}) {
- $params{template}=~s/[^-_a-zA-Z0-9]+//g;
- }
- else {
- $params{template} = "aggregatepost"
- }
- $feed->{template}=$params{template} . ".tmpl";
- delete $feed->{unseen};
- $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
- $feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
- $feed->{numposts}=0 unless defined $feed->{numposts};
- $feed->{newposts}=0 unless defined $feed->{newposts};
- $feed->{message}=gettext("new feed") unless defined $feed->{message};
- $feed->{error}=0 unless defined $feed->{error};
- $feed->{tags}=[];
- while (@_) {
- my $key=shift;
- my $value=shift;
- if ($key eq 'tag') {
- push @{$feed->{tags}}, $value;
- }
- }
-
- return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
- ($feed->{error} ? "<em>" : "").$feed->{message}.
- ($feed->{error} ? "</em>" : "").
- " (".$feed->{numposts}." ".gettext("posts").
- ($feed->{newposts} ? "; ".$feed->{newposts}.
- " ".gettext("new") : "").
- ")";
-}
-
-sub delete (@) {
- my @files=@_;
-
- # Remove feed data for removed pages.
- foreach my $file (@files) {
- my $page=pagename($file);
- markunseen($page);
- }
-}
-
-sub markunseen ($) {
- my $page=shift;
-
- foreach my $id (keys %feeds) {
- if ($feeds{$id}->{sourcepage} eq $page) {
- $feeds{$id}->{unseen}=1;
- }
- }
-}
-
-my $state_loaded=0;
-
-sub loadstate () {
- return if $state_loaded;
- $state_loaded=1;
- if (-e "$config{wikistatedir}/aggregate") {
- open(IN, "<", "$config{wikistatedir}/aggregate") ||
- die "$config{wikistatedir}/aggregate: $!";
- while (<IN>) {
- $_=IkiWiki::possibly_foolish_untaint($_);
- chomp;
- my $data={};
- foreach my $i (split(/ /, $_)) {
- my ($field, $val)=split(/=/, $i, 2);
- if ($field eq "name" || $field eq "feed" ||
- $field eq "guid" || $field eq "message") {
- $data->{$field}=decode_entities($val, " \t\n");
- }
- elsif ($field eq "tag") {
- push @{$data->{tags}}, $val;
- }
- else {
- $data->{$field}=$val;
- }
- }
-
- if (exists $data->{name}) {
- $feeds{$data->{name}}=$data;
- }
- elsif (exists $data->{guid}) {
- $guids{$data->{guid}}=$data;
- }
- }
-
- close IN;
- }
-}
-
-sub savestate () {
- return unless $state_loaded;
- garbage_collect();
- my $newfile="$config{wikistatedir}/aggregate.new";
- my $cleanup = sub { unlink($newfile) };
- open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
- foreach my $data (values %feeds, values %guids) {
- my @line;
- foreach my $field (keys %$data) {
- if ($field eq "name" || $field eq "feed" ||
- $field eq "guid" || $field eq "message") {
- push @line, "$field=".encode_entities($data->{$field}, " \t\n");
- }
- elsif ($field eq "tags") {
- push @line, "tag=$_" foreach @{$data->{tags}};
- }
- else {
- push @line, "$field=".$data->{$field}
- if defined $data->{$field};
- }
- }
- print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
- }
- close OUT || error("save $newfile: $!", $cleanup);
- rename($newfile, "$config{wikistatedir}/aggregate") ||
- error("rename $newfile: $!", $cleanup);
-
- my $timestamp=undef;
- foreach my $feed (keys %feeds) {
- my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
- if (! defined $timestamp || $timestamp > $t) {
- $timestamp=$t;
- }
- }
- $newfile=~s/\.new$/time/;
- open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
- if (defined $timestamp) {
- print OUT $timestamp."\n";
- }
- close OUT || error("save $newfile: $!", $cleanup);
-}
-
-sub garbage_collect () {
- foreach my $name (keys %feeds) {
- # remove any feeds that were not seen while building the pages
- # that used to contain them
- if ($feeds{$name}->{unseen}) {
- delete $feeds{$name};
- }
- }
-
- foreach my $guid (values %guids) {
- # any guid whose feed is gone should be removed
- if (! exists $feeds{$guid->{feed}}) {
- if (exists $guid->{page}) {
- unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
- || unlink "$config{srcdir}/".htmlfn($guid->{page});
- }
- delete $guids{$guid->{guid}};
- }
- # handle expired guids
- elsif ($guid->{expired} && exists $guid->{page}) {
- unlink "$config{srcdir}/".htmlfn($guid->{page});
- unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
- delete $guid->{page};
- delete $guid->{md5};
- }
- }
-}
-
-sub mergestate () {
- # Load the current state in from disk, and merge into it
- # values from the state in memory that might have changed
- # during aggregation.
- my %myfeeds=%feeds;
- my %myguids=%guids;
- clearstate();
- loadstate();
-
- # All that can change in feed state during aggregation is a few
- # fields.
- foreach my $name (keys %myfeeds) {
- if (exists $feeds{$name}) {
- foreach my $field (qw{message lastupdate lasttry
- numposts newposts error}) {
- $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
- }
- }
- }
-
- # New guids can be created during aggregation.
- # Guids have a few fields that may be updated during aggregation.
- # It's also possible that guids were removed from the on-disk state
- # while the aggregation was in process. That would only happen if
- # their feed was also removed, so any removed guids added back here
- # will be garbage collected later.
- foreach my $guid (keys %myguids) {
- if (! exists $guids{$guid}) {
- $guids{$guid}=$myguids{$guid};
- }
- else {
- foreach my $field (qw{md5}) {
- $guids{$guid}->{$field}=$myguids{$guid}->{$field};
- }
- }
- }
-}
-
-sub clearstate () {
- %feeds=();
- %guids=();
- $state_loaded=0;
-}
-
-sub expire () {
- foreach my $feed (values %feeds) {
- next unless $feed->{expireage} || $feed->{expirecount};
- my $count=0;
- my %seen;
- foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
- grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
- values %guids) {
- if ($feed->{expireage}) {
- my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
- if ($days_old > $feed->{expireage}) {
- debug(sprintf(gettext("expiring %s (%s days old)"),
- $item->{page}, int($days_old)));
- $item->{expired}=1;
- }
- }
- elsif ($feed->{expirecount} &&
- $count >= $feed->{expirecount}) {
- debug(sprintf(gettext("expiring %s"), $item->{page}));
- $item->{expired}=1;
- }
- else {
- if (! $seen{$item->{page}}) {
- $seen{$item->{page}}=1;
- $count++;
- }
- }
- }
- }
-}
-
-sub needsaggregate () {
- return values %feeds if $config{rebuild};
- return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
-}
-
-sub aggregate (@) {
- eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
- eval q{use XML::Feed};
- error($@) if $@;
- eval q{use URI::Fetch};
- error($@) if $@;
-
- foreach my $feed (@_) {
- $feed->{lasttry}=time;
- $feed->{newposts}=0;
- $feed->{message}=sprintf(gettext("last checked %s"),
- displaytime($feed->{lasttry}));
- $feed->{error}=0;
-
- debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
-
- if (! length $feed->{feedurl}) {
- my @urls=XML::Feed->find_feeds($feed->{url});
- if (! @urls) {
- $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
- $feed->{error}=1;
- debug($feed->{message});
- next;
- }
- $feed->{feedurl}=pop @urls;
- }
- my $ua=useragent();
- my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
- if (! $res) {
- $feed->{message}=URI::Fetch->errstr;
- $feed->{error}=1;
- debug($feed->{message});
- next;
- }
-
- # lastupdate is only set if we were able to contact the server
- $feed->{lastupdate}=$feed->{lasttry};
-
- if ($res->status == URI::Fetch::URI_GONE()) {
- $feed->{message}=gettext("feed not found");
- $feed->{error}=1;
- debug($feed->{message});
- next;
- }
- my $content=$res->content;
- my $f=eval{XML::Feed->parse(\$content)};
- if ($@) {
- # One common cause of XML::Feed crashing is a feed
- # that contains invalid UTF-8 sequences. Convert
- # feed to ascii to try to work around.
- $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
- $f=eval {
- $content=Encode::decode_utf8($content, 0);
- XML::Feed->parse(\$content)
- };
- }
- if ($@) {
- # Another possibility is badly escaped entities.
- $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
- $content=~s/\&(?!amp)(\w+);/&$1;/g;
- $f=eval {
- $content=Encode::decode_utf8($content, 0);
- XML::Feed->parse(\$content)
- };
- }
- if ($@) {
- # gettext can clobber $@
- my $error = $@;
- $feed->{message}=gettext("feed crashed XML::Feed!")." ($error)";
- $feed->{error}=1;
- debug($feed->{message});
- next;
- }
- if (! $f) {
- $feed->{message}=XML::Feed->errstr;
- $feed->{error}=1;
- debug($feed->{message});
- next;
- }
-
- foreach my $entry ($f->entries) {
- # XML::Feed doesn't work around XML::Atom's bizarre
- # API, so we will. Real unicode strings? Yes please.
- # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
- local $XML::Atom::ForceUnicode = 1;
-
- my $c=$entry->content;
- # atom feeds may have no content, only a summary
- if (! defined $c && ref $entry->summary) {
- $c=$entry->summary;
- }
-
- add_page(
- feed => $feed,
- copyright => $f->copyright,
- title => defined $entry->title ? decode_entities($entry->title) : "untitled",
- author => defined $entry->author ? decode_entities($entry->author) : "",
- link => $entry->link,
- content => (defined $c && defined $c->body) ? $c->body : "",
- guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
- ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
- base => (defined $c && $c->can("base")) ? $c->base : undef,
- );
- }
- }
-}
-
-sub add_page (@) {
- my %params=@_;
-
- my $feed=$params{feed};
- my $guid={};
- my $mtime;
- if (exists $guids{$params{guid}}) {
- # updating an existing post
- $guid=$guids{$params{guid}};
- return if $guid->{expired};
- write_page($feed, $guid, $mtime, \%params);
- }
- else {
- # new post
- $guid->{guid}=$params{guid};
- $guids{$params{guid}}=$guid;
- $mtime=$params{ctime};
- $feed->{numposts}++;
- $feed->{newposts}++;
-
- # assign it an unused page
- my $page=titlepage($params{title});
- # escape slashes and periods in title so it doesn't specify
- # directory name or trigger ".." disallowing code.
- $page=~s!([/.])!"__".ord($1)."__"!eg;
- $page=$feed->{dir}."/".$page;
- ($page)=$page=~/$config{wiki_file_regexp}/;
- if (! defined $page || ! length $page) {
- $page=$feed->{dir}."/item";
- }
- my $c="";
- while (exists $IkiWiki::pagecase{lc $page.$c} ||
- -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
- -e "$config{srcdir}/".htmlfn($page.$c)) {
- $c++
- }
- $page=$page.$c;
-
- $guid->{page}=$page;
- eval { write_page($feed, $guid, $mtime, \%params) };
- if ($@) {
- # assume failure was due to a too long filename
- $c="";
- $page=$feed->{dir}."/item";
- while (exists $IkiWiki::pagecase{lc $page.$c} ||
- -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
- -e "$config{srcdir}/".htmlfn($page.$c)) {
- $c++
- }
- $page=$page.$c;
-
- $guid->{page}=$page;
- write_page($feed, $guid, $mtime, \%params);
- }
-
- debug(sprintf(gettext("creating new page %s"), $page));
- }
-}
-
-sub write_page ($$$$$) {
- my $feed=shift;
- my $guid=shift;
- my $mtime=shift;
- my %params=%{shift()};
-
- $guid->{feed}=$feed->{name};
-
- # To write or not to write? Need to avoid writing unchanged pages
- # to avoid unneccessary rebuilding. The mtime from rss cannot be
- # trusted; let's use a digest.
- eval q{use Digest::MD5 'md5_hex'};
- error($@) if $@;
- require Encode;
- my $digest=md5_hex(Encode::encode_utf8($params{content}));
- return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
- $guid->{md5}=$digest;
-
- # Create the page.
- my $template;
- eval {
- $template=template($feed->{template}, blind_cache => 1);
- };
- if ($@) {
- # gettext can clobber $@
- my $error = $@;
- print STDERR gettext("failed to process template:")." $error";
- return;
- }
- $template->param(title => $params{title})
- if defined $params{title} && length($params{title});
- $template->param(author => $params{author})
- if defined $params{author} && length($params{author}
- && $params{author} ne $feed->{name});
- $template->param(content => wikiescape(htmlabs($params{content},
- defined $params{base} ? $params{base} : $feed->{feedurl})));
- $template->param(name => $feed->{name});
- $template->param(url => $feed->{url});
- $template->param(copyright => $params{copyright})
- if defined $params{copyright} && length $params{copyright};
- $template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
- if defined $params{link};
- if (ref $feed->{tags}) {
- $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
- }
- writefile(htmlfn($guid->{page}),
- $IkiWiki::Plugin::transient::transientdir, $template->output);
-
- if (defined $mtime && $mtime <= time) {
- # Set the mtime, this lets the build process get the right
- # creation time on record for the new page.
- utime $mtime, $mtime,
- $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
- # Store it in pagectime for expiry code to use also.
- $IkiWiki::pagectime{$guid->{page}}=$mtime
- unless exists $IkiWiki::pagectime{$guid->{page}};
- }
- else {
- # Dummy value for expiry code.
- $IkiWiki::pagectime{$guid->{page}}=time
- unless exists $IkiWiki::pagectime{$guid->{page}};
- }
-}
-
-sub wikiescape ($) {
- # escape accidental wikilinks and preprocessor stuff
- return encode_entities(shift, '\[\]');
-}
-
-sub htmlabs ($$) {
- # Convert links in html from relative to absolute.
- # Note that this is a heuristic, which is not specified by the rss
- # spec and may not be right for all feeds. Also, see Debian
- # bug #381359.
- my $html=shift;
- my $urlbase=shift;
-
- my $ret="";
- my $p = HTML::Parser->new(api_version => 3);
- $p->handler(default => sub { $ret.=join("", @_) }, "text");
- $p->handler(start => sub {
- my ($tagname, $pos, $text) = @_;
- if (ref $HTML::Tagset::linkElements{$tagname}) {
- while (4 <= @$pos) {
- # use attribute sets from right to left
- # to avoid invalidating the offsets
- # when replacing the values
- my($k_offset, $k_len, $v_offset, $v_len) =
- splice(@$pos, -4);
- my $attrname = lc(substr($text, $k_offset, $k_len));
- next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
- next unless $v_offset; # 0 v_offset means no value
- my $v = substr($text, $v_offset, $v_len);
- $v =~ s/^([\'\"])(.*)\1$/$2/;
- my $new_v=IkiWiki::urlabs($v, $urlbase);
- $new_v =~ s/\"/"/g; # since we quote with ""
- substr($text, $v_offset, $v_len) = qq("$new_v");
- }
- }
- $ret.=$text;
- }, "tagname, tokenpos, text");
- $p->parse($html);
- $p->eof;
-
- return $ret;
-}
-
-sub htmlfn ($) {
- return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
-}
-
-my $aggregatelock;
-
-sub lockaggregate () {
- # Take an exclusive lock to prevent multiple concurrent aggregators.
- # Returns true if the lock was aquired.
- if (! -d $config{wikistatedir}) {
- mkdir($config{wikistatedir});
- }
- open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
- error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
- if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
- close($aggregatelock) || error("failed closing aggregatelock: $!");
- return 0;
- }
- return 1;
-}
-
-sub unlockaggregate () {
- return close($aggregatelock) if $aggregatelock;
- return;
-}
-
-1
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/blogspam.pm ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/blogspam.pm
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/blogspam.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/blogspam.pm 1970-01-01 10:00:00.000000000 +1000
@@ -1,150 +0,0 @@
-#!/usr/bin/perl
-package IkiWiki::Plugin::blogspam;
-
-use warnings;
-use strict;
-use IkiWiki 3.00;
-use Encode;
-
-my $defaulturl='http://test.blogspam.net:9999/';
-my $client;
-
-sub import {
- hook(type => "getsetup", id => "blogspam", call => \&getsetup);
- hook(type => "checkconfig", id => "blogspam", call => \&checkconfig);
- hook(type => "checkcontent", id => "blogspam", call => \&checkcontent);
-}
-
-sub getsetup () {
- return
- plugin => {
- safe => 1,
- rebuild => 0,
- section => "auth",
- },
- blogspam_pagespec => {
- type => 'pagespec',
- example => 'postcomment(*)',
- description => 'PageSpec of pages to check for spam',
- link => 'ikiwiki/PageSpec',
- safe => 1,
- rebuild => 0,
- },
- blogspam_options => {
- type => "string",
- example => "blacklist=1.2.3.4,blacklist=8.7.6.5,max-links=10",
- description => "options to send to blogspam server",
- link => "http://blogspam.net/api/2.0/testComment.html#options",
- safe => 1,
- rebuild => 0,
- },
- blogspam_server => {
- type => "string",
- default => $defaulturl,
- description => "blogspam server JSON url",
- safe => 1,
- rebuild => 0,
- },
-}
-
-sub checkconfig () {
- # This is done at checkconfig time because printing an error
- # if the module is missing when a spam is posted would not
- # let the admin know about the problem.
- eval q{
- use JSON;
- use HTTP::Request;
- };
- error $@ if $@;
-
- eval q{use LWPx::ParanoidAgent};
- if (!$@) {
- $client=LWPx::ParanoidAgent->new(agent => $config{useragent});
- }
- else {
- eval q{use LWP};
- if ($@) {
- error $@;
- return;
- }
- $client=useragent();
- }
-}
-
-sub checkcontent (@) {
- my %params=@_;
- my $session=$params{session};
-
- my $spec='!admin()';
- if (exists $config{blogspam_pagespec} &&
- length $config{blogspam_pagespec}) {
- $spec.=" and (".$config{blogspam_pagespec}.")";
- }
-
- my $user=$session->param("name");
- return undef unless pagespec_match($params{page}, $spec,
- (defined $user ? (user => $user) : ()),
- (defined $session->remote_addr() ? (ip => $session->remote_addr()) : ()),
- location => $params{page});
-
- my $url=$defaulturl;
- $url = $config{blogspam_server} if exists $config{blogspam_server};
-
- my @options = split(",", $config{blogspam_options})
- if exists $config{blogspam_options};
-
- # Allow short comments and whitespace-only edits, unless the user
- # has overridden min-words themselves.
- push @options, "min-words=0"
- unless grep /^min-words=/i, @options;
- # Wiki pages can have a lot of urls, unless the user specifically
- # wants to limit them.
- push @options, "exclude=lotsaurls"
- unless grep /^max-links/i, @options;
- # Unless the user specified a size check, disable such checking.
- push @options, "exclude=size"
- unless grep /^(?:max|min)-size/i, @options;
- # This test has absurd false positives on words like "alpha"
- # and "buy".
- push @options, "exclude=stopwords";
-
- my %req=(
- ip => $session->remote_addr(),
- comment => encode_utf8(defined $params{diff} ? $params{diff} : $params{content}),
- subject => encode_utf8(defined $params{subject} ? $params{subject} : ""),
- name => encode_utf8(defined $params{author} ? $params{author} : ""),
- link => encode_utf8(exists $params{url} ? $params{url} : ""),
- options => join(",", @options),
- site => encode_utf8($config{url}),
- version => "ikiwiki ".$IkiWiki::version,
- );
- eval q{use JSON; use HTTP::Request}; # errors handled in checkconfig()
- my $res = $client->request(
- HTTP::Request->new(
- 'POST',
- $url,
- [ 'Content-Type' => 'application/json' ],
- to_json(\%req),
- ),
- );
-
- if (! ref $res || ! $res->is_success()) {
- debug("failed to get response from blogspam server ($url)");
- return undef;
- }
- my $details = from_json($res->content);
- if ($details->{result} eq 'SPAM') {
- eval q{use Data::Dumper};
- debug("blogspam server reports $details->{reason}: ".Dumper(\%req));
- return gettext("Sorry, but that looks like spam to <a href=\"http://blogspam.net/\">blogspam</a>: ").$details->{reason};
- }
- elsif ($details->{result} ne 'OK') {
- debug("blogspam server failure: ".$res->content);
- return undef;
- }
- else {
- return undef;
- }
-}
-
-1
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/openid.pm ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/openid.pm
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/openid.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/openid.pm 1970-01-01 10:00:00.000000000 +1000
@@ -1,286 +0,0 @@
-#!/usr/bin/perl
-# OpenID support.
-package IkiWiki::Plugin::openid;
-
-use warnings;
-use strict;
-use IkiWiki 3.00;
-
-sub import {
- add_underlay("openid-selector");
- add_underlay("jquery");
- hook(type => "checkconfig", id => "openid", call => \&checkconfig);
- hook(type => "getsetup", id => "openid", call => \&getsetup);
- hook(type => "auth", id => "openid", call => \&auth);
- hook(type => "formbuilder_setup", id => "openid",
- call => \&formbuilder_setup, last => 1);
-}
-
-sub checkconfig () {
- if ($config{cgi}) {
- # Intercept normal signin form, so the openid selector
- # can be displayed.
- #
- # When other auth hooks are registered, give the selector
- # a reference to the normal signin form.
- require IkiWiki::CGI;
- my $real_cgi_signin;
- if (keys %{$IkiWiki::hooks{auth}} > 1) {
- $real_cgi_signin=\&IkiWiki::cgi_signin;
- }
- inject(name => "IkiWiki::cgi_signin", call => sub ($$) {
- openid_selector($real_cgi_signin, @_);
- });
- }
-}
-
-sub getsetup () {
- return
- plugin => {
- safe => 1,
- rebuild => 0,
- section => "auth",
- },
- openid_realm => {
- type => "string",
- description => "url pattern of openid realm (default is cgiurl)",
- safe => 0,
- rebuild => 0,
- },
- openid_cgiurl => {
- type => "string",
- description => "url to ikiwiki cgi to use for openid authentication (default is cgiurl)",
- safe => 0,
- rebuild => 0,
- },
-}
-
-sub openid_selector {
- my $real_cgi_signin=shift;
- my $q=shift;
- my $session=shift;
-
- my $openid_url=$q->param('openid_identifier');
- my $openid_error;
-
- if (! load_openid_module()) {
- if ($real_cgi_signin) {
- $real_cgi_signin->($q, $session);
- exit;
- }
- error(sprintf(gettext("failed to load openid module: "), @_));
- }
- elsif (defined $q->param("action") && $q->param("action") eq "verify") {
- validate($q, $session, $openid_url, sub {
- $openid_error=shift;
- });
- }
-
- my $template=IkiWiki::template("openid-selector.tmpl");
- $template->param(
- cgiurl => IkiWiki::cgiurl(),
- (defined $openid_error ? (openid_error => $openid_error) : ()),
- (defined $openid_url ? (openid_url => $openid_url) : ()),
- ($real_cgi_signin ? (nonopenidform => $real_cgi_signin->($q, $session, 1)) : ()),
- );
-
- IkiWiki::printheader($session);
- print IkiWiki::cgitemplate($q, "signin", $template->output);
- exit;
-}
-
-sub formbuilder_setup (@) {
- my %params=@_;
-
- my $form=$params{form};
- my $session=$params{session};
- my $cgi=$params{cgi};
-
- if ($form->title eq "preferences" &&
- IkiWiki::openiduser($session->param("name"))) {
- $form->field(name => "openid_identifier", disabled => 1,
- label => htmllink("", "", "ikiwiki/OpenID", noimageinline => 1),
- value => "",
- size => 1, force => 1,
- fieldset => "login",
- comment => $session->param("name"));
- $form->field(name => "email", type => "hidden");
- }
-}
-
-sub validate ($$$;$) {
- my $q=shift;
- my $session=shift;
- my $openid_url=shift;
- my $errhandler=shift;
-
- my $csr=getobj($q, $session);
-
- my $claimed_identity = $csr->claimed_identity($openid_url);
- if (! $claimed_identity) {
- if ($errhandler) {
- if (ref($errhandler) eq 'CODE') {
- $errhandler->($csr->err);
- }
- return 0;
- }
- else {
- error($csr->err);
- }
- }
-
- # Ask for client to provide a name and email, if possible.
- # Try sreg and ax
- if ($claimed_identity->can("set_extension_args")) {
- $claimed_identity->set_extension_args(
- 'http://openid.net/extensions/sreg/1.1',
- {
- optional => 'email,fullname,nickname',
- },
- );
- $claimed_identity->set_extension_args(
- 'http://openid.net/srv/ax/1.0',
- {
- mode => 'fetch_request',
- 'required' => 'email,fullname,nickname,firstname',
- 'type.email' => "http://schema.openid.net/contact/email",
- 'type.fullname' => "http://axschema.org/namePerson",
- 'type.nickname' => "http://axschema.org/namePerson/friendly",
- 'type.firstname' => "http://axschema.org/namePerson/first",
- },
- );
- }
-
- my $cgiurl=$config{openid_cgiurl};
- $cgiurl=$q->url if ! defined $cgiurl;
-
- my $trust_root=$config{openid_realm};
- $trust_root=$cgiurl if ! defined $trust_root;
-
- my $check_url = $claimed_identity->check_url(
- return_to => auto_upgrade_https($q, "$cgiurl?do=postsignin"),
- trust_root => auto_upgrade_https($q, $trust_root),
- delayed_return => 1,
- );
- # Redirect the user to the OpenID server, which will
- # eventually bounce them back to auth()
- IkiWiki::redirect($q, $check_url);
- exit 0;
-}
-
-sub auth ($$) {
- my $q=shift;
- my $session=shift;
-
- if (defined $q->param('openid.mode')) {
- my $csr=getobj($q, $session);
-
- if (my $setup_url = $csr->user_setup_url) {
- IkiWiki::redirect($q, $setup_url);
- }
- elsif ($csr->user_cancel) {
- IkiWiki::redirect($q, IkiWiki::baseurl(undef));
- }
- elsif (my $vident = $csr->verified_identity) {
- $session->param(name => $vident->url);
-
- my @extensions;
- if ($vident->can("signed_extension_fields")) {
- @extensions=grep { defined } (
- $vident->signed_extension_fields('http://openid.net/extensions/sreg/1.1'),
- $vident->signed_extension_fields('http://openid.net/srv/ax/1.0'),
- );
- }
- my $nickname;
- foreach my $ext (@extensions) {
- foreach my $field (qw{value.email email}) {
- if (exists $ext->{$field} &&
- defined $ext->{$field} &&
- length $ext->{$field}) {
- $session->param(email => $ext->{$field});
- if (! defined $nickname &&
- $ext->{$field}=~/(.+)@.+/) {
- $nickname = $1;
- }
- last;
- }
- }
- foreach my $field (qw{value.nickname nickname value.fullname fullname value.firstname}) {
- if (exists $ext->{$field} &&
- defined $ext->{$field} &&
- length $ext->{$field}) {
- $nickname=$ext->{$field};
- last;
- }
- }
- }
- if (defined $nickname) {
- $session->param(nickname =>
- Encode::decode_utf8($nickname));
- }
- }
- else {
- error("OpenID failure: ".$csr->err);
- }
- }
- elsif (defined $q->param('openid_identifier')) {
- # myopenid.com affiliate support
- validate($q, $session, scalar $q->param('openid_identifier'));
- }
-}
-
-sub getobj ($$) {
- my $q=shift;
- my $session=shift;
-
- eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
- eval q{use Net::OpenID::Consumer};
- error($@) if $@;
-
- my $ua;
- eval q{use LWPx::ParanoidAgent};
- if (! $@) {
- $ua=LWPx::ParanoidAgent->new(agent => $config{useragent});
- }
- else {
- $ua=useragent();
- }
-
- # Store the secret in the session.
- my $secret=$session->param("openid_secret");
- if (! defined $secret) {
- $secret=rand;
- $session->param(openid_secret => $secret);
- }
-
- my $cgiurl=$config{openid_cgiurl};
- $cgiurl=$q->url if ! defined $cgiurl;
-
- return Net::OpenID::Consumer->new(
- ua => $ua,
- args => $q,
- consumer_secret => sub { return shift()+$secret },
- required_root => auto_upgrade_https($q, $cgiurl),
- );
-}
-
-sub auto_upgrade_https {
- my $q=shift;
- my $url=shift;
- if ($q->https()) {
- $url=~s/^http:/https:/i;
- }
- return $url;
-}
-
-sub load_openid_module {
- # Give up if module is unavailable to avoid needing to depend on it.
- eval q{use Net::OpenID::Consumer};
- if ($@) {
- debug("unable to load Net::OpenID::Consumer, not enabling OpenID login ($@)");
- return;
- }
- return 1;
-}
-
-1
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/pinger.pm ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/pinger.pm
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/pinger.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki/Plugin/pinger.pm 1970-01-01 10:00:00.000000000 +1000
@@ -1,121 +0,0 @@
-#!/usr/bin/perl
-package IkiWiki::Plugin::pinger;
-
-use warnings;
-use strict;
-use IkiWiki 3.00;
-
-my %pages;
-my $pinged=0;
-
-sub import {
- hook(type => "getsetup", id => "pinger", call => \&getsetup);
- hook(type => "needsbuild", id => "pinger", call => \&needsbuild);
- hook(type => "preprocess", id => "ping", call => \&preprocess);
- hook(type => "delete", id => "pinger", call => \&ping);
- hook(type => "rendered", id => "pinger", call => \&ping);
-}
-
-sub getsetup () {
- return
- plugin => {
- safe => 1,
- rebuild => 0,
- },
- pinger_timeout => {
- type => "integer",
- example => 15,
- description => "how many seconds to try pinging before timing out",
- safe => 1,
- rebuild => 0,
- },
-}
-
-sub needsbuild (@) {
- my $needsbuild=shift;
- foreach my $page (keys %pagestate) {
- if (exists $pagestate{$page}{pinger}) {
- $pages{$page}=1;
- if (exists $pagesources{$page} &&
- grep { $_ eq $pagesources{$page} } @$needsbuild) {
- # remove state, will be re-added if
- # the ping directive is still present
- # on rebuild.
- delete $pagestate{$page}{pinger};
- }
- }
- }
- return $needsbuild;
-}
-
-sub preprocess (@) {
- my %params=@_;
- if (! exists $params{from} || ! exists $params{to}) {
- error gettext("requires 'from' and 'to' parameters");
- }
- if ($params{from} eq $config{url}) {
- $pagestate{$params{destpage}}{pinger}{$params{to}}=1;
- $pages{$params{destpage}}=1;
- return sprintf(gettext("Will ping %s"), $params{to});
- }
- else {
- return sprintf(gettext("Ignoring ping directive for wiki %s (this wiki is %s)"), $params{from}, $config{url});
- }
-}
-
-sub ping {
- if (! $pinged && %pages) {
- $pinged=1;
-
- eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
-
- my $ua;
- eval q{use LWPx::ParanoidAgent};
- if (!$@) {
- $ua=LWPx::ParanoidAgent->new(agent => $config{useragent});
- }
- else {
- eval q{use LWP};
- if ($@) {
- debug(gettext("LWP not found, not pinging"));
- return;
- }
- $ua=useragent();
- }
- $ua->timeout($config{pinger_timeout} || 15);
-
- # daemonise here so slow pings don't slow down wiki updates
- defined(my $pid = fork) or error("Can't fork: $!");
- return if $pid;
- chdir '/';
- open STDIN, '/dev/null';
- open STDOUT, '>/dev/null';
- POSIX::setsid() or error("Can't start a new session: $!");
- open STDERR, '>&STDOUT' or error("Can't dup stdout: $!");
-
- # Don't need to keep a lock on the wiki as a daemon.
- IkiWiki::unlockwiki();
-
- my %urls;
- foreach my $page (%pages) {
- if (exists $pagestate{$page}{pinger}) {
- $urls{$_}=1 foreach keys %{$pagestate{$page}{pinger}};
- }
- }
- foreach my $url (keys %urls) {
- # Try to avoid pinging ourselves. If this check
- # fails, it's not the end of the world, since we
- # only ping when a page was changed, so a ping loop
- # will still be avoided.
- next if $url=~/^\Q$config{cgiurl}\E/;
- my $local_cgiurl = IkiWiki::cgiurl();
- next if $url=~/^\Q$local_cgiurl\E/;
-
- $ua->get($url);
- }
-
- exit 0;
- }
-}
-
-1
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki.pm ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki.pm
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-3.patch/IkiWiki.pm 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-3.patch/IkiWiki.pm 1970-01-01 10:00:00.000000000 +1000
@@ -1,3032 +0,0 @@
-#!/usr/bin/perl
-
-package IkiWiki;
-
-use warnings;
-use strict;
-use Encode;
-use URI::Escape q{uri_escape_utf8};
-use POSIX ();
-use Storable;
-use open qw{:utf8 :std};
-
-use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
- %pagestate %wikistate %renderedfiles %oldrenderedfiles
- %pagesources %delpagesources %destsources %depends %depends_simple
- @mass_depends %hooks %forcerebuild %loaded_plugins %typedlinks
- %oldtypedlinks %autofiles @underlayfiles $lastrev $phase};
-
-use Exporter q{import};
-our @EXPORT = qw(hook debug error htmlpage template template_depends
- deptype add_depends pagespec_match pagespec_match_list bestlink
- htmllink readfile writefile pagetype srcfile pagename
- displaytime strftime_utf8 will_render gettext ngettext urlto targetpage
- add_underlay pagetitle titlepage linkpage newpagefile
- inject add_link add_autofile useragent
- %config %links %pagestate %wikistate %renderedfiles
- %pagesources %destsources %typedlinks);
-our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
-our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
-our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
-
-# Page dependency types.
-our $DEPEND_CONTENT=1;
-our $DEPEND_PRESENCE=2;
-our $DEPEND_LINKS=4;
-
-# Phases of processing.
-sub PHASE_SCAN () { 0 }
-sub PHASE_RENDER () { 1 }
-$phase = PHASE_SCAN;
-
-# Optimisation.
-use Memoize;
-memoize("abs2rel");
-memoize("sortspec_translate");
-memoize("pagespec_translate");
-memoize("template_file");
-
-sub getsetup () {
- wikiname => {
- type => "string",
- default => "wiki",
- description => "name of the wiki",
- safe => 1,
- rebuild => 1,
- },
- adminemail => {
- type => "string",
- default => undef,
- example => 'me@example.com',
- description => "contact email for wiki",
- safe => 1,
- rebuild => 0,
- },
- adminuser => {
- type => "string",
- default => [],
- description => "users who are wiki admins",
- safe => 1,
- rebuild => 0,
- },
- banned_users => {
- type => "string",
- default => [],
- description => "users who are banned from the wiki",
- safe => 1,
- rebuild => 0,
- },
- srcdir => {
- type => "string",
- default => undef,
- example => "$ENV{HOME}/wiki",
- description => "where the source of the wiki is located",
- safe => 0, # path
- rebuild => 1,
- },
- destdir => {
- type => "string",
- default => undef,
- example => "/var/www/wiki",
- description => "where to build the wiki",
- safe => 0, # path
- rebuild => 1,
- },
- url => {
- type => "string",
- default => '',
- example => "http://example.com/wiki",
- description => "base url to the wiki",
- safe => 1,
- rebuild => 1,
- },
- cgiurl => {
- type => "string",
- default => '',
- example => "http://example.com/wiki/ikiwiki.cgi",
- description => "url to the ikiwiki.cgi",
- safe => 1,
- rebuild => 1,
- },
- reverse_proxy => {
- type => "boolean",
- default => 0,
- description => "do not adjust cgiurl if CGI is accessed via different URL",
- advanced => 0,
- safe => 1,
- rebuild => 0, # only affects CGI requests
- },
- cgi_wrapper => {
- type => "string",
- default => '',
- example => "/var/www/wiki/ikiwiki.cgi",
- description => "filename of cgi wrapper to generate",
- safe => 0, # file
- rebuild => 0,
- },
- cgi_wrappermode => {
- type => "string",
- default => '06755',
- description => "mode for cgi_wrapper (can safely be made suid)",
- safe => 0,
- rebuild => 0,
- },
- cgi_overload_delay => {
- type => "string",
- default => '',
- example => "10",
- description => "number of seconds to delay CGI requests when overloaded",
- safe => 1,
- rebuild => 0,
- },
- cgi_overload_message => {
- type => "string",
- default => '',
- example => "Please wait",
- description => "message to display when overloaded (may contain html)",
- safe => 1,
- rebuild => 0,
- },
- only_committed_changes => {
- type => "boolean",
- default => 0,
- description => "enable optimization of only refreshing committed changes?",
- safe => 1,
- rebuild => 0,
- },
- rcs => {
- type => "string",
- default => '',
- description => "rcs backend to use",
- safe => 0, # don't allow overriding
- rebuild => 0,
- },
- default_plugins => {
- type => "internal",
- default => [qw{mdwn link inline meta htmlscrubber passwordauth
- openid signinedit lockedit conditional
- recentchanges parentlinks editpage
- templatebody}],
- description => "plugins to enable by default",
- safe => 0,
- rebuild => 1,
- },
- add_plugins => {
- type => "string",
- default => [],
- description => "plugins to add to the default configuration",
- safe => 1,
- rebuild => 1,
- },
- disable_plugins => {
- type => "string",
- default => [],
- description => "plugins to disable",
- safe => 1,
- rebuild => 1,
- },
- templatedir => {
- type => "string",
- default => "$installdir/share/ikiwiki/templates",
- description => "additional directory to search for template files",
- advanced => 1,
- safe => 0, # path
- rebuild => 1,
- },
- underlaydir => {
- type => "string",
- default => "$installdir/share/ikiwiki/basewiki",
- description => "base wiki source location",
- advanced => 1,
- safe => 0, # path
- rebuild => 0,
- },
- underlaydirbase => {
- type => "internal",
- default => "$installdir/share/ikiwiki",
- description => "parent directory containing additional underlays",
- safe => 0,
- rebuild => 0,
- },
- wrappers => {
- type => "internal",
- default => [],
- description => "wrappers to generate",
- safe => 0,
- rebuild => 0,
- },
- underlaydirs => {
- type => "internal",
- default => [],
- description => "additional underlays to use",
- safe => 0,
- rebuild => 0,
- },
- verbose => {
- type => "boolean",
- example => 1,
- description => "display verbose messages?",
- safe => 1,
- rebuild => 0,
- },
- syslog => {
- type => "boolean",
- example => 1,
- description => "log to syslog?",
- safe => 1,
- rebuild => 0,
- },
- usedirs => {
- type => "boolean",
- default => 1,
- description => "create output files named page/index.html?",
- safe => 0, # changing requires manual transition
- rebuild => 1,
- },
- prefix_directives => {
- type => "boolean",
- default => 1,
- description => "use '!'-prefixed preprocessor directives?",
- safe => 0, # changing requires manual transition
- rebuild => 1,
- },
- indexpages => {
- type => "boolean",
- default => 0,
- description => "use page/index.mdwn source files",
- safe => 1,
- rebuild => 1,
- },
- discussion => {
- type => "boolean",
- default => 1,
- description => "enable Discussion pages?",
- safe => 1,
- rebuild => 1,
- },
- discussionpage => {
- type => "string",
- default => gettext("Discussion"),
- description => "name of Discussion pages",
- safe => 1,
- rebuild => 1,
- },
- html5 => {
- type => "boolean",
- default => 0,
- description => "generate HTML5?",
- advanced => 0,
- safe => 1,
- rebuild => 1,
- },
- sslcookie => {
- type => "boolean",
- default => 0,
- description => "only send cookies over SSL connections?",
- advanced => 1,
- safe => 1,
- rebuild => 0,
- },
- default_pageext => {
- type => "string",
- default => "mdwn",
- description => "extension to use for new pages",
- safe => 0, # not sanitized
- rebuild => 0,
- },
- htmlext => {
- type => "string",
- default => "html",
- description => "extension to use for html files",
- safe => 0, # not sanitized
- rebuild => 1,
- },
- timeformat => {
- type => "string",
- default => '%c',
- description => "strftime format string to display date",
- advanced => 1,
- safe => 1,
- rebuild => 1,
- },
- locale => {
- type => "string",
- default => undef,
- example => "en_US.UTF-8",
- description => "UTF-8 locale to use",
- advanced => 1,
- safe => 0,
- rebuild => 1,
- },
- userdir => {
- type => "string",
- default => "",
- example => "users",
- description => "put user pages below specified page",
- safe => 1,
- rebuild => 1,
- },
- numbacklinks => {
- type => "integer",
- default => 10,
- description => "how many backlinks to show before hiding excess (0 to show all)",
- safe => 1,
- rebuild => 1,
- },
- hardlink => {
- type => "boolean",
- default => 0,
- description => "attempt to hardlink source files? (optimisation for large files)",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- umask => {
- type => "string",
- example => "public",
- description => "force ikiwiki to use a particular umask (keywords public, group or private, or a number)",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- wrappergroup => {
- type => "string",
- example => "ikiwiki",
- description => "group for wrappers to run in",
- advanced => 1,
- safe => 0, # paranoia
- rebuild => 0,
- },
- libdir => {
- type => "string",
- default => "",
- example => "$ENV{HOME}/.ikiwiki/",
- description => "extra library and plugin directory",
- advanced => 1,
- safe => 0, # directory
- rebuild => 0,
- },
- ENV => {
- type => "string",
- default => {},
- description => "environment variables",
- safe => 0, # paranoia
- rebuild => 0,
- },
- timezone => {
- type => "string",
- default => "",
- example => "US/Eastern",
- description => "time zone name",
- safe => 1,
- rebuild => 1,
- },
- include => {
- type => "string",
- default => undef,
- example => '^\.htaccess$',
- description => "regexp of normally excluded files to include",
- advanced => 1,
- safe => 0, # regexp
- rebuild => 1,
- },
- exclude => {
- type => "string",
- default => undef,
- example => '^(*\.private|Makefile)$',
- description => "regexp of files that should be skipped",
- advanced => 1,
- safe => 0, # regexp
- rebuild => 1,
- },
- wiki_file_prune_regexps => {
- type => "internal",
- default => [qr/(^|\/)\.\.(\/|$)/, qr/^\//, qr/^\./, qr/\/\./,
- qr/\.x?html?$/, qr/\.ikiwiki-new$/,
- qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//,
- qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//,
- qr/(^|\/)CVS\//, qr/\.dpkg-tmp$/],
- description => "regexps of source files to ignore",
- safe => 0,
- rebuild => 1,
- },
- wiki_file_chars => {
- type => "string",
- description => "specifies the characters that are allowed in source filenames",
- default => "-[:alnum:]+/.:_",
- safe => 0,
- rebuild => 1,
- },
- wiki_file_regexp => {
- type => "internal",
- description => "regexp of legal source files",
- safe => 0,
- rebuild => 1,
- },
- web_commit_regexp => {
- type => "internal",
- default => qr/^web commit (by (.*?(?=: |$))|from ([0-9a-fA-F:.]+[0-9a-fA-F])):?(.*)/,
- description => "regexp to parse web commits from logs",
- safe => 0,
- rebuild => 0,
- },
- cgi => {
- type => "internal",
- default => 0,
- description => "run as a cgi",
- safe => 0,
- rebuild => 0,
- },
- cgi_disable_uploads => {
- type => "internal",
- default => 1,
- description => "whether CGI should accept file uploads",
- safe => 0,
- rebuild => 0,
- },
- post_commit => {
- type => "internal",
- default => 0,
- description => "run as a post-commit hook",
- safe => 0,
- rebuild => 0,
- },
- rebuild => {
- type => "internal",
- default => 0,
- description => "running in rebuild mode",
- safe => 0,
- rebuild => 0,
- },
- setup => {
- type => "internal",
- default => undef,
- description => "running in setup mode",
- safe => 0,
- rebuild => 0,
- },
- clean => {
- type => "internal",
- default => 0,
- description => "running in clean mode",
- safe => 0,
- rebuild => 0,
- },
- refresh => {
- type => "internal",
- default => 0,
- description => "running in refresh mode",
- safe => 0,
- rebuild => 0,
- },
- test_receive => {
- type => "internal",
- default => 0,
- description => "running in receive test mode",
- safe => 0,
- rebuild => 0,
- },
- wrapper_background_command => {
- type => "internal",
- default => '',
- description => "background shell command to run",
- safe => 0,
- rebuild => 0,
- },
- gettime => {
- type => "internal",
- description => "running in gettime mode",
- safe => 0,
- rebuild => 0,
- },
- w3mmode => {
- type => "internal",
- default => 0,
- description => "running in w3mmode",
- safe => 0,
- rebuild => 0,
- },
- wikistatedir => {
- type => "internal",
- default => undef,
- description => "path to the .ikiwiki directory holding ikiwiki state",
- safe => 0,
- rebuild => 0,
- },
- setupfile => {
- type => "internal",
- default => undef,
- description => "path to setup file",
- safe => 0,
- rebuild => 0,
- },
- setuptype => {
- type => "internal",
- default => "Yaml",
- description => "perl class to use to dump setup file",
- safe => 0,
- rebuild => 0,
- },
- allow_symlinks_before_srcdir => {
- type => "boolean",
- default => 0,
- description => "allow symlinks in the path leading to the srcdir (potentially insecure)",
- safe => 0,
- rebuild => 0,
- },
- cookiejar => {
- type => "string",
- default => { file => "$ENV{HOME}/.ikiwiki/cookies" },
- description => "cookie control",
- safe => 0, # hooks into perl module internals
- rebuild => 0,
- },
- useragent => {
- type => "string",
- default => "ikiwiki/$version",
- example => "Wget/1.13.4 (linux-gnu)",
- description => "set custom user agent string for outbound HTTP requests e.g. when fetching aggregated RSS feeds",
- safe => 0,
- rebuild => 0,
- },
-}
-
-sub defaultconfig () {
- my %s=getsetup();
- my @ret;
- foreach my $key (keys %s) {
- push @ret, $key, $s{$key}->{default};
- }
- return @ret;
-}
-
-# URL to top of wiki as a path starting with /, valid from any wiki page or
-# the CGI; if that's not possible, an absolute URL. Either way, it ends with /
-my $local_url;
-# URL to CGI script, similar to $local_url
-my $local_cgiurl;
-
-sub checkconfig () {
- # locale stuff; avoid LC_ALL since it overrides everything
- if (defined $ENV{LC_ALL}) {
- $ENV{LANG} = $ENV{LC_ALL};
- delete $ENV{LC_ALL};
- }
- if (defined $config{locale}) {
- if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) {
- $ENV{LANG}=$config{locale};
- define_gettext();
- }
- }
-
- if (! defined $config{wiki_file_regexp}) {
- $config{wiki_file_regexp}=qr/(^[$config{wiki_file_chars}]+$)/;
- }
-
- if (ref $config{ENV} eq 'HASH') {
- foreach my $val (keys %{$config{ENV}}) {
- $ENV{$val}=$config{ENV}{$val};
- }
- }
- if (defined $config{timezone} && length $config{timezone}) {
- $ENV{TZ}=$config{timezone};
- }
- else {
- $config{timezone}=$ENV{TZ};
- }
-
- if ($config{w3mmode}) {
- eval q{use Cwd q{abs_path}};
- error($@) if $@;
- $config{srcdir}=possibly_foolish_untaint(abs_path($config{srcdir}));
- $config{destdir}=possibly_foolish_untaint(abs_path($config{destdir}));
- $config{cgiurl}="file:///\$LIB/ikiwiki-w3m.cgi/".$config{cgiurl}
- unless $config{cgiurl} =~ m!file:///!;
- $config{url}="file://".$config{destdir};
- }
-
- if ($config{cgi} && ! length $config{url}) {
- error(gettext("Must specify url to wiki with --url when using --cgi"));
- }
-
- if (defined $config{url} && length $config{url}) {
- eval q{use URI};
- my $baseurl = URI->new($config{url});
-
- $local_url = $baseurl->path . "/";
- $local_cgiurl = undef;
-
- if (length $config{cgiurl}) {
- my $cgiurl = URI->new($config{cgiurl});
-
- $local_cgiurl = $cgiurl->path;
-
- if ($cgiurl->scheme eq 'https' &&
- $baseurl->scheme eq 'http') {
- # We assume that the same content is available
- # over both http and https, because if it
- # wasn't, accessing the static content
- # from the CGI would be mixed-content,
- # which would be a security flaw.
-
- if ($cgiurl->authority ne $baseurl->authority) {
- # use protocol-relative URL for
- # static content
- $local_url = "$config{url}/";
- $local_url =~ s{^http://}{//};
- }
- # else use host-relative URL for static content
-
- # either way, CGI needs to be absolute
- $local_cgiurl = $config{cgiurl};
- }
- elsif ($cgiurl->scheme ne $baseurl->scheme) {
- # too far apart, fall back to absolute URLs
- $local_url = "$config{url}/";
- $local_cgiurl = $config{cgiurl};
- }
- elsif ($cgiurl->authority ne $baseurl->authority) {
- # slightly too far apart, fall back to
- # protocol-relative URLs
- $local_url = "$config{url}/";
- $local_url =~ s{^https?://}{//};
- $local_cgiurl = $config{cgiurl};
- $local_cgiurl =~ s{^https?://}{//};
- }
- # else keep host-relative URLs
- }
-
- $local_url =~ s{//$}{/};
- }
- else {
- $local_cgiurl = $config{cgiurl};
- }
-
- $config{wikistatedir}="$config{srcdir}/.ikiwiki"
- unless exists $config{wikistatedir} && defined $config{wikistatedir};
-
- if (defined $config{umask}) {
- my $u = possibly_foolish_untaint($config{umask});
-
- if ($u =~ m/^\d+$/) {
- umask($u);
- }
- elsif ($u eq 'private') {
- umask(077);
- }
- elsif ($u eq 'group') {
- umask(027);
- }
- elsif ($u eq 'public') {
- umask(022);
- }
- else {
- error(sprintf(gettext("unsupported umask setting %s"), $u));
- }
- }
-
- run_hooks(checkconfig => sub { shift->() });
-
- return 1;
-}
-
-sub listplugins () {
- my %ret;
-
- foreach my $dir (@INC, $config{libdir}) {
- next unless defined $dir && length $dir;
- foreach my $file (glob("$dir/IkiWiki/Plugin/*.pm")) {
- my ($plugin)=$file=~/.*\/(.*)\.pm$/;
- $ret{$plugin}=1;
- }
- }
- foreach my $dir ($config{libdir}, "$installdir/lib/ikiwiki") {
- next unless defined $dir && length $dir;
- foreach my $file (glob("$dir/plugins/*")) {
- $ret{basename($file)}=1 if -x $file;
- }
- }
-
- return keys %ret;
-}
-
-sub loadplugins () {
- if (defined $config{libdir} && length $config{libdir}) {
- unshift @INC, possibly_foolish_untaint($config{libdir});
- }
-
- foreach my $plugin (@{$config{default_plugins}}, @{$config{add_plugins}}) {
- loadplugin($plugin);
- }
-
- if ($config{rcs}) {
- if (exists $hooks{rcs}) {
- error(gettext("cannot use multiple rcs plugins"));
- }
- loadplugin($config{rcs});
- }
- if (! exists $hooks{rcs}) {
- loadplugin("norcs");
- }
-
- run_hooks(getopt => sub { shift->() });
- if (grep /^-/, @ARGV) {
- print STDERR "Unknown option (or missing parameter): $_\n"
- foreach grep /^-/, @ARGV;
- usage();
- }
-
- return 1;
-}
-
-sub loadplugin ($;$) {
- my $plugin=shift;
- my $force=shift;
-
- return if ! $force && grep { $_ eq $plugin} @{$config{disable_plugins}};
-
- foreach my $dir (defined $config{libdir} ? possibly_foolish_untaint($config{libdir}) : undef,
- "$installdir/lib/ikiwiki") {
- if (defined $dir && -x "$dir/plugins/$plugin") {
- eval { require IkiWiki::Plugin::external };
- if ($@) {
- my $reason=$@;
- error(sprintf(gettext("failed to load external plugin needed for %s plugin: %s"), $plugin, $reason));
- }
- import IkiWiki::Plugin::external "$dir/plugins/$plugin";
- $loaded_plugins{$plugin}=1;
- return 1;
- }
- }
-
- my $mod="IkiWiki::Plugin::".possibly_foolish_untaint($plugin);
- eval qq{use $mod};
- if ($@) {
- error("Failed to load plugin $mod: $@");
- }
- $loaded_plugins{$plugin}=1;
- return 1;
-}
-
-sub error ($;$) {
- my $message=shift;
- my $cleaner=shift;
- log_message('err' => $message) if $config{syslog};
- if (defined $cleaner) {
- $cleaner->();
- }
- die $message."\n";
-}
-
-sub debug ($) {
- return unless $config{verbose};
- return log_message(debug => @_);
-}
-
-my $log_open=0;
-my $log_failed=0;
-sub log_message ($$) {
- my $type=shift;
-
- if ($config{syslog}) {
- require Sys::Syslog;
- if (! $log_open) {
- Sys::Syslog::setlogsock('unix');
- Sys::Syslog::openlog('ikiwiki', '', 'user');
- $log_open=1;
- }
- eval {
- # keep a copy to avoid editing the original config repeatedly
- my $wikiname = $config{wikiname};
- utf8::encode($wikiname);
- Sys::Syslog::syslog($type, "[$wikiname] %s", join(" ", @_));
- };
- if ($@) {
- print STDERR "failed to syslog: $@" unless $log_failed;
- $log_failed=1;
- print STDERR "@_\n";
- }
- return $@;
- }
- elsif (! $config{cgi}) {
- return print "@_\n";
- }
- else {
- return print STDERR "@_\n";
- }
-}
-
-sub possibly_foolish_untaint ($) {
- my $tainted=shift;
- my ($untainted)=$tainted=~/(.*)/s;
- return $untainted;
-}
-
-sub basename ($) {
- my $file=shift;
-
- $file=~s!.*/+!!;
- return $file;
-}
-
-sub dirname ($) {
- my $file=shift;
-
- $file=~s!/*[^/]+$!!;
- return $file;
-}
-
-sub isinternal ($) {
- my $page=shift;
- return exists $pagesources{$page} &&
- $pagesources{$page} =~ /\._([^.]+)$/;
-}
-
-sub pagetype ($) {
- my $file=shift;
-
- if ($file =~ /\.([^.]+)$/) {
- return $1 if exists $hooks{htmlize}{$1};
- }
- my $base=basename($file);
- if (exists $hooks{htmlize}{$base} &&
- $hooks{htmlize}{$base}{noextension}) {
- return $base;
- }
- return;
-}
-
-my %pagename_cache;
-
-sub pagename ($) {
- my $file=shift;
-
- if (exists $pagename_cache{$file}) {
- return $pagename_cache{$file};
- }
-
- my $type=pagetype($file);
- my $page=$file;
- $page=~s/\Q.$type\E*$//
- if defined $type && !$hooks{htmlize}{$type}{keepextension}
- && !$hooks{htmlize}{$type}{noextension};
- if ($config{indexpages} && $page=~/(.*)\/index$/) {
- $page=$1;
- }
-
- $pagename_cache{$file} = $page;
- return $page;
-}
-
-sub newpagefile ($$) {
- my $page=shift;
- my $type=shift;
-
- if (! $config{indexpages} || $page eq 'index') {
- return $page.".".$type;
- }
- else {
- return $page."/index.".$type;
- }
-}
-
-sub targetpage ($$;$) {
- my $page=shift;
- my $ext=shift;
- my $filename=shift;
-
- if (defined $filename) {
- return $page."/".$filename.".".$ext;
- }
- elsif (! $config{usedirs} || $page eq 'index') {
- return $page.".".$ext;
- }
- else {
- return $page."/index.".$ext;
- }
-}
-
-sub htmlpage ($) {
- my $page=shift;
-
- return targetpage($page, $config{htmlext});
-}
-
-sub srcfile_stat {
- my $file=shift;
- my $nothrow=shift;
-
- return "$config{srcdir}/$file", stat(_) if -e "$config{srcdir}/$file";
- foreach my $dir (@{$config{underlaydirs}}, $config{underlaydir}) {
- return "$dir/$file", stat(_) if -e "$dir/$file";
- }
- error("internal error: $file cannot be found in $config{srcdir} or underlay") unless $nothrow;
- return;
-}
-
-sub srcfile ($;$) {
- return (srcfile_stat(@_))[0];
-}
-
-sub add_literal_underlay ($) {
- my $dir=shift;
-
- if (! grep { $_ eq $dir } @{$config{underlaydirs}}) {
- unshift @{$config{underlaydirs}}, $dir;
- }
-}
-
-sub add_underlay ($) {
- my $dir = shift;
-
- if ($dir !~ /^\//) {
- $dir="$config{underlaydirbase}/$dir";
- }
-
- add_literal_underlay($dir);
- # why does it return 1? we just don't know
- return 1;
-}
-
-sub readfile ($;$$) {
- my $file=shift;
- my $binary=shift;
- my $wantfd=shift;
-
- if (-l $file) {
- error("cannot read a symlink ($file)");
- }
-
- local $/=undef;
- open (my $in, "<", $file) || error("failed to read $file: $!");
- binmode($in) if ($binary);
- return \*$in if $wantfd;
- my $ret=<$in>;
- # check for invalid utf-8, and toss it back to avoid crashes
- if (! utf8::valid($ret)) {
- $ret=encode_utf8($ret);
- }
- close $in || error("failed to read $file: $!");
- return $ret;
-}
-
-sub prep_writefile ($$) {
- my $file=shift;
- my $destdir=shift;
-
- my $test=$file;
- while (length $test) {
- if (-l "$destdir/$test") {
- error("cannot write to a symlink ($test)");
- }
- if (-f _ && $test ne $file) {
- # Remove conflicting file.
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- foreach my $f (@{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- if ($f eq $test) {
- unlink("$destdir/$test");
- last;
- }
- }
- }
- }
- $test=dirname($test);
- }
-
- my $dir=dirname("$destdir/$file");
- if (! -d $dir) {
- my $d="";
- foreach my $s (split(m!/+!, $dir)) {
- $d.="$s/";
- if (! -d $d) {
- mkdir($d) || error("failed to create directory $d: $!");
- }
- }
- }
-
- return 1;
-}
-
-sub writefile ($$$;$$) {
- my $file=shift; # can include subdirs
- my $destdir=shift; # directory to put file in
- my $content=shift;
- my $binary=shift;
- my $writer=shift;
-
- prep_writefile($file, $destdir);
-
- my $newfile="$destdir/$file.ikiwiki-new";
- if (-l $newfile) {
- error("cannot write to a symlink ($newfile)");
- }
-
- my $cleanup = sub { unlink($newfile) };
- open (my $out, '>', $newfile) || error("failed to write $newfile: $!", $cleanup);
- binmode($out) if ($binary);
- if ($writer) {
- $writer->(\*$out, $cleanup);
- }
- else {
- print $out $content or error("failed writing to $newfile: $!", $cleanup);
- }
- close $out || error("failed saving $newfile: $!", $cleanup);
- rename($newfile, "$destdir/$file") ||
- error("failed renaming $newfile to $destdir/$file: $!", $cleanup);
-
- return 1;
-}
-
-my %cleared;
-sub will_render ($$;$) {
- my $page=shift;
- my $dest=shift;
- my $clear=shift;
-
- # Important security check for independently created files.
- if (-e "$config{destdir}/$dest" && ! $config{rebuild} &&
- ! grep { $_ eq $dest } (@{$renderedfiles{$page}}, @{$oldrenderedfiles{$page}}, @{$wikistate{editpage}{previews}})) {
- my $from_other_page=0;
- # Expensive, but rarely runs.
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- if (grep {
- $_ eq $dest ||
- dirname($_) eq $dest
- } @{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- $from_other_page=1;
- last;
- }
- }
-
- error("$config{destdir}/$dest independently created, not overwriting with version from $page")
- unless $from_other_page;
- }
-
- # If $dest exists as a directory, remove conflicting files in it
- # rendered from other pages.
- if (-d _) {
- foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
- foreach my $f (@{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
- if (dirname($f) eq $dest) {
- unlink("$config{destdir}/$f");
- rmdir(dirname("$config{destdir}/$f"));
- }
- }
- }
- }
-
- if (! $clear || $cleared{$page}) {
- $renderedfiles{$page}=[$dest, grep { $_ ne $dest } @{$renderedfiles{$page}}];
- }
- else {
- foreach my $old (@{$renderedfiles{$page}}) {
- delete $destsources{$old};
- }
- $renderedfiles{$page}=[$dest];
- $cleared{$page}=1;
- }
- $destsources{$dest}=$page;
-
- return 1;
-}
-
-sub bestlink ($$) {
- my $page=shift;
- my $link=shift;
-
- my $cwd=$page;
- if ($link=~s/^\/+//) {
- # absolute links
- $cwd="";
- }
- $link=~s/\/$//;
-
- do {
- my $l=$cwd;
- $l.="/" if length $l;
- $l.=$link;
-
- if (exists $pagesources{$l}) {
- return $l;
- }
- elsif (exists $pagecase{lc $l}) {
- return $pagecase{lc $l};
- }
- } while $cwd=~s{/?[^/]+$}{};
-
- if (length $config{userdir}) {
- my $l = "$config{userdir}/".lc($link);
- if (exists $pagesources{$l}) {
- return $l;
- }
- elsif (exists $pagecase{lc $l}) {
- return $pagecase{lc $l};
- }
- }
-
- #print STDERR "warning: page $page, broken link: $link\n";
- return "";
-}
-
-sub isinlinableimage ($) {
- my $file=shift;
-
- return $file =~ /\.(png|gif|jpg|jpeg|svg)$/i;
-}
-
-sub pagetitle ($;$) {
- my $page=shift;
- my $unescaped=shift;
-
- if ($unescaped) {
- $page=~s/(__(\d+)__|_)/$1 eq '_' ? ' ' : chr($2)/eg;
- }
- else {
- $page=~s/(__(\d+)__|_)/$1 eq '_' ? ' ' : "&#$2;"/eg;
- }
-
- return $page;
-}
-
-sub titlepage ($) {
- my $title=shift;
- # support use w/o %config set
- my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
- $title=~s/([^$chars]|_)/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg;
- return $title;
-}
-
-sub linkpage ($) {
- my $link=shift;
- my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
- $link=~s/([^$chars])/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg;
- return $link;
-}
-
-sub cgiurl (@) {
- my %params=@_;
-
- my $cgiurl=$local_cgiurl;
-
- if (exists $params{cgiurl}) {
- $cgiurl=$params{cgiurl};
- delete $params{cgiurl};
- }
-
- unless (%params) {
- return $cgiurl;
- }
-
- return $cgiurl."?".
- join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params);
-}
-
-sub cgiurl_abs (@) {
- eval q{use URI};
- URI->new_abs(cgiurl(@_), $config{cgiurl});
-}
-
-sub baseurl (;$) {
- my $page=shift;
-
- return $local_url if ! defined $page;
-
- $page=htmlpage($page);
- $page=~s/[^\/]+$//;
- $page=~s/[^\/]+\//..\//g;
- return $page;
-}
-
-sub urlabs ($$) {
- my $url=shift;
- my $urlbase=shift;
-
- return $url unless defined $urlbase && length $urlbase;
-
- eval q{use URI};
- URI->new_abs($url, $urlbase)->as_string;
-}
-
-sub abs2rel ($$) {
- # Work around very innefficient behavior in File::Spec if abs2rel
- # is passed two relative paths. It's much faster if paths are
- # absolute! (Debian bug #376658; fixed in debian unstable now)
- my $path="/".shift;
- my $base="/".shift;
-
- require File::Spec;
- my $ret=File::Spec->abs2rel($path, $base);
- $ret=~s/^// if defined $ret;
- return $ret;
-}
-
-sub displaytime ($;$$) {
- # Plugins can override this function to mark up the time to
- # display.
- my $time=formattime($_[0], $_[1]);
- if ($config{html5}) {
- return '<time datetime="'.date_3339($_[0]).'"'.
- ($_[2] ? ' pubdate="pubdate"' : '').
- '>'.$time.'</time>';
- }
- else {
- return '<span class="date">'.$time.'</span>';
- }
-}
-
-sub formattime ($;$) {
- # Plugins can override this function to format the time.
- my $time=shift;
- my $format=shift;
- if (! defined $format) {
- $format=$config{timeformat};
- }
-
- return strftime_utf8($format, localtime($time));
-}
-
-my $strftime_encoding;
-sub strftime_utf8 {
- # strftime doesn't know about encodings, so make sure
- # its output is properly treated as utf8.
- # Note that this does not handle utf-8 in the format string.
- ($strftime_encoding) = POSIX::setlocale(&POSIX::LC_TIME) =~ m#\.([^@]+)#
- unless defined $strftime_encoding;
- $strftime_encoding
- ? Encode::decode($strftime_encoding, POSIX::strftime(@_))
- : POSIX::strftime(@_);
-}
-
-sub date_3339 ($) {
- my $time=shift;
-
- my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
- POSIX::setlocale(&POSIX::LC_TIME, "C");
- my $ret=POSIX::strftime("%Y-%m-%dT%H:%M:%SZ", gmtime($time));
- POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
- return $ret;
-}
-
-sub beautify_urlpath ($) {
- my $url=shift;
-
- # Ensure url is not an empty link, and if necessary,
- # add ./ to avoid colon confusion.
- if ($url !~ /^\// && $url !~ /^\.\.?\//) {
- $url="./$url";
- }
-
- if ($config{usedirs}) {
- $url =~ s!/index.$config{htmlext}$!/!;
- }
-
- return $url;
-}
-
-sub urlto ($;$$) {
- my $to=shift;
- my $from=shift;
- my $absolute=shift;
-
- if (! length $to) {
- $to = 'index';
- }
-
- if (! $destsources{$to}) {
- $to=htmlpage($to);
- }
-
- if ($absolute) {
- return $config{url}.beautify_urlpath("/".$to);
- }
-
- if (! defined $from) {
- my $u = $local_url || '';
- $u =~ s{/$}{};
- return $u.beautify_urlpath("/".$to);
- }
-
- my $link = abs2rel($to, dirname(htmlpage($from)));
-
- return beautify_urlpath($link);
-}
-
-sub isselflink ($$) {
- # Plugins can override this function to support special types
- # of selflinks.
- my $page=shift;
- my $link=shift;
-
- return $page eq $link;
-}
-
-sub htmllink ($$$;@) {
- my $lpage=shift; # the page doing the linking
- my $page=shift; # the page that will contain the link (different for inline)
- my $link=shift;
- my %opts=@_;
-
- $link=~s/\/$//;
-
- my $bestlink;
- if (! $opts{forcesubpage}) {
- $bestlink=bestlink($lpage, $link);
- }
- else {
- $bestlink="$lpage/".lc($link);
- }
-
- my $linktext;
- if (defined $opts{linktext}) {
- $linktext=$opts{linktext};
- }
- else {
- $linktext=pagetitle(basename($link));
- }
-
- return "<span class=\"selflink\">$linktext</span>"
- if length $bestlink && isselflink($page, $bestlink) &&
- ! defined $opts{anchor};
-
- if (! $destsources{$bestlink}) {
- $bestlink=htmlpage($bestlink);
-
- if (! $destsources{$bestlink}) {
- my $cgilink = "";
- if (length $config{cgiurl}) {
- $cgilink = "<a href=\"".
- cgiurl(
- do => "create",
- page => $link,
- from => $lpage
- )."\" rel=\"nofollow\">?</a>";
- }
- return "<span class=\"createlink\">$cgilink$linktext</span>"
- }
- }
-
- $bestlink=abs2rel($bestlink, dirname(htmlpage($page)));
- $bestlink=beautify_urlpath($bestlink);
-
- if (! $opts{noimageinline} && isinlinableimage($bestlink)) {
- return "<img src=\"$bestlink\" alt=\"$linktext\" />";
- }
-
- if (defined $opts{anchor}) {
- $bestlink.="#".$opts{anchor};
- }
-
- my @attrs;
- foreach my $attr (qw{rel class title}) {
- if (defined $opts{$attr}) {
- push @attrs, " $attr=\"$opts{$attr}\"";
- }
- }
-
- return "<a href=\"$bestlink\"@attrs>$linktext</a>";
-}
-
-sub userpage ($) {
- my $user=shift;
- return length $config{userdir} ? "$config{userdir}/$user" : $user;
-}
-
-sub openiduser ($) {
- my $user=shift;
-
- if (defined $user && $user =~ m!^https?://! &&
- eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
- my $display;
-
- if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) {
- $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user);
- }
- else {
- # backcompat with old version
- my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
- $display=$oid->display;
- }
-
- # Convert "user.somehost.com" to "user [somehost.com]"
- # (also "user.somehost.co.uk")
- if ($display !~ /\[/) {
- $display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/;
- }
- # Convert "http://somehost.com/user" to "user [somehost.com]".
- # (also "https://somehost.com/user/")
- if ($display !~ /\[/) {
- $display=~s/^https?:\/\/(.+)\/([^\/#?]+)\/?(?:[#?].*)?$/$2 [$1]/;
- }
- $display=~s!^https?://!!; # make sure this is removed
- eval q{use CGI 'escapeHTML'};
- error($@) if $@;
- return escapeHTML($display);
- }
- return;
-}
-
-sub htmlize ($$$$) {
- my $page=shift;
- my $destpage=shift;
- my $type=shift;
- my $content=shift;
-
- my $oneline = $content !~ /\n/;
-
- if (exists $hooks{htmlize}{$type}) {
- $content=$hooks{htmlize}{$type}{call}->(
- page => $page,
- content => $content,
- );
- }
- else {
- error("htmlization of $type not supported");
- }
-
- run_hooks(sanitize => sub {
- $content=shift->(
- page => $page,
- destpage => $destpage,
- content => $content,
- );
- });
-
- if ($oneline) {
- # hack to get rid of enclosing junk added by markdown
- # and other htmlizers/sanitizers
- $content=~s/^<p>//i;
- $content=~s/<\/p>\n*$//i;
- }
-
- return $content;
-}
-
-sub linkify ($$$) {
- my $page=shift;
- my $destpage=shift;
- my $content=shift;
-
- run_hooks(linkify => sub {
- $content=shift->(
- page => $page,
- destpage => $destpage,
- content => $content,
- );
- });
-
- return $content;
-}
-
-our %preprocessing;
-our $preprocess_preview=0;
-sub preprocess ($$$;$$) {
- my $page=shift; # the page the data comes from
- my $destpage=shift; # the page the data will appear in (different for inline)
- my $content=shift;
- my $scan=shift;
- my $preview=shift;
-
- # Using local because it needs to be set within any nested calls
- # of this function.
- local $preprocess_preview=$preview if defined $preview;
-
- my $handle=sub {
- my $escape=shift;
- my $prefix=shift;
- my $command=shift;
- my $params=shift;
- $params="" if ! defined $params;
-
- if (length $escape) {
- return "[[$prefix$command $params]]";
- }
- elsif (exists $hooks{preprocess}{$command}) {
- return "" if $scan && ! $hooks{preprocess}{$command}{scan};
- # Note: preserve order of params, some plugins may
- # consider it significant.
- my @params;
- while ($params =~ m{
- (?:([-.\w]+)=)? # 1: named parameter key?
- (?:
- """(.*?)""" # 2: triple-quoted value
- |
- "([^"]*?)" # 3: single-quoted value
- |
- '''(.*?)''' # 4: triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (.*?)\n\5 # 6: heredoc value
- |
- (\S+) # 7: unquoted value
- )
- (?:\s+|$) # delimiter to next param
- }msgx) {
- my $key=$1;
- my $val;
- if (defined $2) {
- $val=$2;
- $val=~s/\r\n/\n/mg;
- $val=~s/^\n+//g;
- $val=~s/\n+$//g;
- }
- elsif (defined $3) {
- $val=$3;
- }
- elsif (defined $4) {
- $val=$4;
- }
- elsif (defined $7) {
- $val=$7;
- }
- elsif (defined $6) {
- $val=$6;
- }
-
- if (defined $key) {
- push @params, $key, $val;
- }
- else {
- push @params, $val, '';
- }
- }
- if ($preprocessing{$page}++ > 8) {
- # Avoid loops of preprocessed pages preprocessing
- # other pages that preprocess them, etc.
- return "[[!$command <span class=\"error\">".
- sprintf(gettext("preprocessing loop detected on %s at depth %i"),
- $page, $preprocessing{$page}).
- "</span>]]";
- }
- my $ret;
- if (! $scan) {
- $ret=eval {
- $hooks{preprocess}{$command}{call}->(
- @params,
- page => $page,
- destpage => $destpage,
- preview => $preprocess_preview,
- );
- };
- if ($@) {
- my $error=$@;
- chomp $error;
- eval q{use HTML::Entities};
- $error = encode_entities($error);
- $ret="[[!$command <span class=\"error\">".
- gettext("Error").": $error"."</span>]]";
- }
- }
- else {
- # use void context during scan pass
- eval {
- $hooks{preprocess}{$command}{call}->(
- @params,
- page => $page,
- destpage => $destpage,
- preview => $preprocess_preview,
- );
- };
- $ret="";
- }
- $preprocessing{$page}--;
- return $ret;
- }
- else {
- return "[[$prefix$command $params]]";
- }
- };
-
- my $regex;
- if ($config{prefix_directives}) {
- $regex = qr{
- (\\?) # 1: escape?
- \[\[(!) # directive open; 2: prefix
- ([-\w]+) # 3: command
- ( # 4: the parameters..
- \s+ # Must have space if parameters present
- (?:
- (?:[-.\w]+=)? # named parameter key?
- (?:
- """.*?""" # triple-quoted value
- |
- "[^"]*?" # single-quoted value
- |
- '''.*?''' # triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (?:.*?)\n\5 # heredoc value
- |
- [^"\s\]]+ # unquoted value
- )
- \s* # whitespace or end
- # of directive
- )
- *)? # 0 or more parameters
- \]\] # directive closed
- }sx;
- }
- else {
- $regex = qr{
- (\\?) # 1: escape?
- \[\[(!?) # directive open; 2: optional prefix
- ([-\w]+) # 3: command
- \s+
- ( # 4: the parameters..
- (?:
- (?:[-.\w]+=)? # named parameter key?
- (?:
- """.*?""" # triple-quoted value
- |
- "[^"]*?" # single-quoted value
- |
- '''.*?''' # triple-single-quote
- |
- <<([a-zA-Z]+)\n # 5: heredoc start
- (?:.*?)\n\5 # heredoc value
- |
- [^"\s\]]+ # unquoted value
- )
- \s* # whitespace or end
- # of directive
- )
- *) # 0 or more parameters
- \]\] # directive closed
- }sx;
- }
-
- $content =~ s{$regex}{$handle->($1, $2, $3, $4)}eg;
- return $content;
-}
-
-sub filter ($$$) {
- my $page=shift;
- my $destpage=shift;
- my $content=shift;
-
- run_hooks(filter => sub {
- $content=shift->(page => $page, destpage => $destpage,
- content => $content);
- });
-
- return $content;
-}
-
-sub check_canedit ($$$;$) {
- my $page=shift;
- my $q=shift;
- my $session=shift;
- my $nonfatal=shift;
-
- my $canedit;
- run_hooks(canedit => sub {
- return if defined $canedit;
- my $ret=shift->($page, $q, $session);
- if (defined $ret) {
- if ($ret eq "") {
- $canedit=1;
- }
- elsif (ref $ret eq 'CODE') {
- $ret->() unless $nonfatal;
- $canedit=0;
- }
- elsif (defined $ret) {
- error($ret) unless $nonfatal;
- $canedit=0;
- }
- }
- });
- return defined $canedit ? $canedit : 1;
-}
-
-sub check_content (@) {
- my %params=@_;
-
- return 1 if ! exists $hooks{checkcontent}; # optimisation
-
- if (exists $pagesources{$params{page}}) {
- my @diff;
- my %old=map { $_ => 1 }
- split("\n", readfile(srcfile($pagesources{$params{page}})));
- foreach my $line (split("\n", $params{content})) {
- push @diff, $line if ! exists $old{$line};
- }
- $params{diff}=join("\n", @diff);
- }
-
- my $ok;
- run_hooks(checkcontent => sub {
- return if defined $ok;
- my $ret=shift->(%params);
- if (defined $ret) {
- if ($ret eq "") {
- $ok=1;
- }
- elsif (ref $ret eq 'CODE') {
- $ret->() unless $params{nonfatal};
- $ok=0;
- }
- elsif (defined $ret) {
- error($ret) unless $params{nonfatal};
- $ok=0;
- }
- }
-
- });
- return defined $ok ? $ok : 1;
-}
-
-sub check_canchange (@) {
- my %params = @_;
- my $cgi = $params{cgi};
- my $session = $params{session};
- my @changes = @{$params{changes}};
-
- my %newfiles;
- foreach my $change (@changes) {
- # This untaint is safe because we check file_pruned and
- # wiki_file_regexp.
- my ($file)=$change->{file}=~/$config{wiki_file_regexp}/;
- $file=possibly_foolish_untaint($file);
- if (! defined $file || ! length $file ||
- file_pruned($file)) {
- error(gettext("bad file name %s"), $file);
- }
-
- my $type=pagetype($file);
- my $page=pagename($file) if defined $type;
-
- if ($change->{action} eq 'add') {
- $newfiles{$file}=1;
- }
-
- if ($change->{action} eq 'change' ||
- $change->{action} eq 'add') {
- if (defined $page) {
- check_canedit($page, $cgi, $session);
- next;
- }
- else {
- if (IkiWiki::Plugin::attachment->can("check_canattach")) {
- IkiWiki::Plugin::attachment::check_canattach($session, $file, $change->{path});
- check_canedit($file, $cgi, $session);
- next;
- }
- }
- }
- elsif ($change->{action} eq 'remove') {
- # check_canremove tests to see if the file is present
- # on disk. This will fail when a single commit adds a
- # file and then removes it again. Avoid the problem
- # by not testing the removal in such pairs of changes.
- # (The add is still tested, just to make sure that
- # no data is added to the repo that a web edit
- # could not add.)
- next if $newfiles{$file};
-
- if (IkiWiki::Plugin::remove->can("check_canremove")) {
- IkiWiki::Plugin::remove::check_canremove(defined $page ? $page : $file, $cgi, $session);
- check_canedit(defined $page ? $page : $file, $cgi, $session);
- next;
- }
- }
- else {
- error "unknown action ".$change->{action};
- }
-
- error sprintf(gettext("you are not allowed to change %s"), $file);
- }
-}
-
-
-my $wikilock;
-
-sub lockwiki () {
- # Take an exclusive lock on the wiki to prevent multiple concurrent
- # run issues. The lock will be dropped on program exit.
- if (! -d $config{wikistatedir}) {
- mkdir($config{wikistatedir});
- }
- open($wikilock, '>', "$config{wikistatedir}/lockfile") ||
- error ("cannot write to $config{wikistatedir}/lockfile: $!");
- if (! flock($wikilock, 2)) { # LOCK_EX
- error("failed to get lock");
- }
- return 1;
-}
-
-sub unlockwiki () {
- POSIX::close($ENV{IKIWIKI_CGILOCK_FD}) if exists $ENV{IKIWIKI_CGILOCK_FD};
- return close($wikilock) if $wikilock;
- return;
-}
-
-my $commitlock;
-
-sub commit_hook_enabled () {
- open($commitlock, '+>', "$config{wikistatedir}/commitlock") ||
- error("cannot write to $config{wikistatedir}/commitlock: $!");
- if (! flock($commitlock, 1 | 4)) { # LOCK_SH | LOCK_NB to test
- close($commitlock) || error("failed closing commitlock: $!");
- return 0;
- }
- close($commitlock) || error("failed closing commitlock: $!");
- return 1;
-}
-
-sub disable_commit_hook () {
- open($commitlock, '>', "$config{wikistatedir}/commitlock") ||
- error("cannot write to $config{wikistatedir}/commitlock: $!");
- if (! flock($commitlock, 2)) { # LOCK_EX
- error("failed to get commit lock");
- }
- return 1;
-}
-
-sub enable_commit_hook () {
- return close($commitlock) if $commitlock;
- return;
-}
-
-sub loadindex () {
- %oldrenderedfiles=%pagectime=();
- my $rebuild=$config{rebuild};
- if (! $rebuild) {
- %pagesources=%pagemtime=%oldlinks=%links=%depends=
- %destsources=%renderedfiles=%pagecase=%pagestate=
- %depends_simple=%typedlinks=%oldtypedlinks=();
- }
- my $in;
- if (! open ($in, "<", "$config{wikistatedir}/indexdb")) {
- if (-e "$config{wikistatedir}/index") {
- system("ikiwiki-transition", "indexdb", $config{srcdir});
- open ($in, "<", "$config{wikistatedir}/indexdb") || return;
- }
- else {
- # gettime on first build
- $config{gettime}=1 unless defined $config{gettime};
- return;
- }
- }
-
- my $index=Storable::fd_retrieve($in);
- if (! defined $index) {
- return 0;
- }
-
- my $pages;
- if (exists $index->{version} && ! ref $index->{version}) {
- $pages=$index->{page};
- %wikistate=%{$index->{state}};
- # Handle plugins that got disabled by loading a new setup.
- if (exists $config{setupfile}) {
- require IkiWiki::Setup;
- IkiWiki::Setup::disabled_plugins(
- grep { ! $loaded_plugins{$_} } keys %wikistate);
- }
- }
- else {
- $pages=$index;
- %wikistate=();
- }
-
- foreach my $src (keys %$pages) {
- my $d=$pages->{$src};
- my $page;
- if (exists $d->{page} && ! $rebuild) {
- $page=$d->{page};
- }
- else {
- $page=pagename($src);
- }
- $pagectime{$page}=$d->{ctime};
- $pagesources{$page}=$src;
- if (! $rebuild) {
- $pagemtime{$page}=$d->{mtime};
- $renderedfiles{$page}=$d->{dest};
- if (exists $d->{links} && ref $d->{links}) {
- $links{$page}=$d->{links};
- $oldlinks{$page}=[@{$d->{links}}];
- }
- if (ref $d->{depends_simple} eq 'ARRAY') {
- # old format
- $depends_simple{$page}={
- map { $_ => 1 } @{$d->{depends_simple}}
- };
- }
- elsif (exists $d->{depends_simple}) {
- $depends_simple{$page}=$d->{depends_simple};
- }
- if (exists $d->{dependslist}) {
- # old format
- $depends{$page}={
- map { $_ => $DEPEND_CONTENT }
- @{$d->{dependslist}}
- };
- }
- elsif (exists $d->{depends} && ! ref $d->{depends}) {
- # old format
- $depends{$page}={$d->{depends} => $DEPEND_CONTENT };
- }
- elsif (exists $d->{depends}) {
- $depends{$page}=$d->{depends};
- }
- if (exists $d->{state}) {
- $pagestate{$page}=$d->{state};
- }
- if (exists $d->{typedlinks}) {
- $typedlinks{$page}=$d->{typedlinks};
-
- while (my ($type, $links) = each %{$typedlinks{$page}}) {
- next unless %$links;
- $oldtypedlinks{$page}{$type} = {%$links};
- }
- }
- }
- $oldrenderedfiles{$page}=[@{$d->{dest}}];
- }
- foreach my $page (keys %pagesources) {
- $pagecase{lc $page}=$page;
- }
- foreach my $page (keys %renderedfiles) {
- $destsources{$_}=$page foreach @{$renderedfiles{$page}};
- }
- $lastrev=$index->{lastrev};
- @underlayfiles=@{$index->{underlayfiles}} if ref $index->{underlayfiles};
- return close($in);
-}
-
-sub saveindex () {
- run_hooks(savestate => sub { shift->() });
-
- my @plugins=keys %loaded_plugins;
-
- if (! -d $config{wikistatedir}) {
- mkdir($config{wikistatedir});
- }
- my $newfile="$config{wikistatedir}/indexdb.new";
- my $cleanup = sub { unlink($newfile) };
- open (my $out, '>', $newfile) || error("cannot write to $newfile: $!", $cleanup);
-
- my %index;
- foreach my $page (keys %pagemtime) {
- next unless $pagemtime{$page};
- my $src=$pagesources{$page};
-
- $index{page}{$src}={
- page => $page,
- ctime => $pagectime{$page},
- mtime => $pagemtime{$page},
- dest => $renderedfiles{$page},
- links => $links{$page},
- };
-
- if (exists $depends{$page}) {
- $index{page}{$src}{depends} = $depends{$page};
- }
-
- if (exists $depends_simple{$page}) {
- $index{page}{$src}{depends_simple} = $depends_simple{$page};
- }
-
- if (exists $typedlinks{$page} && %{$typedlinks{$page}}) {
- $index{page}{$src}{typedlinks} = $typedlinks{$page};
- }
-
- if (exists $pagestate{$page}) {
- $index{page}{$src}{state}=$pagestate{$page};
- }
- }
-
- $index{state}={};
- foreach my $id (@plugins) {
- $index{state}{$id}={}; # used to detect disabled plugins
- foreach my $key (keys %{$wikistate{$id}}) {
- $index{state}{$id}{$key}=$wikistate{$id}{$key};
- }
- }
-
- $index{lastrev}=$lastrev;
- $index{underlayfiles}=\@underlayfiles;
-
- $index{version}="3";
- my $ret=Storable::nstore_fd(\%index, $out);
- return if ! defined $ret || ! $ret;
- close $out || error("failed saving to $newfile: $!", $cleanup);
- rename($newfile, "$config{wikistatedir}/indexdb") ||
- error("failed renaming $newfile to $config{wikistatedir}/indexdb", $cleanup);
-
- return 1;
-}
-
-sub template_file ($) {
- my $name=shift;
-
- my $tpage=($name =~ s/^\///) ? $name : "templates/$name";
- my $template;
- if ($name !~ /\.tmpl$/ && exists $pagesources{$tpage}) {
- $template=srcfile($pagesources{$tpage}, 1);
- $name.=".tmpl";
- }
- else {
- $template=srcfile($tpage, 1);
- }
-
- if (defined $template) {
- return $template, $tpage, 1 if wantarray;
- return $template;
- }
- else {
- $name=~s:/::; # avoid path traversal
- foreach my $dir ($config{templatedir},
- "$installdir/share/ikiwiki/templates") {
- if (-e "$dir/$name") {
- $template="$dir/$name";
- last;
- }
- }
- if (defined $template) {
- return $template, $tpage if wantarray;
- return $template;
- }
- }
-
- return;
-}
-
-sub template_depends ($$;@) {
- my $name=shift;
- my $page=shift;
-
- my ($filename, $tpage, $untrusted)=template_file($name);
- if (! defined $filename) {
- error(sprintf(gettext("template %s not found"), $name))
- }
-
- if (defined $page && defined $tpage) {
- add_depends($page, $tpage);
- }
-
- my @opts=(
- filter => sub {
- my $text_ref = shift;
- ${$text_ref} = decode_utf8(${$text_ref});
- run_hooks(readtemplate => sub {
- ${$text_ref} = shift->(
- id => $name,
- page => $tpage,
- content => ${$text_ref},
- untrusted => $untrusted,
- );
- });
- },
- loop_context_vars => 1,
- die_on_bad_params => 0,
- parent_global_vars => 1,
- filename => $filename,
- @_,
- ($untrusted ? (no_includes => 1) : ()),
- );
- return @opts if wantarray;
-
- require HTML::Template;
- return HTML::Template->new(@opts);
-}
-
-sub template ($;@) {
- template_depends(shift, undef, @_);
-}
-
-sub templateactions ($$) {
- my $template=shift;
- my $page=shift;
-
- my $have_actions=0;
- my @actions;
- run_hooks(pageactions => sub {
- push @actions, map { { action => $_ } }
- grep { defined } shift->(page => $page);
- });
- $template->param(actions => \@actions);
-
- if ($config{cgiurl} && exists $hooks{auth}) {
- $template->param(prefsurl => cgiurl(do => "prefs"));
- $have_actions=1;
- }
-
- if ($have_actions || @actions) {
- $template->param(have_actions => 1);
- }
-}
-
-sub hook (@) {
- my %param=@_;
-
- if (! exists $param{type} || ! ref $param{call} || ! exists $param{id}) {
- error 'hook requires type, call, and id parameters';
- }
-
- return if $param{no_override} && exists $hooks{$param{type}}{$param{id}};
-
- $hooks{$param{type}}{$param{id}}=\%param;
- return 1;
-}
-
-sub run_hooks ($$) {
- # Calls the given sub for each hook of the given type,
- # passing it the hook function to call.
- my $type=shift;
- my $sub=shift;
-
- if (exists $hooks{$type}) {
- my (@first, @middle, @last);
- foreach my $id (keys %{$hooks{$type}}) {
- if ($hooks{$type}{$id}{first}) {
- push @first, $id;
- }
- elsif ($hooks{$type}{$id}{last}) {
- push @last, $id;
- }
- else {
- push @middle, $id;
- }
- }
- foreach my $id (@first, @middle, @last) {
- $sub->($hooks{$type}{$id}{call});
- }
- }
-
- return 1;
-}
-
-sub rcs_update () {
- $hooks{rcs}{rcs_update}{call}->(@_);
-}
-
-sub rcs_prepedit ($) {
- $hooks{rcs}{rcs_prepedit}{call}->(@_);
-}
-
-sub rcs_commit (@) {
- $hooks{rcs}{rcs_commit}{call}->(@_);
-}
-
-sub rcs_commit_staged (@) {
- $hooks{rcs}{rcs_commit_staged}{call}->(@_);
-}
-
-sub rcs_add ($) {
- $hooks{rcs}{rcs_add}{call}->(@_);
-}
-
-sub rcs_remove ($) {
- $hooks{rcs}{rcs_remove}{call}->(@_);
-}
-
-sub rcs_rename ($$) {
- $hooks{rcs}{rcs_rename}{call}->(@_);
-}
-
-sub rcs_recentchanges ($) {
- $hooks{rcs}{rcs_recentchanges}{call}->(@_);
-}
-
-sub rcs_diff ($;$) {
- $hooks{rcs}{rcs_diff}{call}->(@_);
-}
-
-sub rcs_getctime ($) {
- $hooks{rcs}{rcs_getctime}{call}->(@_);
-}
-
-sub rcs_getmtime ($) {
- $hooks{rcs}{rcs_getmtime}{call}->(@_);
-}
-
-sub rcs_receive () {
- $hooks{rcs}{rcs_receive}{call}->();
-}
-
-sub add_depends ($$;$) {
- my $page=shift;
- my $pagespec=shift;
- my $deptype=shift || $DEPEND_CONTENT;
-
- # Is the pagespec a simple page name?
- if ($pagespec =~ /$config{wiki_file_regexp}/ &&
- $pagespec !~ /[\s*?()!]/) {
- $depends_simple{$page}{lc $pagespec} |= $deptype;
- return 1;
- }
-
- # Add explicit dependencies for influences.
- my $sub=pagespec_translate($pagespec);
- return unless defined $sub;
- foreach my $p (keys %pagesources) {
- my $r=$sub->($p, location => $page);
- my $i=$r->influences;
- my $static=$r->influences_static;
- foreach my $k (keys %$i) {
- next unless $r || $static || $k eq $page;
- $depends_simple{$page}{lc $k} |= $i->{$k};
- }
- last if $static;
- }
-
- $depends{$page}{$pagespec} |= $deptype;
- return 1;
-}
-
-sub deptype (@) {
- my $deptype=0;
- foreach my $type (@_) {
- if ($type eq 'presence') {
- $deptype |= $DEPEND_PRESENCE;
- }
- elsif ($type eq 'links') {
- $deptype |= $DEPEND_LINKS;
- }
- elsif ($type eq 'content') {
- $deptype |= $DEPEND_CONTENT;
- }
- }
- return $deptype;
-}
-
-my $file_prune_regexp;
-sub file_pruned ($) {
- my $file=shift;
-
- if (defined $config{include} && length $config{include}) {
- return 0 if $file =~ m/$config{include}/;
- }
-
- if (! defined $file_prune_regexp) {
- $file_prune_regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')';
- $file_prune_regexp=qr/$file_prune_regexp/;
- }
- return $file =~ m/$file_prune_regexp/;
-}
-
-sub define_gettext () {
- # If translation is needed, redefine the gettext function to do it.
- # Otherwise, it becomes a quick no-op.
- my $gettext_obj;
- my $getobj;
- if ((exists $ENV{LANG} && length $ENV{LANG}) ||
- (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) ||
- (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) {
- $getobj=sub {
- $gettext_obj=eval q{
- use Locale::gettext q{textdomain};
- Locale::gettext->domain('ikiwiki')
- };
- };
- }
-
- no warnings 'redefine';
- *gettext=sub {
- $getobj->() if $getobj;
- if ($gettext_obj) {
- $gettext_obj->get(shift);
- }
- else {
- return shift;
- }
- };
- *ngettext=sub {
- $getobj->() if $getobj;
- if ($gettext_obj) {
- $gettext_obj->nget(@_);
- }
- else {
- return ($_[2] == 1 ? $_[0] : $_[1])
- }
- };
-}
-
-sub gettext {
- define_gettext();
- gettext(@_);
-}
-
-sub ngettext {
- define_gettext();
- ngettext(@_);
-}
-
-sub yesno ($) {
- my $val=shift;
-
- return (defined $val && (lc($val) eq gettext("yes") || lc($val) eq "yes" || $val eq "1"));
-}
-
-sub inject {
- # Injects a new function into the symbol table to replace an
- # exported function.
- my %params=@_;
-
- # This is deep ugly perl foo, beware.
- no strict;
- no warnings;
- if (! defined $params{parent}) {
- $params{parent}='::';
- $params{old}=\&{$params{name}};
- $params{name}=~s/.*:://;
- }
- my $parent=$params{parent};
- foreach my $ns (grep /^\w+::/, keys %{$parent}) {
- $ns = $params{parent} . $ns;
- inject(%params, parent => $ns) unless $ns eq '::main::';
- *{$ns . $params{name}} = $params{call}
- if exists ${$ns}{$params{name}} &&
- \&{${$ns}{$params{name}}} == $params{old};
- }
- use strict;
- use warnings;
-}
-
-sub add_link ($$;$) {
- my $page=shift;
- my $link=shift;
- my $type=shift;
-
- push @{$links{$page}}, $link
- unless grep { $_ eq $link } @{$links{$page}};
-
- if (defined $type) {
- $typedlinks{$page}{$type}{$link} = 1;
- }
-}
-
-sub add_autofile ($$$) {
- my $file=shift;
- my $plugin=shift;
- my $generator=shift;
-
- $autofiles{$file}{plugin}=$plugin;
- $autofiles{$file}{generator}=$generator;
-}
-
-sub useragent () {
- eval q{use LWP};
- error($@) if $@;
-
- return LWP::UserAgent->new(
- cookie_jar => $config{cookiejar},
- env_proxy => 1, # respect proxy env vars
- agent => $config{useragent},
- protocols_allowed => [qw(http https)],
- );
-}
-
-sub sortspec_translate ($$) {
- my $spec = shift;
- my $reverse = shift;
-
- my $code = "";
- my @data;
- while ($spec =~ m{
- \s*
- (-?) # group 1: perhaps negated
- \s*
- ( # group 2: a word
- \w+\([^\)]*\) # command(params)
- |
- [^\s]+ # or anything else
- )
- \s*
- }gx) {
- my $negated = $1;
- my $word = $2;
- my $params = undef;
-
- if ($word =~ m/^(\w+)\((.*)\)$/) {
- # command with parameters
- $params = $2;
- $word = $1;
- }
- elsif ($word !~ m/^\w+$/) {
- error(sprintf(gettext("invalid sort type %s"), $word));
- }
-
- if (length $code) {
- $code .= " || ";
- }
-
- if ($negated) {
- $code .= "-";
- }
-
- if (exists $IkiWiki::SortSpec::{"cmp_$word"}) {
- if (defined $params) {
- push @data, $params;
- $code .= "IkiWiki::SortSpec::cmp_$word(\$data[$#data])";
- }
- else {
- $code .= "IkiWiki::SortSpec::cmp_$word(undef)";
- }
- }
- else {
- error(sprintf(gettext("unknown sort type %s"), $word));
- }
- }
-
- if (! length $code) {
- # undefined sorting method... sort arbitrarily
- return sub { 0 };
- }
-
- if ($reverse) {
- $code="-($code)";
- }
-
- no warnings;
- return eval 'sub { '.$code.' }';
-}
-
-sub pagespec_translate ($) {
- my $spec=shift;
-
- # Convert spec to perl code.
- my $code="";
- my @data;
- while ($spec=~m{
- \s* # ignore whitespace
- ( # 1: match a single word
- \! # !
- |
- \( # (
- |
- \) # )
- |
- \w+\([^\)]*\) # command(params)
- |
- [^\s()]+ # any other text
- )
- \s* # ignore whitespace
- }gx) {
- my $word=$1;
- if (lc $word eq 'and') {
- $code.=' &';
- }
- elsif (lc $word eq 'or') {
- $code.=' |';
- }
- elsif ($word eq "(" || $word eq ")" || $word eq "!") {
- $code.=' '.$word;
- }
- elsif ($word =~ /^(\w+)\((.*)\)$/) {
- if (exists $IkiWiki::PageSpec::{"match_$1"}) {
- push @data, $2;
- $code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
- }
- else {
- push @data, qq{unknown function in pagespec "$word"};
- $code.="IkiWiki::ErrorReason->new(\$data[$#data])";
- }
- }
- else {
- push @data, $word;
- $code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
- }
- }
-
- if (! length $code) {
- $code="IkiWiki::FailReason->new('empty pagespec')";
- }
-
- no warnings;
- return eval 'sub { my $page=shift; '.$code.' }';
-}
-
-sub pagespec_match ($$;@) {
- my $page=shift;
- my $spec=shift;
- my @params=@_;
-
- # Backwards compatability with old calling convention.
- if (@params == 1) {
- unshift @params, 'location';
- }
-
- my $sub=pagespec_translate($spec);
- return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
- if ! defined $sub;
- return $sub->($page, @params);
-}
-
-# e.g. @pages = sort_pages("title", \@pages, reverse => "yes")
-#
-# Not exported yet, but could be in future if it is generally useful.
-# Note that this signature is not the same as IkiWiki::SortSpec::sort_pages,
-# which is "more internal".
-sub sort_pages ($$;@) {
- my $sort = shift;
- my $list = shift;
- my %params = @_;
- $sort = sortspec_translate($sort, $params{reverse});
- return IkiWiki::SortSpec::sort_pages($sort, @$list);
-}
-
-sub pagespec_match_list ($$;@) {
- my $page=shift;
- my $pagespec=shift;
- my %params=@_;
-
- # Backwards compatability with old calling convention.
- if (ref $page) {
- print STDERR "warning: a plugin (".caller().") is using pagespec_match_list in an obsolete way, and needs to be updated\n";
- $params{list}=$page;
- $page=$params{location}; # ugh!
- }
-
- my $sub=pagespec_translate($pagespec);
- error "syntax error in pagespec \"$pagespec\""
- if ! defined $sub;
- my $sort=sortspec_translate($params{sort}, $params{reverse})
- if defined $params{sort};
-
- my @candidates;
- if (exists $params{list}) {
- @candidates=exists $params{filter}
- ? grep { ! $params{filter}->($_) } @{$params{list}}
- : @{$params{list}};
- }
- else {
- @candidates=exists $params{filter}
- ? grep { ! $params{filter}->($_) } keys %pagesources
- : keys %pagesources;
- }
-
- # clear params, remainder is passed to pagespec
- $depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT);
- my $num=$params{num};
- delete @params{qw{num deptype reverse sort filter list}};
-
- # when only the top matches will be returned, it's efficient to
- # sort before matching to pagespec,
- if (defined $num && defined $sort) {
- @candidates=IkiWiki::SortSpec::sort_pages(
- $sort, @candidates);
- }
-
- my @matches;
- my $firstfail;
- my $count=0;
- my $accum=IkiWiki::SuccessReason->new();
- foreach my $p (@candidates) {
- my $r=$sub->($p, %params, location => $page);
- error(sprintf(gettext("cannot match pages: %s"), $r))
- if $r->isa("IkiWiki::ErrorReason");
- unless ($r || $r->influences_static) {
- $r->remove_influence($p);
- }
- $accum |= $r;
- if ($r) {
- push @matches, $p;
- last if defined $num && ++$count == $num;
- }
- }
-
- # Add simple dependencies for accumulated influences.
- my $i=$accum->influences;
- foreach my $k (keys %$i) {
- $depends_simple{$page}{lc $k} |= $i->{$k};
- }
-
- # when all matches will be returned, it's efficient to
- # sort after matching
- if (! defined $num && defined $sort) {
- return IkiWiki::SortSpec::sort_pages(
- $sort, @matches);
- }
- else {
- return @matches;
- }
-}
-
-sub pagespec_valid ($) {
- my $spec=shift;
-
- return defined pagespec_translate($spec);
-}
-
-sub glob2re ($) {
- my $re=quotemeta(shift);
- $re=~s/\\\*/.*/g;
- $re=~s/\\\?/./g;
- return qr/^$re$/i;
-}
-
-package IkiWiki::FailReason;
-
-use overload (
- '""' => sub { $_[0][0] },
- '0+' => sub { 0 },
- '!' => sub { bless $_[0], 'IkiWiki::SuccessReason'},
- '&' => sub { $_[0]->merge_influences($_[1], 1); $_[0] },
- '|' => sub { $_[1]->merge_influences($_[0]); $_[1] },
- fallback => 1,
-);
-
-our @ISA = 'IkiWiki::SuccessReason';
-
-package IkiWiki::SuccessReason;
-
-# A blessed array-ref:
-#
-# [0]: human-readable reason for success (or, in FailReason subclass, failure)
-# [1]{""}:
-# - if absent or false, the influences of this evaluation are "static",
-# see the influences_static method
-# - if true, they are dynamic (not static)
-# [1]{any other key}:
-# the dependency types of influences, as returned by the influences method
-
-use overload (
- # in string context, it's the human-readable reason
- '""' => sub { $_[0][0] },
- # in boolean context, SuccessReason is 1 and FailReason is 0
- '0+' => sub { 1 },
- # negating a result gives the opposite result with the same influences
- '!' => sub { bless $_[0], 'IkiWiki::FailReason'},
- # A & B = (A ? B : A) with the influences of both
- '&' => sub { $_[1]->merge_influences($_[0], 1); $_[1] },
- # A | B = (A ? A : B) with the influences of both
- '|' => sub { $_[0]->merge_influences($_[1]); $_[0] },
- fallback => 1,
-);
-
-# SuccessReason->new("human-readable reason", page => deptype, ...)
-
-sub new {
- my $class = shift;
- my $value = shift;
- return bless [$value, {@_}], $class;
-}
-
-# influences(): return a reference to a copy of the hash
-# { page => dependency type } describing the pages that indirectly influenced
-# this result, but would not cause a dependency through ikiwiki's core
-# dependency logic.
-#
-# See [[todo/dependency_types]] for extensive discussion of what this means.
-#
-# influences(page => deptype, ...): remove all influences, replace them
-# with the arguments, and return a reference to a copy of the new influences.
-
-sub influences {
- my $this=shift;
- $this->[1]={@_} if @_;
- my %i=%{$this->[1]};
- delete $i{""};
- return \%i;
-}
-
-# True if this result has the same influences whichever page it matches,
-# For instance, whether bar matches backlink(foo) is influenced only by
-# the set of links in foo, so its only influence is { foo => DEPEND_LINKS },
-# which does not mention bar anywhere.
-#
-# False if this result would have different influences when matching
-# different pages. For instance, when testing whether link(foo) matches bar,
-# { bar => DEPEND_LINKS } is an influence on that result, because changing
-# bar's links could change the outcome; so its influences are not the same
-# as when testing whether link(foo) matches baz.
-#
-# Static influences are one of the things that make pagespec_match_list
-# more efficient than repeated calls to pagespec_match.
-
-sub influences_static {
- return ! $_[0][1]->{""};
-}
-
-# Change the influences of $this to be the influences of "$this & $other"
-# or "$this | $other".
-#
-# If both $this and $other are either successful or have influences,
-# or this is an "or" operation, the result has all the influences from
-# either of the arguments. It has dynamic influences if either argument
-# has dynamic influences.
-#
-# If this is an "and" operation, and at least one argument is a
-# FailReason with no influences, the result has no influences, and they
-# are not dynamic. For instance, link(foo) matching bar is influenced
-# by bar, but enabled(ddate) has no influences. Suppose ddate is disabled;
-# then (link(foo) and enabled(ddate)) not matching bar is not influenced by
-# bar, because it would be false however often you edit bar.
-
-sub merge_influences {
- my $this=shift;
- my $other=shift;
- my $anded=shift;
-
- # This "if" is odd because it needs to avoid negating $this
- # or $other, which would alter the objects in-place. Be careful.
- if (! $anded || (($this || %{$this->[1]}) &&
- ($other || %{$other->[1]}))) {
- foreach my $influence (keys %{$other->[1]}) {
- $this->[1]{$influence} |= $other->[1]{$influence};
- }
- }
- else {
- # influence blocker
- $this->[1]={};
- }
-}
-
-# Change $this so it is not considered to be influenced by $torm.
-
-sub remove_influence {
- my $this=shift;
- my $torm=shift;
-
- delete $this->[1]{$torm};
-}
-
-package IkiWiki::ErrorReason;
-
-our @ISA = 'IkiWiki::FailReason';
-
-package IkiWiki::PageSpec;
-
-sub derel ($$) {
- my $path=shift;
- my $from=shift;
-
- if ($path =~ m!^\.(/|$)!) {
- if ($1) {
- $from=~s#/?[^/]+$## if defined $from;
- $path=~s#^\./##;
- $path="$from/$path" if defined $from && length $from;
- }
- else {
- $path = $from;
- $path = "" unless defined $path;
- }
- }
-
- return $path;
-}
-
-my %glob_cache;
-
-sub match_glob ($$;@) {
- my $page=shift;
- my $glob=shift;
- my %params=@_;
-
- $glob=derel($glob, $params{location});
-
- # Instead of converting the glob to a regex every time,
- # cache the compiled regex to save time.
- my $re=$glob_cache{$glob};
- unless (defined $re) {
- $glob_cache{$glob} = $re = IkiWiki::glob2re($glob);
- }
- if ($page =~ $re) {
- if (! IkiWiki::isinternal($page) || $params{internal}) {
- return IkiWiki::SuccessReason->new("$glob matches $page");
- }
- else {
- return IkiWiki::FailReason->new("$glob matches $page, but the page is an internal page");
- }
- }
- else {
- return IkiWiki::FailReason->new("$glob does not match $page");
- }
-}
-
-sub match_internal ($$;@) {
- return match_glob(shift, shift, @_, internal => 1)
-}
-
-sub match_page ($$;@) {
- my $page=shift;
- my $match=match_glob($page, shift, @_);
- if ($match) {
- my $source=exists $IkiWiki::pagesources{$page} ?
- $IkiWiki::pagesources{$page} :
- $IkiWiki::delpagesources{$page};
- my $type=defined $source ? IkiWiki::pagetype($source) : undef;
- if (! defined $type) {
- return IkiWiki::FailReason->new("$page is not a page");
- }
- }
- return $match;
-}
-
-sub match_link ($$;@) {
- my $page=shift;
- my $link=lc(shift);
- my %params=@_;
-
- $link=derel($link, $params{location});
- my $from=exists $params{location} ? $params{location} : '';
- my $linktype=$params{linktype};
- my $qualifier='';
- if (defined $linktype) {
- $qualifier=" with type $linktype";
- }
-
- my $links = $IkiWiki::links{$page};
- return IkiWiki::FailReason->new("$page has no links", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- unless $links && @{$links};
- my $bestlink = IkiWiki::bestlink($from, $link);
- foreach my $p (@{$links}) {
- next unless (! defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p});
-
- if (length $bestlink) {
- if ($bestlink eq IkiWiki::bestlink($page, $p)) {
- return IkiWiki::SuccessReason->new("$page links to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- }
- else {
- if (match_glob($p, $link, %params)) {
- return IkiWiki::SuccessReason->new("$page links to page $p$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- my ($p_rel)=$p=~/^\/?(.*)/;
- $link=~s/^\///;
- if (match_glob($p_rel, $link, %params)) {
- return IkiWiki::SuccessReason->new("$page links to page $p_rel$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
- }
- }
- }
- return IkiWiki::FailReason->new("$page does not link to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1);
-}
-
-sub match_backlink ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
- if ($testpage eq '.') {
- $testpage = $params{'location'}
- }
- my $ret=match_link($testpage, $page, @_);
- $ret->influences($testpage => $IkiWiki::DEPEND_LINKS);
- return $ret;
-}
-
-sub match_created_before ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
-
- $testpage=derel($testpage, $params{location});
-
- if (exists $IkiWiki::pagectime{$testpage}) {
- if ($IkiWiki::pagectime{$page} < $IkiWiki::pagectime{$testpage}) {
- return IkiWiki::SuccessReason->new("$page created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- else {
- return IkiWiki::FailReason->new("$page not created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- }
- else {
- return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
-}
-
-sub match_created_after ($$;@) {
- my $page=shift;
- my $testpage=shift;
- my %params=@_;
-
- $testpage=derel($testpage, $params{location});
-
- if (exists $IkiWiki::pagectime{$testpage}) {
- if ($IkiWiki::pagectime{$page} > $IkiWiki::pagectime{$testpage}) {
- return IkiWiki::SuccessReason->new("$page created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- else {
- return IkiWiki::FailReason->new("$page not created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
- }
- else {
- return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
- }
-}
-
-sub match_creation_day ($$;@) {
- my $page=shift;
- my $d=shift;
- if ($d !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid day $d");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[3] == $d) {
- return IkiWiki::SuccessReason->new('creation_day matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_day did not match');
- }
-}
-
-sub match_creation_month ($$;@) {
- my $page=shift;
- my $m=shift;
- if ($m !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid month $m");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[4] + 1 == $m) {
- return IkiWiki::SuccessReason->new('creation_month matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_month did not match');
- }
-}
-
-sub match_creation_year ($$;@) {
- my $page=shift;
- my $y=shift;
- if ($y !~ /^\d+$/) {
- return IkiWiki::ErrorReason->new("invalid year $y");
- }
- if ((localtime($IkiWiki::pagectime{$page}))[5] + 1900 == $y) {
- return IkiWiki::SuccessReason->new('creation_year matched');
- }
- else {
- return IkiWiki::FailReason->new('creation_year did not match');
- }
-}
-
-sub match_user ($$;@) {
- shift;
- my $user=shift;
- my %params=@_;
-
- if (! exists $params{user}) {
- return IkiWiki::ErrorReason->new("no user specified");
- }
-
- my $regexp=IkiWiki::glob2re($user);
-
- if (defined $params{user} && $params{user}=~$regexp) {
- return IkiWiki::SuccessReason->new("user is $user");
- }
- elsif (! defined $params{user}) {
- return IkiWiki::FailReason->new("not logged in");
- }
- else {
- return IkiWiki::FailReason->new("user is $params{user}, not $user");
- }
-}
-
-sub match_admin ($$;@) {
- shift;
- shift;
- my %params=@_;
-
- if (! exists $params{user}) {
- return IkiWiki::ErrorReason->new("no user specified");
- }
-
- if (defined $params{user} && IkiWiki::is_admin($params{user})) {
- return IkiWiki::SuccessReason->new("user is an admin");
- }
- elsif (! defined $params{user}) {
- return IkiWiki::FailReason->new("not logged in");
- }
- else {
- return IkiWiki::FailReason->new("user is not an admin");
- }
-}
-
-sub match_ip ($$;@) {
- shift;
- my $ip=shift;
- my %params=@_;
-
- if (! exists $params{ip}) {
- return IkiWiki::ErrorReason->new("no IP specified");
- }
-
- my $regexp=IkiWiki::glob2re(lc $ip);
-
- if (defined $params{ip} && lc $params{ip}=~$regexp) {
- return IkiWiki::SuccessReason->new("IP is $ip");
- }
- else {
- return IkiWiki::FailReason->new("IP is $params{ip}, not $ip");
- }
-}
-
-package IkiWiki::SortSpec;
-
-# This is in the SortSpec namespace so that the $a and $b that sort() uses
-# are easily available in this namespace, for cmp functions to use them.
-sub sort_pages {
- my $f=shift;
- sort $f @_
-}
-
-sub cmp_title {
- IkiWiki::pagetitle(IkiWiki::basename($a))
- cmp
- IkiWiki::pagetitle(IkiWiki::basename($b))
-}
-
-sub cmp_path { IkiWiki::pagetitle($a) cmp IkiWiki::pagetitle($b) }
-sub cmp_mtime { $IkiWiki::pagemtime{$b} <=> $IkiWiki::pagemtime{$a} }
-sub cmp_age { $IkiWiki::pagectime{$b} <=> $IkiWiki::pagectime{$a} }
-
-1
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/plugins/aggregate.mdwn ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/plugins/aggregate.mdwn
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/plugins/aggregate.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/plugins/aggregate.mdwn 1970-01-01 10:00:00.000000000 +1000
@@ -1,57 +0,0 @@
-[[!template id=plugin name=aggregate author="[[Joey]]"]]
-[[!tag type/special-purpose]]
-
-This plugin allows content from other feeds to be aggregated into the
-wiki. To specify feeds to aggregate, use the
-[[ikiwiki/directive/aggregate]] [[ikiwiki/directive]].
-
-## requirements
-
-The [[meta]] and [[tag]] plugins are also recommended to be used with this
-one. Either the [[htmltidy]] or [[htmlbalance]] plugin is suggested, since
-feeds can easily contain html problems, some of which these plugins can fix.
-
-## triggering aggregation
-
-You will need to run ikiwiki periodically from a cron job, passing it the
---aggregate parameter, to make it check for new posts. Here's an example
-crontab entry:
-
- */15 * * * * ikiwiki --setup my.wiki --aggregate --refresh
-
-The plugin updates a file `.ikiwiki/aggregatetime` with the unix time stamp
-when the next aggregation run could occur. (The file may be empty, if no
-aggregation is required.) This can be integrated into more complex cron
-jobs or systems to trigger aggregation only when needed.
-
-Alternatively, you can allow `ikiwiki.cgi` to trigger the aggregation. You
-should only need this if for some reason you cannot use cron, and instead
-want to use a service such as [WebCron](http://webcron.org). To enable
-this, turn on `aggregate_webtrigger` in your setup file. The url to
-visit is `http://whatever/ikiwiki.cgi?do=aggregate_webtrigger`. Anyone
-can visit the url to trigger an aggregation run, but it will only check
-each feed if its `updateinterval` has passed.
-
-## aggregated pages
-
-This plugin creates a page for each aggregated item.
-
-If the `aggregateinternal` option is enabled in the setup file (which is
-the default), aggregated pages are stored in the source directory with a
-"._aggregated" extension. These pages cannot be edited by web users, and
-do not generate first-class wiki pages. They can still be inlined into a
-blog, but you have to use `internal` in [[PageSpecs|IkiWiki/PageSpec]],
-like `internal(blog/*)`.
-
-If `aggregateinternal` is disabled, you will need to enable the [[html]]
-plugin as well as aggregate itself, since feed entries will be stored as
-HTML, and as first-class wiki pages -- each one generates
-a separate HTML page in the output, and they can even be edited. This
-option is provided only for backwards compatability.
-
-## cookies
-
-The `cookiejar` option can be used to configure how [[!cpan LWP::UserAgent]]
-handles cookies. The default is to read them from a file
-`~/.ikiwiki/cookies`, which can be populated using standard perl cookie
-tools like [[!cpan HTTP::Cookies]].
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/plugins/blogspam.mdwn ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/plugins/blogspam.mdwn
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/plugins/blogspam.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/plugins/blogspam.mdwn 1970-01-01 10:00:00.000000000 +1000
@@ -1,32 +0,0 @@
-[[!template id=plugin name=blogspam author="[[Joey]]"]]
-[[!tag type/auth type/comments]]
-
-This plugin adds antispam support to ikiwiki, using the
-[blogspam.net](http://blogspam.net/) API. Both page edits and
-[[comment|comments]] postings can be checked for spam. Page edits that
-appear to contain spam will be rejected; comments that look spammy will be
-stored in a queue for moderation by an admin.
-
-To check for and moderate comments, log in to the wiki as an admin,
-go to your Preferences page, and click the "Comment Moderation" button.
-
-The plugin requires the [[!cpan JSON]] perl module.
-
-You can control how content is tested via the `blogspam_options` setting.
-The list of options is [here](http://blogspam.net/api/testComment.html#options).
-By default, the options are configured in a way that is appropriate for
-wiki content. This includes turning off some of the more problematic tests.
-An interesting option for testing is `fail`, by setting it (e.g.,
-`blogspam_options => 'fail'`), *all* comments will be marked as SPAM, so that
-you can check whether the interaction with blogspam.net works.
-
-The `blogspam_pagespec` setting is a [[ikiwiki/PageSpec]] that can be
-used to configure which pages are checked for spam. The default is to check
-all edits. If you only want to check [[comments]] (not wiki page edits),
-set it to "postcomment(*)". Posts by admins are never checked for spam.
-
-By default, the blogspam.net server is used to do the spam checking. To
-change this, the `blogspam_server` option can be set to the url for a
-different server implementing the same API. Note that content is sent
-unencrypted over the internet to the server, and the server sees
-the full text of the content.
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/plugins/openid.mdwn ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/plugins/openid.mdwn
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/plugins/openid.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/plugins/openid.mdwn 1970-01-01 10:00:00.000000000 +1000
@@ -1,37 +0,0 @@
-[[!template id=plugin name=openid core=1 author="[[Joey]]"]]
-[[!tag type/auth]]
-
-This plugin allows users to use their [OpenID](http://openid.net/) to log
-into the wiki.
-
-The plugin needs the [[!cpan Net::OpenID::Consumer]] perl module.
-Version 1.x is needed in order for OpenID v2 to work.
-
-The [[!cpan LWPx::ParanoidAgent]] perl module is used if available, for
-added security. Finally, the [[!cpan Crypt::SSLeay]] perl module is needed
-to support users entering "https" OpenID urls.
-
-This plugin is enabled by default, but can be turned off if you want to
-only use some other form of authentication, such as [[passwordauth]].
-
-## options
-
-These options do not normally need to be set, but can be useful in
-certain setups.
-
-* `openid_realm` can be used to control the scope of the openid request.
- It defaults to the `cgiurl` (or `openid_cgiurl` if set); only allowing
- ikiwiki's [[CGI]] to authenticate. If you have multiple ikiwiki instances,
- or other things using openid on the same site, you may choose to put them
- all in the same realm to improve the user's openid experience. It is an
- url pattern, so can be set to eg "http://*.example.com/"
-
-* `openid_cgiurl` can be used to cause a different than usual `cgiurl`
- to be used when doing openid authentication. The `openid_cgiurl` must
- point to an ikiwiki [[CGI]], and it will need to match the `openid_realm`
- to work.
-
-## troubleshooting
-
-See [[plugins/openid/troubleshooting]] for a number of issues that may
-need to be addressed when setting up ikiwiki to accept OpenID logins reliably.
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/plugins/pinger.mdwn ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/plugins/pinger.mdwn
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/plugins/pinger.mdwn 2019-03-07 17:33:15.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/plugins/pinger.mdwn 1970-01-01 10:00:00.000000000 +1000
@@ -1,20 +0,0 @@
-[[!template id=plugin name=pinger author="[[Joey]]"]]
-[[!tag type/special-purpose]]
-
-This plugin allows ikiwiki to be configured to hit a URL each time it
-updates the wiki. One way to use this is in conjunction with the [[pingee]]
-plugin to set up a loosely coupled mirror network, or a branched version of
-a wiki. By pinging the mirror or branch each time the main wiki changes, it
-can be kept up-to-date.
-
-To configure what URLs to ping, use the [[ikiwiki/directive/ping]]
-[[ikiwiki/directive]].
-
-The [[!cpan LWP]] perl module is used for pinging. Or the [[!cpan
-LWPx::ParanoidAgent]] perl module is used if available, for added security.
-Finally, the [[!cpan Crypt::SSLeay]] perl module is needed to support pinging
-"https" urls.
-
-By default the pinger will try to ping a site for 15 seconds before timing
-out. This timeout can be changed by setting the `pinger_timeout`
-configuration setting in the setup file.
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/security.mdwn ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/security.mdwn
--- ikiwiki-3.20141016.4+deb8u1/.pc/CVE-2019-9187-4.patch/doc/security.mdwn 2019-03-07 17:34:53.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/CVE-2019-9187-4.patch/doc/security.mdwn 1970-01-01 10:00:00.000000000 +1000
@@ -1,528 +0,0 @@
-Let's do an ikiwiki security analysis.
-
-If you are using ikiwiki to render pages that only you can edit, do not
-generate any wrappers, and do not use the cgi, then there are no more
-security issues with this program than with cat(1). If, however, you let
-others edit pages in your wiki, then some possible security issues do need
-to be kept in mind.
-
-[[!toc levels=2]]
-
-----
-
-# Probable holes
-
-_(The list of things to fix.)_
-
-## commit spoofing
-
-Anyone with direct commit access can forge "web commit from foo" and
-make it appear on [[RecentChanges]] like foo committed. One way to avoid
-this would be to limit web commits to those done by a certain user.
-
-## other stuff to look at
-
-I have been meaning to see if any CRLF injection type things can be
-done in the CGI code.
-
-----
-
-# Potential gotchas
-
-_(Things not to do.)_
-
-## image file etc attacks
-
-If it enounters a file type it does not understand, ikiwiki just copies it
-into place. So if you let users add any kind of file they like, they can
-upload images, movies, windows executables, css files, etc (though not html
-files). If these files exploit security holes in the browser of someone
-who's viewing the wiki, that can be a security problem.
-
-Of course nobody else seems to worry about this in other wikis, so should we?
-
-People with direct commit access can upload such files
-(and if you wanted to you could block that with a pre-commit hook).
-
-The attachments plugin is not enabled by default. If you choose to
-enable it, you should make use of its powerful abilities to filter allowed
-types of attachments, and only let trusted users upload.
-
-It is possible to embed an image in a page edited over the web, by using
-`img src="data:image/png;"`. Ikiwiki's htmlscrubber only allows `data:`
-urls to be used for `image/*` mime types. It's possible that some broken
-browser might ignore the mime type and if the data provided is not an
-image, instead run it as javascript, or something evil like that. Hopefully
-not many browsers are that broken.
-
-## multiple accessors of wiki directory
-
-If multiple people can directly write to the source directory ikiwiki is
-using, or to the destination directory it writes files to, then one can
-cause trouble for the other when they run ikiwiki through symlink attacks.
-
-So it's best if only one person can ever directly write to those directories.
-
-## setup files
-
-Setup files are not safe to keep in the same revision control repository
-with the rest of the wiki. Just don't do it.
-
-## page locking can be bypassed via direct commits
-
-A locked page can only be edited on the web by an admin, but anyone who is
-allowed to commit directly to the repository can bypass this. This is by
-design, although a pre-commit hook could be used to prevent editing of
-locked pages, if you really need to.
-
-## web server attacks
-
-If your web server does any parsing of special sorts of files (for example,
-server parsed html files), then if you let anyone else add files to the wiki,
-they can try to use this to exploit your web server.
-
-----
-
-# Hopefully non-holes
-
-_(AKA, the assumptions that will be the root of most security holes...)_
-
-## exploiting ikiwiki with bad content
-
-Someone could add bad content to the wiki and hope to exploit ikiwiki.
-Note that ikiwiki runs with perl taint checks on, so this is unlikely.
-
-One fun thing in ikiwiki is its handling of a PageSpec, which involves
-translating it into perl and running the perl. Of course, this is done
-*very* carefully to guard against injecting arbitrary perl code.
-
-## publishing cgi scripts
-
-ikiwiki does not allow cgi scripts to be published as part of the wiki. Or
-rather, the script is published, but it's not marked executable (except in
-the case of "destination directory file replacement" below), so hopefully
-your web server will not run it.
-
-## suid wrappers
-
-`ikiwiki --wrapper` is intended to generate a wrapper program that
-runs ikiwiki to update a given wiki. The wrapper can in turn be made suid,
-for example to be used in a [[post-commit]] hook by people who cannot write
-to the html pages, etc.
-
-If the wrapper program is made suid, then any bugs in this wrapper would be
-security holes. The wrapper is written as securely as I know how, is based
-on code that has a history of security use long before ikiwiki, and there's
-been no problem yet.
-
-## shell exploits
-
-ikiwiki does not expose untrusted data to the shell. In fact it doesn't use
-`system(3)` at all, and the only use of backticks is on data supplied by the
-wiki admin and untainted filenames.
-
-Ikiwiki was developed and used for a long time with perl's taint checking
-turned on as a second layer of defense against shell and other exploits. Due
-to a strange [bug](http://bugs.debian.org/411786) in perl, taint checking
-is currently disabled for production builds of ikiwiki.
-
-## cgi data security
-
-When ikiwiki runs as a cgi to edit a page, it is passed the name of the
-page to edit. It has to make sure to sanitise this page, to prevent eg,
-editing of ../../../foo, or editing of files that are not part of the wiki,
-such as subversion dotfiles. This is done by sanitising the filename
-removing unallowed characters, then making sure it doesn't start with "/"
-or contain ".." or "/.svn/", etc. Annoyingly ad-hoc, this kind of code is
-where security holes breed. It needs a test suite at the very least.
-
-## CGI::Session security
-
-I've audited this module and it is massively insecure by default. ikiwiki
-uses it in one of the few secure ways; by forcing it to write to a
-directory it controls (and not /tmp) and by setting a umask that makes the
-file not be world readable.
-
-## cgi password security
-
-Login to the wiki using [[plugins/passwordauth]] involves sending a password
-in cleartext over the net. Cracking the password only allows editing the wiki
-as that user though. If you care, you can use https, I suppose. If you do use
-https either for all of the wiki, or just the cgi access, then consider using
-the sslcookie option. Using [[plugins/openid]] is a potentially better option.
-
-## XSS holes in CGI output
-
-ikiwiki has been audited to ensure that all cgi script input/output
-is sanitised to prevent XSS attacks. For example, a user can't register
-with a username containing html code (anymore).
-
-It's difficult to know for sure if all such avenues have really been
-closed though.
-
-## HTML::Template security
-
-If the [[plugins/template]] plugin is enabled, all users can modify templates
-like any other part of the wiki. Some trusted users can modify templates
-without it too. This assumes that HTML::Template is secure
-when used with untrusted/malicious templates. (Note that includes are not
-allowed.)
-
-----
-
-# Plugins
-
-The security of [[plugins]] depends on how well they're written and what
-external tools they use. The plugins included in ikiwiki are all held to
-the same standards as the rest of ikiwiki, but with that said, here are
-some security notes for them.
-
-* The [[plugins/img]] plugin assumes that imagemagick/perlmagick are secure
- from malformed image attacks for at least the formats listed in
- `img_allowed_formats`. Imagemagick has had security holes in the
- past. To be able to exploit such a hole, a user would need to be able to
- upload images to the wiki.
-
-----
-
-# Fixed holes
-
-_(Unless otherwise noted, these were discovered and immediately fixed by the
-ikiwiki developers.)_
-
-## destination directory file replacement
-
-Any file in the destination directory that is a valid page filename can be
-replaced, even if it was not originally rendered from a page. For example,
-ikiwiki.cgi could be edited in the wiki, and it would write out a
-replacement. File permission is preseved. Yipes!
-
-This was fixed by making ikiwiki check if the file it's writing to exists;
-if it does then it has to be a file that it's aware of creating before, or
-it will refuse to create it.
-
-Still, this sort of attack is something to keep in mind.
-
-## symlink attacks
-
-Could a committer trick ikiwiki into following a symlink and operating on
-some other tree that it shouldn't? svn supports symlinks, so one can get
-into the repo. ikiwiki uses File::Find to traverse the repo, and does not
-tell it to follow symlinks, but it might be possible to race replacing a
-directory with a symlink and trick it into following the link.
-
-Also, if someone checks in a symlink to /etc/passwd, ikiwiki would read and
-publish that, which could be used to expose files a committer otherwise
-wouldn't see.
-
-To avoid this, ikiwiki will skip over symlinks when scanning for pages, and
-uses locking to prevent more than one instance running at a time. The lock
-prevents one ikiwiki from running a svn up/git pull/etc at the wrong time
-to race another ikiwiki. So only attackers who can write to the working
-copy on their own can race it.
-
-## symlink + cgi attacks
-
-Similarly, a commit of a symlink could be made, ikiwiki ignores it
-because of the above, but the symlink is still there, and then you edit the
-page from the web, which follows the symlink when reading the page
-(exposing the content), and again when saving the changed page (changing
-the content).
-
-This was fixed for page saving by making ikiwiki refuse to write to files
-that are symlinks, or that are in subdirectories that are symlinks,
-combined with the above locking.
-
-For page editing, it's fixed by ikiwiki checking to make sure that it
-already has found a page by scanning the tree, before loading it for
-editing, which as described above, also is done in a way that avoids
-symlink attacks.
-
-## underlaydir override attacks
-
-ikiwiki also scans an underlaydir for pages, this is used to provide stock
-pages to all wikis w/o needing to copy them into the wiki. Since ikiwiki
-internally stores only the base filename from the underlaydir or srcdir,
-and searches for a file in either directory when reading a page source,
-there is the potential for ikiwiki's scanner to reject a file from the
-srcdir for some reason (such as it being contained in a directory that is
-symlinked in), find a valid copy of the file in the underlaydir, and then
-when loading the file, mistakenly load the bad file from the srcdir.
-
-This attack is avoided by making ikiwiki refuse to add any files from the
-underlaydir if a file also exists in the srcdir with the same name.
-
-## multiple page source issues
-
-Note that I previously worried that underlay override attacks could also be
-accomplished if ikiwiki were extended to support other page markup
-languages besides markdown. However, a closer look indicates that this is
-not a problem: ikiwiki does preserve the file extension when storing the
-source filename of a page, so a file with another extension that renders to
-the same page name can't bypass the check. Ie, ikiwiki won't skip foo.rst
-in the srcdir, find foo.mdwn in the underlay, decide to render page foo and
-then read the bad foo.mdwn. Instead it will remember the .rst extension and
-only render a file with that extension.
-
-## XSS attacks in page content
-
-ikiwiki supports protecting users from their own broken browsers via the
-[[plugins/htmlscrubber]] plugin, which is enabled by default.
-
-## svn commit logs
-
-It's was possible to force a whole series of svn commits to appear to
-have come just before yours, by forging svn log output. This was
-guarded against by using svn log --xml.
-
-ikiwiki escapes any html in svn commit logs to prevent other mischief.
-
-## XML::Parser
-
-XML::Parser is used by the aggregation plugin, and has some security holes.
-Bug #[378411](http://bugs.debian.org/378411) does not
-seem to affect our use, since the data is not encoded as utf-8 at that
-point. #[378412](http://bugs.debian.org/378412) could affect us, although it
-doesn't seem very exploitable. It has a simple fix, and has been fixed in
-Debian unstable.
-
-## include loops
-
-Various directives that cause one page to be included into another could
-be exploited to DOS the wiki, by causing a loop. Ikiwiki has always guarded
-against this one way or another; the current solution should detect all
-types of loops involving preprocessor directives.
-
-## Online editing of existing css and images
-
-A bug in ikiwiki allowed the web-based editor to edit any file that was in
-the wiki, not just files that are page sources. So an attacker (or a
-genuinely helpful user, which is how the hole came to light) could edit
-files like style.css. It is also theoretically possible that an attacker
-could have used this hole to edit images or other files in the wiki, with
-some difficulty, since all editing would happen in a textarea.
-
-This hole was discovered on 10 Feb 2007 and fixed the same day with the
-release of ikiwiki 1.42. A fix was also backported to Debian etch, as
-version 1.33.1. I recommend upgrading to one of these versions if your wiki
-allows web editing.
-
-## html insertion via title
-
-Missing html escaping of the title contents allowed a web-based editor to
-insert arbitrary html inside the title tag of a page. Since that part of
-the page is not processed by the htmlscrubber, evil html could be injected.
-
-This hole was discovered on 21 March 2007 and fixed the same day (er, hour)
-with the release of ikiwiki 1.46. A fix was also backported to Debian etch,
-as version 1.33.2. I recommend upgrading to one of these versions if your
-wiki allows web editing or aggregates feeds.
-
-## javascript insertion via meta tags
-
-It was possible to use the meta plugin's meta tags to insert arbitrary
-url contents, which could be used to insert stylesheet information
-containing javascript. This was fixed by sanitising meta tags.
-
-This hole was discovered on 21 March 2007 and fixed the same day
-with the release of ikiwiki 1.47. A fix was also backported to Debian etch,
-as version 1.33.3. I recommend upgrading to one of these versions if your
-wiki can be edited by third parties.
-
-## insufficient checking for symlinks in srcdir path
-
-Ikiwiki did not check if path to the srcdir to contained a symlink. If an
-attacker had commit access to the directories in the path, they could
-change it to a symlink, causing ikiwiki to read and publish files that were
-not intended to be published. (But not write to them due to other checks.)
-
-In most configurations, this is not exploitable, because the srcdir is
-checked out of revision control, but the directories leading up to it are
-not. Or, the srcdir is a single subdirectory of a project in revision
-control (ie, `ikiwiki/doc`), and if the subdirectory were a symlink,
-ikiwiki would still typically not follow it.
-
-There are at least two configurations where this is exploitable:
-
-* If the srcdir is a deeper subdirectory of a project. For example if it is
- `project/foo/doc`, an an attacker can replace `foo` with a symlink to a
- directory containing a `doc` directory (not a symlink), then ikiwiki
- would follow the symlink.
-* If the path to the srcdir in ikiwiki's configuration ended in "/",
- and the srcdir is a single subdirectory of a project, (ie,
- `ikiwiki/doc/`), the srcdir could be a symlink and ikiwiki would not
- notice.
-
-This security hole was discovered on 26 November 2007 and fixed the same
-day with the release of ikiwiki 2.14. I recommend upgrading to this version
-if your wiki can be committed to by third parties. Alternatively, don't use
-a trailing slash in the srcdir, and avoid the (unusual) configurations that
-allow the security hole to be exploited.
-
-## javascript insertion via uris
-
-The htmlscrubber did not block javascript in uris. This was fixed by adding
-a whitelist of valid uri types, which does not include javascript.
-([[!cve CVE-2008-0809]]) Some urls specifyable by the meta plugin could also
-theoretically have been used to inject javascript; this was also blocked
-([[!cve CVE-2008-0808]]).
-
-This hole was discovered on 10 February 2008 and fixed the same day
-with the release of ikiwiki 2.31.1. (And a few subsequent versions..)
-A fix was also backported to Debian etch, as version 1.33.4. I recommend
-upgrading to one of these versions if your wiki can be edited by third
-parties.
-
-## Cross Site Request Forging
-
-Cross Site Request Forging could be used to constuct a link that would
-change a logged-in user's password or other preferences if they clicked on
-the link. It could also be used to construct a link that would cause a wiki
-page to be modified by a logged-in user. ([[!cve CVE-2008-0165]])
-
-These holes were discovered on 10 April 2008 and fixed the same day with
-the release of ikiwiki 2.42. A fix was also backported to Debian etch, as
-version 1.33.5. I recommend upgrading to one of these versions.
-
-## Cleartext passwords
-
-Until version 2.48, ikiwiki stored passwords in cleartext in the `userdb`.
-That risks exposing all users' passwords if the file is somehow exposed. To
-pre-emtively guard against that, current versions of ikiwiki store password
-hashes (using Eksblowfish).
-
-If you use the [[plugins/passwordauth]] plugin, I recommend upgrading to
-ikiwiki 2.48, installing the [[!cpan Authen::Passphrase]] perl module, and running
-`ikiwiki-transition hashpassword` to replace all existing cleartext passwords
-with strong blowfish hashes.
-
-You might also consider changing to [[plugins/openid]], which does not
-require ikiwiki deal with passwords at all, and does not involve users sending
-passwords in cleartext over the net to log in, either.
-
-## Empty password security hole
-
-This hole allowed ikiwiki to accept logins using empty passwords, to openid
-accounts that didn't use a password. It was introduced in version 1.34, and
-fixed in version 2.48. The [bug](http://bugs.debian.org/483770) was
-discovered on 30 May 2008 and fixed the same day. ([[!cve CVE-2008-0169]])
-
-I recommend upgrading to 2.48 immediatly if your wiki allows both password
-and openid logins.
-
-## Malformed UTF-8 DOS
-
-Feeding ikiwiki page sources containing certian forms of malformed UTF-8
-can cause it to crash. This can potentially be used for a denial of service
-attack.
-
-intrigeri discovered this problem on 12 Nov 2008 and a patch put in place
-later that day, in version 2.70. The fix was backported to testing as version
-2.53.3, and to stable as version 1.33.7.
-
-## Insufficient blacklisting in teximg plugin
-
-Josh Triplett discovered on 28 Aug 2009 that the teximg plugin's
-blacklisting of insecure TeX commands was insufficient; it could be
-bypassed and used to read arbitrary files. This was fixed by
-enabling TeX configuration options that disallow unsafe TeX commands.
-The fix was released on 30 Aug 2009 in version 3.1415926, and was
-backported to stable in version 2.53.4. If you use the teximg plugin,
-I recommend upgrading. ([[!cve CVE-2009-2944]])
-
-## javascript insertion via svg uris
-
-Ivan Shmakov pointed out that the htmlscrubber allowed `data:image/*` urls,
-including `data:image/svg+xml`. But svg can contain javascript, so that is
-unsafe.
-
-This hole was discovered on 12 March 2010 and fixed the same day
-with the release of ikiwiki 3.20100312.
-A fix was also backported to Debian etch, as version 2.53.5. I recommend
-upgrading to one of these versions if your wiki can be edited by third
-parties.
-
-## javascript insertion via insufficient htmlscrubbing of comments
-
-Kevin Riggle noticed that it was not possible to configure
-`htmlscrubber_skip` to scrub comments while leaving unscubbed the text
-of eg, blog posts. Confusingly, setting it to "* and !comment(*)" did not
-scrub comments.
-
-Additionally, it was discovered that comments' html was never scrubbed during
-preview or moderation of comments with such a configuration.
-
-These problems were discovered on 12 November 2010 and fixed the same
-hour with the release of ikiwiki 3.20101112. ([[!cve CVE-2010-1673]])
-
-## javascript insertion via insufficient checking in comments
-
-Dave B noticed that attempting to comment on an illegal page name could be
-used for an XSS attack.
-
-This hole was discovered on 22 Jan 2011 and fixed the same day with
-the release of ikiwiki 3.20110122. A fix was backported to Debian squeeze,
-as version 3.20100815.5. An upgrade is recommended for sites
-with the comments plugin enabled. ([[!cve CVE-2011-0428]])
-
-## possible javascript insertion via insufficient htmlscrubbing of alternate stylesheets
-
-Giuseppe Bilotta noticed that 'meta stylesheet` directives allowed anyone
-who could upload a malicious stylesheet to a site to add it to a
-page as an alternate stylesheet, or replacing the default stylesheet.
-
-This hole was discovered on 28 Mar 2011 and fixed the same hour with
-the release of ikiwiki 3.20110328. A fix was backported to Debian squeeze,
-as version 3.20100815.6. An upgrade is recommended for sites that have
-untrusted committers, or have the attachments plugin enabled.
-([[!cve CVE-2011-1401]])
-
-## tty hijacking via ikiwiki-mass-rebuild
-
-Ludwig Nussel discovered a way for users to hijack root's tty when
-ikiwiki-mass-rebuild was run. Additionally, there was some potential
-for information disclosure via symlinks. ([[!cve CVE-2011-1408]])
-
-This hole was discovered on 8 June 2011 and fixed the same day with
-the release of ikiwiki 3.20110608. Note that the fix is dependant on
-a version of su that has a similar hole fixed. Version 4.1.5 of the shadow
-package contains the fixed su; [[!debbug 628843]] tracks fixing the hole in
-Debian. An upgrade is a must for any sites that have `ikiwiki-update-wikilist`
-installed suid (not the default), and whose admins run `ikiwiki-mass-rebuild`.
-
-## javascript insertion via meta tags
-
-Raúl Benencia discovered an additional XSS exposure in the meta plugin.
-([[!cve CVE-2012-0220]])
-
-This hole was discovered on 16 May 2012 and fixed the same day with
-the release of ikiwiki 3.20120516. A fix was backported to Debian squeeze,
-as version 3.20100815.9. An upgrade is recommended for all sites.
-
-## XSS via openid selector
-
-Raghav Bisht discovered this XSS in the openid selector. ([[!cve CVE-2015-2793]])
-
-The hole was reported on March 24th, a fix was developed on March 27th,
-and the fixed version 3.20150329 was released on the 29th. A fix was backported
-to Debian jessie as version 3.20141016.2 and to Debian wheezy as version
-3.20120629.2. An upgrade is recommended for sites using CGI and openid.
-
-## XSS via error messages
-
-CGI error messages did not escape HTML meta-characters, potentially
-allowing an attacker to carry out cross-site scripting by directing a
-user to a URL that would result in a crafted ikiwiki error message. This
-was discovered on 4 May by the ikiwiki developers, and the fixed version
-3.20160506 was released on 6 May. An upgrade is recommended for sites using
-the CGI.
-
-## ImageMagick CVE-2016–3714 ("ImageTragick")
-
-ikiwiki 3.20160506 attempts to mitigate [[!cve CVE-2016-3714]] and any
-future ImageMagick vulnerabilities that resemble it, by restricting the
-image formats that the [[ikiwiki/directive/img]] directive is willing to
-resize. An upgrade is recommended for sites where an untrusted user is
-able to attach images. Upgrading ImageMagick to a version where
-CVE-2016-3714 has been fixed is also recommended, but at the time of
-writing no such version is available.
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/.quilt_patches ikiwiki-3.20141016.4/.pc/.quilt_patches
--- ikiwiki-3.20141016.4+deb8u1/.pc/.quilt_patches 2019-03-07 17:31:01.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/.quilt_patches 1970-01-01 10:00:00.000000000 +1000
@@ -1 +0,0 @@
-debian/patches
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/.quilt_series ikiwiki-3.20141016.4/.pc/.quilt_series
--- ikiwiki-3.20141016.4+deb8u1/.pc/.quilt_series 2019-03-07 17:31:01.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/.quilt_series 1970-01-01 10:00:00.000000000 +1000
@@ -1 +0,0 @@
-series
diff -Nru ikiwiki-3.20141016.4+deb8u1/.pc/.version ikiwiki-3.20141016.4/.pc/.version
--- ikiwiki-3.20141016.4+deb8u1/.pc/.version 2019-03-07 17:31:01.000000000 +1100
+++ ikiwiki-3.20141016.4/.pc/.version 1970-01-01 10:00:00.000000000 +1000
@@ -1 +0,0 @@
-2
diff -Nru ikiwiki-3.20141016.4+deb8u1/t/aggregate-file.t ikiwiki-3.20141016.4/t/aggregate-file.t
--- ikiwiki-3.20141016.4+deb8u1/t/aggregate-file.t 2019-03-07 17:32:38.000000000 +1100
+++ ikiwiki-3.20141016.4/t/aggregate-file.t 1970-01-01 10:00:00.000000000 +1000
@@ -1,173 +0,0 @@
-#!/usr/bin/perl
-use utf8;
-use warnings;
-use strict;
-
-use Encode;
-use Test::More;
-
-BEGIN {
- plan(skip_all => "CGI not available")
- unless eval q{
- use CGI qw();
- 1;
- };
-
- plan(skip_all => "IPC::Run not available")
- unless eval q{
- use IPC::Run qw(run);
- 1;
- };
-
- use_ok('IkiWiki');
- use_ok('YAML::XS');
-}
-
-# We check for English error messages
-$ENV{LC_ALL} = 'C';
-
-use Cwd qw(getcwd);
-use Errno qw(ENOENT);
-
-my $installed = $ENV{INSTALLED_TESTS};
-
-my @command;
-if ($installed) {
- @command = qw(ikiwiki --plugin inline);
-}
-else {
- ok(! system("make -s ikiwiki.out"));
- @command = ("perl", "-I".getcwd."/blib/lib", './ikiwiki.out',
- '--underlaydir='.getcwd.'/underlays/basewiki',
- '--set', 'underlaydirbase='.getcwd.'/underlays',
- '--templatedir='.getcwd.'/templates');
-}
-
-sub write_old_file {
- my $name = shift;
- my $dir = shift;
- my $content = shift;
- writefile($name, $dir, $content);
- ok(utime(333333333, 333333333, "$dir/$name"));
-}
-
-sub write_setup_file {
- my %params = @_;
- my %setup = (
- wikiname => 'this is the name of my wiki',
- srcdir => getcwd.'/t/tmp/in',
- destdir => getcwd.'/t/tmp/out',
- url => 'http://example.com',
- cgiurl => 'http://example.com/cgi-bin/ikiwiki.cgi',
- cgi_wrapper => getcwd.'/t/tmp/ikiwiki.cgi',
- cgi_wrappermode => '0751',
- add_plugins => [qw(aggregate)],
- disable_plugins => [qw(emailauth openid passwordauth)],
- aggregate_webtrigger => 1,
- );
- if ($params{without_paranoia}) {
- $setup{libdirs} = [getcwd.'/t/noparanoia'];
- }
- unless ($installed) {
- $setup{ENV} = { 'PERL5LIB' => getcwd.'/blib/lib' };
- }
- writefile("test.setup", "t/tmp",
- "# IkiWiki::Setup::Yaml - YAML formatted setup file\n" .
- Dump(\%setup));
-}
-
-sub thoroughly_rebuild {
- ok(unlink("t/tmp/ikiwiki.cgi") || $!{ENOENT});
- ok(! system(@command, qw(--setup t/tmp/test.setup --rebuild --wrappers)));
-}
-
-sub run_cgi {
- my (%args) = @_;
- my ($in, $out);
- my $method = $args{method} || 'GET';
- my $environ = $args{environ} || {};
- my $params = $args{params} || { do => 'prefs' };
-
- my %defaults = (
- SCRIPT_NAME => '/cgi-bin/ikiwiki.cgi',
- HTTP_HOST => 'example.com',
- );
-
- my $cgi = CGI->new($args{params});
- my $query_string = $cgi->query_string();
- diag $query_string;
-
- if ($method eq 'POST') {
- $defaults{REQUEST_METHOD} = 'POST';
- $in = $query_string;
- $defaults{CONTENT_LENGTH} = length $in;
- } else {
- $defaults{REQUEST_METHOD} = 'GET';
- $defaults{QUERY_STRING} = $query_string;
- }
-
- my %envvars = (
- %defaults,
- %$environ,
- );
- run(["./t/tmp/ikiwiki.cgi"], \$in, \$out, init => sub {
- map {
- $ENV{$_} = $envvars{$_}
- } keys(%envvars);
- });
-
- return decode_utf8($out);
-}
-
-sub test {
- my $content;
-
- ok(! system(qw(rm -rf t/tmp)));
- ok(! system(qw(mkdir t/tmp)));
-
- write_old_file('aggregator.mdwn', 't/tmp/in',
- '[[!aggregate name="ssrf" url="file://'.getcwd.'/t/secret.rss"]]'
- .'[[!inline pages="internal(aggregator/*)"]]');
-
- write_setup_file();
- thoroughly_rebuild();
-
- $content = run_cgi(
- method => 'GET',
- params => {
- do => 'aggregate_webtrigger',
- },
- );
- unlike($content, qr{creating new page});
- unlike($content, qr{Secrets});
- ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf');
- ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf/Secrets_go_here._aggregated');
-
- thoroughly_rebuild();
- $content = readfile('t/tmp/out/aggregator/index.html');
- unlike($content, qr{Secrets});
-
- diag('Trying test again with LWPx::ParanoidAgent disabled');
-
- write_setup_file(without_paranoia => 1);
- thoroughly_rebuild();
-
- $content = run_cgi(
- method => 'GET',
- params => {
- do => 'aggregate_webtrigger',
- },
- );
- unlike($content, qr{creating new page});
- unlike($content, qr{Secrets});
- ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf');
- ok(! -e 't/tmp/in/.ikiwiki/transient/aggregator/ssrf/Secrets_go_here._aggregated');
-
- thoroughly_rebuild();
- $content = readfile('t/tmp/out/aggregator/index.html');
- unlike($content, qr{Secrets});
-}
-
-test();
-
-done_testing();
diff -Nru ikiwiki-3.20141016.4+deb8u1/t/noparanoia/LWPx/ParanoidAgent.pm ikiwiki-3.20141016.4/t/noparanoia/LWPx/ParanoidAgent.pm
--- ikiwiki-3.20141016.4+deb8u1/t/noparanoia/LWPx/ParanoidAgent.pm 2019-03-07 17:32:38.000000000 +1100
+++ ikiwiki-3.20141016.4/t/noparanoia/LWPx/ParanoidAgent.pm 1970-01-01 10:00:00.000000000 +1000
@@ -1,2 +0,0 @@
-# make import fail
-0;
diff -Nru ikiwiki-3.20141016.4+deb8u1/t/secret.rss ikiwiki-3.20141016.4/t/secret.rss
--- ikiwiki-3.20141016.4+deb8u1/t/secret.rss 2019-03-07 17:32:38.000000000 +1100
+++ ikiwiki-3.20141016.4/t/secret.rss 1970-01-01 10:00:00.000000000 +1000
@@ -1,11 +0,0 @@
-<?xml version="1.0"?>
-<rss version="2.0">
-<channel>
-<title>Secrets go here</title>
-<description>Secrets go here</description>
-<item>
- <title>Secrets go here</title>
- <description>Secrets go here</description>
-</item>
-</channel>
-</rss>
diff -Nru ikiwiki-3.20141016.4+deb8u1/t/useragent.t ikiwiki-3.20141016.4/t/useragent.t
--- ikiwiki-3.20141016.4+deb8u1/t/useragent.t 2019-03-07 17:32:54.000000000 +1100
+++ ikiwiki-3.20141016.4/t/useragent.t 1970-01-01 10:00:00.000000000 +1000
@@ -1,317 +0,0 @@
-#!/usr/bin/perl
-use warnings;
-use strict;
-use Test::More;
-
-my $have_paranoid_agent;
-BEGIN {
- plan(skip_all => 'LWP not available')
- unless eval q{
- use LWP qw(); 1;
- };
- use_ok("IkiWiki");
- $have_paranoid_agent = eval q{
- use LWPx::ParanoidAgent qw(); 1;
- };
-}
-
-eval { useragent(future_feature => 1); };
-ok($@, 'future features should cause useragent to fail');
-
-diag "==== No proxy ====";
-delete $ENV{http_proxy};
-delete $ENV{https_proxy};
-delete $ENV{no_proxy};
-delete $ENV{HTTPS_PROXY};
-delete $ENV{NO_PROXY};
-
-diag "---- Unspecified URL ----";
-my $ua = useragent(for_url => undef);
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef, 'No http proxy');
-is($ua->proxy('https'), undef, 'No https proxy');
-
-diag "---- Specified URL ----";
-$ua = useragent(for_url => 'http://example.com');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef, 'No http proxy');
-is($ua->proxy('https'), undef, 'No https proxy');
-
-diag "==== Proxy for everything ====";
-$ENV{http_proxy} = 'http://proxy:8080';
-$ENV{https_proxy} = 'http://sproxy:8080';
-delete $ENV{no_proxy};
-delete $ENV{HTTPS_PROXY};
-delete $ENV{NO_PROXY};
-
-diag "---- Unspecified URL ----";
-$ua = useragent(for_url => undef);
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-$ua = useragent(for_url => 'http://example.com');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-# We don't care what $ua->proxy('https') is, because it won't be used
-$ua = useragent(for_url => 'https://example.com');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-# We don't care what $ua->proxy('http') is, because it won't be used
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-
-diag "==== Selective proxy ====";
-$ENV{http_proxy} = 'http://proxy:8080';
-$ENV{https_proxy} = 'http://sproxy:8080';
-$ENV{no_proxy} = '*.example.net,example.com,.example.org';
-delete $ENV{HTTPS_PROXY};
-delete $ENV{NO_PROXY};
-
-diag "---- Unspecified URL ----";
-$ua = useragent(for_url => undef);
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-
-diag "---- Exact match for no_proxy ----";
-$ua = useragent(for_url => 'http://example.com');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- Subdomain of exact domain in no_proxy ----";
-$ua = useragent(for_url => 'http://sub.example.com');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-
-diag "---- example.net matches *.example.net ----";
-$ua = useragent(for_url => 'https://example.net');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- sub.example.net matches *.example.net ----";
-$ua = useragent(for_url => 'https://sub.example.net');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- badexample.net does not match *.example.net ----";
-$ua = useragent(for_url => 'https://badexample.net');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-
-diag "---- example.org matches .example.org ----";
-$ua = useragent(for_url => 'https://example.org');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- sub.example.org matches .example.org ----";
-$ua = useragent(for_url => 'https://sub.example.org');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- badexample.org does not match .example.org ----";
-$ua = useragent(for_url => 'https://badexample.org');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-
-diag "==== Selective proxy (alternate variables) ====";
-$ENV{http_proxy} = 'http://proxy:8080';
-delete $ENV{https_proxy};
-$ENV{HTTPS_PROXY} = 'http://sproxy:8080';
-delete $ENV{no_proxy};
-$ENV{NO_PROXY} = '*.example.net,example.com,.example.org';
-
-diag "---- Unspecified URL ----";
-$ua = useragent(for_url => undef);
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-
-diag "---- Exact match for no_proxy ----";
-$ua = useragent(for_url => 'http://example.com');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- Subdomain of exact domain in no_proxy ----";
-$ua = useragent(for_url => 'http://sub.example.com');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-
-diag "---- example.net matches *.example.net ----";
-$ua = useragent(for_url => 'https://example.net');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- sub.example.net matches *.example.net ----";
-$ua = useragent(for_url => 'https://sub.example.net');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- badexample.net does not match *.example.net ----";
-$ua = useragent(for_url => 'https://badexample.net');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-
-diag "---- example.org matches .example.org ----";
-$ua = useragent(for_url => 'https://example.org');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- sub.example.org matches .example.org ----";
-$ua = useragent(for_url => 'https://sub.example.org');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- badexample.org does not match .example.org ----";
-$ua = useragent(for_url => 'https://badexample.org');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-
-diag "==== Selective proxy (many variables) ====";
-$ENV{http_proxy} = 'http://proxy:8080';
-$ENV{https_proxy} = 'http://sproxy:8080';
-# This one should be ignored in favour of https_proxy
-$ENV{HTTPS_PROXY} = 'http://not.preferred.proxy:3128';
-# These two should be merged
-$ENV{no_proxy} = '*.example.net,example.com';
-$ENV{NO_PROXY} = '.example.org';
-
-diag "---- Unspecified URL ----";
-$ua = useragent(for_url => undef);
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use CONNECT proxy');
-
-diag "---- Exact match for no_proxy ----";
-$ua = useragent(for_url => 'http://example.com');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- Subdomain of exact domain in no_proxy ----";
-$ua = useragent(for_url => 'http://sub.example.com');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-
-diag "---- example.net matches *.example.net ----";
-$ua = useragent(for_url => 'https://example.net');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- sub.example.net matches *.example.net ----";
-$ua = useragent(for_url => 'https://sub.example.net');
-SKIP: {
- skip 'paranoid agent not available', 1 unless $have_paranoid_agent;
- ok($ua->isa('LWPx::ParanoidAgent'), 'uses ParanoidAgent if possible');
-}
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), undef);
-is($ua->proxy('https'), undef);
-
-diag "---- badexample.net does not match *.example.net ----";
-$ua = useragent(for_url => 'https://badexample.net');
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(https)]);
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-
-diag "==== One but not the other ====\n";
-$ENV{http_proxy} = 'http://proxy:8080';
-delete $ENV{https_proxy};
-delete $ENV{HTTPS_PROXY};
-delete $ENV{no_proxy};
-delete $ENV{NO_PROXY};
-$ua = useragent(for_url => undef);
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), 'http://proxy:8080', 'should use proxy');
-is($ua->proxy('https'), 'http://proxy:8080', 'should use proxy');
-
-delete $ENV{http_proxy};
-$ENV{https_proxy} = 'http://sproxy:8080';
-delete $ENV{HTTPS_PROXY};
-delete $ENV{no_proxy};
-delete $ENV{NO_PROXY};
-$ua = useragent(for_url => undef);
-ok(! $ua->isa('LWPx::ParanoidAgent'), 'should use proxy instead of ParanoidAgent');
-is_deeply([sort @{$ua->protocols_allowed}], [sort qw(http https)]);
-is($ua->proxy('http'), 'http://sproxy:8080', 'should use proxy');
-is($ua->proxy('https'), 'http://sproxy:8080', 'should use proxy');
-
-done_testing;
Reply to: