Hello Salvatore and Stable Release Managers, Am Dienstag, den 05.07.2016, 15:44 +0200 schrieb Salvatore Bonaccorso: > > Package: release.debian.org > > Severity: normal > > Tags: jessie > > User: release.debian.org@packages.debian.org > > Usertags: pu > > wget in stable is affected by CVE-2016-4971, an issue where wget ... > JFTR, if actually Noël Köthe <noel@debian.org> would like to do the > upload himself, I can happily hand over. I wasn't aware of this release bugreport (sorry). Thanks for CC:. DSA informed me that there will be no DSA for this CVE and liked to see this fixed by a jessie point release. https://security-tracker.debian.org/tracker/CVE-2016-4971 ;(at the end "no DSA"). Attached the minor changed debdiff based on the backported patch from Salvatore. I tested the resulting wget 1.16-1+deb8u1 package on a amd64 jessie machine. If the Stable Release Manager accept the changes I will upload the package. Regards Noel
diff -Nru wget-1.16/debian/changelog wget-1.16/debian/changelog
--- wget-1.16/debian/changelog 2014-10-27 11:41:18.000000000 +0100
+++ wget-1.16/debian/changelog 2016-07-05 16:21:21.000000000 +0200
@@ -1,3 +1,15 @@
+wget (1.16-1+deb8u1) jessie; urgency=medium
+
+ * added patch for CVE-2016-4971. closes: #827003, #829130
+ By default, on server redirects to a FTP resource, use the original
+ URL to get the local file name. Close CVE-2016-4971. This
+ introduces a backward-incompatibility for HTTP->FTP redirects and
+ any script that relies on the old behaviour must use
+ --trust-server-names.
+ * debian/rules fixed clean target
+
+ -- Noël Köthe <noel@debian.org> Mon, 04 Jul 2016 18:37:47 +0200
+
wget (1.16-1) unstable; urgency=medium
* new upstream release from 2014-10-27
diff -Nru wget-1.16/debian/patches/series wget-1.16/debian/patches/series
--- wget-1.16/debian/patches/series 2014-10-16 11:32:22.000000000 +0200
+++ wget-1.16/debian/patches/series 2016-06-30 17:21:45.000000000 +0200
@@ -1,4 +1,5 @@
wget-doc-remove-usr-local-in-sample.wgetrc
wget-doc-remove-usr-local-in-wget.texi
wget-passive_ftp-default
+wget-CVE-2016-4971.patch
diff -Nru wget-1.16/debian/patches/wget-CVE-2016-4971.patch wget-1.16/debian/patches/wget-CVE-2016-4971.patch
--- wget-1.16/debian/patches/wget-CVE-2016-4971.patch 1970-01-01 01:00:00.000000000 +0100
+++ wget-1.16/debian/patches/wget-CVE-2016-4971.patch 2016-07-05 16:09:10.000000000 +0200
@@ -0,0 +1,270 @@
+Description: ftp: understand --trust-server-names on a HTTP->FTP redirect
+ If not --trust-server-names is used, FTP will also get the destination
+ file name from the original url specified by the user instead of the
+ redirected url. Closes CVE-2016-4971.
+Origin: backport, http://git.savannah.gnu.org/cgit/wget.git/commit/?id=e996e322ffd42aaa051602da182d03178d0f13e1
+Bug-Debian: https://bugs.debian.org/827003
+Forwarded: not-needed
+Author: Giuseppe Scrivano <gscrivan@redhat.com>
+Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
+Last-Update: 2016-06-30
+Applied-Upstream: 1.18
+---
+
+--- a/src/ftp.c
++++ b/src/ftp.c
+@@ -235,14 +235,15 @@ print_length (wgint size, wgint start, b
+ logputs (LOG_VERBOSE, !authoritative ? _(" (unauthoritative)\n") : "\n");
+ }
+
+-static uerr_t ftp_get_listing (struct url *, ccon *, struct fileinfo **);
++static uerr_t ftp_get_listing (struct url *, struct url *, ccon *, struct fileinfo **);
+
+ /* Retrieves a file with denoted parameters through opening an FTP
+ connection to the server. It always closes the data connection,
+ and closes the control connection in case of error. If warc_tmp
+ is non-NULL, the downloaded data will be written there as well. */
+ static uerr_t
+-getftp (struct url *u, wgint passed_expected_bytes, wgint *qtyread,
++getftp (struct url *u, struct url *original_url,
++ wgint passed_expected_bytes, wgint *qtyread,
+ wgint restval, ccon *con, int count, wgint *last_expected_bytes,
+ FILE *warc_tmp)
+ {
+@@ -992,7 +993,7 @@ Error in server response, closing contro
+ {
+ bool exists = false;
+ struct fileinfo *f;
+- uerr_t _res = ftp_get_listing (u, con, &f);
++ uerr_t _res = ftp_get_listing (u, original_url, con, &f);
+ /* Set the DO_RETR command flag again, because it gets unset when
+ calling ftp_get_listing() and would otherwise cause an assertion
+ failure earlier on when this function gets repeatedly called
+@@ -1536,7 +1537,8 @@ Error in server response, closing contro
+ This loop either gets commands from con, or (if ON_YOUR_OWN is
+ set), makes them up to retrieve the file given by the URL. */
+ static uerr_t
+-ftp_loop_internal (struct url *u, struct fileinfo *f, ccon *con, char **local_file)
++ftp_loop_internal (struct url *u, struct url *original_url, struct fileinfo *f,
++ ccon *con, char **local_file)
+ {
+ int count, orig_lp;
+ wgint restval, len = 0, qtyread = 0;
+@@ -1560,7 +1562,7 @@ ftp_loop_internal (struct url *u, struct
+ else
+ {
+ /* URL-derived file. Consider "-O file" name. */
+- con->target = url_file_name (u, NULL);
++ con->target = url_file_name (opt.trustservernames || !original_url ? u : original_url, NULL);
+ if (!opt.output_document)
+ locf = con->target;
+ else
+@@ -1676,8 +1678,8 @@ ftp_loop_internal (struct url *u, struct
+
+ /* If we are working on a WARC record, getftp should also write
+ to the warc_tmp file. */
+- err = getftp (u, len, &qtyread, restval, con, count, &last_expected_bytes,
+- warc_tmp);
++ err = getftp (u, original_url, len, &qtyread, restval, con, count,
++ &last_expected_bytes, warc_tmp);
+
+ if (con->csock == -1)
+ con->st &= ~DONE_CWD;
+@@ -1830,7 +1832,8 @@ Removing file due to --delete-after in f
+ /* Return the directory listing in a reusable format. The directory
+ is specifed in u->dir. */
+ static uerr_t
+-ftp_get_listing (struct url *u, ccon *con, struct fileinfo **f)
++ftp_get_listing (struct url *u, struct url *original_url, ccon *con,
++ struct fileinfo **f)
+ {
+ uerr_t err;
+ char *uf; /* url file name */
+@@ -1851,7 +1854,7 @@ ftp_get_listing (struct url *u, ccon *co
+
+ con->target = xstrdup (lf);
+ xfree (lf);
+- err = ftp_loop_internal (u, NULL, con, NULL);
++ err = ftp_loop_internal (u, original_url, NULL, con, NULL);
+ lf = xstrdup (con->target);
+ xfree (con->target);
+ con->target = old_target;
+@@ -1874,8 +1877,9 @@ ftp_get_listing (struct url *u, ccon *co
+ return err;
+ }
+
+-static uerr_t ftp_retrieve_dirs (struct url *, struct fileinfo *, ccon *);
+-static uerr_t ftp_retrieve_glob (struct url *, ccon *, int);
++static uerr_t ftp_retrieve_dirs (struct url *, struct url *,
++ struct fileinfo *, ccon *);
++static uerr_t ftp_retrieve_glob (struct url *, struct url *, ccon *, int);
+ static struct fileinfo *delelement (struct fileinfo *, struct fileinfo **);
+ static void freefileinfo (struct fileinfo *f);
+
+@@ -1887,7 +1891,8 @@ static void freefileinfo (struct fileinf
+ If opt.recursive is set, after all files have been retrieved,
+ ftp_retrieve_dirs will be called to retrieve the directories. */
+ static uerr_t
+-ftp_retrieve_list (struct url *u, struct fileinfo *f, ccon *con)
++ftp_retrieve_list (struct url *u, struct url *original_url,
++ struct fileinfo *f, ccon *con)
+ {
+ static int depth = 0;
+ uerr_t err;
+@@ -2046,7 +2051,9 @@ Already have correct symlink %s -> %s\n\
+ else /* opt.retr_symlinks */
+ {
+ if (dlthis)
+- err = ftp_loop_internal (u, f, con, NULL);
++ {
++ err = ftp_loop_internal (u, original_url, f, con, NULL);
++ }
+ } /* opt.retr_symlinks */
+ break;
+ case FT_DIRECTORY:
+@@ -2057,7 +2064,9 @@ Already have correct symlink %s -> %s\n\
+ case FT_PLAINFILE:
+ /* Call the retrieve loop. */
+ if (dlthis)
+- err = ftp_loop_internal (u, f, con, NULL);
++ {
++ err = ftp_loop_internal (u, original_url, f, con, NULL);
++ }
+ break;
+ case FT_UNKNOWN:
+ logprintf (LOG_NOTQUIET, _("%s: unknown/unsupported file type.\n"),
+@@ -2122,7 +2131,7 @@ Already have correct symlink %s -> %s\n\
+ /* We do not want to call ftp_retrieve_dirs here */
+ if (opt.recursive &&
+ !(opt.reclevel != INFINITE_RECURSION && depth >= opt.reclevel))
+- err = ftp_retrieve_dirs (u, orig, con);
++ err = ftp_retrieve_dirs (u, original_url, orig, con);
+ else if (opt.recursive)
+ DEBUGP ((_("Will not retrieve dirs since depth is %d (max %d).\n"),
+ depth, opt.reclevel));
+@@ -2135,7 +2144,8 @@ Already have correct symlink %s -> %s\n\
+ ftp_retrieve_glob on each directory entry. The function knows
+ about excluded directories. */
+ static uerr_t
+-ftp_retrieve_dirs (struct url *u, struct fileinfo *f, ccon *con)
++ftp_retrieve_dirs (struct url *u, struct url *original_url,
++ struct fileinfo *f, ccon *con)
+ {
+ char *container = NULL;
+ int container_size = 0;
+@@ -2185,7 +2195,7 @@ Not descending to %s as it is excluded/n
+ odir = xstrdup (u->dir); /* because url_set_dir will free
+ u->dir. */
+ url_set_dir (u, newdir);
+- ftp_retrieve_glob (u, con, GLOB_GETALL);
++ ftp_retrieve_glob (u, original_url, con, GLOB_GETALL);
+ url_set_dir (u, odir);
+ xfree (odir);
+
+@@ -2244,14 +2254,15 @@ is_invalid_entry (struct fileinfo *f)
+ GLOB_GLOBALL, use globbing; if it's GLOB_GETALL, download the whole
+ directory. */
+ static uerr_t
+-ftp_retrieve_glob (struct url *u, ccon *con, int action)
++ftp_retrieve_glob (struct url *u, struct url *original_url,
++ ccon *con, int action)
+ {
+ struct fileinfo *f, *start;
+ uerr_t res;
+
+ con->cmd |= LEAVE_PENDING;
+
+- res = ftp_get_listing (u, con, &start);
++ res = ftp_get_listing (u, original_url, con, &start);
+ if (res != RETROK)
+ return res;
+ /* First: weed out that do not conform the global rules given in
+@@ -2347,7 +2358,7 @@ ftp_retrieve_glob (struct url *u, ccon *
+ if (start)
+ {
+ /* Just get everything. */
+- res = ftp_retrieve_list (u, start, con);
++ res = ftp_retrieve_list (u, original_url, start, con);
+ }
+ else
+ {
+@@ -2363,7 +2374,7 @@ ftp_retrieve_glob (struct url *u, ccon *
+ {
+ /* Let's try retrieving it anyway. */
+ con->st |= ON_YOUR_OWN;
+- res = ftp_loop_internal (u, NULL, con, NULL);
++ res = ftp_loop_internal (u, original_url, NULL, con, NULL);
+ return res;
+ }
+
+@@ -2383,8 +2394,8 @@ ftp_retrieve_glob (struct url *u, ccon *
+ of URL. Inherently, its capabilities are limited on what can be
+ encoded into a URL. */
+ uerr_t
+-ftp_loop (struct url *u, char **local_file, int *dt, struct url *proxy,
+- bool recursive, bool glob)
++ftp_loop (struct url *u, struct url *original_url, char **local_file, int *dt,
++ struct url *proxy, bool recursive, bool glob)
+ {
+ ccon con; /* FTP connection */
+ uerr_t res;
+@@ -2405,16 +2416,17 @@ ftp_loop (struct url *u, char **local_fi
+ if (!*u->file && !recursive)
+ {
+ struct fileinfo *f;
+- res = ftp_get_listing (u, &con, &f);
++ res = ftp_get_listing (u, original_url, &con, &f);
+
+ if (res == RETROK)
+ {
+ if (opt.htmlify && !opt.spider)
+ {
++ struct url *url_file = opt.trustservernames ? u : original_url;
+ char *filename = (opt.output_document
+ ? xstrdup (opt.output_document)
+ : (con.target ? xstrdup (con.target)
+- : url_file_name (u, NULL)));
++ : url_file_name (url_file, NULL)));
+ res = ftp_index (filename, u, f);
+ if (res == FTPOK && opt.verbose)
+ {
+@@ -2459,11 +2471,13 @@ ftp_loop (struct url *u, char **local_fi
+ /* ftp_retrieve_glob is a catch-all function that gets called
+ if we need globbing, time-stamping, recursion or preserve
+ permissions. Its third argument is just what we really need. */
+- res = ftp_retrieve_glob (u, &con,
++ res = ftp_retrieve_glob (u, original_url, &con,
+ ispattern ? GLOB_GLOBALL : GLOB_GETONE);
+ }
+ else
+- res = ftp_loop_internal (u, NULL, &con, local_file);
++ {
++ res = ftp_loop_internal (u, original_url, NULL, &con, local_file);
++ }
+ }
+ if (res == FTPOK)
+ res = RETROK;
+--- a/src/ftp.h
++++ b/src/ftp.h
+@@ -152,7 +152,8 @@ enum wget_ftp_fstatus
+ };
+
+ struct fileinfo *ftp_parse_ls (const char *, const enum stype);
+-uerr_t ftp_loop (struct url *, char **, int *, struct url *, bool, bool);
++uerr_t ftp_loop (struct url *, struct url *, char **, int *, struct url *,
++ bool, bool);
+
+ uerr_t ftp_index (const char *, struct url *, struct fileinfo *);
+
+--- a/src/retr.c
++++ b/src/retr.c
+@@ -803,7 +803,8 @@ retrieve_url (struct url * orig_parsed,
+ if (redirection_count)
+ oldrec = glob = false;
+
+- result = ftp_loop (u, &local_file, dt, proxy_url, recursive, glob);
++ result = ftp_loop (u, orig_parsed, &local_file, dt, proxy_url,
++ recursive, glob);
+ recursive = oldrec;
+
+ /* There is a possibility of having HTTP being redirected to
diff -Nru wget-1.16/debian/rules wget-1.16/debian/rules
--- wget-1.16/debian/rules 2013-11-08 12:58:33.000000000 +0100
+++ wget-1.16/debian/rules 2016-07-05 16:09:56.000000000 +0200
@@ -61,12 +61,12 @@
doc/sample.wgetrc.munged_for_texi_inclusion
rm -f config.guess config.sub
- dh_prep
+ dh_clean
install: build
dh_testdir
dh_testroot
- dh_clean -k
+ dh_prep
dh_installdirs
# Add here commands to install the package into debian/wget.
diff -Nru wget-1.16/debian/wget.debhelper.log wget-1.16/debian/wget.debhelper.log
--- wget-1.16/debian/wget.debhelper.log 2014-10-27 11:43:02.000000000 +0100
+++ wget-1.16/debian/wget.debhelper.log 1970-01-01 01:00:00.000000000 +0100
@@ -1,6 +0,0 @@
-dh_prep
-dh_prep
-dh_prep
-dh_prep
-dh_prep
-dh_prep
Attachment:
signature.asc
Description: This is a digitally signed message part