Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package wget for openSUSE:Factory checked in 
at 2021-06-05 23:30:19
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/wget (Old)
 and      /work/SRC/openSUSE:Factory/.wget.new.1898 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "wget"

Sat Jun  5 23:30:19 2021 rev:64 rq:896315 version:1.21.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/wget/wget.changes        2021-02-04 
20:21:45.274614362 +0100
+++ /work/SRC/openSUSE:Factory/.wget.new.1898/wget.changes      2021-06-05 
23:30:38.596345233 +0200
@@ -1,0 +2,19 @@
+Mon May 10 13:15:35 UTC 2021 - Josef M??llers <[email protected]>
+
+- When running recursively, wget will verify the length of the whole
+  URL when saving the files. This will make it overwrite files with
+  truncated names, throwing the "The name is too long, ... trying to
+  shorten" messages. The patch moves the length check code to a
+  separate function and call it from the append_dir_structure() for each
+  path element.
+  [ bsc#1181173, 0001-src-main.c-Introduce-truncate_filename-option.patch]
+
+-------------------------------------------------------------------
+Mon May 10 13:13:14 UTC 2021 - Josef M??llers <[email protected]>
+
+- If wget for an http URL is redirected to a different site (hostname
+  parts of URLs differ), then any "Authenticate" and "Cookie" header
+  entries are discarded.
+  [bsc#1175551, wget-do-not-propagate-credentials.patch]
+
+-------------------------------------------------------------------

New:
----
  0001-src-main.c-Introduce-truncate_filename-option.patch
  wget-do-not-propagate-credentials.patch

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ wget.spec ++++++
--- /var/tmp/diff_new_pack.bjklPV/_old  2021-06-05 23:30:39.164346220 +0200
+++ /var/tmp/diff_new_pack.bjklPV/_new  2021-06-05 23:30:39.168346228 +0200
@@ -34,6 +34,8 @@
 Patch7:         wget-fix-pod-syntax.diff
 Patch8:         wget-errno-clobber.patch
 Patch9:         remove-env-from-shebang.patch
+Patch10:        wget-do-not-propagate-credentials.patch
+Patch11:        0001-src-main.c-Introduce-truncate_filename-option.patch
 BuildRequires:  automake
 BuildRequires:  gpgme-devel >= 0.4.2
 BuildRequires:  libcares-devel
@@ -79,6 +81,8 @@
 %patch7 -p1
 %patch8 -p1
 %patch9 -p1
+%patch10 -p1
+%patch11 -p1
 
 %build
 %if 0%{?suse_version} > 1110

++++++ 0001-src-main.c-Introduce-truncate_filename-option.patch ++++++
Index: wget-1.21.1/src/url.c
===================================================================
--- wget-1.21.1.orig/src/url.c
+++ wget-1.21.1/src/url.c
@@ -1652,6 +1652,50 @@ convert_fname (char *fname)
 }
 #endif
 
+/* Check if the length of path element is acceptable.
+   If it's longer than OS-defined maximum, truncate it. */
+static void
+possibly_truncate_pathel (char *pathel) {
+  size_t len = strlen (pathel);
+  size_t max_length;
+
+#ifdef WINDOWS
+  if (MAX_PATH > (len + CHOMP_BUFFER + 2))
+    {
+      max_length = MAX_PATH - (len + CHOMP_BUFFER + 2);
+      /* FIXME: In Windows a filename is usually limited to 255 characters.
+      To really be accurate you could call GetVolumeInformation() to get
+      lpMaximumComponentLength
+
+      Only FAT16 actually uses the 8.3 standard; this shouldn't be worrisome.
+      */
+      if (max_length > 255)
+        {
+          max_length = 255;
+        }
+    }
+  else
+    {
+      max_length = 0;
+    }
+#else
+  max_length = get_max_length (pathel, len, _PC_NAME_MAX) - CHOMP_BUFFER;
+#endif
+  if (max_length > 0 && len > max_length)
+    {
+      logprintf (LOG_NOTQUIET, "The name is too long, %lu chars total.\n",
+          (unsigned long) len);
+      logprintf (LOG_NOTQUIET, "Trying to shorten...\n");
+
+      /* Truncate path element. */
+      pathel[max_length] = '\0';
+
+      logprintf (LOG_NOTQUIET, "New name is %s.\n", pathel);
+    }
+
+  return;
+}
+
 /* Append to DEST the directory structure that corresponds the
    directory part of URL's path.  For example, if the URL is
    http://server/dir1/dir2/file, this appends "/dir1/dir2".
@@ -1686,7 +1730,11 @@ append_dir_structure (const struct url *
 
       if (dest->tail)
         append_char ('/', dest);
+
+      *next = '\0';   /* temporarily isolate the next element */
+      possibly_truncate_pathel(pathel);
       append_uri_pathel (pathel, next, true, dest);
+      *next = '/';
     }
 }
 
@@ -1796,41 +1844,8 @@ url_file_name (const struct url *u, char
   temp_fnres.size = 0;
   temp_fnres.tail = 0;
   append_string (fname, &temp_fnres);
-  xfree (fname);
-
-  /* Check that the length of the file name is acceptable. */
-#ifdef WINDOWS
-  if (MAX_PATH > (fnres.tail + CHOMP_BUFFER + 2))
-    {
-      max_length = MAX_PATH - (fnres.tail + CHOMP_BUFFER + 2);
-      /* FIXME: In Windows a filename is usually limited to 255 characters.
-      To really be accurate you could call GetVolumeInformation() to get
-      lpMaximumComponentLength
-      */
-      if (max_length > 255)
-        {
-          max_length = 255;
-        }
-    }
-  else
-    {
-      max_length = 0;
-    }
-#else
-  max_length = get_max_length (fnres.base, fnres.tail, _PC_NAME_MAX) - 
CHOMP_BUFFER;
-#endif
-  if (max_length > 0 && strlen (temp_fnres.base) > max_length)
-    {
-      logprintf (LOG_NOTQUIET, "The name is too long, %lu chars total.\n",
-          (unsigned long) strlen (temp_fnres.base));
-      logprintf (LOG_NOTQUIET, "Trying to shorten...\n");
-
-      /* Shorten the file name. */
-      temp_fnres.base[max_length] = '\0';
-
-      logprintf (LOG_NOTQUIET, "New name is %s.\n", temp_fnres.base);
-    }
 
+  xfree (fname);
   xfree (fname_len_check);
 
   /* The filename has already been 'cleaned' by append_uri_pathel() above.  So,

++++++ wget-do-not-propagate-credentials.patch ++++++
Index: wget-1.21.1/src/http.c
===================================================================
--- wget-1.21.1.orig/src/http.c
+++ wget-1.21.1/src/http.c
@@ -3155,6 +3155,33 @@ fail:
 }
 #endif /* HAVE_METALINK */
 
+/*
+ * Check if the corresponding header line should not
+ * be sent after a redirect
+ */
+static inline int
+unredirectable_headerline(char *line)
+{
+    static struct {
+        size_t len;
+       char *name;
+    } field_name[] = {
+        { 14, "Authorization:" },
+       { 7, "Cookie:" },
+       { 0, NULL }
+    };
+    int i;
+
+    /*
+     * Note: According to RFC 2616, Field names are case-insensitive.
+     */
+    for (i = 0; field_name[i].name != NULL; i++)
+        if (strncasecmp(line, field_name[i].name, field_name[i].len) == 0)
+           return 1;
+
+    return 0;
+}
+
 /* Retrieve a document through HTTP protocol.  It recognizes status
    code, and correctly handles redirections.  It closes the network
    socket.  If it receives an error from the functions below it, it
@@ -3167,7 +3194,7 @@ fail:
    server, and u->url will be requested.  */
 static uerr_t
 gethttp (const struct url *u, struct url *original_url, struct http_stat *hs,
-         int *dt, struct url *proxy, struct iri *iri, int count)
+         int *dt, struct url *proxy, struct iri *iri, int count, int 
location_changed)
 {
   struct request *req = NULL;
 
@@ -3314,7 +3341,16 @@ gethttp (const struct url *u, struct url
     {
       int i;
       for (i = 0; opt.user_headers[i]; i++)
-        request_set_user_header (req, opt.user_headers[i]);
+       {
+        /*
+         * IF we have been redirected
+         * AND the user-supplied header line should NOT be sent to the new host
+         * DO NOT append that header line
+         */
+        if (location_changed && unredirectable_headerline(opt.user_headers[i]))
+          continue;
+        request_set_user_header (req, opt.user_headers[i]);
+       }
     }
 
   proxyauth = NULL;
@@ -4232,7 +4268,7 @@ check_retry_on_http_error (const int sta
 uerr_t
 http_loop (const struct url *u, struct url *original_url, char **newloc,
            char **local_file, const char *referer, int *dt, struct url *proxy,
-           struct iri *iri)
+           struct iri *iri, int location_changed)
 {
   int count;
   bool got_head = false;         /* used for time-stamping and filename 
detection */
@@ -4424,7 +4460,7 @@ http_loop (const struct url *u, struct u
         *dt &= ~SEND_NOCACHE;
 
       /* Try fetching the document, or at least its head.  */
-      err = gethttp (u, original_url, &hstat, dt, proxy, iri, count);
+      err = gethttp (u, original_url, &hstat, dt, proxy, iri, count, 
location_changed);
 
       /* Time?  */
       tms = datetime_str (time (NULL));
Index: wget-1.21.1/src/http.h
===================================================================
--- wget-1.21.1.orig/src/http.h
+++ wget-1.21.1/src/http.h
@@ -36,7 +36,7 @@ as that of the covered work.  */
 struct url;
 
 uerr_t http_loop (const struct url *, struct url *, char **, char **, const 
char *,
-                  int *, struct url *, struct iri *);
+                  int *, struct url *, struct iri *, int);
 void save_cookies (void);
 void http_cleanup (void);
 time_t http_atotm (const char *);
Index: wget-1.21.1/src/retr.c
===================================================================
--- wget-1.21.1.orig/src/retr.c
+++ wget-1.21.1/src/retr.c
@@ -886,7 +886,7 @@ retrieve_url (struct url * orig_parsed,
 {
   uerr_t result;
   char *url;
-  bool location_changed;
+  bool location_changed = 0;
   bool iri_fallbacked = 0;
   int dummy;
   char *mynewloc, *proxy;
@@ -985,7 +985,7 @@ retrieve_url (struct url * orig_parsed,
        }
 #endif
       result = http_loop (u, orig_parsed, &mynewloc, &local_file, refurl, dt,
-                          proxy_url, iri);
+                          proxy_url, iri, location_changed);
     }
   else if (u->scheme == SCHEME_FTP
 #ifdef HAVE_SSL

Reply via email to