Hello community,

here is the log from the commit of package perl-WWW-Mechanize for 
openSUSE:Factory checked in at 2013-10-21 15:15:05
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/perl-WWW-Mechanize (Old)
 and      /work/SRC/openSUSE:Factory/.perl-WWW-Mechanize.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "perl-WWW-Mechanize"

Changes:
--------
--- /work/SRC/openSUSE:Factory/perl-WWW-Mechanize/perl-WWW-Mechanize.changes    
2013-06-21 13:37:26.000000000 +0200
+++ 
/work/SRC/openSUSE:Factory/.perl-WWW-Mechanize.new/perl-WWW-Mechanize.changes   
    2013-10-21 15:15:06.000000000 +0200
@@ -1,0 +2,17 @@
+Fri Oct  4 09:15:25 UTC 2013 - [email protected]
+
+- updated to 1.73
+  [TESTS]
+  
+  - Update t/local/back.t to use LocalServer for 404 checking to avoid fails
+  on win32. Fix by Matt S Trout, patient diagnostics and testing provided
+  by jayefuu of freenode #perl
+  
+  - Blow away more proxy env vars in LocalServer, and do it on load so that
+  the LWP env checking doesn't happen before we've done it.
+  
+  [OTHER CHANGES]
+  
+  - Better error when passing only one parameter to follow_link
+
+-------------------------------------------------------------------

Old:
----
  WWW-Mechanize-1.72.tar.gz

New:
----
  WWW-Mechanize-1.73.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ perl-WWW-Mechanize.spec ++++++
--- /var/tmp/diff_new_pack.j485rC/_old  2013-10-21 15:15:08.000000000 +0200
+++ /var/tmp/diff_new_pack.j485rC/_new  2013-10-21 15:15:08.000000000 +0200
@@ -17,14 +17,14 @@
 
 
 Name:           perl-WWW-Mechanize
-Version:        1.72
+Version:        1.73
 Release:        0
 %define cpan_name WWW-Mechanize
 Summary:        Handy web browsing in a Perl object
 License:        Artistic-1.0 or GPL-1.0+
 Group:          Development/Libraries/Perl
 Url:            http://search.cpan.org/dist/WWW-Mechanize/
-Source:         
http://www.cpan.org/authors/id/J/JE/JESSE/%{cpan_name}-%{version}.tar.gz
+Source:         
http://www.cpan.org/authors/id/E/ET/ETHER/%{cpan_name}-%{version}.tar.gz
 BuildArch:      noarch
 BuildRoot:      %{_tmppath}/%{name}-%{version}-build
 BuildRequires:  perl
@@ -74,12 +74,7 @@
 %{perl_requires}
 
 %description
-WWW::Mechanize, or Mech for short, helps you automate interaction with a
-website. It supports performing a sequence of page fetches including following
-links and submitting forms. Each fetched page is parsed and its links and
-forms are extracted. A link or a form can be selected, form fields can be
-filled and the next page can be fetched. Mech also stores a history of the
-URLs you've visited, which can be queried and revisited.
+Handy web browsing in a Perl object
 
 %prep
 %setup -q -n %{cpan_name}-%{version}

++++++ WWW-Mechanize-1.72.tar.gz -> WWW-Mechanize-1.73.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/Changes 
new/WWW-Mechanize-1.73/Changes
--- old/WWW-Mechanize-1.72/Changes      2012-02-03 00:37:59.000000000 +0100
+++ new/WWW-Mechanize-1.73/Changes      2013-08-24 06:23:37.000000000 +0200
@@ -3,7 +3,22 @@
 Mech now has its own mailing list at Google Groups:
 http://groups.google.com/group/www-mechanize-users
 
-[CHANGES]
+
+1.73        2013-08-24
+========================================
+[TESTS]
+
+- Update t/local/back.t to use LocalServer for 404 checking to avoid fails
+on win32. Fix by Matt S Trout, patient diagnostics and testing provided
+by jayefuu of freenode #perl
+
+- Blow away more proxy env vars in LocalServer, and do it on load so that
+the LWP env checking doesn't happen before we've done it.
+
+[OTHER CHANGES]
+
+- Better error when passing only one parameter to follow_link
+
 
 1.72        Thu Feb  2 18:37:28 EST 2012
 ========================================
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/MANIFEST 
new/WWW-Mechanize-1.73/MANIFEST
--- old/WWW-Mechanize-1.72/MANIFEST     2012-02-03 00:39:27.000000000 +0100
+++ new/WWW-Mechanize-1.73/MANIFEST     2013-08-24 06:33:06.000000000 +0200
@@ -23,6 +23,7 @@
 t/credentials-api.t
 t/credentials.t
 t/die.t
+t/dump.t
 t/field.html
 t/field.t
 t/find_frame.html
@@ -73,6 +74,7 @@
 t/pod-coverage.t
 t/pod.t
 t/regex-error.t
+t/save_content.html
 t/save_content.t
 t/select.html
 t/select.t
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/MANIFEST.SKIP 
new/WWW-Mechanize-1.73/MANIFEST.SKIP
--- old/WWW-Mechanize-1.72/MANIFEST.SKIP        2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/MANIFEST.SKIP        2013-08-24 06:10:43.000000000 
+0200
@@ -9,3 +9,5 @@
 .gitignore$
 .shipit$
 ^tags$
+^MYMETA
+^WWW-Mechanize
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/META.json 
new/WWW-Mechanize-1.73/META.json
--- old/WWW-Mechanize-1.72/META.json    2012-02-03 00:39:27.000000000 +0100
+++ new/WWW-Mechanize-1.73/META.json    2013-08-24 06:33:06.000000000 +0200
@@ -4,7 +4,7 @@
       "Jesse Vincent <[email protected]>"
    ],
    "dynamic_config" : 1,
-   "generated_by" : "ExtUtils::MakeMaker version 6.59, CPAN::Meta::Converter 
version 2.112150",
+   "generated_by" : "ExtUtils::MakeMaker version 6.72, CPAN::Meta::Converter 
version 2.132140",
    "license" : [
       "perl_5"
    ],
@@ -22,38 +22,38 @@
    "prereqs" : {
       "build" : {
          "requires" : {
-            "ExtUtils::MakeMaker" : 0
+            "ExtUtils::MakeMaker" : "0"
          }
       },
       "configure" : {
          "requires" : {
-            "ExtUtils::MakeMaker" : 0
+            "ExtUtils::MakeMaker" : "0"
          }
       },
       "runtime" : {
          "requires" : {
-            "Carp" : 0,
-            "File::Temp" : 0,
-            "FindBin" : 0,
-            "Getopt::Long" : 0,
+            "Carp" : "0",
+            "File::Temp" : "0",
+            "FindBin" : "0",
+            "Getopt::Long" : "0",
             "HTML::Form" : "6",
-            "HTML::HeadParser" : 0,
+            "HTML::HeadParser" : "0",
             "HTML::Parser" : "3.33",
             "HTML::TokeParser" : "2.28",
-            "HTML::TreeBuilder" : 0,
-            "HTTP::Daemon" : 0,
+            "HTML::TreeBuilder" : "0",
+            "HTTP::Daemon" : "0",
             "HTTP::Request" : "1.3",
             "HTTP::Server::Simple" : "0.35",
-            "HTTP::Server::Simple::CGI" : 0,
-            "HTTP::Status" : 0,
+            "HTTP::Server::Simple::CGI" : "0",
+            "HTTP::Status" : "0",
             "LWP" : "5.829",
             "LWP::UserAgent" : "5.829",
-            "Pod::Usage" : 0,
+            "Pod::Usage" : "0",
             "Test::More" : "0.34",
             "Test::Warn" : "0.11",
             "URI" : "1.36",
-            "URI::URL" : 0,
-            "URI::file" : 0,
+            "URI::URL" : "0",
+            "URI::file" : "0",
             "perl" : "5.008"
          }
       }
@@ -70,5 +70,5 @@
       "x_MailingList" : "http://groups.google.com/group/www-mechanize-users";,
       "x_Repository" : "https://github.com/bestpractical/www-mechanize";
    },
-   "version" : "1.72"
+   "version" : "1.73"
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/META.yml 
new/WWW-Mechanize-1.73/META.yml
--- old/WWW-Mechanize-1.72/META.yml     2012-02-03 00:39:27.000000000 +0100
+++ new/WWW-Mechanize-1.73/META.yml     2013-08-24 06:33:05.000000000 +0200
@@ -7,7 +7,7 @@
 configure_requires:
   ExtUtils::MakeMaker: 0
 dynamic_config: 1
-generated_by: 'ExtUtils::MakeMaker version 6.59, CPAN::Meta::Converter version 
2.112150'
+generated_by: 'ExtUtils::MakeMaker version 6.72, CPAN::Meta::Converter version 
2.132140'
 license: perl
 meta-spec:
   url: http://module-build.sourceforge.net/META-spec-v1.4.html
@@ -42,9 +42,9 @@
   URI::file: 0
   perl: 5.008
 resources:
+  MailingList: http://groups.google.com/group/www-mechanize-users
+  Repository: https://github.com/bestpractical/www-mechanize
   bugtracker: http://code.google.com/p/www-mechanize/issues/list
   homepage: https://github.com/bestpractical/www-mechanize
   license: http://dev.perl.org/licenses/
-  x_MailingList: http://groups.google.com/group/www-mechanize-users
-  x_Repository: https://github.com/bestpractical/www-mechanize
-version: 1.72
+version: 1.73
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/lib/WWW/Mechanize/Examples.pod 
new/WWW-Mechanize-1.73/lib/WWW/Mechanize/Examples.pod
--- old/WWW-Mechanize-1.72/lib/WWW/Mechanize/Examples.pod       2011-08-05 
22:48:08.000000000 +0200
+++ new/WWW-Mechanize-1.73/lib/WWW/Mechanize/Examples.pod       2013-08-24 
06:10:59.000000000 +0200
@@ -214,7 +214,7 @@
 =head2 listmod, by Ian Langworth
 
 Ian Langworth contributes this little gem that will bring joy to
-beleagured mailing list admins.  It discards spam messages through
+beleaguered mailing list admins.  It discards spam messages through
 mailman's web interface.
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/lib/WWW/Mechanize/FAQ.pod 
new/WWW-Mechanize-1.73/lib/WWW/Mechanize/FAQ.pod
--- old/WWW-Mechanize-1.72/lib/WWW/Mechanize/FAQ.pod    2011-10-09 
19:58:05.000000000 +0200
+++ new/WWW-Mechanize-1.73/lib/WWW/Mechanize/FAQ.pod    2013-08-24 
06:10:59.000000000 +0200
@@ -46,7 +46,7 @@
 long?). There is probably some function with one or more arguments which
 calculates the new URL. Step one: using your favorite browser, get the
 before and after URLs and save them to files. Edit each file, converting
-the the argument separators ('?', '&' or ';') into newlines. Now it is
+the argument separators ('?', '&' or ';') into newlines. Now it is
 easy to use diff or comm to find out what Javascript did to the URL.
 Step 2 - find the function call which created the URL - you will need
 to parse and interpret its argument list. The Javascript Debugger in the
@@ -54,7 +54,7 @@
 fairly trivial to write your own function which emulates the Javascript
 for the pages you want to process.
 
-Here's annother approach that answers the question, "It works in Firefox,
+Here's another approach that answers the question, "It works in Firefox,
 but why not Mech?"  Everything the web server knows about the client is
 present in the HTTP request. If two requests are identical, the results
 should be identical. So the real question is "What is different between
@@ -147,7 +147,7 @@
 
 =head2 How do I check a checkbox that doesn't have a value defined?
 
-Set it to to the value of "on".
+Set it to the value of "on".
 
     $mech->field( my_checkbox => 'on' );
 
@@ -166,7 +166,7 @@
 
     my $mech = WWW::Mechanize->new( autocheck => 1 );
     $mech->get( 'http://my.site.com' );
-    my $res = $mech->response();
+    my $response = $mech->response();
     for my $key ( $response->header_field_names() ) {
         print $key, " : ", $response->header( $key ), "\n";
     }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/lib/WWW/Mechanize.pm 
new/WWW-Mechanize-1.73/lib/WWW/Mechanize.pm
--- old/WWW-Mechanize-1.72/lib/WWW/Mechanize.pm 2012-02-03 00:38:14.000000000 
+0100
+++ new/WWW-Mechanize-1.73/lib/WWW/Mechanize.pm 2013-08-24 06:24:11.000000000 
+0200
@@ -6,11 +6,11 @@
 
 =head1 VERSION
 
-Version 1.70
+Version 1.73
 
 =cut
 
-our $VERSION = '1.72';
+our $VERSION = '1.73';
 
 =head1 SYNOPSIS
 
@@ -591,7 +591,8 @@
 =head2 $mech->content(...)
 
 Returns the content that the mech uses internally for the last page
-fetched. Ordinarily this is the same as $mech->response()->content(),
+fetched. Ordinarily this is the same as
+C<< $mech->response()->decoded_content() >>,
 but this may differ for HTML documents if L</update_html> is
 overloaded (in which case the value passed to the base-class
 implementation of same will be returned), and/or extra named arguments
@@ -603,31 +604,56 @@
 
 Returns a text-only version of the page, with all HTML markup
 stripped. This feature requires I<HTML::TreeBuilder> to be installed,
-or a fatal error will be thrown.
+or a fatal error will be thrown. This works only if the contents are
+HTML.
 
 =item I<< $mech->content( base_href => [$base_href|undef] ) >>
 
 Returns the HTML document, modified to contain a
 C<< <base href="$base_href"> >> mark-up in the header.
 I<$base_href> is C<< $mech->base() >> if not specified. This is
-handy to pass the HTML to e.g. L<HTML::Display>.
+handy to pass the HTML to e.g. L<HTML::Display>. This works only if
+the contents are HTML.
+
+
+=item I<< $mech->content( raw => 1 ) >>
+
+Returns C<< $self->response()->content() >>, i.e. the raw contents from the
+response.
+
+=item I<< $mech->content( decoded_by_headers => 1 ) >>
+
+Returns the content after applying all C<Content-Encoding> headers but
+with not additional mangling.
+
+=item I<< $mech->content( charset => $charset ) >>
+
+Returns C<< $self->response()->decoded_content(charset => $charset) >>
+(see L<HTTP::Response> for details).
 
 =back
 
-Passing arguments to C<content()> if the current document is not
-HTML has no effect now (i.e. the return value is the same as
-C<< $self->response()->content() >>. This may change in the future,
-but will likely be backwards-compatible when it does.
+To preserve backwards compatibility, additional parameters will be
+ignored unless none of C<< raw | decoded_by_headers | charset >> is
+specified and the text is HTML, in which case an error will be triggered.
 
 =cut
 
 sub content {
     my $self = shift;
-    my $content = $self->{content};
-
-    if ( $self->is_html ) {
-        my %parms = @_;
+    my %parms = @_;
 
+    my $content = $self->{content};
+    if (delete $parms{raw}) {
+        $content = $self->response()->content();
+    }
+    elsif (delete $parms{decoded_by_headers}) {
+        $content = $self->response()->decoded_content(charset => 'none');
+    }
+    elsif (my $charset = delete $parms{charset}) {
+        $content = $self->response()->decoded_content(charset => $charset);
+    }
+    elsif ( $self->is_html ) {
         if ( exists $parms{base_href} ) {
             my $base_href = (delete $parms{base_href}) || $self->base;
             $content=~s/<head>/<head>\n<base href="$base_href">/i;
@@ -728,6 +754,14 @@
 
     $mech->follow_link( n => 3 );
 
+=item * the link with the url
+
+    $mech->follow_link( url => '/other/page' );
+
+or
+
+    $mech->follow_link( url => 'http://example.com/page' );
+
 =back
 
 Returns the result of the GET method (an HTTP::Response object) if
@@ -738,6 +772,7 @@
 
 sub follow_link {
     my $self = shift;
+    $self->die( qq{Needs to get key-value pairs of parameters.} ) if @_ % 2;
     my %parms = ( n=>1, @_ );
 
     if ( $parms{n} eq 'all' ) {
@@ -2025,23 +2060,70 @@
     return $self->{stack_depth};
 }
 
-=head2 $mech->save_content( $filename )
+=head2 $mech->save_content( $filename, %opts )
 
 Dumps the contents of C<< $mech->content >> into I<$filename>.
 I<$filename> will be overwritten.  Dies if there are any errors.
 
 If the content type does not begin with "text/", then the content
-is saved in binary mode.
+is saved in binary mode (i.e. C<binmode()> is set on the output
+filehandle).
+
+Additional arguments can be passed as I<key>/I<value> pairs:
+
+=over
+
+=item I<< $mech->save_content( $filename, binary => 1 ) >>
+
+Filehandle is set with C<binmode> to C<:raw> and contents are taken
+calling C<< $self->content(decoded_by_headers => 1) >>. Same as calling:
+
+    $mech->save_content( $filename, binmode => ':raw',
+                         decoded_by_headers => 1 );
+
+This I<should> be the safest way to save contents verbatim.
+
+=item I<< $mech->save_content( $filename, binmode => $binmode ) >>
+
+Filehandle is set to binary mode. If C<$binmode> begins with ':', it is
+passed as a parameter to C<binmode>:
+
+    binmode $fh, $binmode;
+
+otherwise the filehandle is set to binary mode if C<$binmode> is true:
+
+    binmode $fh;
+
+=item I<all other arguments>
+
+are passed as-is to C<< $mech->content(%opts) >>. In particular,
+C<decoded_by_headers> might come handy if you want to revert the effect
+of line compression performed by the web server but without further
+interpreting the contents (e.g. decoding it according to the charset).
+
+=back
 
 =cut
 
 sub save_content {
     my $self = shift;
     my $filename = shift;
+    my %opts = @_;
+    if (delete $opts{binary}) {
+        $opts{binmode} = ':raw';
+        $opts{decoded_by_headers} = 1;
+    }
 
     open( my $fh, '>', $filename ) or $self->die( "Unable to create $filename: 
$!" );
-    binmode $fh unless $self->content_type =~ m{^text/};
-    print {$fh} $self->content or $self->die( "Unable to write to $filename: 
$!" );
+    if ((my $binmode = delete($opts{binmode}) || '') || ($self->content_type() 
!~ m{^text/})) {
+        if (length($binmode) && (substr($binmode, 0, 1) eq ':')) {
+            binmode $fh, $binmode;
+        }
+        else {
+            binmode $fh;
+        }
+    }
+    print {$fh} $self->content(%opts) or $self->die( "Unable to write to 
$filename: $!" );
     close $fh or $self->die( "Unable to close $filename: $!" );
 
     return;
@@ -2054,14 +2136,27 @@
 response.  If I<$fh> is not specified or is undef, it dumps to
 STDOUT.
 
-Unlike the rest of the dump_* methods, you cannot specify a filehandle
-to print to.
+Unlike the rest of the dump_* methods, $fh can be a scalar. It
+will be used as a file name.
 
 =cut
 
+sub _get_fh_default_stdout {
+    my $self = shift;
+    my $p = shift || '';
+    if ( !$p ) {
+        return \*STDOUT;
+    } elsif ( !ref($p) ) {
+        open my $fh, '>', $p or $self->die( "Unable to write to $p: $!" );;
+        return $fh;
+    } else {
+        return $p;
+    }
+}
+
 sub dump_headers {
     my $self = shift;
-    my $fh   = shift || \*STDOUT;
+    my $fh   = $self->_get_fh_default_stdout(shift);
 
     print {$fh} $self->response->headers_as_string;
 
@@ -2278,7 +2373,7 @@
 sub credentials {
     my $self = shift;
 
-    # The lastest LWP::UserAgent also supports 2 arguments,
+    # The latest LWP::UserAgent also supports 2 arguments,
     # in which case the first is host:port
     if (@_ == 4 || (@_ == 2 && $_[0] =~ /:\d+$/)) {
         return $self->SUPER::credentials(@_);
@@ -2316,7 +2411,7 @@
 
 =head1 INHERITED UNCHANGED LWP::UserAgent METHODS
 
-As a sublass of L<LWP::UserAgent>, WWW::Mechanize inherits all of
+As a subclass of L<LWP::UserAgent>, WWW::Mechanize inherits all of
 L<LWP::UserAgent>'s methods.  Many of which are overridden or
 extended. The following methods are inherited unchanged. View the
 L<LWP::UserAgent> documentation for their implementation descriptions.
@@ -2760,8 +2855,8 @@
 WWW::Mechanize is hosted at GitHub, though the bug tracker still
 lives at Google Code.
 
-Repository: https://github.com/bestpractical/www-mechanize/.  
-Bugs: http://code.google.com/p/www-mechanize/issues
+Repository: L<https://github.com/libwww-perl/WWW-Mechanize>.
+Bugs: L<http://code.google.com/p/www-mechanize/issues>.
 
 =head1 OTHER DOCUMENTATION
 
@@ -2909,6 +3004,8 @@
 
 =item * L<WWW::Yahoo::Groups>
 
+=item * L<WWW::Scripter>
+
 =back
 
 =head1 ACKNOWLEDGEMENTS
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/content.t 
new/WWW-Mechanize-1.73/t/content.t
--- old/WWW-Mechanize-1.72/t/content.t  2011-08-05 22:48:08.000000000 +0200
+++ new/WWW-Mechanize-1.73/t/content.t  2012-09-19 23:41:54.000000000 +0200
@@ -2,7 +2,7 @@
 
 use warnings;
 use strict;
-use Test::More tests => 5;
+use Test::More tests => 8;
 
 =head1 NAME
 
@@ -63,3 +63,35 @@
 $content = $mech->content(base_href => undef);
 like($content, qr[base href="http://example.com/";], 'Found the new base href');
 
+$mech->{res} = Test::MockResponse->new(
+   raw_content => 'this is the raw content',
+   charset_none => 'this is a slightly decoded content',
+   charset_whatever => 'this is charset whatever',
+);
+
+$content = $mech->content(raw => 1);
+is($content, 'this is the raw content', 'raw => 1');
+
+$content = $mech->content(decoded_by_headers => 1);
+is($content, 'this is a slightly decoded content', 'decoded_by_headers => 1');
+
+$content = $mech->content(charset => 'whatever');
+is($content, 'this is charset whatever', 'charset => ...');
+
+package Test::MockResponse;
+
+sub new {
+   my $package = shift;
+   return bless { @_ }, $package;
+}
+
+sub content {
+   my ($self) = @_;
+   return $self->{raw_content};
+}
+
+sub decoded_content {
+   my ($self, %opts) = @_;
+   return $self->{decoded_content} unless exists $opts{charset};
+   return $self->{"charset_$opts{charset}"};
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/dump.t 
new/WWW-Mechanize-1.73/t/dump.t
--- old/WWW-Mechanize-1.72/t/dump.t     1970-01-01 01:00:00.000000000 +0100
+++ new/WWW-Mechanize-1.73/t/dump.t     2012-09-19 23:41:54.000000000 +0200
@@ -0,0 +1,32 @@
+#!perl -Tw
+
+use warnings;
+use strict;
+use Test::More tests => 5;
+use URI::file;
+
+BEGIN {
+    use_ok( 'WWW::Mechanize' );
+}
+
+my $mech = WWW::Mechanize->new( cookie_jar => undef );
+isa_ok( $mech, 'WWW::Mechanize' );
+
+my $uri = URI::file->new_abs( 't/find_inputs.html' )->as_string;
+
+$mech->get( $uri );
+ok( $mech->success, "Fetched $uri" ) or die q{Can't get test page};
+my $fn = 'headers.tmp';
+$mech->dump_headers($fn);
+ok( -e $fn );
+unlink('headers.tmp');
+
+my $content;
+open my $fh, '>', \$content;
+$mech->dump_headers( $fh );
+like( $content, qr/Content-Length/ );
+close $fh;
+
+END {
+    unlink('headers.tmp');
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/LocalServer.pm 
new/WWW-Mechanize-1.73/t/local/LocalServer.pm
--- old/WWW-Mechanize-1.72/t/local/LocalServer.pm       2011-08-05 
22:48:08.000000000 +0200
+++ new/WWW-Mechanize-1.73/t/local/LocalServer.pm       2012-09-19 
23:41:54.000000000 +0200
@@ -3,6 +3,14 @@
 # start a fake webserver, fork, and connect to ourselves
 use warnings;
 use strict;
+# this has to happen here because LWP::Simple creates a $ua
+# on load so any time after this is too late.
+BEGIN {
+  delete @ENV{qw(
+    HTTP_PROXY http_proxy CGI_HTTP_PROXY
+    HTTPS_PROXY https_proxy HTTP_PROXY_ALL http_proxy_all
+  )};
+}
 use LWP::Simple;
 use FindBin;
 use File::Spec;
@@ -63,6 +71,10 @@
     HTTP_PROXY
     http_proxy
     CGI_HTTP_PROXY
+    HTTPS_PROXY
+    https_proxy
+    HTTP_PROXY_ALL
+    http_proxy_all
 
 =cut
 
@@ -75,8 +87,6 @@
   $ENV{TEST_HTTP_VERBOSE} = 1
     if (delete $args{debug});
 
-  delete @ENV{qw(HTTP_PROXY http_proxy CGI_HTTP_PROXY)};
-
   $self->{delete} = [];
   if (my $html = delete $args{html}) {
     # write the html to a temp file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/back.t 
new/WWW-Mechanize-1.73/t/local/back.t
--- old/WWW-Mechanize-1.72/t/local/back.t       2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/back.t       2012-09-19 23:41:54.000000000 
+0200
@@ -23,6 +23,7 @@
 }
 
 BEGIN {
+    delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
 
@@ -128,24 +129,7 @@
 
 is( scalar @{$mech->{page_stack}}, 0, 'Pre-404 check' );
 
-my $server404 = HTTP::Daemon->new(LocalAddr => 'localhost') or die;
-my $server404url = $server404->url;
-
-die 'Cannot fork' if (! defined (my $pid404 = fork()));
-END {
-    local $?;
-    kill KILL => $pid404 if $pid404; # Extreme prejudice intended, because we 
do not
-    # want the global cleanup to be done twice.
-}
-
-if (! $pid404) { # Fake HTTP server code: a true 404-compliant server!
-    while ( my $c = $server404->accept() ) {
-        while ( $c->get_request() ) {
-            $c->send_response( HTTP::Response->new(404) );
-            $c->close();
-        }
-    }
-}
+my $server404url = $server->error_notfound('404check');
 
 $mech->get($server404url);
 is( $mech->status, 404 , '404 check') or
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/click.t 
new/WWW-Mechanize-1.73/t/local/click.t
--- old/WWW-Mechanize-1.72/t/local/click.t      2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/click.t      2012-09-19 23:41:54.000000000 
+0200
@@ -7,7 +7,6 @@
 use Test::More tests => 9;
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/click_button.t 
new/WWW-Mechanize-1.73/t/local/click_button.t
--- old/WWW-Mechanize-1.72/t/local/click_button.t       2011-08-05 
22:48:08.000000000 +0200
+++ new/WWW-Mechanize-1.73/t/local/click_button.t       2012-09-19 
23:41:54.000000000 +0200
@@ -7,7 +7,6 @@
 use Test::More tests => 19;
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/content.t 
new/WWW-Mechanize-1.73/t/local/content.t
--- old/WWW-Mechanize-1.72/t/local/content.t    2011-10-09 19:58:05.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/content.t    2012-09-19 23:41:54.000000000 
+0200
@@ -7,7 +7,6 @@
 use Test::More tests => 10;
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/encoding.t 
new/WWW-Mechanize-1.73/t/local/encoding.t
--- old/WWW-Mechanize-1.72/t/local/encoding.t   2011-11-25 19:21:42.000000000 
+0100
+++ new/WWW-Mechanize-1.73/t/local/encoding.t   2012-09-19 23:41:54.000000000 
+0200
@@ -7,7 +7,6 @@
 use LocalServer;
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{qw( IFS CDPATH ENV BASH_ENV )};
     use_ok('WWW::Mechanize');
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/failure.t 
new/WWW-Mechanize-1.73/t/local/failure.t
--- old/WWW-Mechanize-1.72/t/local/failure.t    2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/failure.t    2012-09-19 23:41:54.000000000 
+0200
@@ -9,7 +9,6 @@
 
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
 }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/follow.t 
new/WWW-Mechanize-1.73/t/local/follow.t
--- old/WWW-Mechanize-1.72/t/local/follow.t     2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/follow.t     2013-08-24 06:10:59.000000000 
+0200
@@ -2,13 +2,12 @@
 
 use warnings;
 use strict;
-use Test::More tests => 22;
+use Test::More tests => 28;
 use lib 't/local';
 use LocalServer;
 use encoding 'iso-8859-1';
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
@@ -54,3 +53,13 @@
 
 $response = $agent->follow_link( text_regex => qr/Snargle/ );
 ok( !$response, q{Couldn't find it} );
+
+ok($agent->follow_link( url => '/foo' ), 'can follow url');
+isnt( $agent->uri, $server->url, 'Need to be on a separate page' );
+ok($agent->back(), 'Can still go back');
+
+ok(!$agent->follow_link( url => '/notfoo' ), "can't follow wrong url");
+is( $agent->uri, $server->url, 'Needs to be on the same page' );
+eval {$agent->follow_link( '/foo' )};
+like($@, qr/Needs to get key-value pairs of parameters.*follow\.t/, "Invalid 
parameter passing gets better error message");
+
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/form.t 
new/WWW-Mechanize-1.73/t/local/form.t
--- old/WWW-Mechanize-1.72/t/local/form.t       2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/form.t       2012-09-19 23:41:54.000000000 
+0200
@@ -8,7 +8,6 @@
 use LocalServer;
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/get.t 
new/WWW-Mechanize-1.73/t/local/get.t
--- old/WWW-Mechanize-1.72/t/local/get.t        2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/get.t        2012-09-19 23:41:54.000000000 
+0200
@@ -12,6 +12,7 @@
 }
 
 BEGIN {
+    delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/nonascii.t 
new/WWW-Mechanize-1.73/t/local/nonascii.t
--- old/WWW-Mechanize-1.72/t/local/nonascii.t   2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/nonascii.t   2012-09-19 23:41:54.000000000 
+0200
@@ -8,7 +8,6 @@
 use LocalServer;
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/overload.t 
new/WWW-Mechanize-1.73/t/local/overload.t
--- old/WWW-Mechanize-1.72/t/local/overload.t   2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/overload.t   2012-09-19 23:41:54.000000000 
+0200
@@ -31,7 +31,6 @@
 =cut
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/page_stack.t 
new/WWW-Mechanize-1.73/t/local/page_stack.t
--- old/WWW-Mechanize-1.72/t/local/page_stack.t 2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/page_stack.t 2012-09-19 23:41:54.000000000 
+0200
@@ -8,7 +8,6 @@
 use LocalServer;
 
 BEGIN {
-    delete @ENV{ grep { lc eq 'http_proxy' } keys %ENV };
     delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/referer.t 
new/WWW-Mechanize-1.73/t/local/referer.t
--- old/WWW-Mechanize-1.72/t/local/referer.t    2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/referer.t    2012-09-19 23:41:54.000000000 
+0200
@@ -12,6 +12,7 @@
 }
 
 BEGIN {
+    delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/reload.t 
new/WWW-Mechanize-1.73/t/local/reload.t
--- old/WWW-Mechanize-1.72/t/local/reload.t     2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/reload.t     2012-09-19 23:41:54.000000000 
+0200
@@ -12,6 +12,7 @@
 }
 
 BEGIN {
+    delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/local/submit.t 
new/WWW-Mechanize-1.73/t/local/submit.t
--- old/WWW-Mechanize-1.72/t/local/submit.t     2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/local/submit.t     2012-09-19 23:41:54.000000000 
+0200
@@ -11,6 +11,7 @@
 }
 
 BEGIN {
+    delete @ENV{ qw( IFS CDPATH ENV BASH_ENV ) };
     use_ok( 'WWW::Mechanize' );
 }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/save_content.html 
new/WWW-Mechanize-1.73/t/save_content.html
--- old/WWW-Mechanize-1.72/t/save_content.html  1970-01-01 01:00:00.000000000 
+0100
+++ new/WWW-Mechanize-1.73/t/save_content.html  2012-09-19 23:41:54.000000000 
+0200
@@ -0,0 +1,8 @@
+<html>
+<head>
+    <META http-equiv="Content-Type" content="text/html; charset=UTF-8">
+</head>
+<body>
+   PerĂ² poi si vedrĂ !!!
+</body>
+</html>
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/WWW-Mechanize-1.72/t/save_content.t 
new/WWW-Mechanize-1.73/t/save_content.t
--- old/WWW-Mechanize-1.72/t/save_content.t     2011-08-05 22:48:08.000000000 
+0200
+++ new/WWW-Mechanize-1.73/t/save_content.t     2012-09-19 23:41:54.000000000 
+0200
@@ -3,7 +3,7 @@
 use warnings;
 use strict;
 
-use Test::More tests => 5;
+use Test::More tests => 8;
 use URI::file;
 
 BEGIN {
@@ -15,7 +15,7 @@
 isa_ok( $mech, 'WWW::Mechanize' );
 
 my $original = 't/find_inputs.html';
-my $saved = 'saved.test.txt';
+my $saved = 'saved1.test.txt';
 
 my $uri = URI::file->new_abs( $original )->as_string;
 
@@ -31,6 +31,25 @@
 
 ok( $old_text eq $new_text, 'Saved copy matches the original' ) && unlink 
$saved;
 
+{
+    my $original = 't/save_content.html';
+    my $saved = 'saved2.test.txt';
+
+    my $uri = URI::file->new_abs( $original )->as_string;
+
+    $mech->get( $uri );
+    ok( $mech->success, "Fetched $uri" ) or die q{Can't get test page};
+
+    #unlink $saved;
+    ok( !-e $saved, "$saved does not exist" );
+    $mech->save_content( $saved, binary => 1 );
+
+    my $old_text = slurp( $original );
+    my $new_text = slurp( $saved );
+
+    ok( $old_text eq $new_text, 'Saved copy matches the original' ) && unlink 
$saved;
+}
+
 sub slurp {
     my $name = shift;
 

-- 
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to