Dzahn has submitted this change and it was merged.

Change subject: split out apache sync scripts from mw deploy
......................................................................


split out apache sync scripts from mw deploy

move the Apache scripts out of the general
deployment class, in order to:

move it away from fenari and put it on tin
without touching mw deployment or fenari

RT #6145 remove a blocker to shutdown fenari

Change-Id: Idb4ced99644c1692c93673ceb15d690a01b7e7d7
---
M manifests/misc/deployment.pp
A manifests/role/apachesync.pp
M manifests/site.pp
A modules/apachesync/files/apache-fast-test
A modules/apachesync/files/apache-graceful-all
A modules/apachesync/files/sync-apache
A modules/apachesync/manifests/init.pp
7 files changed, 286 insertions(+), 21 deletions(-)

Approvals:
  Matanya: Looks good to me, but someone else must approve
  jenkins-bot: Verified
  Dzahn: Looks good to me, approved



diff --git a/manifests/misc/deployment.pp b/manifests/misc/deployment.pp
index f9cf741..fe9d84e 100644
--- a/manifests/misc/deployment.pp
+++ b/manifests/misc/deployment.pp
@@ -119,22 +119,6 @@
                        group => root,
                        mode => 0555,
                        source => "puppet:///files/misc/scripts/sqldump";
-               "${scriptpath}/sync-apache":
-                       owner => root,
-                       group => root,
-                       mode => 0555,
-                       source => "puppet:///files/misc/scripts/sync-apache";
-               "${scriptpath}/sync-apache-simulated":
-                       owner => root,
-                       group => root,
-                       mode => 0555,
-                       ensure => link,
-                       target => "${scriptpath}/sync-apache";
-               "${scriptpath}/apache-graceful-all":
-                       owner  => 'root',
-                       group  => 'root',
-                       mode   => '0554',
-                       source => 
'puppet:///files/misc/scripts/apache-graceful-all';
                "${scriptpath}/udprec":
                        owner => root,
                        group => root,
@@ -145,11 +129,6 @@
                        group => root,
                        mode => 0555,
                        source => 
"puppet:///files/misc/scripts/set-group-write2";
-               "${scriptpath}/apache-fast-test":
-                       owner => root,
-                       group => root,
-                       mode => 0555,
-                       source => 
"puppet:///files/misc/scripts/apache-fast-test";
                "${scriptpath}/updateinterwikicache":
                        owner => root,
                        group => root,
diff --git a/manifests/role/apachesync.pp b/manifests/role/apachesync.pp
new file mode 100644
index 0000000..5485bf6
--- /dev/null
+++ b/manifests/role/apachesync.pp
@@ -0,0 +1,11 @@
+# role for a host with apache sync scripts
+class role::apachesync {
+
+    system::role { 'apachesync':
+        description => 'apache sync server',
+    }
+
+    include ::apachesync
+    include misc::dsh
+}
+
diff --git a/manifests/site.pp b/manifests/site.pp
index 444d801..d5478ed 100644
--- a/manifests/site.pp
+++ b/manifests/site.pp
@@ -2467,6 +2467,7 @@
     include mysql
     include role::labsdb::manager
     include ssh::hostkeys-collect
+    include role::apachesync
 
     # for reedy RT #6322
     package { 'unzip':
diff --git a/modules/apachesync/files/apache-fast-test 
b/modules/apachesync/files/apache-fast-test
new file mode 100755
index 0000000..cfdcf33
--- /dev/null
+++ b/modules/apachesync/files/apache-fast-test
@@ -0,0 +1,200 @@
+#!/usr/bin/perl
+#
+# Copyright (c) 2012 Jeff Green <jgr...@wikimedia.org>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy 
of
+# this software and associated documentation files (the "Software"), to deal in
+# the Software without restriction, including without limitation the rights to
+# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+# of the Software, and to permit persons to whom the Software is furnished to 
do
+# so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in 
all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+#
+use strict;
+no warnings 'threads';
+use threads;
+use threads::shared;
+use LWP::UserAgent;
+use Net::DNS::Resolver;
+use Time::HiRes;
+
+my $threads = 50; # how many concurrent threads?
+my $timeout_limit = 3; # how many timeouts to tolerate before dropping a server
+my $timeout = 10; # seconds before LWP gives up on a web request
+my $pad_length = 25; # server hostname column width in report
+my $default_staging_server = 'mw1017.eqiad.wmnet';
+
+# get a list of servers to test
+my $servers;
+if (@ARGV[1]) {
+       # explicit list of servers
+       for (@ARGV[1..$#ARGV]) {
+               chomp;
+               if (/^pybal$/) {
+                       # list of production servers from pybal config
+                       $servers = get_server_list_from_pybal_config(qw(
+                               /home/wikipedia/conf/pybal/eqiad/apaches
+                               /home/wikipedia/conf/pybal/eqiad/api
+                       ));
+               } elsif (/^([\w\.]+)$/) {
+                       $servers->{$1} = 1;
+               }
+       }
+} else {
+       $servers->{$default_staging_server} = 1;
+}
+
+# thread-shared variables used for test/result
+my @queue : shared; # job_ids grouped in per-thread batches
+my %result :shared; # threads return stuff here
+my %timeouts :shared; # server timeout counter
+
+# read in the list of urls
+my $urls;
+open URL, $ARGV[0];
+while (<URL>) {
+       chomp;
+       if (/^\s*(http\S+)/) {
+               $urls->{$1}++;
+       }
+}
+close URL;
+
+unless (keys %{$urls}) {
+       print "no urls found in file $ARGV[0]\n";
+}
+
+# do the server DNS lookups in advance, threading DNS lookups is fraught with 
peril
+my $resolver = Net::DNS::Resolver->new;
+for my $host (sort keys %{$servers}) {
+       my $answer = $resolver->search($host);
+       if ($answer) {
+               for my $r ($answer->answer) {
+                       if ($r->type eq 'A') {
+                               my $batch;
+                               $servers->{$host} = $r->address; # grab the 
first A record
+                               for my $url (sort keys %{$urls}) {
+                                       $batch .= "$servers->{$host}\t$url\n";
+                               }
+                               push @queue, $batch if defined $batch;
+                               last;
+                       }
+               }
+       }
+       unless ($servers->{$host} > 1) {
+               print "no IP found for $host\n";
+               delete $servers->{$host};
+       }
+}
+
+# informative output
+if (scalar @queue) {
+       print "testing " . (keys %{$urls}) . ' urls on ' . (keys %{$servers}) . 
' servers, totalling ' .
+               ((keys %{$urls}) * (keys %{$servers}))  . " requests\n";
+} else {
+       print "\n  usage: $0 url_file [server spec]\n\n" .
+               "   url_file format is one URL per line, http/https schemes 
supported\n\n" .
+               "   server spec:\n" .
+               "     (none)   use default $default_staging_server\n" .
+               "     pybal    fetch webserver list from local pybal conf\n" .
+               "     host1 host2 host3\n\n";
+       exit;
+}
+
+# spawn worker threads to do api web requests, then reap them
+print "spawning threads";
+for (1..$threads) {
+       last unless scalar @queue;
+       threads->new(sub { process_queue_task() });
+}
+while (threads->list) {
+       sleep 0.1;
+       for my $thr (threads->list(threads::joinable)) {
+               $thr->join;
+       }
+}
+print "\n";
+
+# can't nest hashes with threads::shared, so we retrieve from shared %result 
hash
+print "\n";
+for my $url (sort keys %{$urls}) {
+       my ($highlight, $previous_result, $report);
+       for my $host (sort keys %{$servers}) {
+               my $ip = $servers->{$host};
+               next unless defined $result{"$ip\t$url"};
+               $report .= "  " . sprintf("%-${pad_length}s",$host) . 
$result{"$ip\t$url"} . "\n";
+               $highlight++ if defined($previous_result) and ($previous_result 
ne $result{"$ip\t$url"});
+               $previous_result = $result{"$ip\t$url"};
+       }
+       if (defined $highlight) {
+               print "$url\n$report";
+       } else {
+               print "$url\n * $previous_result\n"; # short form output
+       }
+}
+
+exit;
+
+
+
+
+
+
+# SUBROUTINES
+sub process_queue_task {
+       my $name = 'thread' . threads->self->tid();
+       print '.';
+       while (defined $queue[0]) {
+               for my $job (split("\n", shift @queue)) {
+                       my ($ip,$url) = split("\t", $job);
+                       if (($timeouts{$ip} < $timeout_limit) and ($url =~ 
/^(https?):\/\/([^\/]+)(.*)$/)) {
+                               my ($protocol,$host,$path) = ($1,$2,$3);
+                               $path = (defined $path) ? $path : '';
+                               my $ua = LWP::UserAgent->new(timeout => 
$timeout);
+                               my $request = HTTP::Request->new(GET => 
"http://$ip$path";);
+                               $request->header(
+                                       'Host' => $host,
+                                       'User_Agent' => $0,
+                                       'X-Forwarded-Proto' => $protocol, # 
proxies add this
+                               );
+                               my $r = $ua->simple_request($request);
+                               if ($r->is_success) {
+                                       $result{"$ip\t$url"} = $r->status_line 
. ' ' . length($r->content);
+                               } elsif (($r->is_error) and ($r->status_line =~ 
/^500 can't connect/i)) { # simple timeout, drop the data
+                                       $timeouts{$ip}++;
+                                       print "$name dropped $ip, too many 
timeouts\n" if $timeouts{$ip} >= $timeout_limit;
+                               } elsif ($r->is_error) { # report other errors
+                                       $result{"$ip\t$url"} = $r->status_line;
+                               } else {
+                                       $result{"$ip\t$url"} = $r->status_line 
. ' ' . $r->header('Location');
+                               }
+                       }
+               }
+       }
+}
+
+
+sub get_server_list_from_pybal_config {
+       my $servers;
+       for my $pyfile (@_) {
+               open PYFILE, $pyfile;
+               while (<PYFILE>) {
+                       next if /^\s*#/;
+                       if ( (/'enabled':\s+True/i) and 
(/'host':\s+'([\.\w]+)'/) ) {
+                               $servers->{$1} = 1;
+                       }
+               }
+               close PYFILE;
+       }
+       return $servers;
+}
diff --git a/modules/apachesync/files/apache-graceful-all 
b/modules/apachesync/files/apache-graceful-all
new file mode 100755
index 0000000..bddd1bb
--- /dev/null
+++ b/modules/apachesync/files/apache-graceful-all
@@ -0,0 +1,8 @@
+#!/bin/bash
+if [ `cat /etc/cluster` == pmtpa ]; then
+       /home/wikipedia/bin/dologmsg "$USER is doing a graceful restart of all 
apaches"
+fi
+ddsh -F20 -g apaches -cM 'if [ -x /home/wikipedia/bin/apache-graceful ]; then 
echo "/home-mounted apache $(hostname)"; /home/wikipedia/bin/apache-graceful; 
else /usr/bin/apache-sanity-check && sudo /usr/sbin/apache2ctl graceful; fi'
+if [ `cat /etc/cluster` == pmtpa ]; then
+       /home/wikipedia/bin/dologmsg "!log $USER gracefulled all apaches"
+fi
diff --git a/modules/apachesync/files/sync-apache 
b/modules/apachesync/files/sync-apache
new file mode 100755
index 0000000..b3aa737
--- /dev/null
+++ b/modules/apachesync/files/sync-apache
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+echo "Synchronizing /home/wikipedia/conf/httpd to /usr/local/apache/conf..."
+
+# Options to pass to the rsync command
+RSYNC_OPTIONS="-a"
+
+# Add dry-run option to rsync when this script is called
+# as `apache-sync-simulated`. Hacky but avoid code duplication.
+if [ `basename $0` = "sync-apache-simulated" ]; then
+       echo "Simulating rsync with --dry-run..."
+       RSYNC_OPTIONS="$RSYNC_OPTIONS -n"
+fi;
+
+#This sync works with both puppet and non-puppet hosts
+dsh -cM -g apaches -o-lroot -o-oSetupTimeout=30 -F30 -- "
+       rsync $RSYNC_OPTIONS 10.0.5.8::httpdconf/ /usr/local/apache/conf
+"
+# dont forget the image rending cluster
+dsh -cM -g image_scalers -o-lroot -o-oSetupTimeout=30 -F30 -- "
+       rsync $RSYNC_OPTIONS 10.0.5.8::httpdconf/ /usr/local/apache/conf
+"
+# while we are at it, might as well do the snapshot boxen
+dsh -cM -g snapshot -o-lroot -o-oSetupTimeout=30 -F30 -- "
+       rsync $RSYNC_OPTIONS 10.0.5.8::httpdconf/ /usr/local/apache/conf
+"
+
+# decided we want search indexers too
+dsh -cM -g searchidx -o-lroot -o-oSetupTimeout=30 -F30 -- "
+       rsync $RSYNC_OPTIONS 10.0.5.8::httpdconf/ /usr/local/apache/conf
+"
diff --git a/modules/apachesync/manifests/init.pp 
b/modules/apachesync/manifests/init.pp
new file mode 100644
index 0000000..177af69
--- /dev/null
+++ b/modules/apachesync/manifests/init.pp
@@ -0,0 +1,35 @@
+# scripts for syncing apache changes
+class apachesync {
+
+    $scriptpath = '/usr/local/bin'
+
+    file { "${scriptpath}/sync-apache":
+        owner  => 'root',
+        group  => 'root',
+        mode   => '0555',
+        source => 'puppet:///modules/apachesync/sync-apache',
+    }
+
+    file { "${scriptpath}/sync-apache-simulated":
+        ensure => link,
+        owner  => 'root',
+        group  => 'root',
+        mode   => '0555',
+        target => "${scriptpath}/sync-apache",
+    }
+
+    file { "${scriptpath}/apache-graceful-all":
+        owner  => 'root',
+        group  => 'root',
+        mode   => '0554',
+        source => 'puppet:///modules/apachesync/apache-graceful-all',
+    }
+
+    file  { "${scriptpath}/apache-fast-test":
+        owner  => 'root',
+        group  => 'root',
+        mode   => '0555',
+        source => 'puppet:///modules/apachesync/apache-fast-test',
+    }
+
+}

-- 
To view, visit https://gerrit.wikimedia.org/r/129399
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: Idb4ced99644c1692c93673ceb15d690a01b7e7d7
Gerrit-PatchSet: 7
Gerrit-Project: operations/puppet
Gerrit-Branch: production
Gerrit-Owner: Dzahn <dz...@wikimedia.org>
Gerrit-Reviewer: Dzahn <dz...@wikimedia.org>
Gerrit-Reviewer: Matanya <mata...@foss.co.il>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to