Ori.livneh has uploaded a new change for review.
https://gerrit.wikimedia.org/r/111351
Change subject: Copy scap scripts from operations/puppet repository
......................................................................
Copy scap scripts from operations/puppet repository
Change-Id: I1d4c66afd83a68813cb2c38073777d73cd6d0ae0
---
A find-nearest-rsync
A mergeCdbFileUpdates
A mw-update-l10n
A mwversionsinuse
A refreshCdbJsonFiles
A restart-twemproxy
A scap
A scap-1
A scap-2
A scap-rebuild-cdbs
A scap-recompile
A scappy
A sync-common
A sync-common-all
A sync-common-file
A sync-dblist
A sync-dir
A sync-docroot
A sync-file
A sync-wikiversions
20 files changed, 1,231 insertions(+), 0 deletions(-)
git pull ssh://gerrit.wikimedia.org:29418/mediawiki/tools/scap
refs/changes/51/111351/1
diff --git a/find-nearest-rsync b/find-nearest-rsync
new file mode 100755
index 0000000..a9eff89
--- /dev/null
+++ b/find-nearest-rsync
@@ -0,0 +1,40 @@
+#!/usr/bin/perl
+
+use strict;
+use Net::Ping;
+
+my $verbose = 0;
+if ( $#ARGV >= 0 && $ARGV[0] eq '--verbose' ) {
+ $verbose = 1;
+ shift;
+}
+
+if ( $#ARGV < 0 ) {
+ print STDERR "Usage: find-nearest-rsync [--verbose] <host> [<host>
...]\n";
+ exit( 1 );
+}
+
+my( $bestHost, $bestRTT );
+my $p = Net::Ping->new( "icmp", 2 );
+$p->hires();
+
+foreach my $host ( @ARGV ) {
+ my ( $success, $rtt, $ip ) = $p->ping( $host );
+ if ( not defined( $bestHost ) ) {
+ $bestHost = $host;
+ $bestRTT = $rtt;
+ } elsif ( $rtt < $bestRTT ) {
+ $bestRTT = $rtt;
+ $bestHost = $host;
+ }
+ if ( $verbose ) {
+ printf STDERR "%s: %.6f\n", ( $host, $rtt * 1000 );
+ }
+}
+
+if ( not defined( $bestHost ) ) {
+ print STDERR "find-nearest-rsync: No hosts replied!\n";
+ exit( 1 );
+}
+print "$bestHost\n";
+
diff --git a/mergeCdbFileUpdates b/mergeCdbFileUpdates
new file mode 100755
index 0000000..6d7903f
--- /dev/null
+++ b/mergeCdbFileUpdates
@@ -0,0 +1,208 @@
+#!/usr/bin/env php
+<?php
+
+/**
+ * Merge CDB file updates via a JSON blobs and push-based MD5 file.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @file
+ * @author Aaron Schulz
+ */
+
+if ( PHP_SAPI !== 'cli' ) {
+ die( "This is not a valid entry point.\n" );
+}
+
+$script = new MergeCdbFileUpdates(
+ getopt( '', array( 'directory:', 'threads:' ) )
+);
+$script->execute();
+
+/**
+ * Script to pull in updates to CDB files.
+ *
+ * This will check if the /upstream .MD5 files for each CDB file match their
hash.
+ * If they do not, then the JSON files in the that directory will be converted
+ * to CDB files, replacing the current ones.
+ */
+class MergeCdbFileUpdates {
+ /** @var array */
+ protected $params = array();
+
+ public function __construct( array $params ) {
+ foreach ( array( 'directory' ) as $par ) {
+ if ( !isset( $params[$par] ) ) {
+ die( "Usage: mergeCdbFileUpdates " .
+ "--directory <directory> --threads
<integer> [--trustmtime]\n\n" .
+ "The --trustmtime option assumes that
the CDB files match the " .
+ "upstream JSON files if their mtimes
match.\nWithout it, the hash " .
+ "of the CDB files is checked against
the upstream hash files.\n"
+ );
+ }
+ }
+ $this->params = $params;
+ $this->params['threads'] = isset( $params['threads'] ) ?
$params['threads'] : 1;
+ $this->params['trustmtime'] = !empty( $params['trustmtime'] );
+ }
+
+ public function execute() {
+ $threads = $this->params['threads'];
+ if ( $threads < 1 || $threads != intval( $threads ) ) {
+ $this->output( "Invalid thread count specified; running
single-threaded.\n" );
+ $threads = 1;
+ }
+ if ( $threads > 1 && !function_exists( 'pcntl_fork' ) ) {
+ $this->output( "PHP pcntl extension is not present;
running single-threaded.\n" );
+ $threads = 1;
+ }
+ $directory = realpath( $this->params['directory'] );
+
+ // Get the list of JSON files to make CDB files from...
+ $files = array();
+ $nHandle = opendir( "$directory/upstream" );
+ if ( !$nHandle ) {
+ $this->error( "Could not open directory
'$directory/upstream'.", 1 ); // bail
+ }
+ while ( false !== ( $entry = readdir( $nHandle ) ) ) {
+ if ( $this->extensionFromPath( $entry ) === 'json' ) {
+ // Get the filename ending in CDB instead of
JSON
+ $files[] = substr( $entry, 0, strrpos( $entry,
'.' ) );
+ }
+ }
+ closedir( $nHandle );
+ if ( !count( $files ) ) {
+ $this->output( "Directory '$directory/upstream' is
empty.\n" );
+ return;
+ }
+
+ // Make results file
+ $tmpFileName = tempnam( "/tmp", get_class( $this ) );
+ if ( $tmpFileName === false ) {
+ $this->error( "Could not create temp file with
tempnam().", 1 ); // bail
+ }
+
+ // Initialise and split into chunks
+ $chunks = array_chunk( $files, ceil( count( $files ) / $threads
) );
+ $pids = array();
+ foreach ( $chunks as $fileBatch ) {
+ // Do not fork for only one thread
+ $pid = ( $threads > 1 ) ? pcntl_fork() : -1;
+ if ( $pid === 0 ) {
+ // Child, reseed due to
http://bugs.php.net/bug.php?id=42465
+ mt_srand( getmypid() );
+ $this->doUpdate( $directory, $fileBatch,
$tmpFileName );
+ return;
+ } elseif ( $pid === -1 ) {
+ // Fork failed or one thread, do it serialized
+ $this->doUpdate( $directory, $fileBatch,
$tmpFileName );
+ } else {
+ // Main thread
+ $pids[] = $pid;
+ }
+ }
+ // Wait for all children
+ foreach ( $pids as $pid ) {
+ $status = 0;
+ pcntl_waitpid( $pid, $status );
+ }
+
+ // Collect results file
+ $numRebuilt = array_sum( explode( "\n", file_get_contents(
$tmpFileName ) ) );
+ unlink( $tmpFileName );
+
+ if ( $numRebuilt > 0 ) {
+ $this->output( "Updated $numRebuilt CDB file(s) in
'$directory'.\n" );
+ }
+ }
+
+ /**
+ * @param string $directory
+ * @param array $files
+ * @param string $resFile
+ */
+ protected function doUpdate( $directory, array $files, $resFile ) {
+ $rebuilt = 0;
+ foreach ( $files as $file ) {
+ // Get the MD5 to match the CDB file upstream requests
+ if ( !is_file( "$directory/upstream/$file.MD5" ) ) {
+ $this->error( "Skipped file '$file'; no MD5
file." );
+ continue;
+ }
+ $cdbPath = "$directory/$file";
+ $jsonPath = "$directory/upstream/$file.json";
+ // Get the upstream JSON file timestamp and local CDB
file timestamp
+ $jsonTimestamp = is_file( $jsonPath ) ? filemtime(
$jsonPath ) : 0;
+ // Check if a rebuild is needed
+ if ( $this->params['trustmtime'] ) {
+ $cdbTimestamp = is_file( $cdbPath ) ?
filemtime( $cdbPath ) : 0;
+ $needRebuild = ( $cdbTimestamp !==
$jsonTimestamp );
+ } else {
+ $md5Upstream = file_get_contents(
"$directory/upstream/$file.MD5" );
+ $md5Local = is_file( $cdbPath ) ? md5_file(
$cdbPath ) : '';
+ $needRebuild = ( $md5Local !== $md5Upstream );
+ }
+ // If the hashes do not match, fetch the diff needed to
update the CDB
+ if ( $needRebuild ) {
+ // @FIXME: stream this instead loading to RAM
+ $data = json_decode( file_get_contents(
$jsonPath ), true );
+ if ( $data === null ) {
+ $this->error( "Could not read
'$jsonPath'.", 1 ); // bail
+ }
+ // Open a temporary new CDB file
+ $tmpFileName = tempnam( "/tmp", $file );
+ if ( $tmpFileName === false ) {
+ $this->error( "Could not create temp
file with tempnam().", 1 ); // bail
+ }
+ $handle = dba_open( $tmpFileName, 'n',
'cdb_make' );
+ if ( $handle === false ) {
+ $this->error( "Could not open temp CDB
file '$tmpFileName'.", 1 ); // bail
+ }
+ // Convert the JSON data to CDB...
+ foreach ( $data as $key => $value ) {
+ dba_insert( $key, $value, $handle );
+ }
+ dba_close( $handle );
+ chmod( $tmpFileName, 0644 );
+ // Move the temporary CDB file over the old
one..
+ if ( !rename( $tmpFileName, $cdbPath ) ) {
+ $this->error( "Unable to move the new
CDB file to '$file'.", 1 );
+ }
+ // Set its timestamp to match the upstream JSON
file
+ touch( $cdbPath, $jsonTimestamp );
+ ++$rebuilt;
+ }
+ }
+ file_put_contents( $resFile, "$rebuilt\n", LOCK_EX |
FILE_APPEND );
+ }
+
+ protected function extensionFromPath( $path ) {
+ $i = strrpos( $path, '.' );
+ return strtolower( $i ? substr( $path, $i + 1 ) : '' );
+ }
+
+ protected function output( $s ) {
+ print $s;
+ }
+
+ protected function error( $s, $die = 0 ) {
+ fwrite( STDERR, $s . "\n" );
+ $die = intval( $die );
+ if ( $die > 0 ) {
+ die( $die );
+ }
+ }
+}
diff --git a/mw-update-l10n b/mw-update-l10n
new file mode 100755
index 0000000..81d5c40
--- /dev/null
+++ b/mw-update-l10n
@@ -0,0 +1,104 @@
+#!/bin/bash
+
+# WARNING: make sure to run any PHP scripts coming from MediaWiki under a
+# dedicated user such as `mwdeploy`. This script is running as user `root` on
+# the beta cluster.
+
+. /usr/local/lib/mw-deployment-vars.sh
+BINDIR=/usr/local/bin
+
+function die() {
+ if [ -n "$*" ]; then
+ echo >&2 "$*"
+ else
+ echo >&2 "Update of MediaWiki localisation messages failed"
+ fi
+ exit 1
+}
+
+if [ "`uname -s`" != Linux ]; then
+ die "ERROR: This script requires the Linux operating system to function
correctly"
+fi
+
+QUIET=--quiet
+TEMP=`getopt -o '' -l verbose -- "$@"`
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+eval set -- "$TEMP"
+while true; do
+ case "$1" in
+ --verbose)
+ QUIET=
+ shift
+ ;;
+ --)
+ shift
+ break
+ esac
+done
+
+# Calculate the number of parallel threads
+# Leave a couple of cores free for other stuff
+CPUS=`grep -c 'model name' /proc/cpuinfo`
+THREADS=`expr $CPUS - 2`
+
+if [ $THREADS -lt 1 ]; then
+ THREADS=1
+fi
+
+mwExtVerDbSets=$($BINDIR/mwversionsinuse --extended --withdb) || die
+# Regenerate the extension message file list for all active MediaWiki versions
+for i in ${mwExtVerDbSets[@]}; do
+ mwVerNum=${i%=*}
+ mwDbName=${i#*=}
+
+ # Only looks at certain MW version if specified
+ if [ -n "$MW_VERSIONS_SYNC" ]; then
+ skip=1
+ for v in ${MW_VERSIONS_SYNC[@]}; do
+ if [ "$mwVerNum" == "$v" ]; then
+ skip=0
+ fi
+ done
+ if [ "$skip" -eq "1" ]; then
+ continue
+ fi
+ fi
+
+ echo -n "Updating ExtensionMessages-$mwVerNum.php..."
+ mwTempDest=$(sudo -u apache mktemp) || die
+ sudo -u apache $BINDIR/mwscript mergeMessageFileList.php
--wiki="$mwDbName" \
+ --list-file=$MW_COMMON_SOURCE/wmf-config/extension-list $QUIET
--output="$mwTempDest" \
+ || die
+ sudo -u apache chmod 664 "$mwTempDest" || die
+ cp "$mwTempDest"
$MW_COMMON_SOURCE/wmf-config/ExtensionMessages-"$mwVerNum".php || die
+ sudo -u apache rm "$mwTempDest" || die
+ echo "done"
+
+ # Update ExtensionMessages-*.php in the local copy
+ # Though only when commons are different files. Beta has everything on
the same
+ # shared filesystem and thus cp would complain about copying to the
same file.
+ if [ `readlink -f $MW_COMMON_SOURCE` != `readlink -f $MW_COMMON` ]; then
+ echo "Copying to local copy..."
+ sudo -u mwdeploy \
+ cp
$MW_COMMON_SOURCE/wmf-config/ExtensionMessages-"$mwVerNum".php
$MW_COMMON/wmf-config/ \
+ || die
+ echo "done"
+ fi
+
+ # Rebuild all the CDB files for each language
+ echo -n "Updating LocalisationCache for $mwVerNum... "
+ sudo -u l10nupdate $BINDIR/mwscript rebuildLocalisationCache.php
--wiki="$mwDbName" \
+ --outdir=$MW_COMMON_SOURCE/php-$mwVerNum/cache/l10n $QUIET \
+ --threads=$THREADS \
+ || die
+
+ # Include JSON versions of the CDB files and add MD5 files
+ sudo -u l10nupdate $BINDIR/refreshCdbJsonFiles
--directory="$MW_COMMON_SOURCE/php-$mwVerNum/cache/l10n" \
+ --threads=$THREADS \
+ || die
+
+ echo "done"
+done
+
diff --git a/mwversionsinuse b/mwversionsinuse
new file mode 100755
index 0000000..e09c219
--- /dev/null
+++ b/mwversionsinuse
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Shell wrapper for the local version of multiversion/activeMWVersions.
+# This script belongs in /usr/bin/ and should be in PATH.
+. /usr/local/lib/mw-deployment-vars.sh
+if ! "${MW_COMMON}/multiversion/activeMWVersions" "$@"; then
+ exit 1
+fi
diff --git a/refreshCdbJsonFiles b/refreshCdbJsonFiles
new file mode 100755
index 0000000..9fd4c13
--- /dev/null
+++ b/refreshCdbJsonFiles
@@ -0,0 +1,231 @@
+#!/usr/bin/env php
+<?php
+
+/**
+ * Create JSON/MD5 files for all CDB files in a directory
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @file
+ * @author Aaron Schulz
+ */
+
+if ( PHP_SAPI !== 'cli' ) {
+ die( "This is not a valid entry point.\n" );
+}
+
+$script = new RefreshCdbJsonFiles(
+ getopt( '', array( 'directory:', 'threads:' ) )
+);
+$script->execute();
+
+/**
+ * Create JSON/MD5 files for all CDB files in a directory
+ *
+ * This will put a JSON and MD5 file in /upstream for each CDB file.
+ *
+ * This can be combined with rsync and the mergeCdbFileUpdates.php script to
push out
+ * large CDB files with minimal traffic. CDB files change drastically with
small key/value
+ * changes, where as JSON files do not, and thus they diff/rdiff much better.
+ *
+ * When pushing updates with rsync, this should be run before running rsync.
+ * The rsync command should exclude CDB files or at least use -ignore-existing.
+ * After the rsync is done, mergeCdbFileUpdates.php can be run on each server
to
+ * apply the updates to the CDB files.
+ */
+class RefreshCdbJsonFiles {
+ /** @var array */
+ protected $params = array();
+
+ public function __construct( array $params ) {
+ foreach ( array( 'directory' ) as $par ) {
+ if ( !isset( $params[$par] ) ) {
+ die( "Usage: refreshCdbJsonFiles " .
+ "--directory <directory> --threads
<integer>\n"
+ );
+ }
+ }
+ $this->params = $params;
+ $this->params['threads'] = isset( $params['threads'] ) ?
$params['threads'] : 1;
+ }
+
+ public function execute() {
+ $threads = $this->params['threads'];
+ if ( $threads < 1 || $threads != intval( $threads ) ) {
+ $this->output( "Invalid thread count specified; running
single-threaded.\n" );
+ $threads = 1;
+ }
+ if ( $threads > 1 && !function_exists( 'pcntl_fork' ) ) {
+ $this->output( "PHP pcntl extension is not present;
running single-threaded.\n" );
+ $threads = 1;
+ }
+
+ $directory = realpath( $this->params['directory'] );
+ if ( !is_dir( $directory ) ) {
+ $this->error( "Invalid directory '$directory'.", 1 );
// bail
+ }
+
+ if ( !is_dir( "$directory/upstream" ) && !mkdir(
"$directory/upstream" ) ) {
+ $this->error( "Could not make directory
'$directory/upstream'.", 1 ); // bail
+ }
+
+ $files = array();
+ $handle = opendir( $directory );
+ if ( !$handle ) {
+ $this->error( "Could not open directory '$directory'.",
1 ); // bail
+ }
+ while ( false !== ( $entry = readdir( $handle ) ) ) {
+ if ( $this->extensionFromPath( $entry ) === 'cdb' ) {
+ $files[] = $entry;
+ }
+ }
+ closedir( $handle );
+ if ( !count( $files ) ) {
+ $this->output( "Directory '$directory' is empty.\n" );
+ return;
+ }
+
+ // Make results file
+ $tmpFileName = tempnam( "/tmp", get_class( $this ) );
+ if ( $tmpFileName === false ) {
+ $this->error( "Could not create temp file with
tempnam().", 1 ); // bail
+ }
+
+ // Initialise and split into chunks
+ $chunks = array_chunk( $files, ceil( count( $files ) / $threads
) );
+ $pids = array();
+ foreach ( $chunks as $fileBatch ) {
+ // Do not fork for only one thread
+ $pid = ( $threads > 1 ) ? pcntl_fork() : -1;
+ if ( $pid === 0 ) {
+ // Child, reseed due to
http://bugs.php.net/bug.php?id=42465
+ mt_srand( getmypid() );
+ $this->doUpdate( $directory, $fileBatch,
$tmpFileName );
+ return;
+ } elseif ( $pid === -1 ) {
+ // Fork failed or one thread, do it serialized
+ $this->doUpdate( $directory, $fileBatch,
$tmpFileName );
+ } else {
+ // Main thread
+ $pids[] = $pid;
+ }
+ }
+ // Wait for all children
+ foreach ( $pids as $pid ) {
+ $status = 0;
+ pcntl_waitpid( $pid, $status );
+ }
+
+ // Collect results file
+ $numUpdated = array_sum( explode( "\n", file_get_contents(
$tmpFileName ) ) );
+ unlink( $tmpFileName );
+
+ if ( $numUpdated > 0 ) {
+ $this->output( "Updated $numUpdated JSON file(s) in
'$directory'.\n" );
+ }
+ }
+
+ /**
+ * @param string $directory
+ * @param array $files
+ * @param string $resFile
+ */
+ protected function doUpdate( $directory, array $files, $resFile ) {
+ $rebuilt = 0;
+ foreach ( $files as $file ) {
+ $newCdbMd5 = md5_file( "$directory/$file" );
+ if ( is_file( "$directory/upstream/$file.MD5" ) &&
+ is_file( "$directory/upstream/$file.json" ) &&
+ file_get_contents(
"$directory/upstream/$file.MD5" ) === $newCdbMd5
+ ) {
+ continue;
+ }
+
+ // Open the CDB file for iteration
+ $cdbHandle = dba_open( "$directory/$file", 'r-', 'cdb'
);
+ if ( $cdbHandle === false ) {
+ $this->error( "Could not open CDB file
'$directory/$file'." );
+ continue;
+ }
+ // Convert the CDB file to JSON...
+ $tmpFileName = tempnam( "/tmp", get_class( $this ) );
+ if ( $tmpFileName === false ) {
+ $this->error( "Could not create temp file with
tempnam()." );
+ continue;
+ }
+ $jsonHandle = fopen( $tmpFileName, 'wb+' );
+ if ( $jsonHandle === false ) {
+ $this->error( "Could not open '$tmpFileName'."
);
+ continue;
+ }
+ $bytes = 0;
+ $first = true;
+ fwrite( $jsonHandle, '{' );
+ for ( $k = dba_firstkey( $cdbHandle ); $k !== false; $k
= dba_nextkey( $cdbHandle ) ) {
+ $key = json_encode( $k );
+ $val = json_encode( dba_fetch( $k, $cdbHandle )
);
+ $data = "$key:$val";
+ if ( $first ) {
+ $first = false;
+ } else {
+ $data = ",\n" . $data;
+ }
+ fwrite( $jsonHandle, $data );
+ $bytes += strlen( $data );
+ }
+ fwrite( $jsonHandle, '}' );
+ fclose( $jsonHandle );
+ dba_close( $cdbHandle );
+ // Sanity check file size
+ if ( filesize( $tmpFileName ) !== ( $bytes + 2 ) ) {
+ unlink( $tmpFileName );
+ $this->error( "Failed to write '$tmpFileName'."
);
+ continue;
+ }
+ // Create the MD5 and JSON files...
+ $bytes = file_put_contents(
"$directory/upstream/$file.MD5", $newCdbMd5, LOCK_EX );
+ if ( $bytes !== strlen( $newCdbMd5 ) ) {
+ unlink( $tmpFileName );
+ $this->error( "Could not write '$file.MD5'." );
+ continue;
+ }
+ chmod( $tmpFileName, 0644 );
+ if ( !rename( $tmpFileName,
"$directory/upstream/$file.json" ) ) {
+ $this->error( "Could not write
'$directory/upstream/$file.json'." );
+ continue;
+ }
+ ++$rebuilt;
+ }
+ file_put_contents( $resFile, "$rebuilt\n", LOCK_EX |
FILE_APPEND );
+ }
+
+ protected function extensionFromPath( $path ) {
+ $i = strrpos( $path, '.' );
+ return strtolower( $i ? substr( $path, $i + 1 ) : '' );
+ }
+
+ protected function output( $s ) {
+ print $s;
+ }
+
+ protected function error( $s, $die = 0 ) {
+ fwrite( STDERR, $s . "\n" );
+ $die = intval( $die );
+ if ( $die > 0 ) {
+ die( $die );
+ }
+ }
+}
diff --git a/restart-twemproxy b/restart-twemproxy
new file mode 100755
index 0000000..41af489
--- /dev/null
+++ b/restart-twemproxy
@@ -0,0 +1,24 @@
+#!/bin/bash
+. /usr/local/lib/mw-deployment-vars.sh
+BINDIR="/usr/local/bin"
+
+if [ ! -S "$SSH_AUTH_SOCK" ]; then
+ echo >&2 "SSH_AUTH_SOCK not set or not pointing to a socket."
+ echo >&2 "Did you start your ssh-agent?"
+ exit 1
+fi
+
+# No use going any further if we can't complete the sync
+[ $(which dsh 2>/dev/null) ] || {
+ echo >&2 "Error: no dsh on this host, aborting"
+ exit 1
+}
+
+# Restart twemproxy
+echo 'Restarting twemproxy on all servers'
+dsh -cM -g mediawiki-installation -o -oSetupTimeout=30 -F8 -- "sudo
/sbin/restart twemproxy"
+
+echo 'Finished'
+
+# Notify
+$BINDIR/dologmsg "!log $USER restarted twemproxy on all servers"
diff --git a/scap b/scap
new file mode 100755
index 0000000..d43c97c
--- /dev/null
+++ b/scap
@@ -0,0 +1,121 @@
+#!/bin/bash
+
+# Acquire a non-blocking lock to prevent parallel runs
+exec 200>/var/lock/scap || exit 1
+flock -n 200 || { echo "WARNING: could not acquire /var/lock/scap; scap is
already running." >&2; exit 1; }
+
+SCAP_START=$(date +%s)
+NODEFILE=
+
+function cleanup() {
+ if [ -n "$NODEFILE" ]; then
+ rm -f "$NODEFILE"
+ fi
+ # The lock is automatically released on exit, but do it here for good
measure
+ flock -u 200
+}
+
+function die() {
+ cleanup
+ if [ -n "$*" ]; then
+ echo >&2 "$*"
+ else
+ echo >&2 "sync failed"
+ fi
+ exit 1
+}
+
+. /usr/local/lib/mw-deployment-vars.sh
+
+BINDIR=/usr/local/bin
+
+if [ ! -S "$SSH_AUTH_SOCK" ]; then
+ die "SSH_AUTH_SOCK not set or not pointing to a socket. Did you start
your ssh-agent?"
+fi
+
+DSH_EXPORTS=
+# Only sync the active version(s) if requested
+if [[ "$1" == --versions=?* ]]; then
+ versions="${1#--versions=}"
+ shift
+ if [ "$versions" == "active" ]; then
+ # All active MW versions
+ export MW_VERSIONS_SYNC=$($BINDIR/mwversionsinuse --home)
+ elif [ -d "$MW_COMMON_SOURCE/php-$versions" ]; then
+ # A specific MW version
+ export MW_VERSIONS_SYNC="$versions"
+ else
+ die "Invalid MediaWiki version \"$versions\""
+ fi
+ unset versions
+ # This will export MW_VERSIONS_SYNC to scap-1 on the proxies/servers
+ echo "MediaWiki versions selected for sync (via --versions):
$MW_VERSIONS_SYNC"
+ DSH_EXPORTS="export MW_VERSIONS_SYNC=\"$MW_VERSIONS_SYNC\";"
+else
+ echo "Syncing all versions."
+fi
+
+if [[ "$MW_SCAP_BETA" == "1" ]]; then
+ DSH_EXPORTS="export MW_SCAP_BETA=1; $DSH_EXPORTS"
+fi
+
+# Perform syntax check
+echo -n "Checking syntax of wmf-config and multiversion..."
+if ( ! ( $BINDIR/lint $MW_COMMON_SOURCE/wmf-config && $BINDIR/lint
$MW_COMMON_SOURCE/multiversion ) ); then
+ die "Found syntax errors, cannot sync."
+fi
+echo " done"
+
+# Update the current machine so that serialization works.
+# Push wikiversions.dat changes so mwversionsinuse, set-group-write,
+# and mwscript work with the right version of the files.
+/usr/local/bin/sync-common || die
+
+# Update list of extension message files and regenerate
+# the localisation cache
+/usr/local/bin/mw-update-l10n || die
+
+# Notify
+$BINDIR/dologmsg "!log $USER started scap${*:+: }$*"
+
+# Disable logging
+export DOLOGMSGNOLOG=1
+
+echo 'Updating rsync proxies...'
+dsh -cM -g scap-proxies -o -oSetupTimeout=10 -- "$DSH_EXPORTS
/usr/local/bin/scap-1"
+echo 'Finished'
+
+# Do the main code update in random order to avoid overloading any given rsync
server
+NODEFILE=$(mktemp)
+shuf < /etc/dsh/group/mediawiki-installation > "$NODEFILE" || die
+
+RSYNC_SERVERS=`sed 's/^#.*//' /etc/dsh/group/scap-proxies` || die
+# Condense whitespace
+RSYNC_SERVERS=`echo $RSYNC_SERVERS`
+
+echo 'Copying code to apaches...'
+dsh -F40 -cM -f "$NODEFILE" -o -oSetupTimeout=10 -- "$DSH_EXPORTS
/usr/local/bin/scap-1 \""$RSYNC_SERVERS"\""
+echo 'Finished'
+
+echo 'Rebuilding CDB files from /upstream...'
+dsh -cM -g mediawiki-installation -o -oSetupTimeout=10 -- "$DSH_EXPORTS
/usr/local/bin/scap-rebuild-cdbs"
+echo 'Finished'
+
+# Builds wikiversions.cdb and syncs it to the apaches with the dat file.
+# This is done after all else so that deploying new MW versions is easier.
+sync-wikiversions || die
+
+SCAP_END=$(date +%s)
+DIFF=$((SCAP_END-SCAP_START))
+MINS=$(((DIFF/60)%60))
+SECS=$((DIFF%60))
+printf -v HUMAN_DIFF "%02dm %02ds" $MINS $SECS
+
+echo "scap completed in $HUMAN_DIFF."
+
+export DOLOGMSGNOLOG=""
+$BINDIR/dologmsg "!log $USER finished scap${*:+: }$* (duration: $HUMAN_DIFF)"
+$BINDIR/deploy2graphite scap $DIFF
+
+cleanup
+exit 0
diff --git a/scap-1 b/scap-1
new file mode 100755
index 0000000..913cc2f
--- /dev/null
+++ b/scap-1
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+. /usr/local/lib/mw-deployment-vars.sh
+
+if [ ! -d "${MW_COMMON}" ];then
+ if ! install -d -o mwdeploy -g mwdeploy "${MW_COMMON}"; then
+ echo "Unable to create ${MW_COMMON}, please re-run this script
as root."
+ exit 1
+ fi
+fi
+
+if [ ! -d /usr/local/apache/uncommon ];then
+ if ! install -d -o mwdeploy -g mwdeploy /usr/local/apache/uncommon; then
+ echo "Unable to create /usr/local/apache/uncommon, please
re-run this script as root."
+ exit 1
+ fi
+fi
+
+RSYNC_SERVERS="$1"
+SERVER=
+if [ -n "$RSYNC_SERVERS" ]; then
+ SERVER=$(sudo /usr/local/bin/find-nearest-rsync $RSYNC_SERVERS)
+fi
+if [ -z "$SERVER" ]; then
+ SERVER="${MW_RSYNC_HOST}"
+fi
+
+sudo -u mwdeploy MW_VERSIONS_SYNC="$MW_VERSIONS_SYNC"
MW_SCAP_BETA="$MW_SCAP_BETA" /usr/local/bin/scap-2 "$SERVER"
+
+echo Done
+exit 0
diff --git a/scap-2 b/scap-2
new file mode 100755
index 0000000..7e16985
--- /dev/null
+++ b/scap-2
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+BINDIR=/usr/local/bin
+PATH=/bin:/usr/bin:/sbin:/usr/sbin:
+TERM=dumb
+
+. /usr/local/lib/mw-deployment-vars.sh
+
+SERVER="$1"
+if [ -z "$SERVER" ]; then
+ SERVER="${MW_RSYNC_HOST}"
+fi
+
+RSYNC_ARGS=("${MW_RSYNC_ARGS[@]}")
+# Only looks at certain MW version if specified
+if [ -n "$MW_VERSIONS_SYNC" ]; then
+ for v in ${MW_VERSIONS_SYNC[@]}; do
+ RSYNC_ARGS+=("--include='php-$v/'")
+ done
+ RSYNC_ARGS+=("--exclude='php-*/'")
+fi
+
+echo -n Copying to `hostname -s` from "$SERVER"...
+if rsync "${RSYNC_ARGS[@]}" "$SERVER"::common/ "${MW_COMMON}"
+then
+ echo "ok"
+else
+ echo "failed"
+ exit 1
+fi
+
diff --git a/scap-rebuild-cdbs b/scap-rebuild-cdbs
new file mode 100755
index 0000000..01845b3
--- /dev/null
+++ b/scap-rebuild-cdbs
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+BINDIR=/usr/local/bin
+PATH=/bin:/usr/bin:/sbin:/usr/sbin:
+TERM=dumb
+
+. /usr/local/lib/mw-deployment-vars.sh
+
+# Leave some of the cores free for apache processes
+CPUS=`grep -c 'model name' /proc/cpuinfo`
+THREADS=`expr $CPUS / 2`
+
+# Only looks at certain MW version if specified
+if [ -n "$MW_VERSIONS_SYNC" ]; then
+ mwVersions=$MW_VERSIONS_SYNC
+else
+ mwVersions=$($BINDIR/mwversionsinuse)
+fi
+
+# Rebuild the CDB files from the JSON versions
+# Regenerate the extension message file list for all active MediaWiki versions
+for mwVerNum in ${mwVersions[@]}; do
+ sudo -u mwdeploy TERM="$TERM" $BINDIR/mergeCdbFileUpdates
--directory="$MW_COMMON/php-$mwVerNum/cache/l10n" \
+ --trustmtime --threads=$THREADS
+done
diff --git a/scap-recompile b/scap-recompile
new file mode 100644
index 0000000..60a1d4c
--- /dev/null
+++ b/scap-recompile
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+PATH=/bin:/usr/bin:/sbin:/usr/sbin:
+TERM=dumb
+
+# wikiversions.dat just synced above
+mwVersionNums=$(mwversionsinuse)
+if [ -z "$mwVersionNums" ]; then
+ echo "Unable to read wikiversions.dat or it is empty"
+ exit 1
+fi
+
+arr=($mwVersionNums)
+mwVerNum=${arr[1]}
+
+echo -n "MediaWiki: Compiling texvc..."
+builddir=`mktemp -dt texvc-build.XXXXXXXXXX`
+if [ -z "$builddir" ]; then
+ echo "Unable to create temporary directory"
+ exit 1
+fi
+
+mwIP=/usr/local/apache/common-local/php-"$mwVerNum"
+MATHPATH=$mwIP/extensions/Math/math
+rsync -r --exclude=.git/ $MATHPATH/ "$builddir"
+cd "$builddir"
+if make -f Makefile texvc >/dev/null 2>/dev/null; then
+ echo "ok"
+ install -d /usr/local/apache/uncommon/bin
+ install -m 755 "$builddir"/texvc /usr/local/apache/uncommon/bin
+else
+ echo "failed"
+ exit 1
+fi
+
+rm -r "$builddir"
+
+echo -n "MediaWiki: Compiling texvcheck..."
+builddir=`mktemp -dt texvccheck-build.XXXXXXXXXX`
+cd "$builddir"
+
+if [ -z "$builddir" ]; then
+ echo "Unable to create temporary directory"
+ exit 1
+fi
+
+MATHPATH=$mwIP/extensions/Math/texvccheck
+rsync -r --exclude=.git/ $MATHPATH/ "$builddir"
+
+if make -f Makefile texvccheck >/dev/null 2>/dev/null; then
+ echo "ok"
+ install -d /usr/local/apache/uncommon/bin
+ install -m 755 "$builddir"/texvccheck /usr/local/apache/uncommon/bin
+else
+ echo "failed"
+ exit 1
+fi
+
+rm -r "$builddir"
+cd /
diff --git a/scappy b/scappy
new file mode 100755
index 0000000..55de8fd
--- /dev/null
+++ b/scappy
@@ -0,0 +1,181 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+ Heterogeneous deployment script
+ Deploys MediaWiki code and configuration to a group of servers via rsync.
+
+"""
+import argparse
+import contextlib
+import fcntl
+import imp
+import logging
+import os
+import pipes
+import random
+import socket
+import struct
+import subprocess
+import tempfile
+import time
+
+
+
+class IRCSocketHandler(logging.Handler):
+ """Log handler for logmsgbot on #wikimedia-operation."""
+
+ def __init__(self, host, port, timeout=1.0):
+ super(IRCSocketHandler, self).__init__()
+ self.addr = (host, port)
+ self.level = logging.INFO
+ self.timeout = timeout
+
+ def emit(self, record):
+ message = '!log %s %s' % (os.getlogin(), record.getMessage())
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(self.timeout)
+ sock.connect(self.addr)
+ sock.sendall(message.encode('utf-8'))
+ sock.close()
+ except (socket.timeout, socket.error, socket.gaierror):
+ self.handleError(record)
+
+
+log = logging.getLogger('scap')
+log.setLevel(logging.DEBUG)
+log.addHandler(IRCSocketHandler('neon.wikimedia.org', 9200))
+log.addHandler(logging.StreamHandler())
+
+
+def shell_map(mapping):
+ """Convert a map to a string of space-separated KEY=VALUE pairs."""
+ return ' '.join('%s=%s' % (k, pipes.quote(v)) for k, v in mapping.items())
+
+
+def get_config():
+ """Load environment variables from mw-deployment-vars.sh."""
+ dep_env = imp.load_source('__env', '/usr/local/lib/mw-deployment-vars.sh')
+ return {k: v for k, v in dep_env.__dict__.items() if k.startswith('MW_')}
+
+
[email protected]
+def lock(filename):
+ """Context manager. Acquires a file lock on entry, releases on exit."""
+ with open(filename, 'w+') as lock_fd:
+ fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ try:
+ yield
+ finally:
+ fcntl.lockf(lock_fd, fcntl.LOCK_UN)
+
+
+def cdb_items(buf):
+ """Iterates over CDB key/value pairs."""
+ table_start, = struct.unpack_from('<L', buf)
+ offset = 2048
+ while offset < table_start:
+ lengths = struct.unpack_from('<LL', buf, offset)
+ offset += 8
+ yield struct.unpack_from('%ds %ds' % lengths, buf, offset)
+ offset += sum(lengths)
+
+
+def get_branches(wikiversions_cdb_path):
+ """Get the set of active branches from a wikiversions.cdb file."""
+ with open(wikiversions_cdb_path, 'rb') as cdb_file:
+ cdb = cdb_file.read()
+ return {v for k, v in cdb_items(cdb) if k.startswith('ver:')}
+
+
+def dsh(command, group, exports=None):
+ """Run a command on multiple hosts via DSH."""
+ if exports:
+ command = '%s %s' % (shell_map(exports), command)
+ group_file = os.path.join('/etc/dsh/group', group)
+ return subprocess.check_call(['/usr/bin/dsh', '-F40', '-cM', '-f',
+ group_file, '-o', '-oSetupTimeout=10', '--',
+ command.strip()])
+
+
+def check_syntax(*paths):
+ """Run lint.php on `paths`; raise CalledProcessError if nonzero exit."""
+ command = ['/usr/bin/php', '-n', '-dextension=parsekit.so',
+ '/usr/local/bin/lint.php'] + list(paths)
+ return subprocess.check_call(command)
+
+
+def parse_args():
+ """Parse command-line arguments."""
+ parser = argparse.ArgumentParser(description='Deploy MediaWiki')
+ parser.add_argument('--active', action='store_true', default=False,
+ help='only sync active branches')
+ parser.add_argument('message', nargs=argparse.REMAINDER)
+ return parser.parse_args()
+
+
+def scap():
+ """Deploy MediaWiki code and configuration."""
+ if 'SSH_AUTH_SOCK' not in os.environ:
+ raise RuntimeError('SSH_AUTH_SOCK is unset. Is your agent running?')
+
+ start = time.time()
+ config = get_config()
+ env = {}
+ args = parse_args()
+ message = ' '.join(args.message) or '(no message)'
+ if args.active:
+ branches = get_branches('%(MW_COMMON)s/wikiversions.cdb' % config)
+ env['MW_VERSIONS_SYNC'] = ' '.join(branches)
+
+ with lock('/var/lock/scap'):
+ # Perform syntax check
+ log.debug('Checking syntax of wmf-config and multiversion')
+ check_syntax('%(MW_COMMON_SOURCE)s/wmf-config' % config)
+ check_syntax('%(MW_COMMON_SOURCE)s/multiversion' % config)
+
+ # Update the current machine so that serialization works.
+ # Push wikiversions.dat changes so mwversionsinuse, set-group-write,
+ # and mwscript work with the right version of the files.
+ subprocess.check_call('/usr/local/bin/sync-common')
+
+ # Update list of extension message files and regenerate
+ # the localisation cache
+ subprocess.check_call('/usr/local/bin/mw-update-l10n')
+
+ # Notify
+ log.info('started scap: %s', message)
+
+ log.debug('updating rsync proxies')
+ dsh('/usr/local/bin/scap-1', 'scap-proxies', env)
+
+ with open('/etc/dsh/group/scap-proxies') as f:
+ rsync_servers = ' '.join(
+ ln.strip() for ln in f if not ln.startswith('#'))
+
+ with open('/etc/dsh/group/mediawiki-installation', 'rt') as f:
+ # Randomize the order of target machines
+ hosts = [ln for ln in f if not ln.startswith('#')]
+ random.shuffle(hosts)
+ with tempfile.NamedTemporaryFile(prefix='scap') as tmp:
+ tmp.write(''.join(hosts))
+ log.debug('copying code to apaches')
+ dsh('/usr/local/bin/scap-1 "%s"' % rsync_servers,
+ tmp.name, env)
+
+ log.debug('rebuilding CDB files from /upstream')
+ dsh('/usr/local/bin/scap-rebuild-cdbs', tmp.name, env)
+
+ # Builds wikiversions.cdb and syncs it to the apaches with the dat
+ # file. This is done after all else so that deploying new MW versions
+ # is easier.
+ subprocess.check_call('sync-wikiversions')
+
+ stop = time.time()
+ duration = '%02dm %02ds' % divmod(stop - start, 60)
+ log.info('finished scap: %s (duration: %s)', message, duration)
+ # TODO: record scap in graphite
+
+
+if __name__ == '__main__':
+ scap()
diff --git a/sync-common b/sync-common
new file mode 100755
index 0000000..b3f1056
--- /dev/null
+++ b/sync-common
@@ -0,0 +1,2 @@
+#!/bin/bash
+/usr/local/bin/scap-1
diff --git a/sync-common-all b/sync-common-all
new file mode 100755
index 0000000..c860b3f
--- /dev/null
+++ b/sync-common-all
@@ -0,0 +1,2 @@
+#!/bin/bash
+/usr/local/bin/scap
diff --git a/sync-common-file b/sync-common-file
new file mode 100755
index 0000000..cd95b16
--- /dev/null
+++ b/sync-common-file
@@ -0,0 +1,55 @@
+#!/bin/bash
+## synchronizes a single file from common on all apaches
+
+. /usr/local/lib/mw-deployment-vars.sh
+BINDIR=/usr/local/bin
+
+[ $# -lt 1 ] && {
+ echo >&2 "usage: sync-common-file file [message]"
+ exit 1
+}
+
+if [ ! -S "$SSH_AUTH_SOCK" ]; then
+ echo >&2 "SSH_AUTH_SOCK not set or not pointing to a socket."
+ echo >&2 "Did you start your ssh-agent?"
+ exit 1
+fi
+
+DIR=$1
+shift
+if [ -n "$1" ]; then
+ MESSAGE=" '"$@"'"
+else
+ MESSAGE=
+fi
+
+[ -e "$MW_COMMON_SOURCE/$DIR" ] || {
+ echo >&2 "$MW_COMMON_SOURCE/$DIR does not exist!"
+ exit 1
+}
+
+# No use going any further if we can't complete the sync
+[ $(which dsh 2>/dev/null) ] || {
+ echo >&2 "no dsh on this host, aborting"
+ exit 1
+}
+
+# copy file to all apaches in node mediawiki-installation
+echo >&2 "copying to apaches"
+
+if [ -d "$MW_COMMON_SOURCE/$DIR" ]; then
+ DESTDIR=$MW_COMMON/$DIR
+ dsh "${MW_DSH_ARGS[@]}" -- "
+ sudo -u mwdeploy rsync ${MW_RSYNC_ARGS[*]} --exclude=cache/l10n
$MW_RSYNC_HOST::common/$DIR/ $DESTDIR
+ "
+else
+ DESTDIR=$(dirname $MW_COMMON/$DIR)
+ dsh "${MW_DSH_ARGS[@]}" -- "
+ sudo -u mwdeploy rsync -l $MW_RSYNC_HOST::common/$DIR $DESTDIR
+ "
+fi
+
+# Notify (one line only)
+$BINDIR/dologmsg "!log $USER synchronized $DIR $MESSAGE"
+$BINDIR/deploy2graphite sync-common-file
+exit 0
diff --git a/sync-dblist b/sync-dblist
new file mode 100755
index 0000000..a9e847f
--- /dev/null
+++ b/sync-dblist
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+. /usr/local/lib/mw-deployment-vars.sh
+BINDIR=/usr/local/bin
+echo "Synchronizing $MW_COMMON_SOURCE/*.dblist to $MW_COMMON/*.dblist..."
+echo "mediawiki-installation:"
+
+dsh "${MW_DSH_ARGS[@]}" -- "
+ sudo -u mwdeploy rsync -a $MW_RSYNC_HOST::common/*.dblist $MW_COMMON
+"
+
+$BINDIR/dologmsg "!log $USER synchronized database lists files: $*"
diff --git a/sync-dir b/sync-dir
new file mode 100755
index 0000000..a9413e1
--- /dev/null
+++ b/sync-dir
@@ -0,0 +1,28 @@
+#!/bin/bash
+## synchronizes a directory from common/ on all apaches
+
+. /usr/local/lib/mw-deployment-vars.sh
+BINDIR=/usr/local/bin
+
+[ $# -lt 1 ] && {
+ echo >&2 "Usage: sync-dir directory [message]"
+ exit 1
+}
+
+FILE=$1
+shift
+
+# Sanity check
+if [ ! -d "$MW_COMMON_SOURCE/$FILE" ]; then
+ echo >&2 "Target file is not a directory"
+ exit 1
+fi
+
+# Perform syntax check
+$BINDIR/lint $MW_COMMON_SOURCE/$FILE || {
+ echo >&2 "Aborted due to syntax errors"
+ exit 1
+}
+
+# Actually sync the dir...
+$BINDIR/sync-common-file $FILE "$@"
diff --git a/sync-docroot b/sync-docroot
new file mode 100755
index 0000000..2c86916
--- /dev/null
+++ b/sync-docroot
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+. /usr/local/lib/mw-deployment-vars.sh
+echo "Synchronizing $MW_COMMON_SOURCE/docroot to $MW_COMMON/docroot..."
+dsh "${MW_DSH_ARGS[@]}" -- "sudo -u mwdeploy rsync -a --no-perms
$MW_RSYNC_HOST::common/docroot $MW_RSYNC_HOST::common/w $MW_COMMON"
+echo "done!"
+
+BINDIR=/usr/local/bin
+$BINDIR/dologmsg "!log $USER synchronized docroot and w"
+$BINDIR/deploy2graphite sync-docroot
+exit 0
diff --git a/sync-file b/sync-file
new file mode 100755
index 0000000..eefabf2
--- /dev/null
+++ b/sync-file
@@ -0,0 +1,28 @@
+#!/bin/bash
+## synchronizes a single file from common/ on all apaches
+
+. /usr/local/lib/mw-deployment-vars.sh
+
+BINDIR=/usr/local/bin
+
+[ $# -lt 1 ] && {
+ echo >&2 "Usage: sync-file file [message]"
+ exit 1
+}
+
+FILE=$1
+shift
+
+# Sanity check
+if [ -d "$MW_COMMON_SOURCE/$FILE" ]; then
+ echo >&2 "Target file is a directory, not a single file"
+ exit 1
+fi
+
+# Perform syntax check
+php -l $MW_COMMON_SOURCE/$FILE || {
+ echo >&2 "Aborted due to syntax errors"
+ exit 1
+}
+
+$BINDIR/sync-common-file $FILE "$@"
diff --git a/sync-wikiversions b/sync-wikiversions
new file mode 100755
index 0000000..a7b9102
--- /dev/null
+++ b/sync-wikiversions
@@ -0,0 +1,30 @@
+#!/bin/bash
+. /usr/local/lib/mw-deployment-vars.sh
+BINDIR="/usr/local/bin"
+
+if [ ! -S "$SSH_AUTH_SOCK" ]; then
+ echo >&2 "SSH_AUTH_SOCK not set or not pointing to a socket."
+ echo >&2 "Did you start your ssh-agent?"
+ exit 1
+fi
+
+if ! $MW_COMMON_SOURCE/multiversion/refreshWikiversionsCDB; then
+ echo 'Error: could not rebuild the wikiversions.cdb file'
+fi
+
+# No use going any further if we can't complete the sync
+[ $(which dsh 2>/dev/null) ] || {
+ echo >&2 "Error: no dsh on this host, aborting"
+ exit 1
+}
+
+# Copy files to apaches
+echo -n 'Copying wikiversions dat and cdb files to apaches...'
+dsh -cM -g mediawiki-installation -o -oSetupTimeout=10 -- "sudo -u mwdeploy
rsync -l $MW_RSYNC_HOST::common/wikiversions.{dat,cdb} $MW_COMMON"
+
+echo 'Finished'
+
+# Notify
+$BINDIR/dologmsg "!log $USER rebuilt wikiversions.cdb and synchronized
wikiversions files: $*"
+$BINDIR/deploy2graphite sync-wikiversions
+exit 0
--
To view, visit https://gerrit.wikimedia.org/r/111351
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: I1d4c66afd83a68813cb2c38073777d73cd6d0ae0
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/tools/scap
Gerrit-Branch: master
Gerrit-Owner: Ori.livneh <[email protected]>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits