ArielGlenn has submitted this change and it was merged.

Change subject: fix outliers from camelcase conversion and from a bad merge
......................................................................


fix outliers from camelcase conversion and from a bad merge

Change-Id: I8378729593ae28a4e055aa5e209bf58ad19a18f2
---
M xmldumps-backup/dumps/jobs.py
M xmldumps-backup/dumps/utils.py
M xmldumps-backup/worker.py
M xmldumps-backup/xmlabstracts.py
M xmldumps-backup/xmllogs.py
M xmldumps-backup/xmlstreams.py
M xmldumps-backup/xmlstubs.py
7 files changed, 26 insertions(+), 25 deletions(-)

Approvals:
  ArielGlenn: Verified; Looks good to me, approved



diff --git a/xmldumps-backup/dumps/jobs.py b/xmldumps-backup/dumps/jobs.py
index c94357e..b3b1f94 100644
--- a/xmldumps-backup/dumps/jobs.py
+++ b/xmldumps-backup/dumps/jobs.py
@@ -761,7 +761,7 @@
         script_command = MultiVersion.mw_script_as_array(runner.wiki.config, 
"dumpBackup.php")
 
         command = ["/usr/bin/python", "xmlstubs.py", "--config", 
runner.wiki.config.files[0], "--wiki", runner.db_name,
-                    runner.forceNormalOption(), "--articles", articles_file,
+                    "--articles", articles_file,
                     "--history", history_file, "--current", current_file]
 
         if outf.chunk:
@@ -891,7 +891,7 @@
         logging = runner.dump_dir.filename_public_path(output_file_obj)
 
         command = ["/usr/bin/python", "xmllogs.py", "--config", 
runner.wiki.config.files[0], "--wiki", runner.db_name,
-                    runner.forceNormalOption(), "--outfile", logging]
+                   "--outfile", logging]
 
         pipeline = [command]
         series = [pipeline]
@@ -1120,7 +1120,6 @@
         dump_command.extend(["--wiki=%s" % runner.db_name,
                     "%s" % stub_option,
                     "%s" % prefetch,
-                    "%s" % runner.forceNormalOption(),
                     "%s" % checkpoint_time,
                     "%s" % checkpoint_file,
                     "--report=1000",
@@ -1695,7 +1694,7 @@
 
     def build_command(self, runner, fname):
         command = ["/usr/bin/python", "xmlabstracts.py", "--config", 
runner.wiki.config.files[0],
-                    "--wiki", self.db_name, runner.forceNormalOption()]
+                    "--wiki", self.db_name]
 
         outputs = []
         variants = []
@@ -1878,7 +1877,7 @@
         """Pass some SQL commands to the server for this DB and save output to 
a gzipped file."""
         if not exists(runner.wiki.config.gzip):
             raise BackupError("gzip command %s not found" % 
runner.wiki.config.gzip)
-        command = runner.db_server_info.buildSqlCommand(query, 
runner.wiki.config.gzip)
+        command = runner.db_server_info.build_sql_command(query, 
runner.wiki.config.gzip)
         return runner.save_command(command, outfile)
 
 class AllTitleDump(TitleDump):
diff --git a/xmldumps-backup/dumps/utils.py b/xmldumps-backup/dumps/utils.py
index f5b7df7..ffa5163 100644
--- a/xmldumps-backup/dumps/utils.py
+++ b/xmldumps-backup/dumps/utils.py
@@ -50,7 +50,7 @@
         the prefix for all tables for the specific wiki ($wgDBprefix)"""
         if not exists(self.wiki.config.php):
             raise BackupError("php command %s not found" % 
self.wiki.config.php)
-        command_list = MultiVersion.MWScriptAsArray(self.wiki.config, 
"getSlaveServer.php")
+        command_list = MultiVersion.mw_script_as_array(self.wiki.config, 
"getSlaveServer.php")
         php_command = MiscUtils.shellEscape(self.wiki.config.php)
         db_name = MiscUtils.shellEscape(self.db_name)
         for i in range(0, len(command_list)):
diff --git a/xmldumps-backup/worker.py b/xmldumps-backup/worker.py
index a974cf0..e8b9ec4 100644
--- a/xmldumps-backup/worker.py
+++ b/xmldumps-backup/worker.py
@@ -133,11 +133,11 @@
 
                            AbstractDump("abstractsdump", "Extracted page 
abstracts for Yahoo", self._get_chunk_to_do("abstractsdump"), self.wiki.dbName, 
self.chunk_info.get_pages_per_chunk_abstract())]
 
-        if self.chunk_info.chunksEnabled():
+        if self.chunk_info.chunks_enabled():
             
self.dump_items.append(RecombineAbstractDump("abstractsdumprecombine", 
"Recombine extracted page abstracts for Yahoo", 
self.find_item_by_name('abstractsdump')))
 
         self.dump_items.append(XmlStub("xmlstubsdump", "First-pass for page 
XML data dumps", self._get_chunk_to_do("xmlstubsdump"), 
self.chunk_info.get_pages_per_chunk_history()))
-        if self.chunk_info.chunksEnabled():
+        if self.chunk_info.chunks_enabled():
             self.dump_items.append(RecombineXmlStub("xmlstubsdumprecombine", 
"Recombine first-pass for page XML data dumps", 
self.find_item_by_name('xmlstubsdump')))
 
         # NOTE that the chunk_info thing passed here is irrelevant, these get 
generated from the stubs which are all done in one pass
@@ -146,7 +146,7 @@
                     "articlesdump",
                     "<big><b>Articles, templates, media/file descriptions, and 
primary meta-pages.</b></big>",
                     "This contains current versions of article content, and is 
the archive most mirror sites will probably want.", 
self.find_item_by_name('xmlstubsdump'), self._prefetch, self._spawn, self.wiki, 
self._get_chunk_to_do("articlesdump"), 
self.chunk_info.get_pages_per_chunk_history(), checkpoints, 
self.checkpoint_file, self.page_id_range))
-        if self.chunk_info.chunksEnabled():
+        if self.chunk_info.chunks_enabled():
             self.dump_items.append(RecombineXmlDump("articlesdumprecombine", 
"<big><b>Recombine articles, templates, media/file descriptions, and primary 
meta-pages.</b></big>", "This contains current versions of article content, and 
is the archive most mirror sites will probably want.", 
self.find_item_by_name('articlesdump')))
 
         self.dump_items.append(
@@ -155,7 +155,7 @@
                     "All pages, current versions only.",
                     "Discussion and user pages are included in this complete 
archive. Most mirrors won't want this extra material.", 
self.find_item_by_name('xmlstubsdump'), self._prefetch, self._spawn, self.wiki, 
self._get_chunk_to_do("metacurrentdump"), 
self.chunk_info.get_pages_per_chunk_history(), checkpoints, 
self.checkpoint_file, self.page_id_range))
 
-        if self.chunk_info.chunksEnabled():
+        if self.chunk_info.chunks_enabled():
             
self.dump_items.append(RecombineXmlDump("metacurrentdumprecombine", "Recombine 
all pages, current versions only.", "Discussion and user pages are included in 
this complete archive. Most mirrors won't want this extra material.", 
self.find_item_by_name('metacurrentdump')))
 
         self.dump_items.append(
@@ -191,7 +191,7 @@
                        "All pages with complete page edit history (.bz2)",
                        "These dumps can be *very* large, uncompressing up to 
20 times the archive download size. " +
                        "Suitable for archival and statistical use, most mirror 
sites won't want or need this.", self.find_item_by_name('xmlstubsdump'), 
self._prefetch, self._spawn, self.wiki, 
self._get_chunk_to_do("metahistorybz2dump"), 
self.chunk_info.get_pages_per_chunk_history(), checkpoints, 
self.checkpoint_file, self.page_id_range))
-        if self.chunk_info.chunksEnabled() and 
self.chunk_info.recombine_history():
+        if self.chunk_info.chunks_enabled() and 
self.chunk_info.recombine_history():
             self.dump_items.append(
                 RecombineXmlDump("metahistorybz2dumprecombine",
                                  "Recombine all pages with complete edit 
history (.bz2)",
@@ -203,7 +203,7 @@
                               "All pages with complete edit history (.7z)",
                               "These dumps can be *very* large, uncompressing 
up to 100 times the archive download size. " +
                               "Suitable for archival and statistical use, most 
mirror sites won't want or need this.", 
self.find_item_by_name('metahistorybz2dump'), self.wiki, 
self._get_chunk_to_do("metahistory7zdump"), 
self.chunk_info.get_pages_per_chunk_history(), checkpoints, 
self.checkpoint_file))
-        if self.chunk_info.chunksEnabled() and 
self.chunk_info.recombine_history():
+        if self.chunk_info.chunks_enabled() and 
self.chunk_info.recombine_history():
             self.dump_items.append(
                 RecombineXmlRecompressDump("metahistory7zdumprecombine",
                                            "Recombine all pages with complete 
edit history (.7z)",
@@ -211,7 +211,7 @@
                                            "Suitable for archival and 
statistical use, most mirror sites won't want or need this.", 
self.find_item_by_name('metahistory7zdump'), self.wiki))
         # doing this only for recombined/full articles dump
         if self.wiki.config.multistreamEnabled:
-            if self.chunk_info.chunksEnabled():
+            if self.chunk_info.chunks_enabled():
                 input_for_multistream = "articlesdumprecombine"
             else:
                 input_for_multistream = "articlesdump"
diff --git a/xmldumps-backup/xmlabstracts.py b/xmldumps-backup/xmlabstracts.py
index fe2779d..aa5fba6 100644
--- a/xmldumps-backup/xmlabstracts.py
+++ b/xmldumps-backup/xmlabstracts.py
@@ -10,7 +10,8 @@
 import os
 import sys
 import worker
-import WikiDump
+from dumps.WikiDump import Config
+from dumps.utils import MultiVersion
 import getopt
 from xmlstreams import do_xml_stream, catit
 
@@ -37,10 +38,10 @@
         else:
             outfiles[filetype]['compr'] = catit(outfiles[filetype]['name'])
 
-    script_command = worker.MultiVersion.MWScriptAsArray(wikiconf,
-                                                         "dumpBackup.php")
+    script_command = MultiVersion.mw_script_as_array(wikiconf,
+                                                     "dumpBackup.php")
     command = [wikiconf.php, "-q"] + script_command
-    version = worker.MultiVersion.MWVersion(wikiconf, wikidb)
+    version = MultiVersion.mw_version(wikiconf, wikidb)
     abstract_cmd_dir = wikiconf.wikiDir
     if version:
         abstract_cmd_dir = abstract_cmd_dir + "/" + version
@@ -169,7 +170,7 @@
             usage("each variant must correspond to outfile, "
                   "different number supplied")
 
-    wikiconf = WikiDump.Config(configfile)
+    wikiconf = Config(configfile)
     wikiconf.parseConfFilePerProject(wiki)
     do_abstractsbackup(wiki, output_files, variants, wikiconf,
                        start, end, dryrun)
diff --git a/xmldumps-backup/xmllogs.py b/xmldumps-backup/xmllogs.py
index e7b3149..5cd6965 100644
--- a/xmldumps-backup/xmllogs.py
+++ b/xmldumps-backup/xmllogs.py
@@ -11,7 +11,8 @@
 import sys
 import time
 import worker
-import WikiDump
+from dumps.WikiDump import Config
+from dumps.utils import MultiVersion
 import getopt
 from xmlstreams import run_script, catfile, gzippit, get_max_id, do_xml_piece, 
do_xml_stream
 
@@ -31,7 +32,7 @@
         else:
             outfiles[filetype]['compr'] = gzippit(outfiles[filetype]['name'])
 
-    script_command = worker.MultiVersion.MWScriptAsArray(wikiconf, 
"dumpBackup.php")
+    script_command = MultiVersion.mw_script_as_array(wikiconf, 
"dumpBackup.php")
     command = [wikiconf.php, "-q"] + script_command
 
     command.extend(["--wiki=%s" % wikidb,
@@ -132,7 +133,7 @@
     if not os.path.exists(configfile):
         usage("no such file found: " + configfile)
 
-    wikiconf = WikiDump.Config(configfile)
+    wikiconf = Config(configfile)
     wikiconf.parseConfFilePerProject(wiki)
     dologsbackup(wiki, output_file, wikiconf, start, end, dryrun)
 
diff --git a/xmldumps-backup/xmlstreams.py b/xmldumps-backup/xmlstreams.py
index 54b1caf..7840d32 100644
--- a/xmldumps-backup/xmlstreams.py
+++ b/xmldumps-backup/xmlstreams.py
@@ -11,7 +11,7 @@
 import sys
 import time
 import worker
-import WikiDump
+from dumps.WikiDump import Wiki
 import getopt
 
 from subprocess import Popen, PIPE
@@ -147,7 +147,7 @@
     retrieve the largest id for this wiki from the db for specific table
     pass in name of id field, name of table
     '''
-    wiki = WikiDump.Wiki(wikiconf, wikidb)
+    wiki = Wiki(wikiconf, wikidb)
 
     db_info = worker.DbServerInfo(wiki, wikidb)
     query = "select MAX(%s) from %s%s;" % (id_field, db_info.db_table_prefix, 
table)
diff --git a/xmldumps-backup/xmlstubs.py b/xmldumps-backup/xmlstubs.py
index eb4c995..7117b93 100644
--- a/xmldumps-backup/xmlstubs.py
+++ b/xmldumps-backup/xmlstubs.py
@@ -11,7 +11,7 @@
 import sys
 import time
 import worker
-import WikiDump
+from dumps.WikiDump import Config
 import getopt
 from xmlstreams import run_script, catfile, gzippit, get_max_id, do_xml_piece, 
do_xml_stream
 
@@ -151,7 +151,7 @@
     if not os.path.exists(configfile):
         usage("no such file found: " + configfile)
 
-    wikiconf = WikiDump.Config(configfile)
+    wikiconf = Config(configfile)
     wikiconf.parseConfFilePerProject(wiki)
     dostubsbackup(wiki, history_file, current_file, articles_file, wikiconf, 
start, end, dryrun)
 

-- 
To view, visit https://gerrit.wikimedia.org/r/242503
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I8378729593ae28a4e055aa5e209bf58ad19a18f2
Gerrit-PatchSet: 3
Gerrit-Project: operations/dumps
Gerrit-Branch: ariel
Gerrit-Owner: ArielGlenn <ar...@wikimedia.org>
Gerrit-Reviewer: ArielGlenn <ar...@wikimedia.org>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to