jenkins-bot has submitted this change and it was merged.
Change subject: Archive and takeover namespaces with Flow
......................................................................
Archive and takeover namespaces with Flow
Change-Id: Ie785403748cc22bc28ff52a172bfaafad8aaf18b
---
M autoload.php
M i18n/en.json
M i18n/qqq.json
M includes/Import/Converter.php
M includes/Import/Importer.php
A includes/Import/Plain/ImportHeader.php
A includes/Import/Plain/ObjectRevision.php
A includes/Import/Wikitext/ConversionStrategy.php
A includes/Import/Wikitext/ImportSource.php
A includes/Utils/NamespaceIterator.php
M maintenance/convertLqt.php
A maintenance/convertNamespaceFromWikitext.php
A tests/phpunit/Import/Wikitext/ConversionStrategyTest.php
A tests/phpunit/Import/Wikitext/ImportSourceTest.php
14 files changed, 599 insertions(+), 7 deletions(-)
Approvals:
EBernhardson: Looks good to me, approved
jenkins-bot: Verified
diff --git a/autoload.php b/autoload.php
index 22f7779..1177875 100644
--- a/autoload.php
+++ b/autoload.php
@@ -199,12 +199,16 @@
'Flow\\Import\\LiquidThreadsApi\\TopicIterator' => __DIR__ .
'/includes/Import/LiquidThreadsApi/Iterators.php',
'Flow\\Import\\NullImportSourceStore' => __DIR__ .
'/includes/Import/ImportSourceStore.php',
'Flow\\Import\\PageImportState' => __DIR__ .
'/includes/Import/Importer.php',
+ 'Flow\\Import\\Plain\\ImportHeader' => __DIR__ .
'/includes/Import/Plain/ImportHeader.php',
+ 'Flow\\Import\\Plain\\ObjectRevision' => __DIR__ .
'/includes/Import/Plain/ObjectRevision.php',
'Flow\\Import\\Postprocessor\\LqtRedirector' => __DIR__ .
'/includes/Import/Postprocessor/LqtRedirector.php',
'Flow\\Import\\Postprocessor\\PostprocessingException' => __DIR__ .
'/includes/Import/Postprocessor/PostprocessingException.php',
'Flow\\Import\\Postprocessor\\Postprocessor' => __DIR__ .
'/includes/Import/Postprocessor/Postprocessor.php',
'Flow\\Import\\Postprocessor\\ProcessorGroup' => __DIR__ .
'/includes/Import/Postprocessor/ProcessorGroup.php',
'Flow\\Import\\TalkpageImportOperation' => __DIR__ .
'/includes/Import/Importer.php',
'Flow\\Import\\TopicImportState' => __DIR__ .
'/includes/Import/Importer.php',
+ 'Flow\\Import\\Wikitext\\ConversionStrategy' => __DIR__ .
'/includes/Import/Wikitext/ConversionStrategy.php',
+ 'Flow\\Import\\Wikitext\\ImportSource' => __DIR__ .
'/includes/Import/Wikitext/ImportSource.php',
'Flow\\LinksTableUpdater' => __DIR__ .
'/includes/LinksTableUpdater.php',
'Flow\\Log\\Formatter' => __DIR__ . '/includes/Log/Formatter.php',
'Flow\\Log\\Logger' => __DIR__ . '/includes/Log/Logger.php',
@@ -304,6 +308,8 @@
'Flow\\Tests\\Import\\LiquidThreadsApi\\ConversionStrategyTest' =>
__DIR__ . '/tests/phpunit/Import/LiquidThreadsApi/ConversionStrategyTest.php',
'Flow\\Tests\\Import\\PageImportStateTest' => __DIR__ .
'/tests/phpunit/Import/PageImportStateTest.php',
'Flow\\Tests\\Import\\TalkpageImportOperationTest' => __DIR__ .
'/tests/phpunit/Import/TalkpageImportOperationTest.php',
+ 'Flow\\Tests\\Import\\Wikitext\\ConversionStrategyTest' => __DIR__ .
'/tests/phpunit/Import/Wikitext/ConversionStrategyTest.php',
+ 'Flow\\Tests\\Import\\Wikitext\\ImportSourceTest' => __DIR__ .
'/tests/phpunit/Import/Wikitext/ImportSourceTest.php',
'Flow\\Tests\\LinksTableTest' => __DIR__ .
'/tests/phpunit/LinksTableTest.php',
'Flow\\Tests\\LocalBufferedBagOStuffTest' => __DIR__ .
'/tests/phpunit/Data/BagOStuff/LocalBufferedBagOStuffTest.php',
'Flow\\Tests\\Mock\\MockImportHeader' => __DIR__ .
'/tests/phpunit/Mock/MockImportHeader.php',
@@ -335,6 +341,7 @@
'Flow\\Tests\\UrlGeneratorTest' => __DIR__ .
'/tests/phpunit/UrlGeneratorTest.php',
'Flow\\Tests\\WatchedTopicItemTest' => __DIR__ .
'/tests/phpunit/WatchedTopicItemsTest.php',
'Flow\\UrlGenerator' => __DIR__ . '/includes/UrlGenerator.php',
+ 'Flow\\Utils\\NamespaceIterator' => __DIR__ .
'/includes/Utils/NamespaceIterator.php',
'Flow\\Utils\\PagesWithPropertyIterator' => __DIR__ .
'/includes/Utils/PagesWithPropertyIterator.php',
'Flow\\View' => __DIR__ . '/includes/View.php',
'Flow\\WatchedTopicItems' => __DIR__ .
'/includes/WatchedTopicItems.php',
diff --git a/i18n/en.json b/i18n/en.json
index 5902b9b..b0aee75 100644
--- a/i18n/en.json
+++ b/i18n/en.json
@@ -381,6 +381,8 @@
"flow-post-undo-suppress": "(undo suppress)",
"flow-importer-lqt-converted-template": "LQT page converted to Flow",
"flow-importer-lqt-converted-archive-template": "Archive for converted
LQT page",
+ "flow-importer-wt-converted-template": "Wikitext talk page converted to
Flow",
+ "flow-importer-wt-converted-archive-template": "Archive for converted
wikitext talk page",
"apihelp-flow-description": "Allows actions to be taken on Flow pages.",
"apihelp-flow-param-submodule": "The Flow submodule to invoke.",
"apihelp-flow-param-page": "The page to take the action on.",
@@ -461,5 +463,7 @@
"apihelp-query+flowinfo-example-1": "Fetch Flow information about
[[Talk:Sandbox]], [[Main Page]], and [[Talk:Flow]]",
"flow-edited": "Edited",
"flow-edited-by": "Edited by $1",
- "flow-lqt-redirect-reason": "Redirecting retired LiquidThreads post to
its converted Flow post"
+ "flow-lqt-redirect-reason": "Redirecting retired LiquidThreads post to
its converted Flow post",
+ "flow-talk-conversion-move-reason": "Conversion of wikitext talk to
Flow from $1",
+ "flow-talk-conversion-archive-edit-reason": "Wikitext talk to Flow
conversion"
}
diff --git a/i18n/qqq.json b/i18n/qqq.json
index acf091f..8593a52 100644
--- a/i18n/qqq.json
+++ b/i18n/qqq.json
@@ -385,6 +385,8 @@
"flow-post-undo-suppress": "Automatic moderation summary when undoing a
suppress that was just performed.",
"flow-importer-lqt-converted-template": "Name of a wikitext template
that is added to the header of Flow boards that were converted from
LiquidThreads",
"flow-importer-lqt-converted-archive-template": "Name of a wikitext
template that is added to the archived copy of a LiquidThreads page converted
to Flow.",
+ "flow-importer-wt-converted-template": "Name of a wikitext template
that is added to the header of a Flow boards that were converted from Wikitext",
+ "flow-importer-wt-converted-archive-template": "Name of a wikitext
template that is added to the archived copy of a wikitext talk page converted
to Flow.",
"apihelp-flow-description": "{{doc-apihelp-description|flow}}",
"apihelp-flow-param-submodule": "{{doc-apihelp-param|flow|submodule}}",
"apihelp-flow-param-page": "{{doc-apihelp-param|flow|page}}",
@@ -465,5 +467,7 @@
"apihelp-query+flowinfo-example-1":
"{{doc-apihelp-example|query+flowinfo}}",
"flow-edited": "Message displayed below a post to indicate it has last
been edited by the original author\n{{Identical|Edited}}",
"flow-edited-by": "Message displayed below a post to indicate it has
last been edited by a user other than the original author",
- "flow-lqt-redirect-reason": "Edit summary used to redirect old LQT
thread pages to Flow topics"
+ "flow-lqt-redirect-reason": "Edit summary used to redirect old LQT
thread pages to Flow topics",
+ "flow-talk-conversion-move-reason": "Message used as an edit summary
when moving an existing talk page to an archive location in preparation for
enabling flow on that page.\nParameters:\n* $1 - Title the page was moved from",
+ "flow-talk-conversion-archive-edit-reason": "Message used as an edit
summary when appending a template to a wikitext talk page after archiving it in
preparation for conversion to Flow."
}
diff --git a/includes/Import/Converter.php b/includes/Import/Converter.php
index 43b21bf..f2738df 100644
--- a/includes/Import/Converter.php
+++ b/includes/Import/Converter.php
@@ -88,8 +88,12 @@
$postprocessor = $strategy->getPostprocessor();
if ( $postprocessor !== null ) {
+ // @todo assert we cant cause duplicate postprocessors
$this->importer->addPostprocessor( $postprocessor );
}
+
+ // Force the importer to use our logger for consistent output.
+ $this->importer->setLogger( $logger );
}
/**
@@ -103,7 +107,10 @@
continue;
}
- // Only convert sub pages if we made them sub
pages
+ // Filter out sub pages unless we moved them
there. This
+ // matches the behaviour of
$wgFlowOccupyNamespaces, where
+ // the main pages get converted to Flow but the
sub pages
+ // remain wikitext.
$movedFrom = $this->getPageMovedFrom( $title );
if ( $movedFrom === null && $title->isSubpage()
) {
continue;
diff --git a/includes/Import/Importer.php b/includes/Import/Importer.php
index c30ffa1..2b1f1a1 100644
--- a/includes/Import/Importer.php
+++ b/includes/Import/Importer.php
@@ -5,7 +5,6 @@
use Flow\Data\BufferedCache;
use Flow\Data\ManagerGroup;
use Flow\DbFactory;
-use Flow\Exception\FlowException;
use Flow\Import\Postprocessor\Postprocessor;
use Flow\Import\Postprocessor\ProcessorGroup;
use Flow\Model\AbstractRevision;
@@ -488,7 +487,7 @@
$this->importHeader( $state, $header );
$state->commit();
$imported++;
- } catch ( FlowException $e ) {
+ } catch ( \Exception $e ) {
$state->rollback();
\MWExceptionHandler::logException( $e );
$state->logger->error( 'Failed importing
header' );
@@ -504,7 +503,7 @@
$this->importTopic( $state, $topic );
$state->commit();
$imported++;
- } catch ( FlowException $e ) {
+ } catch ( \Exception $e ) {
$state->rollback();
\MWExceptionHandler::logException( $e );
$state->logger->error( 'Failed importing topic'
);
diff --git a/includes/Import/Plain/ImportHeader.php
b/includes/Import/Plain/ImportHeader.php
new file mode 100644
index 0000000..33df3b6
--- /dev/null
+++ b/includes/Import/Plain/ImportHeader.php
@@ -0,0 +1,31 @@
+<?php
+
+namespace Flow\Import\Plain;
+
+use ArrayIterator;
+use Flow\Import\IImportHeader;
+use Flow\Import\IObjectRevision;
+
+class ImportHeader implements IImportHeader {
+ /** @var IObjectRevision[] */
+ protected $revisions;
+ /** @var string */
+ protected $objectKey;
+
+ /**
+ * @param IObjectRevision[] $revisions
+ * @param string $objectKey
+ */
+ public function __construct( array $revisions, $objectKey ) {
+ $this->revisions = $revisions;
+ $this->objectKey = $objectKey;
+ }
+
+ public function getRevisions() {
+ return new ArrayIterator( $this->revisions );
+ }
+
+ public function getObjectKey() {
+ return $this->objectKey;
+ }
+}
diff --git a/includes/Import/Plain/ObjectRevision.php
b/includes/Import/Plain/ObjectRevision.php
new file mode 100644
index 0000000..0e94b2a
--- /dev/null
+++ b/includes/Import/Plain/ObjectRevision.php
@@ -0,0 +1,45 @@
+<?php
+
+namespace Flow\Import\Plain;
+
+use Flow\Import\IObjectRevision;
+
+class ObjectRevision implements IObjectRevision {
+ /** @var string */
+ protected $text;
+ /** @var string */
+ protected $timestamp;
+ /** @var string */
+ protected $author;
+ /** @var string */
+ protected $objectKey;
+
+ /**
+ * @param string $text The content of the revision
+ * @param string $timestamp wfTimestamp() compatible creation timestamp
+ * @param string $author Name of the user that created the revision
+ * @param string $objectKey Unique key identifying this revision
+ */
+ public function __construct( $text, $timestamp, $author, $objectKey ) {
+ $this->text = $text;
+ $this->timestamp = $timestamp;
+ $this->author = $author;
+ $this->objectKey = $objectKey;
+ }
+
+ public function getText() {
+ return $this->text;
+ }
+
+ public function getTimestamp() {
+ return $this->timestamp;
+ }
+
+ public function getAuthor() {
+ return $this->author;
+ }
+
+ public function getObjectKey() {
+ return $this->objectKey;
+ }
+}
diff --git a/includes/Import/Wikitext/ConversionStrategy.php
b/includes/Import/Wikitext/ConversionStrategy.php
new file mode 100644
index 0000000..546c9d8
--- /dev/null
+++ b/includes/Import/Wikitext/ConversionStrategy.php
@@ -0,0 +1,123 @@
+<?php
+
+namespace Flow\Import\Wikitext;
+
+use DateTime;
+use DateTimeZone;
+use Flow\Import\Converter;
+use Flow\Import\IConversionStrategy;
+use Flow\Import\ImportSourceStore;
+use Parser;
+use StubObject;
+use Title;
+use WikitextContent;
+
+/**
+ * Does not really convert. Archives wikitext pages out of the way and puts
+ * a new flow board in place. We take either the entire page, or the page up
+ * to the first section and put it into the header of the flow board. We
+ * additionally edit both the flow header and the archived page to include
+ * a localized template containing the reciprocal title and the conversion
+ * date in GMT.
+ *
+ * It is plausible something with the EchoDiscussionParser could be worked up
+ * to do an import of topics and posts. We know it wont work for everything,
+ * but we don't know if it works for 90%, 99%, or 99.99% of topics. We know
+ * for sure that it does not currently understand anything about editing an
+ * existing comment.
+ */
+class ConversionStrategy implements IConversionStrategy {
+ /**
+ * @var ImportSourceStore
+ */
+ protected $sourceStore;
+
+ /**
+ * @var Parser|StubObject
+ */
+ protected $parser;
+
+ /**
+ * @param Parser|StubObject $parser
+ * @param ImportSourceStore $sourceStore
+ */
+ public function __construct( $parser, ImportSourceStore $sourceStore ) {
+ $this->parser = $parser;
+ $this->sourceStore = $sourceStore;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public function getSourceStore() {
+ return $this->sourceStore;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public function getMoveComment( Title $from, Title $to ) {
+ return wfMessage( 'flow-talk-conversion-move-reason',
$from->getPrefixedText() )->plain();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public function getCleanupComment( Title $from, Title $to ) {
+ return wfMessage( 'flow-talk-conversion-archive-edit-reason'
)->plain();
+ }
+
+ /**
+ * @{inheritDoc}
+ */
+ public function isConversionFinished( Title $title, Title $movedFrom =
null ) {
+ if ( $movedFrom ) {
+ // no good way to pick up where we left off
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public function createImportSource( Title $title ) {
+ return new ImportSource( $title, $this->parser );
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public function decideArchiveTitle( Title $source ) {
+ return Converter::decideArchiveTitle( $source, array(
+ '%s/Archive %d',
+ '%s/Archive%d',
+ '%s/archive %d',
+ '%s/archive%d',
+ ) );
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public function getPostprocessor() {
+ return null;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public function createArchiveCleanupRevisionContent( WikitextContent
$content, Title $title ) {
+ $now = new DateTime( "now", new DateTimeZone( "GMT" ) );
+ $arguments = implode( '|', array(
+ 'from=' . $title->getPrefixedText(),
+ 'date=' . $now->format( 'Y-m-d' ),
+ ) );
+
+ $template = wfMessage(
'flow-importer-wt-converted-archive-template' )->inContentLanguage()->plain();
+ $newWikitext = "{{{$template}|$arguments}}" . "\n\n" .
$content->getNativeData();
+
+ return new WikitextContent( $newWikitext );
+ }
+}
diff --git a/includes/Import/Wikitext/ImportSource.php
b/includes/Import/Wikitext/ImportSource.php
new file mode 100644
index 0000000..013a748
--- /dev/null
+++ b/includes/Import/Wikitext/ImportSource.php
@@ -0,0 +1,90 @@
+<?php
+
+namespace Flow\Import\Wikitext;
+
+use ArrayIterator;
+use DateTime;
+use DateTimeZone;
+use FlowHooks;
+use Flow\Exception\FlowException;
+use Flow\Import\Plain\ImportHeader;
+use Flow\Import\Plain\ObjectRevision;
+use Flow\Import\IImportSource;
+use Parser;
+use ParserOptions;
+use Revision;
+use StubObject;
+use Title;
+
+/**
+ * Imports the header of a wikitext talk page. Does not attempt to
+ * parse out and return individual topics. See the wikitext
+ * ConversionStrategy for more details.
+ */
+class ImportSource implements IImportSource {
+
+ /**
+ * @param Title $title
+ * @param Parser|StubObject $parser
+ * @throws FlowException When $title is an external title
+ */
+ public function __construct( Title $title, $parser ) {
+ if ( $title->isExternal() ) {
+ throw new FlowException( "Invalid non-local title:
{$title->getPrefixedText()}" );
+ }
+ $this->title = $title;
+ $this->parser = $parser;
+ }
+
+ /**
+ * Converts the existing wikitext talk page into a flow board header.
+ * If sections exist the header only receives the content up to the
+ * first section. Appends a template to the output indicating conversion
+ * occurred parameterized with the page the source lives at and the date
+ * of conversion in GMT.
+ *
+ * @return ImportHeader|null
+ */
+ public function getHeader() {
+ $revision = Revision::newFromTitle( $this->title );
+ if ( !$revision ) {
+ return null;
+ }
+
+
+ // If sections exist only take the content from the top of the
page
+ // to the first section.
+ $content = $revision->getContent()->getNativeData();
+ $output = $this->parser->parse( $content, $this->title, new
ParserOptions );
+ $sections = $output->getSections();
+ if ( $sections ) {
+ $content = substr( $content, 0,
$sections[0]['byteoffset'] );
+ }
+
+ $template = wfMessage( 'flow-importer-wt-converted-template'
)->inContentLanguage()->plain();
+ $now = new DateTime( "now", new DateTimeZone( "GMT" ) );
+ $arguments = implode( '|', array(
+ 'from=' . $this->title->getPrefixedText(),
+ 'date=' . $now->format( 'Y-m-d' ),
+ ) );
+ $content .= "\n\n{{{$template}|$arguments}}";
+
+ return new ImportHeader(
+ array( new ObjectRevision(
+ $content,
+ wfTimestampNow(),
+
FlowHooks::getOccupationController()->getTalkpageManager()->getName(),
+
"wikitext-import:header-revision:{$revision->getId()}"
+ ) ),
+
"wikitext-import:header:{$this->title->getPrefixedText()}"
+ );
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public function getTopics() {
+ return new ArrayIterator( array() );
+ }
+}
+
diff --git a/includes/Utils/NamespaceIterator.php
b/includes/Utils/NamespaceIterator.php
new file mode 100644
index 0000000..a52a356
--- /dev/null
+++ b/includes/Utils/NamespaceIterator.php
@@ -0,0 +1,58 @@
+<?php
+
+namespace Flow\Utils;
+
+use DatabaseBase;
+use EchoBatchRowIterator;
+use EchoCallbackIterator;
+use Iterator;
+use IteratorAggregate;
+use RecursiveIteratorIterator;
+use Title;
+
+/**
+ * Iterates over all titles within the specified namespace. Batches
+ * queries into 500 titles at a time starting with the lowest page id.
+ */
+class NamespaceIterator implements IteratorAggregate {
+ /**
+ * @var DatabaseBase A wiki database to read from
+ */
+ protected $db;
+
+ /**
+ * @var int An NS_* namespace to iterate over
+ */
+ protected $namespace;
+
+ /**
+ * @param DatabaseBase $db A wiki database to read from
+ * @param int $namespace An NS_* namespace to iterate over
+ */
+ public function __construct( DatabaseBase $db, $namespace ) {
+ $this->db = $db;
+ $this->namespace = $namespace;
+ }
+
+ /**
+ * @return Iterator<Title>
+ */
+ public function getIterator() {
+ $it = new EchoBatchRowIterator(
+ $this->db,
+ /* tables */ array( 'page' ),
+ /* pk */ 'page_id',
+ /* rows per batch */ 500
+ );
+ $it->addConditions( array(
+ 'page_namespace' => $this->namespace,
+ ) );
+ $it->setFetchColumns( array( 'page_title' ) );
+ $it = new RecursiveIteratorIterator( $it );
+
+ $namespace = $this->namespace;
+ return new EchoCallbackIterator( $it, function( $row ) use (
$namespace ) {
+ return Title::makeTitle( $namespace, $row->page_title );
+ } );
+ }
+}
diff --git a/maintenance/convertLqt.php b/maintenance/convertLqt.php
index c19c49b..38f06eb 100644
--- a/maintenance/convertLqt.php
+++ b/maintenance/convertLqt.php
@@ -29,7 +29,6 @@
? new MaintenanceDebugLogger( $this )
: new NullLogger;
$importer = Flow\Container::get( 'importer' );
- $importer->setLogger( $logger );
$talkpageManagerUser =
FlowHooks::getOccupationController()->getTalkpageManager();
$dbr = wfGetDB( DB_SLAVE );
diff --git a/maintenance/convertNamespaceFromWikitext.php
b/maintenance/convertNamespaceFromWikitext.php
new file mode 100644
index 0000000..297e05c
--- /dev/null
+++ b/maintenance/convertNamespaceFromWikitext.php
@@ -0,0 +1,79 @@
+<?php
+
+use Flow\Utils\NamespaceIterator;
+use Psr\Log\NullLogger;
+
+require_once ( getenv( 'MW_INSTALL_PATH' ) !== false
+ ? getenv( 'MW_INSTALL_PATH' ) . '/maintenance/Maintenance.php'
+ : dirname( __FILE__ ) . '/../../../maintenance/Maintenance.php' );
+
+/**
+ * Converts a single namespace from wikitext talk pages to flow talk pages.
Does not
+ * modify liquid threads pages it comes across, use convertLqt.php for that.
Does not
+ * modify sub-pages. Does not modify LiquidThreads enabled pages.
+ */
+class ConvertNamespaceFromWikitext extends Maintenance {
+ public function __construct() {
+ parent::__construct();
+ $this->mDescription = "Converts a single namespace of wikitext
talk pages to Flow";
+ $this->addArg( 'namespace', 'Name of the namespace to convert'
);
+ $this->addOption( 'verbose', 'Report on import progress to
stdout' );
+ }
+
+ public function execute() {
+ global $wgLang, $wgParser;
+
+ $provided = $this->getArg( 0 );
+ $namespace = $wgLang->getNsIndex( $provided );
+ if ( !$namespace ) {
+ $this->error( "Invalid namespace provided: $provided" );
+ return;
+ }
+
+ // @todo send to prod logger?
+ $logger = $this->getOption( 'verbose' )
+ ? new MaintenanceDebugLogger( $this )
+ : new NullLogger();
+
+ $dbr = wfGetDB( DB_SLAVE );
+ $converter = new \Flow\Import\Converter(
+ $dbr,
+ Flow\Container::get( 'importer' ),
+ $logger,
+
FlowHooks::getOccupationController()->getTalkpageManager(),
+ new Flow\Import\Wikitext\ConversionStrategy(
+ $wgParser,
+ new Flow\Import\NullImportSourceStore()
+ )
+ );
+
+ $namespaceName = $wgLang->getNsText( $namespace );
+ $logger->info( "Starting conversion of $namespaceName
namespace" );
+
+ // Iterate over all existing pages of the namespace.
+ $it = new NamespaceIterator( $dbr, $namespace );
+ // NamespaceIterator is an IteratorAggregate. Get an Iterator
+ // so we can wrap that.
+ $it = $it->getIterator();
+
+
+ // if we have liquid threads filter out any pages with that
enabled. They should
+ // be converted separately.
+ if ( class_exists( 'LqtDispatch' ) ) {
+ $it = new CallbackFilterIterator( $it, function( $title
) use ( $logger ) {
+ if ( LqtDispatch::isLqtPage( $title ) ) {
+ $logger->info( "Skipping LQT enabled
page, conversion must be done with convertLqt.php or convertLqtPage.php:
$title" );
+ return false;
+ } else {
+ return true;
+ }
+ } );
+ }
+
+ $converter->convert( $it );
+ }
+}
+
+$maintClass = "ConvertNamespaceFromWikitext";
+require_once ( RUN_MAINTENANCE_IF_MAIN );
+
diff --git a/tests/phpunit/Import/Wikitext/ConversionStrategyTest.php
b/tests/phpunit/Import/Wikitext/ConversionStrategyTest.php
new file mode 100644
index 0000000..4208a35
--- /dev/null
+++ b/tests/phpunit/Import/Wikitext/ConversionStrategyTest.php
@@ -0,0 +1,96 @@
+<?php
+
+namespace Flow\Tests\Import\Wikitext;
+
+use DateTime;
+use DateTimeZone;
+use Flow\Import\ImportSourceStore;
+use Flow\Import\NullImportSourceStore;
+use Flow\Import\Wikitext\ConversionStrategy;
+use Parser;
+use Title;
+use WikitextContent;
+
+/**
+ * @group Flow
+ */
+class ConversionStrategyTest extends \MediaWikiTestCase {
+ public function testCanConstruct() {
+ $this->assertInstanceOf(
+ 'Flow\Import\Wikitext\ConversionStrategy',
+ $this->createStrategy()
+ );
+ }
+
+ public function testGeneratesMoveComment() {
+ $from = Title::newFromText( 'Talk:Blue_birds' );
+ $to = Title::newFromText( 'Talk:Blue_birds/Archive 4' );
+ $this->assertGreaterThan(
+ 1,
+ strlen( $this->createStrategy()->getMoveComment( $from,
$to ) )
+ );
+ }
+
+ public function testGeneratesCleanupComment() {
+ $from = Title::newFromText( 'Talk:Blue_birds' );
+ $to = Title::newFromText( 'Talk:Blue_birds/Archive 4' );
+ $this->assertGreaterThan(
+ 1,
+ strlen( $this->createStrategy()->getCleanupComment(
$from, $to ) )
+ );
+ }
+
+ public function testCreatesValidImportSource() {
+ $this->assertInstanceOf(
+ 'Flow\Import\IImportSource',
+ $this->createStrategy()->createImportSource(
Title::newFromText( 'Talk:Blue_birds' ) )
+ );
+ }
+
+ public function testReturnsValidSourceStore() {
+ $this->assertInstanceOf(
+ 'Flow\Import\ImportSourceStore',
+ $this->createStrategy()->getSourceStore()
+ );
+ }
+
+ public function testDecidesArchiveTitle() {
+ // we don't have control of the Title::exists() calls that are
made here,
+ // so just assume the page doesn't exist and we get format = 0
n = 1
+ $this->assertEquals(
+ 'Talk:Blue birds/Archive 1',
+ $this->createStrategy()
+ ->decideArchiveTitle( Title::newFromText(
'Talk:Blue_birds' ) )
+ ->getPrefixedText()
+ );
+ }
+
+ public function testCreateArchiveCleanupRevisionContent() {
+ // @todo superm401 suggested finding library that lets us
control time during tests,
+ // would probably be better
+ $now = new DateTime( "now", new DateTimeZone( "GMT" ) );
+ $date = $now->format( 'Y-m-d' );
+
+ $result =
$this->createStrategy()->createArchiveCleanupRevisionContent(
+ new WikitextContent( "Four score and..." ),
+ Title::newFromText( 'Talk:Blue_birds' )
+ );
+ $this->assertInstanceOf( 'WikitextContent', $result );
+ $this->assertEquals(
+ "{{Archive for converted wikitext talk
page|from=Talk:Blue birds|date=$date}}\n\nFour score and...",
+ $result->getNativeData()
+ );
+ }
+
+ protected function createStrategy(
+ Parser $parser = null,
+ ImportSourceStore $sourceStore = null
+ ) {
+ global $wgParser;
+
+ return new ConversionStrategy(
+ $parser ?: $wgParser,
+ $sourceStore ?: new NullImportSourceStore
+ );
+ }
+}
diff --git a/tests/phpunit/Import/Wikitext/ImportSourceTest.php
b/tests/phpunit/Import/Wikitext/ImportSourceTest.php
new file mode 100644
index 0000000..4b15f51
--- /dev/null
+++ b/tests/phpunit/Import/Wikitext/ImportSourceTest.php
@@ -0,0 +1,50 @@
+<?php
+
+namespace Flow\Tests\Import\Wikitext;
+
+use DateTime;
+use DateTimeZone;
+use Flow\Import\Wikitext\ImportSource;
+use Parser;
+use Title;
+use WikiPage;
+use WikitextContent;
+
+/**
+ * @group Flow
+ * @group Database
+ */
+class ImportSourceTest extends \MediaWikiTestCase {
+
+ protected $tablesUsed = array( 'page', 'revision' );
+
+ public function testGetHeader() {
+ $now = new DateTime( "now", new DateTimeZone( "GMT" ) );
+ $date = $now->format( 'Y-m-d' );
+
+ // create a page with some content
+ $status = WikiPage::factory( Title::newMainPage() )
+ ->doEditContent(
+ new WikitextContent( "This is some content\n" ),
+ "and an edit summary"
+ );
+ if ( !$status->isGood() ) {
+ $this->fail( $status->getMessage()->plain() );
+ }
+
+ $source = new ImportSource( Title::newMainPage(), new Parser );
+ $header = $source->getHeader();
+ $this->assertNotNull( $header );
+ $this->assertGreaterThan( 1, strlen( $header->getObjectKey() )
);
+
+ $revisions = iterator_to_array( $header->getRevisions() );
+ $this->assertCount( 1, $revisions );
+
+ $revision = reset( $revisions );
+ $this->assertInstanceOf( 'Flow\Import\IObjectRevision',
$revision );
+ $this->assertEquals(
+ "This is some content\n\n{{Wikitext talk page converted
to Flow|from=Main Page|date=$date}}",
+ $revision->getText()
+ );
+ }
+}
--
To view, visit https://gerrit.wikimedia.org/r/174861
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ie785403748cc22bc28ff52a172bfaafad8aaf18b
Gerrit-PatchSet: 22
Gerrit-Project: mediawiki/extensions/Flow
Gerrit-Branch: master
Gerrit-Owner: EBernhardson <[email protected]>
Gerrit-Reviewer: EBernhardson <[email protected]>
Gerrit-Reviewer: Mattflaschen <[email protected]>
Gerrit-Reviewer: Siebrand <[email protected]>
Gerrit-Reviewer: Werdna <[email protected]>
Gerrit-Reviewer: jenkins-bot <>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits