Aaron Schulz has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/247190

Change subject: SquidUpdate cleanups
......................................................................

SquidUpdate cleanups

* Actually implement DeferrableUpdate
* Remove unused parameter to __construct()
* Remove redundant check in newFromTitles()
* Short-circuit purge() if  is empty

Change-Id: I23b1f712001d72305dfa1c49063c3384ea67fb2f
---
M includes/deferred/SquidUpdate.php
1 file changed, 30 insertions(+), 37 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/90/247190/1

diff --git a/includes/deferred/SquidUpdate.php 
b/includes/deferred/SquidUpdate.php
index e148f56..8d535b1 100644
--- a/includes/deferred/SquidUpdate.php
+++ b/includes/deferred/SquidUpdate.php
@@ -25,7 +25,7 @@
  * Handles purging appropriate Squid URLs given a title (or titles)
  * @ingroup Cache
  */
-class SquidUpdate {
+class SquidUpdate implements DeferrableUpdate {
        /**
         * Collection of URLs to purge.
         * @var array
@@ -34,19 +34,15 @@
 
        /**
         * @param array $urlArr Collection of URLs to purge
-        * @param bool|int $maxTitles Maximum number of unique URLs to purge
         */
-       public function __construct( $urlArr = array(), $maxTitles = false ) {
+       public function __construct( array $urlArr ) {
                global $wgMaxSquidPurgeTitles;
-               if ( $maxTitles === false ) {
-                       $maxTitles = $wgMaxSquidPurgeTitles;
-               }
 
                // Remove duplicate URLs from list
                $urlArr = array_unique( $urlArr );
-               if ( count( $urlArr ) > $maxTitles ) {
+               if ( count( $urlArr ) > $wgMaxSquidPurgeTitles ) {
                        // Truncate to desired maximum URL count
-                       $urlArr = array_slice( $urlArr, 0, $maxTitles );
+                       $urlArr = array_slice( $urlArr, 0, 
$wgMaxSquidPurgeTitles );
                }
                $this->urlArr = $urlArr;
        }
@@ -59,14 +55,9 @@
         * @return SquidUpdate
         */
        public static function newFromTitles( $titles, $urlArr = array() ) {
-               global $wgMaxSquidPurgeTitles;
-               $i = 0;
                /** @var Title $title */
                foreach ( $titles as $title ) {
                        $urlArr[] = $title->getInternalURL();
-                       if ( $i++ > $wgMaxSquidPurgeTitles ) {
-                               break;
-                       }
                }
 
                return new SquidUpdate( $urlArr );
@@ -97,7 +88,7 @@
         *
         * @param array $urlArr List of full URLs to purge
         */
-       public static function purge( $urlArr ) {
+       public static function purge( array $urlArr ) {
                global $wgSquidServers, $wgHTCPRouting;
 
                if ( !$urlArr ) {
@@ -110,31 +101,33 @@
                        self::HTCPPurge( $urlArr );
                }
 
-               // Remove duplicate URLs
-               $urlArr = array_unique( $urlArr );
-               // Maximum number of parallel connections per squid
-               $maxSocketsPerSquid = 8;
-               // Number of requests to send per socket
-               // 400 seems to be a good tradeoff, opening a socket takes a 
while
-               $urlsPerSocket = 400;
-               $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
-               if ( $socketsPerSquid > $maxSocketsPerSquid ) {
-                       $socketsPerSquid = $maxSocketsPerSquid;
-               }
-
-               $pool = new SquidPurgeClientPool;
-               $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / 
$socketsPerSquid ) );
-               foreach ( $wgSquidServers as $server ) {
-                       foreach ( $chunks as $chunk ) {
-                               $client = new SquidPurgeClient( $server );
-                               foreach ( $chunk as $url ) {
-                                       $client->queuePurge( $url );
-                               }
-                               $pool->addClient( $client );
+               if ( $wgSquidServers ) {
+                       // Remove duplicate URLs
+                       $urlArr = array_unique( $urlArr );
+                       // Maximum number of parallel connections per squid
+                       $maxSocketsPerSquid = 8;
+                       // Number of requests to send per socket
+                       // 400 seems to be a good tradeoff, opening a socket 
takes a while
+                       $urlsPerSocket = 400;
+                       $socketsPerSquid = ceil( count( $urlArr ) / 
$urlsPerSocket );
+                       if ( $socketsPerSquid > $maxSocketsPerSquid ) {
+                               $socketsPerSquid = $maxSocketsPerSquid;
                        }
-               }
-               $pool->run();
 
+                       $pool = new SquidPurgeClientPool;
+                       $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) 
/ $socketsPerSquid ) );
+                       foreach ( $wgSquidServers as $server ) {
+                               foreach ( $chunks as $chunk ) {
+                                       $client = new SquidPurgeClient( $server 
);
+                                       foreach ( $chunk as $url ) {
+                                               $client->queuePurge( $url );
+                                       }
+                                       $pool->addClient( $client );
+                               }
+                       }
+
+                       $pool->run();
+               }
        }
 
        /**

-- 
To view, visit https://gerrit.wikimedia.org/r/247190
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I23b1f712001d72305dfa1c49063c3384ea67fb2f
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Aaron Schulz <[email protected]>

_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to