[MediaWiki-commits] [Gerrit] mediawiki...FileAnnotations[master]: Handle API request errors and improve caching
jenkins-bot has submitted this change and it was merged. Change subject: Handle API request errors and improve caching .. Handle API request errors and improve caching * Use adaptive TTLs so cache entries that change more often have lower TTLs than those that change less often. * Use "minAsOf" together with getChronologyProtectorTouched() to opportunistically purge the cache. This triggers when a user makes changes to one of the shared wiki DBs and then hits this API. * Increase the TTL to 1 hour, given the above. Depends-On: Ia1168cf0d46cfdee046838ce4c5a6294e4d81760 Change-Id: Id71bef631888ba3b12b9db59bd62bc2fe3647ea8 --- M ApiFileAnnotations.php 1 file changed, 135 insertions(+), 35 deletions(-) Approvals: MarkTraceur: Looks good to me, approved jenkins-bot: Verified diff --git a/ApiFileAnnotations.php b/ApiFileAnnotations.php index d50a063..73dafbd 100644 --- a/ApiFileAnnotations.php +++ b/ApiFileAnnotations.php @@ -23,13 +23,11 @@ * @copyright 2015 Mark Holmquist * @license GNU General Public License version 2.0 */ +use MediaWiki\MediaWikiServices; class ApiFileAnnotations extends ApiQueryBase { - // 5 minutes - long enough to avoid crashing the servers with a lot - // of repeated requests for the same data, but not long enough so it's - // hard to update information quickly. Cache not invalidated by changes - // to Wikidata, Wikipedia, or Commons. - const CACHE_TTL = 300; + const MIN_CACHE_TTL = WANObjectCache::TTL_MINUTE; + const MAX_CACHE_TTL = WANObjectCache::TTL_DAY; public function __construct( $query, $moduleName ) { parent::__construct( $query, $moduleName, 'fa' ); @@ -86,12 +84,16 @@ protected function renderCommonsAnnotation( $commonsMatches ) { $categoryName = $commonsMatches[1]; + $safeAsOf = $this->getSafeCacheAsOfForUser( 'commonswiki' ); + $cache = ObjectCache::getMainWANInstance(); + $cacheKey = $cache->makeKey( 'fileannotations', 'commonscategory', $categoryName ); return $cache->getWithSetCallback( - $cache->makeKey( 'fileannotations', 'commonscategory', $categoryName ), - self::CACHE_TTL, - function ( $oldValue, &$ttl, array &$setOpts ) use ( $categoryName ) { + $cacheKey, + self::MAX_CACHE_TTL, + function ( $oldValue, &$ttl, array &$setOpts, $oldAsOf ) + use ( $cache, $categoryName, $cacheKey, $safeAsOf ) { $client = new MultiHttpClient( [] ); $response = $client->run( [ @@ -111,9 +113,14 @@ ], ] ); - $imagesApiData = json_decode( $response['body'], true ); + if ( $response['code'] == 200 ) { + $imagesApiData = json_decode( $response['body'], true ); + $pages = $imagesApiData['query']['pages']; + } else { + $pages = []; - $pages = $imagesApiData['query']['pages']; + $ttl = $cache::TTL_UNCACHEABLE; + } $imagesHtml = ''; @@ -136,25 +143,38 @@ ? '' . 'See more images' . '' : ''; - return + $html = '' . $imagesHtml . $seeMoreHtml . ''; - } + + $setOpts['staleTTL'] = self::MAX_CACHE_TTL; + if ( self::maybePurge( $safeAsOf, $oldValue, $html, $cache, $cacheKey ) ) { + $ttl = $cache::TTL_UNCACHEABLE; // don't bother; tombstoned by delete() + } else { + $ttl = self::elasticCacheTTL( $oldValue, $html, $oldAsOf, $ttl ); + } + + return $html; + }, + [ 'minAsOf' => $safeAsOf ] ); } protected function renderWikipediaAnnotation( $wpMatches ) { $articleName = $wpMatches[2]; $language = $wpMatches[1]; + $safeAsOf = $this->getSafeCacheAsOfForUser( 'enwiki' ); $cache = ObjectCache::getMainWANInstance(); +
[MediaWiki-commits] [Gerrit] mediawiki...FileAnnotations[master]: Handle API request errors and improve caching
Aaron Schulz has uploaded a new change for review. https://gerrit.wikimedia.org/r/309412 Change subject: Handle API request errors and improve caching .. Handle API request errors and improve caching * Use adaptive TTLs so cache entries that change more often have lower TTLs than those that change less often. * Also use "minAsOf" together with getChronologyProtectorTouched() to opportunistically purge the cache. This triggers when a user makes changes to one of the shared wiki DBs and then hits this API. * Increase the TTL to 1 hour, given the above. Depends-On: Ia1168cf0d46cfdee046838ce4c5a6294e4d81760 Change-Id: Id71bef631888ba3b12b9db59bd62bc2fe3647ea8 --- M ApiFileAnnotations.php 1 file changed, 106 insertions(+), 25 deletions(-) git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/FileAnnotations refs/changes/12/309412/1 diff --git a/ApiFileAnnotations.php b/ApiFileAnnotations.php index d50a063..8c65ad1 100644 --- a/ApiFileAnnotations.php +++ b/ApiFileAnnotations.php @@ -23,13 +23,11 @@ * @copyright 2015 Mark Holmquist * @license GNU General Public License version 2.0 */ +use MediaWiki\MediaWikiServices; class ApiFileAnnotations extends ApiQueryBase { - // 5 minutes - long enough to avoid crashing the servers with a lot - // of repeated requests for the same data, but not long enough so it's - // hard to update information quickly. Cache not invalidated by changes - // to Wikidata, Wikipedia, or Commons. - const CACHE_TTL = 300; + const MIN_CACHE_TTL = WANObjectCache::TTL_MINUTE; + const MAX_CACHE_TTL = WANObjectCache::TTL_HOUR; public function __construct( $query, $moduleName ) { parent::__construct( $query, $moduleName, 'fa' ); @@ -86,12 +84,16 @@ protected function renderCommonsAnnotation( $commonsMatches ) { $categoryName = $commonsMatches[1]; + $safeAsOf = $this->getSafeCacheAsOfForUser( 'commonswiki' ); + $cache = ObjectCache::getMainWANInstance(); + $cacheKey = $cache->makeKey( 'fileannotations', 'commonscategory', $categoryName ); return $cache->getWithSetCallback( - $cache->makeKey( 'fileannotations', 'commonscategory', $categoryName ), - self::CACHE_TTL, - function ( $oldValue, &$ttl, array &$setOpts ) use ( $categoryName ) { + $cacheKey, + self::MAX_CACHE_TTL, + function ( $oldValue, &$ttl, array &$setOpts, $oldAsOf ) + use ( $cache, $categoryName, $cacheKey, $safeAsOf ) { $client = new MultiHttpClient( [] ); $response = $client->run( [ @@ -111,9 +113,14 @@ ], ] ); - $imagesApiData = json_decode( $response['body'], true ); + if ( $response['code'] == 200 ) { + $imagesApiData = json_decode( $response['body'], true ); + $pages = $imagesApiData['query']['pages']; + } else { + $pages = []; - $pages = $imagesApiData['query']['pages']; + $ttl = $cache::TTL_UNCACHEABLE; + } $imagesHtml = ''; @@ -136,25 +143,39 @@ ? '' . 'See more images' . '' : ''; - return + $html = '' . $imagesHtml . $seeMoreHtml . ''; - } + + if ( $safeAsOf && $oldValue && $oldValue !== $html ) { + $cache->delete( $cacheKey ); // update all datacenters + $ttl = $cache::TTL_UNCACHEABLE; // don't bother due to delete() tombstone + } else { + $virtualMtime = ( $oldValue === $html ) ? $oldAsOf : false; + $ttl = $cache->adaptiveTTL( $virtualMtime, $ttl, self::MIN_CACHE_TTL ); + } + + return $html; + }, + [ 'minAsOf' => $safeAsOf ] ); } protected function renderWikipediaAnnotation( $wpMatches ) { $articleName = $wpMatches[2];