Giuseppe Lavagetto has uploaded a new change for review.
https://gerrit.wikimedia.org/r/243139
Change subject: Add support for http_status to ProxyFetch
......................................................................
Add support for http_status to ProxyFetch
Now ProxyFetch accepts a configuration parameter, http_status, that
allows to define if redirects are an expected outcome of the fetch.
Bug: T102393
Change-Id: If6a44a86741eeba13eee714f689d9b6b4f6d76c1
---
M pybal/monitors/proxyfetch.py
1 file changed, 70 insertions(+), 43 deletions(-)
git pull ssh://gerrit.wikimedia.org:29418/operations/debs/pybal
refs/changes/39/243139/1
diff --git a/pybal/monitors/proxyfetch.py b/pybal/monitors/proxyfetch.py
index c0c1e9e..4fa2c91 100644
--- a/pybal/monitors/proxyfetch.py
+++ b/pybal/monitors/proxyfetch.py
@@ -11,97 +11,121 @@
from twisted.web import client
from twisted.python.runtime import seconds
+
+class RedirHTTPPageGetter(client.HTTPPageGetter):
+ """PageGetter that accepts redirects as valid responses"""
+
+ def handleStatus_301(self):
+ """If we get a redirect, that's ok"""
+ return self.handleStatus_200()
+
+
+class RedirHTTPClientFactory(client.HTTPClientFactory):
+ """HTTPClientFactory that accepts redirects as valid responses"""
+ protocol = RedirHTTPPageGetter
+
+
class ProxyFetchMonitoringProtocol(monitor.MonitoringProtocol):
"""
Monitor that checks server uptime by repeatedly fetching a certain URL
- """
-
+ """
+
INTV_CHECK = 10
-
+
TIMEOUT_GET = 5
-
+
+ HTTP_STATUS = 200
+
__name__ = 'ProxyFetch'
-
+
from twisted.internet import error
from twisted.web import error as weberror
catchList = ( defer.TimeoutError, weberror.Error, error.ConnectError,
error.DNSLookupError )
-
+
def __init__(self, coordinator, server, configuration={}):
"""Constructor"""
-
+
# Call ancestor constructor
super(ProxyFetchMonitoringProtocol, self).__init__(coordinator,
server, configuration)
-
+
self.intvCheck = self._getConfigInt('interval', self.INTV_CHECK)
self.toGET = self._getConfigInt('timeout', self.TIMEOUT_GET)
-
+ self.expectedStatus = self._getConfigInit('http_status',
+ self.HTTP_STATUS)
+
self.checkCall = None
self.getPageDeferred = defer.Deferred()
-
+
self.checkStartTime = None
-
+
self.URL = self._getConfigStringList('url')
-
+
def run(self):
"""Start the monitoring"""
-
+
super(ProxyFetchMonitoringProtocol, self).run()
-
+
if not self.checkCall or not self.checkCall.active():
self.checkCall = reactor.callLater(self.intvCheck, self.check)
-
+
def stop(self):
"""Stop all running and/or upcoming checks"""
-
+
super(ProxyFetchMonitoringProtocol, self).stop()
if self.checkCall and self.checkCall.active():
self.checkCall.cancel()
-
+
self.getPageDeferred.cancel()
-
+
def check(self):
"""Periodically called method that does a single uptime check."""
-
+
if not self.active:
print "WARNING: ProxyFetchMonitoringProtocol.check() called while
active == False"
return
-
+
# FIXME: Use GET as a workaround for a Twisted bug with
HEAD/Content-length
# where it expects a body and throws a PartialDownload failure
-
+
import random
url = random.choice(self.URL)
try:
host = random.choice(self.server.ip4_addresses)
except (TypeError, IndexError):
host = self.server.host
-
+
self.checkStartTime = seconds()
- self.getPageDeferred = self.getProxyPage(url, method='GET', host=host,
port=self.server.port,
- timeout=self.toGET, followRedirect=False
- ).addCallbacks(self._fetchSuccessful, self._fetchFailed
- ).addBoth(self._checkFinished)
-
+ self.getPageDeferred = self.getProxyPage(
+ url, method='GET', host=host,
+ port=self.server.port,
+ status=self.expectedStatus,
+ timeout=self.toGET,
+ followRedirect=False
+ ).addCallbacks(
+ self._fetchSuccessful,
+ self._fetchFailed
+ ).addBoth(self._checkFinished)
+
def _fetchSuccessful(self, result):
- """Called when getProxyPage is finished successfully."""
-
+ """Called when getProxyPage is finished successfully."""
+
self.report('Fetch successful, %.3f s' % (seconds() -
self.checkStartTime))
self._resultUp()
-
+
return result
-
+
def _fetchFailed(self, failure):
- """Called when getProxyPage finished with a failure."""
+ """Called when getProxyPage finished with a failure."""
# Don't act as if the check failed if we cancelled it
if failure.check(defer.CancelledError):
return None
-
+
self.report('Fetch failed, %.3f s' % (seconds() - self.checkStartTime))
-
+
self._resultDown(failure.getErrorMessage())
-
+
failure.trap(*self.catchList)
def _checkFinished(self, result):
@@ -109,26 +133,29 @@
Called when getProxyPage finished with either success or failure,
to do after-check cleanups.
"""
-
+
self.checkStartTime = None
-
+
# Schedule the next check
if self.active:
self.checkCall = reactor.callLater(self.intvCheck, self.check)
-
+
return result
- def getProxyPage(url, contextFactory=None, host=None, port=None, *args,
**kwargs):
+ def getProxyPage(url, contextFactory=None, host=None, port=None,
+ status=None, *args, **kwargs):
"""Download a web page as a string. (modified from
twisted.web.client.getPage)
-
+
Download a page. Return a deferred, which will callback with a
page (as a string) or errback with a description of the error.
-
+
See HTTPClientFactory to see what extra args can be passed.
"""
+ if status > 300 and status < 304:
+ factory = RedirHTTPClientFactory(url, *args, **kwargs)
+ else:
+ factory = client.HTTPClientFactory(url, *args, **kwargs)
- factory = client.HTTPClientFactory(url, *args, **kwargs)
-
host = host or factory.host
port = port or factory.port
--
To view, visit https://gerrit.wikimedia.org/r/243139
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: If6a44a86741eeba13eee714f689d9b6b4f6d76c1
Gerrit-PatchSet: 1
Gerrit-Project: operations/debs/pybal
Gerrit-Branch: master
Gerrit-Owner: Giuseppe Lavagetto <[email protected]>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits