Author: aum
Date: 2006-05-16 22:19:29 +0000 (Tue, 16 May 2006)
New Revision: 8724
Added:
trunk/apps/pyFreenet/fcp/
trunk/apps/pyFreenet/fcp/__init__.py
trunk/apps/pyFreenet/fcp/core.py
trunk/apps/pyFreenet/fcp/sitemgr.py
trunk/apps/pyFreenet/fcp/xmlrpc.py
Log:
refactored code
Added: trunk/apps/pyFreenet/fcp/__init__.py
===================================================================
--- trunk/apps/pyFreenet/fcp/__init__.py 2006-05-16 22:17:57 UTC (rev
8723)
+++ trunk/apps/pyFreenet/fcp/__init__.py 2006-05-16 22:19:29 UTC (rev
8724)
@@ -0,0 +1,14 @@
+from core import FCPNode, JobTicket
+from core import ConnectionRefused, FCPException, FCPGetFailed, \
+ FCPPutFailed, FCPProtocolError
+
+from core import SILENT, FATAL, CRITICAL, ERROR, INFO, DETAIL, DEBUG
+
+
+__all__ = ['core', 'sitemgr', 'xmlrpc',
+ 'FCPNode', 'JobTicket',
+ 'ConnectionRefused', 'FCPException', 'FCPPutFailed',
+ 'FCPProtocolError',
+ ]
+
+
Added: trunk/apps/pyFreenet/fcp/core.py
===================================================================
--- trunk/apps/pyFreenet/fcp/core.py 2006-05-16 22:17:57 UTC (rev 8723)
+++ trunk/apps/pyFreenet/fcp/core.py 2006-05-16 22:19:29 UTC (rev 8724)
@@ -0,0 +1,1302 @@
+#!/usr/bin/env python
+"""
+An implementation of a freenet client library for
+FCP v2, offering considerable flexibility.
+
+Clients should instantiate FCPNode, then execute
+its methods to perform tasks with FCP.
+
+This module was written by aum, May 2006, released under the GNU Lesser General
+Public License.
+
+No warranty, yada yada
+
+"""
+
+import sys, os, socket, time, thread
+import threading, mimetypes, sha, Queue
+import select, traceback
+
+class ConnectionRefused(Exception):
+ """
+ cannot connect to given host/port
+ """
+
+class FCPException(Exception):
+
+ def __init__(self, info=None):
+ #print "Creating fcp exception"
+ if not info:
+ info = {}
+ self.info = info
+ #print "fcp exception created"
+ Exception.__init__(self, str(info))
+
+ def __str__(self):
+
+ parts = []
+ for k in ['header', 'ShortCodeDescription', 'CodeDescription']:
+ if self.info.has_key(k):
+ parts.append(str(self.info[k]))
+ return ";".join(parts) or "??"
+
+class FCPGetFailed(FCPException):
+ pass
+
+class FCPPutFailed(FCPException):
+ pass
+
+class FCPProtocolError(FCPException):
+ pass
+
+# where we can find the freenet node FCP port
+defaultFCPHost = "127.0.0.1"
+defaultFCPPort = 9481
+
+# poll timeout period for manager thread
+pollTimeout = 0.1
+#pollTimeout = 3
+
+# list of keywords sent from node to client, which have
+# int values
+intKeys = [
+ 'DataLength', 'Code',
+ ]
+
+# for the FCP 'ClientHello' handshake
+expectedVersion="2.0"
+
+# logger verbosity levels
+SILENT = 0
+FATAL = 1
+CRITICAL = 2
+ERROR = 3
+INFO = 4
+DETAIL = 5
+DEBUG = 6
+
+class FCPNode:
+ """
+ Represents an interface to a freenet node via its FCP port,
+ and exposes primitives for the basic genkey, get, put and putdir
+ operations.
+
+ Only one instance of FCPNode is needed across an entire
+ running client application, because its methods are quite thread-safe.
+ Creating 2 or more instances is a waste of resources.
+
+ Clients, when invoking methods, have several options regarding flow
+ control and event notification:
+
+ - synchronous call (the default). Here, no pending status messages
+ will ever be seen, and the call will only control when it has
+ completed (successfully, or otherwise)
+
+ - asynchronous call - this is invoked by passing the keyword argument
+ 'async=True' to any of the main primitives. When a primitive is
invoked
+ asynchronously, it will return a 'job ticket object' immediately.
This
+ job ticket has methods for polling for job completion, or blocking
+ awaiting completion
+
+ - setting a callback. You can pass to any of the primitives a
+ 'callback=somefunc' keyword arg, where 'somefunc' is a callable
object
+ conforming to 'def somefunc(status, value)'
+
+ The callback function will be invoked when a primitive succeeds or
fails,
+ as well as when a pending message is received from the node.
+
+ The 'status' argument passed will be one of:
+ - 'successful' - the primitive succeeded, and 'value' will
contain
+ the result of the primitive
+ - 'pending' - the primitive is still executing, and 'value' will
+ contain the raw pending message sent back from the node, as a
+ dict
+ - 'failed' - the primitive failed, and as with 'pending', the
+ argument 'value' contains a dict containing the message fields
+ sent back from the node
+
+ Note that callbacks can be set in both synchronous and asynchronous
+ calling modes.
+
+ """
+ def __init__(self, **kw):
+ """
+ Create a connection object
+
+ Keyword Arguments:
+ - name - name of client to use with reqs, defaults to random. This
+ is crucial if you plan on making persistent requests
+ - host - hostname, defaults to defaultFCPHost
+ - port - port number, defaults to defaultFCPPort
+ - logfile - a pathname or writable file object, to which log
messages
+ should be written, defaults to stdout
+ - verbosity - how detailed the log messages should be, defaults to 0
+ (silence)
+
+ Attributes of interest:
+ - jobs - a dict of currently running jobs (persistent and
nonpersistent).
+ keys are job ids and values are JobTicket objects
+
+ Notes:
+ - when the connection is created, a 'hello' handshake takes place.
+ After that handshake, the node sends back a list of outstanding
persistent
+ requests left over from the last connection (based on the value
of
+ the 'name' keyword passed into this constructor).
+
+ This object then wraps all this info into JobTicket instances
and stores
+ them in the self.persistentJobs dict
+
+ """
+ # grab and save parms
+ self.name = kw.get('clientName', self._getUniqueId())
+ self.host = kw.get('host', defaultFCPHost)
+ self.port = kw.get('port', defaultFCPPort)
+
+ # set up the logger
+ logfile = kw.get('logfile', None) or sys.stdout
+ if not hasattr(logfile, 'write'):
+ # might be a pathname
+ if not isinstance(logfile, str):
+ raise Exception("Bad logfile '%s', must be pathname or file
object" % logfile)
+ logfile = file(logfile, "a")
+ self.logfile = logfile
+ self.verbosity = kw.get('verbosity', 0)
+
+ # try to connect to node
+ self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ self.socket.connect((self.host, self.port))
+ except Exception, e:
+ raise Exception("Failed to connect to %s:%s - %s" % (self.host,
+ self.port,
+ e))
+
+ # now do the hello
+ self._hello()
+
+ # the pending job tickets
+ self.jobs = {} # keyed by request ID
+
+ # queue for incoming client requests
+ self.clientReqQueue = Queue.Queue()
+
+ # launch receiver thread
+ self.running = True
+ thread.start_new_thread(self._mgrThread, ())
+
+ def __del__(self):
+ """
+ object is getting cleaned up, so disconnect
+ """
+ # terminate the node
+ try:
+ self.shutdown()
+ except:
+ traceback.print_exc()
+ pass
+
+ # basic FCP primitives
+
+ def genkey(self, **kw):
+ """
+ Generates and returns an SSK keypair
+
+ Keywords:
+ - async - whether to do this call asynchronously, and
+ return a JobTicket object
+ - callback - if given, this should be a callable which accepts 2
+ arguments:
+ - status - will be one of 'successful', 'failed' or 'pending'
+ - value - depends on status:
+ - if status is 'successful', this will contain the value
+ returned from the command
+ - if status is 'failed' or 'pending', this will contain
+ a dict containing the response from node
+ """
+ id = kw.pop("id", None)
+ if not id:
+ id = self._getUniqueId()
+
+ return self._submitCmd(id, "GenerateSSK", Identifier=id, **kw)
+
+ def get(self, uri, **kw):
+ """
+ Does a direct get of a key
+
+ Keywords:
+ - async - whether to return immediately with a job ticket object,
default
+ False (wait for completion)
+ - persistence - default 'connection' - the kind of persistence for
+ this request. If 'reboot' or 'forever', this job will be able to
+ be recalled in subsequent FCP sessions. Other valid values are
+ 'reboot' and 'forever', as per FCP spec
+ - Global - default false - if evaluates to true, puts this request
+ on the global queue. Note the capital G in Global. If you set
this,
+ persistence must be 'reboot' or 'forever'
+ - verbosity - default 0 - sets the Verbosity mask passed in the
+ FCP message
+
+ - dsnly - whether to only check local datastore
+ - ignoreds - don't check local datastore
+ - file - if given, this is a pathname to which to store the
retrieved key
+ - nodata - if true, no data will be returned. This can be a useful
+ test of whether a key is retrievable, without having to consume
resources
+ by retrieving it
+
+ Returns a 2-tuple, depending on keyword args:
+ - if 'file' is given, returns (mimetype, pathname) if key is
returned
+ - if 'file' is not given, returns (mimetype, data) if key is
returned
+ - if 'dontReturnData' is true, returns (mimetype, 1) if key is
returned
+ If key is not found, raises an exception
+ """
+ self._log(INFO, "get: uri=%s" % uri)
+
+ # ---------------------------------
+ # format the request
+ opts = {}
+
+ id = kw.pop("id", None)
+ if not id:
+ id = self._getUniqueId()
+
+ opts['async'] = kw.pop('async', False)
+ if kw.has_key('callback'):
+ opts['callback'] = kw['callback']
+
+ opts['Persistence'] = kw.pop('persistence', 'connection')
+ if kw.get('Global', False):
+ print "global get"
+ opts['Global'] = "true"
+ else:
+ opts['Global'] = "false"
+
+ opts['Verbosity'] = kw.get('verbosity', 0)
+
+ if opts['Global'] == 'true' and opts['Persistence'] == 'connection':
+ raise Exception("Global requests must be persistent")
+
+ file = kw.pop("file", None)
+ if file:
+ opts['ReturnType'] = "disk"
+ #opts['File'] = file
+ opts['Filename'] = file
+
+ elif opts.get('nodata', False):
+ nodata = True
+ opts['ReturnType'] = "none"
+ else:
+ nodata = False
+ opts['ReturnType'] = "direct"
+
+ opts['Identifier'] = id
+
+ if kw.get("ignoreds", False):
+ opts["IgnoreDS"] = "true"
+ else:
+ opts["IgnoreDS"] = "false"
+
+ if kw.get("dsonly", False):
+ opts["DSOnly"] = "true"
+ else:
+ opts["DSOnly"] = "false"
+
+ opts['URI'] = uri
+
+ opts['MaxRetries'] = kw.get("maxretries", 3)
+ opts['MaxSize'] = kw.get("maxsize", "1000000000000")
+ opts['PriorityClass'] = int(kw.get("priority", 1))
+
+ # ---------------------------------
+ # now enqueue the request
+ return self._submitCmd(id, "ClientGet", **opts)
+
+ def put(self, uri="CHK@", **kw):
+ """
+ Inserts a key
+
+ Arguments:
+ - uri - uri under which to insert the key
+
+ Keywords - you must specify one of the following to choose an insert
mode:
+ - file - path of file from which to read the key data
+ - data - the raw data of the key as string
+ - dir - the directory to insert, for freesite insertion
+ - redirect - the target URI to redirect to
+
+ Keywords for 'dir' mode:
+ - name - name of the freesite, the 'sitename' in SSK at
privkey/sitename'
+ - usk - whether to insert as a USK (USK at
privkey/sitename/version/), default False
+ - version - valid if usk is true, default 0
+
+ Keywords for 'file' and 'data' modes:
+ - chkonly - only generate CHK, don't insert - default false
+ - dontcompress - do not compress on insert - default false
+
+ Keywords for 'file', 'data' and 'redirect' modes:
+ - mimetype - the mime type, default text/plain
+
+ Keywords valid for all modes:
+ - async - whether to do the job asynchronously, returning a job
ticket
+ object (default False)
+ - persistence - default 'connection' - the kind of persistence for
+ this request. If 'reboot' or 'forever', this job will be able to
+ be recalled in subsequent FCP sessions. Other valid values are
+ 'reboot' and 'forever', as per FCP spec
+ - Global - default false - if evaluates to true, puts this request
+ on the global queue. Note the capital G in Global. If you set
this,
+ persistence must be 'reboot' or 'forever'
+ - verbosity - default 0 - sets the Verbosity mask passed in the
+ FCP message
+
+ - maxretries - maximum number of retries, default 3
+ - priority - default 1
+
+ Notes:
+ - exactly one of 'file', 'data' or 'dir' keyword arguments must be
present
+ """
+ self._log(INFO, "put: uri=%s" % uri)
+
+ # divert to putdir if dir keyword present
+ if kw.has_key('dir'):
+ return self.putdir(uri, **kw)
+
+ # ---------------------------------
+ # format the request
+ opts = {}
+
+ opts['async'] = kw.get('async', False)
+ if kw.has_key('callback'):
+ opts['callback'] = kw['callback']
+
+ opts['Persistence'] = kw.pop('persistence', 'connection')
+ if kw.get('Global', False):
+ opts['Global'] = "true"
+ else:
+ opts['Global'] = "false"
+
+ if opts['Global'] == 'true' and opts['Persistence'] == 'connection':
+ raise Exception("Global requests must be persistent")
+
+ opts['URI'] = uri
+ opts['Metadata.ContentType'] = kw.get("mimetype", "text/plain")
+
+ id = kw.pop("id", None)
+ if not id:
+ id = self._getUniqueId()
+ opts['Identifier'] = id
+
+ opts['Verbosity'] = kw.get('verbosity', 0)
+ opts['MaxRetries'] = kw.get("maxretries", 3)
+ opts['PriorityClass'] = kw.get("priority", 1)
+ opts['GetCHKOnly'] = toBool(kw.get("chkonly", "false"))
+ opts['DontCompress'] = toBool(kw.get("nocompress", "false"))
+
+ if kw.has_key("file"):
+ opts['UploadFrom'] = "disk"
+ opts['Filename'] = kw['file']
+ if not kw.has_key("mimetype"):
+ opts['Metadata.ContentType'] =
mimetypes.guess_type(kw['file'])[0] or "text/plain"
+
+ elif kw.has_key("data"):
+ opts["UploadFrom"] = "direct"
+ opts["Data"] = kw['data']
+
+ elif kw.has_key("redirect"):
+ opts["UploadFrom"] = "redirect"
+ opts["TargetURI"] = kw['redirect']
+ else:
+ raise Exception("Must specify file, data or redirect keywords")
+
+ #print "sendEnd=%s" % sendEnd
+
+ # ---------------------------------
+ # now dispatch the job
+ return self._submitCmd(id, "ClientPut", **opts)
+
+ def putdir(self, uri, **kw):
+ """
+ Inserts a freesite
+
+ Arguments:
+ - uri - uri under which to insert the key
+
+ Keywords:
+ - dir - the directory to insert - mandatory, no default.
+ This directory must contain a toplevel index.html file
+ - name - the name of the freesite, defaults to 'freesite'
+ - usk - set to True to insert as USK (Default false)
+ - version - the USK version number, default 0
+
+ - maxretries - maximum number of retries, default 3
+ - priority - default 1
+
+ - async - default False - if True, return immediately with a job
ticket
+ - persistence - default 'connection' - the kind of persistence for
+ this request. If 'reboot' or 'forever', this job will be able to
+ be recalled in subsequent FCP sessions. Other valid values are
+ 'reboot' and 'forever', as per FCP spec
+ - Global - default false - if evaluates to true, puts this request
+ on the global queue. Note the capital G in Global. If you set
this,
+ persistence must be 'reboot' or 'forever'
+ - verbosity - default 0 - sets the Verbosity mask passed in the
+ FCP message
+
+ Returns:
+ - the URI under which the freesite can be retrieved
+ """
+ self._log(INFO, "putdir: uri=%s dir=%s" % (uri, kw['dir']))
+
+ # -------------------------------------
+ # format the command
+ #
+ # note that with this primitive, we have to format the command
+ # buffer ourselves, not just drop it through as a bunch of keywords,
+ # since we want to control the order of keyword lines
+
+ # get keyword args
+ dir = kw['dir']
+ sitename = kw.get('name', 'freesite')
+ usk = kw.get('usk', False)
+ version = kw.get('version', 0)
+ maxretries = kw.get('maxretries', 3)
+ priority = kw.get('priority', 1)
+
+ id = kw.pop("id", None)
+ if not id:
+ id = self._getUniqueId()
+
+ # derive final URI for insert
+ uriFull = uri + sitename + "/"
+ if kw.get('usk', False):
+ uriFull += "%d/" % int(version)
+ uriFull = uriFull.replace("SSK@", "USK@")
+
+ # build a big command buffer
+ msgLines = ["ClientPutComplexDir",
+ "Identifier=%s" % id,
+ "Verbosity=%s" % kw.get('verbosity', 0),
+ "MaxRetries=%s" % maxretries,
+ "PriorityClass=%s" % priority,
+ "URI=%s" % uriFull,
+ "Persistence=%s" % kw.get("persistence", "connection"),
+ ]
+
+ if kw.get('Global', False):
+ msgLines.append("Global=true")
+ else:
+ msgLines.append("Global=false")
+
+ # scan directory and add its files
+ n = 0
+ manifest = readdir(kw['dir'])
+ default = None
+ for file in manifest:
+ relpath = file['relpath']
+ fullpath = file['fullpath']
+ mimetype = file['mimetype']
+
+ if relpath == 'index.html':
+ default = file
+ self._log(DETAIL, "n=%s relpath=%s" % (repr(n), repr(relpath)))
+
+ msgLines.extend(["Files.%d.Name=%s" % (n, relpath),
+ "Files.%d.UploadFrom=disk" % n,
+ "Files.%d.Filename=%s" % (n, fullpath),
+ ])
+ n += 1
+
+ # now, add the default file
+ msgLines.extend(["Files.%d.Name=" % n,
+ "Files.%d.UploadFrom=disk" % n,
+ "Files.%d.Filename=%s" % (n, default['fullpath']),
+ ])
+
+ msgLines.append("EndMessage")
+
+ for line in msgLines:
+ self._log(DETAIL, line)
+ fullbuf = "\n".join(msgLines) + "\n"
+
+ # --------------------------------------
+ # now dispatch the job
+ return self._submitCmd(id, "ClientPutComplexDir",
+ rawcmd=fullbuf,
+ async=kw.get('async', False),
+ callback=kw.get('callback', False),
+ Persistence=kw.get('Persistence', 'connection'),
+ )
+
+
+
+ # high level client methods
+
+ def listenGlobal(self, **kw):
+ """
+ Enable listening on global queue
+ """
+ self._submitCmd(None, "WatchGlobal", Enabled="true", **kw)
+
+ def ignoreGlobal(self, **kw):
+ """
+ Stop listening on global queue
+ """
+ self._submitCmd(None, "WatchGlobal", Enabled="false", **kw)
+
+ def purgePersistentJobs(self):
+ """
+ Cancels all persistent jobs in one go
+ """
+ for job in self.getPersistentJobs():
+ job.cancel()
+
+ def getAllJobs(self):
+ """
+ Returns a list of persistent jobs, excluding global jobs
+ """
+ return self.jobs.values()
+
+ def getPersistentJobs(self):
+ """
+ Returns a list of persistent jobs, excluding global jobs
+ """
+ return [j for j in self.jobs.values() if j.isPersistent and not
j.isGlobal]
+
+ def getGlobalJobs(self):
+ """
+ Returns a list of global jobs
+ """
+ return [j for j in self.jobs.values() if j.isGlobal]
+
+ def getTransientJobs(self):
+ """
+ Returns a list of non-persistent, non-global jobs
+ """
+ return [j for j in self.jobs.values() if not j.isPersistent]
+
+ def refreshPersistentRequests(self, **kw):
+ """
+ Sends a ListPersistentRequests to node, to ensure that
+ our records of persistent requests are up to date.
+
+ Since, upon connection, the node sends us a list of all
+ outstanding persistent requests anyway, I can't really
+ see much use for this method. I've only added the method
+ for FCP spec compliance
+ """
+ self._log(INFO, "listPersistentRequests")
+
+ if self.jobs.has_key('__global'):
+ raise Exception("An existing non-identifier job is currently
pending")
+
+ # ---------------------------------
+ # format the request
+ opts = {}
+
+ id = '__global'
+ opts['Identifier'] = id
+
+ opts['async'] = kw.pop('async', False)
+ if kw.has_key('callback'):
+ opts['callback'] = kw['callback']
+
+ # ---------------------------------
+ # now enqueue the request
+ return self._submitCmd(id, "ListPersistentRequests", **opts)
+
+ def setVerbosity(self, verbosity):
+ """
+ Sets the verbosity for future logging calls
+ """
+ self.verbosity = verbosity
+
+ def shutdown(self):
+ """
+ Terminates the manager thread
+
+ You should explicitly shutdown any existing nodes
+ before exiting your client application
+ """
+ self.running = False
+
+ # give the manager thread a chance to bail out
+ time.sleep(pollTimeout * 3)
+
+ # shut down FCP connection
+ if hasattr(self, 'socket'):
+ self.socket.close()
+ del self.socket
+
+ # and close the logfile
+ if self.logfile not in [sys.stdout, sys.stderr]:
+ self.logfile.close()
+
+
+
+
+ # methods for manager thread
+
+ def _mgrThread(self):
+ """
+ This thread is the nucleus of pyfcp, and coordinates incoming
+ client commands and incoming node responses
+ """
+ log = self._log
+ try:
+ while self.running:
+
+ log(DEBUG, "Top of manager thread")
+
+ # try for incoming messages from node
+ log(DEBUG, "Testing for incoming message")
+ if self._msgIncoming():
+ log(DEBUG, "Retrieving incoming message")
+ msg = self._rxMsg()
+ log(DEBUG, "Got incoming message, dispatching")
+ self._on_rxMsg(msg)
+ log(DEBUG, "back from on_rxMsg")
+ else:
+ log(DEBUG, "No incoming message from node")
+
+ # try for incoming requests from clients
+ log(DEBUG, "Testing for client req")
+ try:
+ req = self.clientReqQueue.get(True, pollTimeout)
+ log(DEBUG, "Got client req, dispatching")
+ self._on_clientReq(req)
+ log(DEBUG, "Back from on_clientReq")
+ except Queue.Empty:
+ log(DEBUG, "No incoming client req")
+ pass
+
+ self._log(INFO, "Manager thread terminated normally")
+ return
+
+ except:
+ traceback.print_exc()
+ self._log(CRITICAL, "manager thread crashed")
+
+ def _msgIncoming(self):
+ """
+ Returns True if a message is coming in from the node
+ """
+ return len(select.select([self.socket], [], [], pollTimeout)[0]) > 0
+
+ def _submitCmd(self, id, cmd, **kw):
+ """
+ Submits a command for execution
+
+ Arguments:
+ - id - the command identifier
+ - cmd - the command name, such as 'ClientPut'
+
+ Keywords:
+ - async - whether to return a JobTicket object, rather than
+ the command result
+ - callback - a function taking 2 args 'status' and 'value'.
+ Status is one of 'successful', 'pending' or 'failed'.
+ value is the primitive return value if successful, or the raw
+ node message if pending or failed
+ - rawcmd - a raw command buffer to send directly
+ - options specific to command such as 'URI'
+
+ Returns:
+ - if command is sent in sync mode, returns the result
+ - if command is sent in async mode, returns a JobTicket
+ object which the client can poll or block on later
+ """
+ async = kw.pop('async', False)
+ job = JobTicket(self, id, cmd, kw)
+
+ self.clientReqQueue.put(job)
+
+ self._log(DEBUG, "_submitCmd: id=%s cmd=%s kw=%s" % (id, cmd,
str(kw)[:256]))
+
+ if cmd == 'WatchGlobal':
+ return
+ elif async:
+ return job
+ else:
+ return job.wait()
+
+ def _on_rxMsg(self, msg):
+ """
+ Handles incoming messages from node
+
+ If an incoming message represents the termination of a command,
+ the job ticket object will be notified accordingly
+ """
+ log = self._log
+
+ # find the job this relates to
+ id = msg.get('Identifier', '__global')
+
+ hdr = msg['header']
+
+ job = self.jobs.get(id, None)
+ if not job:
+ # we have a global job and/or persistent job from last connection
+ log(INFO, "Got %s from prior session" % hdr)
+ job = JobTicket(self, id, hdr, msg)
+ self.jobs[id] = job
+
+ # action from here depends on what kind of message we got
+
+ # -----------------------------
+ # handle GenerateSSK responses
+
+ if hdr == 'SSKKeypair':
+ # got requested keys back
+ keys = (msg['RequestURI'], msg['InsertURI'])
+ job.callback('successful', keys)
+ job._putResult(keys)
+
+ # and remove job from queue
+ self.jobs.pop(id, None)
+ return
+
+ # -----------------------------
+ # handle ClientGet responses
+
+ if hdr == 'DataFound':
+ log(INFO, "Got DataFound for URI=%s" % job.kw['URI'])
+ mimetype = msg['Metadata.ContentType']
+ if job.kw.has_key('Filename'):
+ # already stored to disk, done
+ #resp['file'] = file
+ result = (mimetype, job.kw['Filename'])
+ job.callback('successful', result)
+ job._putResult(result)
+ return
+
+ elif job.kw['ReturnType'] == 'none':
+ result = (mimetype, 1)
+ job.callback('successful', result)
+ job._putResult(result)
+ return
+
+ # otherwise, we're expecting an AllData and will react to it then
+ else:
+ # is this a persistent get?
+ if job.kw['ReturnType'] == 'direct' \
+ and job.kw.get('Persistence', None) != 'connection':
+ # gotta poll for request status so we can get our data
+ # FIXME: this is a hack, clean it up
+ log(INFO, "Request was persistent")
+ if not hasattr(job, "gotPersistentDataFound"):
+ if job.isGlobal:
+ isGlobal = "true"
+ else:
+ isGlobal = "false"
+ job.gotPersistentDataFound = True
+ log(INFO, " --> sending GetRequestStatus")
+ self._txMsg("GetRequestStatus",
+ Identifier=job.kw['Identifier'],
+ Persistence=msg.get("Persistence",
"connection"),
+ Global=isGlobal,
+ )
+
+ job.callback('pending', msg)
+ job.mimetype = mimetype
+ return
+
+ if hdr == 'AllData':
+ result = (job.mimetype, msg['Data'])
+ job.callback('successful', result)
+ job._putResult(result)
+ return
+
+ if hdr == 'GetFailed':
+ # return an exception
+ job.callback("failed", msg)
+ job._putResult(FCPGetFailed(msg))
+ return
+
+ # -----------------------------
+ # handle ClientPut responses
+
+ if hdr == 'URIGenerated':
+
+ job.uri = msg['URI']
+ newUri = msg['URI']
+ job.callback('pending', msg)
+
+ return
+
+ # bail here if no data coming back
+ if job.kw.get('GetCHKOnly', False) == 'true':
+ # done - only wanted a CHK
+ job._putResult(newUri)
+ return
+
+ if hdr == 'PutSuccessful':
+ result = msg['URI']
+ job.callback('successful', result)
+ job._putResult(result)
+ return
+
+ if hdr == 'PutFailed':
+ job.callback('failed', msg)
+ job._putResult(FCPPutFailed(msg))
+ return
+
+ # -----------------------------
+ # handle progress messages
+
+ if hdr == 'StartedCompression':
+ job.callback('pending', msg)
+ return
+
+ if hdr == 'FinishedCompression':
+ job.callback('pending', msg)
+ return
+
+ if hdr == 'SimpleProgress':
+ job.callback('pending', msg)
+ return
+
+ # -----------------------------
+ # handle persistent job messages
+
+ if hdr == 'PersistentGet':
+ job.callback('pending', msg)
+ job._appendMsg(msg)
+ return
+
+ if hdr == 'PersistentPut':
+ job.callback('pending', msg)
+ job._appendMsg(msg)
+ return
+
+ if hdr == 'PersistentPutDir':
+ job.callback('pending', msg)
+ job._appendMsg(msg)
+ return
+
+ if hdr == 'EndListPersistentRequests':
+ job._appendMsg(msg)
+ job.callback('successful', job.msgs)
+ job._putResult(job.msgs)
+ return
+
+ # -----------------------------
+ # handle various errors
+
+ if hdr == 'ProtocolError':
+ job.callback('failed', msg)
+ job._putResult(FCPProtocolError(msg))
+ return
+
+ if hdr == 'IdentifierCollision':
+ log(ERROR, "IdentifierCollision on id %s ???" % id)
+ job.callback('failed', msg)
+ job._putResult(Exception("Duplicate job identifier %s" % id))
+ return
+
+ # -----------------------------
+ # wtf is happening here?!?
+
+ log(ERROR, "Unknown message type from node: %s" % hdr)
+ job.callback('failed', msg)
+ job._putResult(FCPException(msg))
+ return
+ def _on_clientReq(self, job):
+ """
+ takes an incoming request job from client and transmits it to
+ the fcp port, and also registers it so the manager thread
+ can action responses from the fcp port.
+ """
+ id = job.id
+ cmd = job.cmd
+ kw = job.kw
+
+ # register the req
+ if cmd != 'WatchGlobal':
+ self.jobs[id] = job
+
+ # now can send, since we're the only one who will
+ self._txMsg(cmd, **kw)
+
+
+ # low level noce comms methods
+
+ def _hello(self):
+ """
+ perform the initial FCP protocol handshake
+ """
+ self._txMsg("ClientHello",
+ Name=self.name,
+ ExpectedVersion=expectedVersion)
+
+ resp = self._rxMsg()
+ return resp
+
+ def _getUniqueId(self):
+ """
+ Allocate a unique ID for a request
+ """
+ return "id" + str(int(time.time() * 1000000))
+
+ def _txMsg(self, msgType, **kw):
+ """
+ low level message send
+
+ Arguments:
+ - msgType - one of the FCP message headers, such as 'ClientHello'
+ - args - zero or more (keyword, value) tuples
+ Keywords:
+ - rawcmd - if given, this is the raw buffer to send
+ - other keywords depend on the value of msgType
+ """
+ log = self._log
+
+ # just send the raw command, if given
+ rawcmd = kw.get('rawcmd', None)
+ if rawcmd:
+ self.socket.send(rawcmd)
+ log(DETAIL, "CLIENT: %s" % rawcmd)
+ return
+
+ if kw.has_key("Data"):
+ data = kw.pop("Data")
+ sendEndMessage = False
+ else:
+ data = None
+ sendEndMessage = True
+
+ items = [msgType + "\n"]
+ log(DETAIL, "CLIENT: %s" % msgType)
+
+ #print "CLIENT: %s" % msgType
+ for k, v in kw.items():
+ #print "CLIENT: %s=%s" % (k,v)
+ line = k + "=" + str(v)
+ items.append(line + "\n")
+ log(DETAIL, "CLIENT: %s" % line)
+
+ if data != None:
+ items.append("DataLength=%d\n" % len(data))
+ log(DETAIL, "CLIENT: DataLength=%d" % len(data))
+ items.append("Data\n")
+ log(DETAIL, "CLIENT: ...data...")
+ items.append(data)
+
+ #print "sendEndMessage=%s" % sendEndMessage
+
+ if sendEndMessage:
+ items.append("EndMessage\n")
+ log(DETAIL, "CLIENT: EndMessage")
+ raw = "".join(items)
+
+ self.socket.send(raw)
+
+ def _rxMsg(self):
+ """
+ Receives and returns a message as a dict
+
+ The header keyword is included as key 'header'
+ """
+ log = self._log
+
+ log(DETAIL, "NODE: ----------------------------")
+
+ # shorthand, for reading n bytes
+ def read(n):
+ if n > 1:
+ log(DEBUG, "read: want %d bytes" % n)
+ chunks = []
+ remaining = n
+ while remaining > 0:
+ chunk = self.socket.recv(remaining)
+ chunklen = len(chunk)
+ if chunk:
+ chunks.append(chunk)
+ remaining -= chunklen
+ if remaining > 0:
+ if n > 1:
+ log(DEBUG,
+ "wanted %s, got %s still need %s bytes" % (n,
chunklen, remaining)
+ )
+ pass
+ buf = "".join(chunks)
+ return buf
+
+ # read a line
+ def readln():
+ buf = []
+ while True:
+ c = read(1)
+ buf.append(c)
+ if c == '\n':
+ break
+ ln = "".join(buf)
+ log(DETAIL, "NODE: " + ln[:-1])
+ return ln
+
+ items = {}
+
+ # read the header line
+ while True:
+ line = readln().strip()
+ if line:
+ items['header'] = line
+ break
+
+ # read the body
+ while True:
+ line = readln().strip()
+ if line in ['End', 'EndMessage']:
+ break
+
+ if line == 'Data':
+ # read the following data
+ buf = read(items['DataLength'])
+ items['Data'] = buf
+ log(DETAIL, "NODE: ...<%d bytes of data>" % len(buf))
+ break
+ else:
+ # it's a normal 'key=val' pair
+ try:
+ k, v = line.split("=")
+ except:
+ #print "unexpected: %s"% line
+ raise
+
+ # attempt int conversion
+ try:
+ v = int(v)
+ except:
+ pass
+
+ items[k] = v
+
+ # all done
+ return items
+
+ def _log(self, level, msg):
+ """
+ Logs a message. If level > verbosity, don't output it
+ """
+ if level > self.verbosity:
+ return
+
+ if not msg.endswith("\n"): msg += "\n"
+
+ self.logfile.write(msg)
+ self.logfile.flush()
+
+
+class JobTicket:
+ """
+ A JobTicket is an object returned to clients making
+ asynchronous requests. It puts them in control of how
+ they manage n concurrent requests.
+
+ When you as a client receive a JobTicket, you can choose to:
+ - block, awaiting completion of the job
+ - poll the job for completion status
+ - receive a callback upon completion
+
+ Attributes of interest:
+ - isPersistent - True if job is persistent
+ - isGlobal - True if job is global
+ - value - value returned upon completion, or None if not complete
+ - node - the node this job belongs to
+ - id - the job Identifier
+ - cmd - the FCP message header word
+ - kw - the keywords in the FCP header
+ - msgs - any messages received from node in connection
+ to this job
+ """
+ def __init__(self, node, id, cmd, kw):
+ """
+ You should never instantiate a JobTicket object yourself
+ """
+ self.node = node
+ self.id = id
+ self.cmd = cmd
+
+ # find out if persistent
+ if kw.get("Persistent", "connection") != "connection" \
+ or kw.get("PersistenceType", "connection") != "connection":
+ self.isPersistent = True
+ else:
+ self.isPersistent = False
+
+ if kw.get('Global', 'false') == 'true':
+ self.isGlobal = True
+ else:
+ self.isGlobal = False
+
+ self.kw = kw
+
+ self.msgs = []
+
+ callback = kw.pop('callback', None)
+ if callback:
+ self.callback = callback
+
+
+ self.lock = threading.Lock()
+ self.lock.acquire()
+ self.result = None
+
+ def isComplete(self):
+ """
+ Returns True if the job has been completed
+ """
+ return self.result != None
+
+ def wait(self, timeout=None):
+ """
+ Waits forever (or for a given timeout) for a job to complete
+ """
+ self.lock.acquire()
+ self.lock.release()
+ return self.getResult()
+ def getResult(self):
+ """
+ Returns result of job, or None if job still not complete
+
+ If result is an exception object, then raises it
+ """
+ if isinstance(self.result, Exception):
+ raise self.result
+ else:
+ return self.result
+
+ def callback(self, status, value):
+ """
+ This will be replaced in job ticket instances wherever
+ user provides callback arguments
+ """
+ # no action needed
+
+ def cancel(self):
+ """
+ Cancels the job, if it is persistent
+
+ Does nothing if the job was not persistent
+ """
+ if not self.isPersistent:
+ return
+
+ # remove from node's jobs lists
+ try:
+ del self.node.jobs[self.id]
+ except:
+ pass
+
+ # send the cancel
+ if self.isGlobal:
+ isGlobal = "true"
+ else:
+ isGlobal = "False"
+
+ self.node._txMsg("RemovePersistentRequest",
+ Global=isGlobal,
+ Identifier=self.id)
+
+ def _appendMsg(self, msg):
+ self.msgs.append(msg)
+
+ def _putResult(self, result):
+ """
+ Called by manager thread to indicate job is complete,
+ and submit a result to be picked up by client
+ """
+ self.result = result
+
+ if not self.isPersistent:
+ try:
+ del self.node.jobs[self.id]
+ except:
+ pass
+
+ self.lock.release()
+
+ def __repr__(self):
+ if self.kw.has_key("URI"):
+ uri = " URI=%s" % self.kw['URI']
+ else:
+ uri = ""
+ return "<FCP job %s:%s%s" % (self.id, self.cmd, uri)
+
+
+def toBool(arg):
+ try:
+ arg = int(arg)
+ if arg:
+ return "true"
+ except:
+ pass
+
+ if isinstance(arg, str):
+ if arg.strip().lower()[0] == 't':
+ return "true"
+ else:
+ return "false"
+
+ if arg:
+ return True
+ else:
+ return False
+
+def readdir(dirpath, prefix='', gethashes=False):
+ """
+ Reads a directory, returning a sequence of file dicts.
+
+ Arguments:
+ - dirpath - relative or absolute pathname of directory to scan
+ - gethashes - also include a 'hash' key in each file dict, being
+ the SHA1 hash of the file's name and contents
+
+ Each returned dict in the sequence has the keys:
+ - fullpath - usable for opening/reading file
+ - relpath - relative path of file (the part after 'dirpath'),
+ for the 'SSK at blahblah//relpath' URI
+ - mimetype - guestimated mimetype for file
+ """
+
+ #set_trace()
+ #print "dirpath=%s, prefix='%s'" % (dirpath, prefix)
+ entries = []
+ for f in os.listdir(dirpath):
+ relpath = prefix + f
+ fullpath = dirpath + "/" + f
+ if f == '.freesiterc':
+ continue
+ if os.path.isdir(fullpath):
+ entries.extend(readdir(dirpath+"/"+f, relpath + "/", gethashes))
+ else:
+ #entries[relpath] = {'mimetype':'blah/shit',
'fullpath':dirpath+"/"+relpath}
+ fullpath = dirpath + "/" + f
+ entry = {'relpath' :relpath,
+ 'fullpath':fullpath,
+ 'mimetype':guessMimetype(f)
+ }
+ if gethashes:
+ h = sha.new(relpath)
+ fobj = file(fullpath, "rb")
+ while True:
+ buf = fobj.read(262144)
+ if len(buf) == 0:
+ break
+ h.update(buf)
+ fobj.close()
+ entry['hash'] = h.hexdigest()
+ entries.append(entry)
+ entries.sort(lambda f1,f2: cmp(f1['relpath'], f2['relpath']))
+
+ return entries
+
+def guessMimetype(filename):
+ """
+ Returns a guess of a mimetype based on a filename's extension
+ """
+ m = mimetypes.guess_type(filename, False)[0]
+ if m == None:
+ m = "text/plain"
+ return m
+
+
+
Added: trunk/apps/pyFreenet/fcp/sitemgr.py
===================================================================
--- trunk/apps/pyFreenet/fcp/sitemgr.py 2006-05-16 22:17:57 UTC (rev 8723)
+++ trunk/apps/pyFreenet/fcp/sitemgr.py 2006-05-16 22:19:29 UTC (rev 8724)
@@ -0,0 +1,416 @@
+#! /usr/bin/env python
+"""
+A small freesite insertion/management utility
+"""
+# standard lib imports
+import sys, os, sha, traceback, getopt
+from ConfigParser import SafeConfigParser
+
+# fcp imports
+import core
+from core import FCPNode
+from core import SILENT, FATAL, CRITICAL, ERROR, INFO, DETAIL, DEBUG
+
+fcpHost = core.defaultFCPHost
+fcpPort = core.defaultFCPPort
+#verbosity = DETAIL
+verbosity = None
+logfile = None
+
+class SiteMgr:
+ """
+ Manages insertion and updating of freesites
+ """
+ def __init__(self, **kw):
+ """
+ Creates a site manager object.
+
+ Arguments:
+
+ Keywords:
+ - configfile - pathname of where config file lives, defaults
+ to ~/.freesites (or ~/freesites.ini on doze)
+ - logfile - a pathname or open file object to which to write
+ log messages, defaults to sys.stdout
+ - verbosity - logging verbosity level, refer to fcp.core
+ - fcphost - hostname of fcp, default fcp.core.defaultFCPHost
+ - fcpport - port number of fcp, default fcp.core.defaultFCPPort
+ """
+ # set up the logger
+ logfile = kw.pop('logfile', sys.stderr)
+ if not hasattr(logfile, 'write'):
+ # might be a pathname
+ if not isinstance(logfile, str):
+ raise Exception("Bad logfile, must be pathname or file object")
+ logfile = file(logfile, "a")
+ self.logfile = logfile
+ self.verbosity = kw.get('verbosity', 0)
+ self.Verbosity = kw.get('Verbosity', 0)
+
+ self.fcpHost = fcpHost
+ self.fcpPort = fcpPort
+
+ self.kw = kw
+
+ self.node = None
+
+ # determine pathname for sites ini file
+ configFile = kw.get('configfile', None)
+ if configFile == None:
+ isDoze = sys.platform.lower().startswith("win")
+ homedir = os.path.expanduser("~")
+ if isDoze:
+ filename = "freesites.ini"
+ else:
+ filename = ".freesites"
+ configFile = os.path.join(homedir, filename)
+
+ self.configFile = configFile
+
+ if os.path.isfile(configFile):
+ self.loadConfig()
+ else:
+ self.config = SafeConfigParser()
+ self.config.set("DEFAULT", "fcphost", self.fcpHost)
+ self.config.set("DEFAULT", "fcpport", self.fcpPort)
+
+ def __del__(self):
+
+ try:
+ if hasattr(self, 'node'):
+ self.node.shutdown()
+ del self.node
+ self.node = None
+ except:
+ pass
+
+ def createConfig(self, **kw):
+ """
+ Creates a whole new config
+ """
+ #if not kw.has_key("fcpHost"):
+ # kw['fcpHost'] = core.defaultFCPHost
+ #if not kw.has_key("fcpPort"):
+ # kw['fcpPort'] = core.defaultFCPPort
+
+ #self.fcpHost = kw['fcpHost']
+ #self.fcpPort = kw['fcpPort']
+
+ file(self.configFile, "w").write("\n".join([
+ "# config file for freesites",
+ "# being inserted via pyfcp 'sitemgr' utility",
+ "#",
+ "# edit this file with care",
+ "",
+ # "# FCP access details",
+ # "[DEFAULT]",
+ # "fcpHost=%s" % self.fcpHost,
+ # "fcpPort=%s" % self.fcpPort,
+ "",
+ "# for each new site, just copy the following two lines",
+ "# to the end of this file, uncomment them, change as needed",
+ "",
+ "# [mysite]",
+ "# dir=/path/to/mysite/directory",
+ "",
+ "",
+ ]))
+
+ def loadConfig(self):
+ """
+ Loads the sites config file into self.config as a SafeConfigParser
+ object
+ """
+ conf = self.config = SafeConfigParser()
+ conf.read(self.configFile)
+
+ try:
+ self.fcpHost = conf.get("DEFAULT", "fcphost")
+ except:
+ conf.set("DEFAULT", "fcphost", self.fcpHost)
+ try:
+ self.fcpPort = conf.getint("DEFAULT", "fcpport")
+ except:
+ conf.set("DEFAULT", "fcpport", self.fcpPort)
+
+
+ for sitename in conf.sections():
+
+ if not conf.has_option(sitename, "dir"):
+ raise Exception("Config file error: No directory specified for
site '%s'" \
+ % sitename)
+
+ def saveConfig(self):
+ """
+ Saves the amended config file to disk
+ """
+ self.createConfig()
+
+ self.config.set("DEFAULT", "fcphost", self.fcpHost)
+ self.config.set("DEFAULT", "fcpport", self.fcpPort)
+
+ f = file(self.configFile, "a")
+
+ self.config.write(f)
+
+ f.close()
+
+ def createNode(self, **kw):
+ """
+ Creates and saves a node object, if one not already present
+ """
+ if isinstance(self.node, FCPNode):
+ return
+
+ opts = {}
+
+ if kw.has_key("fcpHost"):
+ opts['host'] = kw['fcpHost']
+ else:
+ opts['host'] = self.fcpHost
+
+ if kw.has_key("fcpPort"):
+ opts['port'] = self.fcpPort
+ else:
+ opts['port'] = self.fcpPort
+
+ if kw.has_key("verbosity"):
+ opts['verbosity'] = kw['verbosity']
+ else:
+ opts['verbosity'] = core.INFO
+
+ opts['Verbosity'] = self.Verbosity
+
+ if kw.has_key("logfile"):
+ opts['logfile'] = kw['logfile'] or sys.stdout
+ else:
+ opts['logfile'] = sys.stdout
+
+ opts['name'] = 'freesitemgr'
+
+ print "createNode:"
+ print " kw=%s"% kw
+ print " opts=%s" % opts
+ #sys.exit(0)
+
+ self.node = FCPNode(**opts)
+
+ def hasSite(self, sitename):
+ """
+ returns True if site is known in this config
+ """
+ return self.config.has_section(sitename)
+
+ def addSite(self, sitename, sitedir):
+
+ if self.hasSite(sitename):
+ raise Exception("Site %s already exists" % sitename)
+
+ conf = self.config
+ conf.add_section(sitename)
+ conf.set(sitename, "dir", sitedir)
+
+ self.saveConfig()
+
+ def removeSite(self, sitename):
+ """
+ Drops a freesite from the config
+ """
+ if not self.hasSite(sitename):
+ raise Exception("No such site '%s'" % sitename)
+
+ conf = self.config
+ conf.remove_section(sitename)
+
+ self.saveConfig()
+
+ def getSiteInfo(self, sitename):
+ """
+ returns a record of info about given site
+ """
+ if not self.hasSite(sitename):
+ raise Exception("No such freesite '%s'" % sitename)
+
+ conf = self.config
+
+ if conf.has_option(sitename, "hash"):
+ hash = conf.get(sitename, "hash")
+ else:
+ hash = None
+
+ if conf.has_option(sitename, "version"):
+ version = conf.getint(sitename, "version")
+ else:
+ version = None
+
+ if conf.has_option(sitename, "privatekey"):
+ privkey = conf.get(sitename, "privatekey")
+ else:
+ privkey = None
+
+ if conf.has_option(sitename, "uri"):
+ uri = conf.get(sitename, "uri")
+ else:
+ uri = None
+
+ return {'name' : sitename,
+ 'dir' : conf.get(sitename, 'dir'),
+ 'hash' : hash,
+ 'version' : version,
+ 'privatekey' : privkey,
+ 'uri' : uri,
+ }
+
+ def getSiteNames(self):
+ return self.config.sections()
+
+ def update(self):
+ """
+ Insert/update all registered freesites
+ """
+ noSites = True
+
+ log = self._log
+
+ kw = self.kw
+
+ # get a node handle
+ self.createNode(logfile=logfile, **kw)
+
+ conf = self.config
+ for sitename in conf.sections():
+
+ # fill in any incomplete details with site entries
+ needToSave = False
+ if not conf.has_option(sitename, "hash"):
+ needToSave = True
+ conf.set(sitename, "hash", "")
+
+ if not conf.has_option(sitename, "version"):
+ needToSave = True
+ conf.set(sitename, "version", "0")
+
+ if not conf.has_option(sitename, "privatekey"):
+ needToSave = True
+ pub, priv = self.node.genkey()
+ uri = pub.replace("SSK@", "USK@") + sitename + "/0"
+ conf.set(sitename, "uri", uri)
+ conf.set(sitename, "privatekey", priv)
+ if needToSave:
+ self.saveConfig()
+
+ uri = conf.get(sitename, "uri")
+ dir = conf.get(sitename, "dir")
+ hash = conf.get(sitename, "hash")
+ version = conf.get(sitename, "version")
+ privatekey = conf.get(sitename, "privatekey")
+
+ files = core.readdir(dir, gethashes=True)
+ h = sha.new()
+ for f in files:
+ h.update(f['hash'])
+ hashNew = h.hexdigest()
+ if hashNew != hash:
+ log(INFO, "Updating site %s" % sitename)
+ log(INFO, "privatekey=%s" % privatekey)
+ noSites = False
+ try:
+ res = self.node.put(privatekey,
+ dir=dir,
+ name=sitename,
+ version=version,
+ usk=True,
+ verbosity=self.Verbosity)
+ log(INFO, "site %s updated successfully" % sitename)
+ except:
+ traceback.print_exc()
+ log(ERROR, "site %s failed to update" % sitename)
+ conf.set(sitename, "hash", hashNew)
+
+ self.saveConfig()
+
+ if noSites:
+ log(INFO, "No sites needed updating")
+
+ def shutdown(self):
+ self.node.shutdown()
+
+ def _log(self, level, msg):
+ """
+ Logs a message. If level > verbosity, don't output it
+ """
+ if level > self.verbosity:
+ return
+
+ if not msg.endswith("\n"): msg += "\n"
+
+ self.logfile.write(msg)
+ self.logfile.flush()
+
+
+def help():
+
+ print "%s: A console-based, cron-able freesite inserter" % sys.argv[0]
+ print "Usage: %s" % sys.argv[0]
+
+ print "This utility inserts/updates freesites, and is"
+ print "driven by a simple config file."
+ print
+ print "The first time you run this utility, a config file"
+ print "will be created for you in your home directory,"
+ print "You will be told where this file is (~/.freesites on *nix"
+ print "or ~/freesites.ini on doze)"
+ print "then you can edit this file and add details of"
+ print "your freesites, and run it again."
+ print
+ print "Note - freesites are only updated if they have"
+ print "changed since the last update, because a hash"
+ print "of each site gets stored in the config"
+
+ sys.exit(0)
+
+def run():
+ """
+ Runs the sitemgr in a console environment
+ """
+ import getopt
+
+ opts = {'verbosity': core.INFO,
+ 'host':xmlrpcHost,
+ 'port':xmlrpcPort,
+ 'fcpHost':core.defaultFCPHost,
+ 'fcpPort':core.defaultFCPPort,
+ }
+
+ try:
+ cmdopts, args = getopt.getopt(sys.argv[1:],
+ "?hv:",
+ ["help", "verbosity=", "host=", "port=",
+ "fcphost=", "fcpport="])
+ except getopt.GetoptError:
+ # print help information and exit:
+ usage()
+ sys.exit(2)
+ output = None
+ verbose = False
+ #print cmdopts
+ for o, a in cmdopts:
+ if o in ("-h", "--help"):
+ usage(ret=0)
+ elif o == "--host":
+ opts['host'] = a
+ elif o == "--port":
+ opts['port'] = int(a)
+
+if __name__ == '__main__':
+
+ if '-h' in sys.argv:
+ help()
+
+ if '-v' in sys.argv:
+ verbosity = core.DETAIL
+
+ s = SiteMgr()
+ s.update()
+ s.shutdown()
+
Added: trunk/apps/pyFreenet/fcp/xmlrpc.py
===================================================================
--- trunk/apps/pyFreenet/fcp/xmlrpc.py 2006-05-16 22:17:57 UTC (rev 8723)
+++ trunk/apps/pyFreenet/fcp/xmlrpc.py 2006-05-16 22:19:29 UTC (rev 8724)
@@ -0,0 +1,233 @@
+#! /usr/bin/env python
+"""
+fcpxmlrpc.py
+
+Exposes some pyfcp primitives over an XML-RPC service
+"""
+
+# standard library imports
+import sys
+from SimpleXMLRPCServer import SimpleXMLRPCServer
+from SocketServer import ThreadingMixIn
+
+# FCP imports
+import core
+
+# where to listen, for the xml-rpc server
+xmlrpcHost = "127.0.0.1"
+xmlrpcPort = 19481
+
+class FCPXMLRPCServer(ThreadingMixIn, SimpleXMLRPCServer):
+ """
+ Multi-threaded XML-RPC server for freenet FCP access
+ """
+ def __init__(self, **kw):
+ """
+ Creates the xml-rpc server
+
+ Keywords:
+ - host - hostname to listen on for xml-rpc requests, default
127.0.0.1
+ - port - port to listen on for xml-rpc requests, default 19481
+ - fcpHost - hostname where FCP port is
+ - fcpPort - port where FCP port is
+ - verbosity - verbosity of output messages, 0 (silent) through 6
(noisy),
+ default 4. Refer verbosity constants in fcp module
+ """
+ # create the server
+ host = kw.get('host', xmlrpcHost)
+ port = kw.get('port', xmlrpcPort)
+
+ SimpleXMLRPCServer.__init__(self, (host, port))
+
+ # create the fcp node interface
+ fcpHost = kw.get('fcpHost', core.defaultFCPHost)
+ fcpPort = kw.get('fcpPort', core.defaultFCPPort)
+ verbosity = kw.get('verbosity', core.SILENT)
+
+ node = self.node = core.FCPNode(host=fcpHost,
+ port=fcpPort,
+ verbosity=verbosity,
+ )
+
+ # create the request handler
+ hdlr = FreenetXMLRPCRequestHandler(node)
+
+ # link in the request handler object
+ self.register_instance(hdlr)
+ self.register_introspection_functions()
+
+ def run(self):
+ """
+ Launch the server to run forever
+ """
+ try:
+ self.serve_forever()
+ except KeyboardInterrupt:
+ self.node.shutdown()
+ raise
+
+
+class FreenetXMLRPCRequestHandler:
+ """
+ Simple class which exposes basic primitives
+ for freenet xmlrpc server
+ """
+ def __init__(self, node):
+
+ self.node = node
+
+
+ def get(self, uri, options=None):
+ """
+ Performs a fetch of a key
+
+ Arguments:
+ - uri - the URI to retrieve
+ - options - a mapping (dict) object containing various
+ options - refer to FCPNode.get documentation
+ """
+ if options==None:
+ options = {}
+
+ if options.has_key('file'):
+ raise Exception("file option not available over XML-RPC")
+ if options.has_key('dir'):
+ raise Exception("dir option not available over XML-RPC")
+
+ return self.node.get(uri, **options)
+
+ def put(self, uri, options=None):
+ """
+ Inserts data to node
+
+ Arguments:
+ - uri - the URI to insert under
+ - options - a mapping (dict) object containing various options,
+ refer to FCPNode.get documentation
+ """
+ if options==None:
+ options = {}
+
+ if options.has_key('file'):
+ raise Exception("file option not available over XML-RPC")
+ if options.has_key('dir'):
+ raise Exception("dir option not available over XML-RPC")
+
+ return self.node.put(uri, data=data, **options)
+
+ def genkey(self):
+
+ return self.node.genkey()
+
+
+def usage(msg="", ret=1):
+
+ if msg:
+ sys.stderr.write(msg+"\n")
+
+ print "\n".join([
+ "Freenet XML-RPC Server",
+ "Usage: %s [options]" % sys.argv[0],
+ "Options:",
+ " -h, --help",
+ " show this usage message",
+ " -v, --verbosity=",
+ " set verbosity level, values are:",
+ " 0 (SILENT) show only 1 line for incoming hits",
+ " 1 (FATAL) show only fatal messages",
+ " 2 (CRITICAL) show only major failures",
+ " 3 (ERROR) show significant errors",
+ " 4 (INFO) show basic request details",
+ " 5 (DETAIL) show FCP dialogue",
+ " 6 (DEBUG) show ridiculous amounts of debug info",
+ " --host=",
+ " listen hostname for xml-rpc requests, default %s" % xmlrpcHost,
+ " --port=",
+ " listen port number for xml-rpc requests, default %s" %
xmlrpcPort,
+ " --fcphost=",
+ " set hostname of freenet FCP interface, default %s" \
+ % core.defaultFCPHost,
+ " --fcpport=",
+ " set port number of freenet FCP interface, default %s" \
+ % core.defaultFCPPort,
+ ])
+
+ sys.exit(ret)
+
+def testServer():
+
+ runServer(host="", fcpHost="10.0.0.1", verbosity=DETAIL)
+
+def runServer(**kw):
+ """
+ Creates and runs a basic XML-RPC server for FCP access
+
+ For keyword parameters, refer FCPXMLRPCServer constructor
+ """
+ FCPXMLRPCServer(**kw).run()
+
+def main():
+ """
+ When this script is executed, it runs the XML-RPC server
+ """
+ import getopt
+
+ opts = {'verbosity': core.INFO,
+ 'host':xmlrpcHost,
+ 'port':xmlrpcPort,
+ 'fcpHost':core.defaultFCPHost,
+ 'fcpPort':core.defaultFCPPort,
+ }
+
+ try:
+ cmdopts, args = getopt.getopt(sys.argv[1:],
+ "?hv:",
+ ["help", "verbosity=", "host=", "port=",
+ "fcphost=", "fcpport="])
+ except getopt.GetoptError:
+ # print help information and exit:
+ usage()
+ sys.exit(2)
+ output = None
+ verbose = False
+ #print cmdopts
+ for o, a in cmdopts:
+ if o in ("-h", "--help"):
+ usage(ret=0)
+ elif o == "--host":
+ opts['host'] = a
+ elif o == "--port":
+ try:
+ opts['port'] = int(a)
+ except:
+ usage("Invalid port number '%s'" % a)
+ elif o == "--fcphost":
+ opts['fcpHost'] = a
+ elif o == "--fcpport":
+ opts['fcpPort'] = a
+ elif o in ['-v', '--verbosity']:
+ print "setting verbosity"
+ try:
+ opts['verbosity'] = int(a)
+ #print "verbosity=%s" % opts['verbosity']
+ except:
+ usage("Invalid verbosity '%s'" % a)
+
+ #print "Verbosity=%s" % opts['verbosity']
+
+ if opts['verbosity'] >= core.INFO:
+ print "Launching Freenet XML-RPC server"
+ print "Listening on %s:%s" % (opts['host'], opts['port'])
+ print "Talking to Freenet FCP at %s:%s" % (opts['fcpHost'],
opts['fcpPort'])
+
+ try:
+ runServer(**opts)
+ except KeyboardInterrupt:
+ print "Freenet XML-RPC server terminated by user"
+
+
+
+if __name__ == '__main__':
+
+ main()
+