Author: aum
Date: 2006-05-16 22:15:45 +0000 (Tue, 16 May 2006)
New Revision: 8722
Removed:
trunk/apps/pyFreenet/fcp.py
trunk/apps/pyFreenet/fcpsitemgr.py
trunk/apps/pyFreenet/fcpxmlrpc.py
trunk/apps/pyFreenet/start.sh
trunk/apps/pyFreenet/stop.sh
trunk/apps/pyFreenet/updatesites.py
Log:
Deprecated, due to refactoring
Deleted: trunk/apps/pyFreenet/fcp.py
===================================================================
--- trunk/apps/pyFreenet/fcp.py 2006-05-16 22:14:50 UTC (rev 8721)
+++ trunk/apps/pyFreenet/fcp.py 2006-05-16 22:15:45 UTC (rev 8722)
@@ -1,1286 +0,0 @@
-#!/usr/bin/env python
-"""
-An implementation of a freenet client library for
-FCP v2, offering considerable flexibility.
-
-Clients should instantiate FCPNodeConnection, then execute
-its methods to perform tasks with FCP.
-
-This module was written by aum, May 2006, released under the GNU Lesser General
-Public License.
-
-No warranty, yada yada
-
-"""
-
-import sys, os, socket, time, thread
-import threading, mimetypes, sha, Queue
-import select, traceback
-
-class ConnectionRefused(Exception):
- """
- cannot connect to given host/port
- """
-
-class FCPException(Exception):
-
- def __init__(self, info=None):
- print "Creating fcp exception"
- if not info:
- info = {}
- self.info = info
- print "fcp exception created"
-
- def __str__(self):
-
- parts = []
- for k in ['header', 'ShortCodeDescription', 'CodeDescription']:
- if self.info.has_key(k):
- parts.append(str(self.info[k]))
- return ";".join(parts) or "??"
-
-class FCPGetFailed(FCPException):
- pass
-
-class FCPPutFailed(FCPException):
- pass
-
-class FCPProtocolError(FCPException):
- pass
-
-# where we can find the freenet node FCP port
-defaultFCPHost = "127.0.0.1"
-defaultFCPPort = 9481
-
-# poll timeout period for manager thread
-pollTimeout = 0.1
-#pollTimeout = 3
-
-# list of keywords sent from node to client, which have
-# int values
-intKeys = [
- 'DataLength', 'Code',
- ]
-
-# for the FCP 'ClientHello' handshake
-expectedVersion="2.0"
-
-# logger verbosity levels
-SILENT = 0
-FATAL = 1
-CRITICAL = 2
-ERROR = 3
-INFO = 4
-DETAIL = 5
-DEBUG = 6
-
-class FCPNodeConnection:
- """
- Represents an interface to a freenet node via its FCP port,
- and exposes primitives for the basic genkey, get, put and putdir
- operations.
-
- Only one instance of FCPNodeConnection is needed across an entire
- running client application, because its methods are quite thread-safe.
- Creating 2 or more instances is a waste of resources.
-
- Clients, when invoking methods, have several options regarding flow
- control and event notification:
-
- - synchronous call (the default). Here, no pending status messages
- will ever be seen, and the call will only control when it has
- completed (successfully, or otherwise)
-
- - asynchronous call - this is invoked by passing the keyword argument
- 'async=True' to any of the main primitives. When a primitive is
invoked
- asynchronously, it will return a 'job ticket object' immediately.
This
- job ticket has methods for polling for job completion, or blocking
- awaiting completion
-
- - setting a callback. You can pass to any of the primitives a
- 'callback=somefunc' keyword arg, where 'somefunc' is a callable
object
- conforming to 'def somefunc(status, value)'
-
- The callback function will be invoked when a primitive succeeds or
fails,
- as well as when a pending message is received from the node.
-
- The 'status' argument passed will be one of:
- - 'successful' - the primitive succeeded, and 'value' will
contain
- the result of the primitive
- - 'pending' - the primitive is still executing, and 'value' will
- contain the raw pending message sent back from the node, as a
- dict
- - 'failed' - the primitive failed, and as with 'pending', the
- argument 'value' contains a dict containing the message fields
- sent back from the node
-
- Note that callbacks can be set in both synchronous and asynchronous
- calling modes.
-
- """
- def __init__(self, **kw):
- """
- Create a connection object
-
- Keyword Arguments:
- - name - name of client to use with reqs, defaults to random. This
- is crucial if you plan on making persistent requests
- - host - hostname, defaults to defaultFCPHost
- - port - port number, defaults to defaultFCPPort
- - logfile - a pathname or writable file object, to which log
messages
- should be written, defaults to stdout
- - verbosity - how detailed the log messages should be, defaults to 0
- (silence)
-
- Attributes of interest:
- - jobs - a dict of currently running jobs (persistent and
nonpersistent).
- keys are job ids and values are JobTicket objects
-
- Notes:
- - when the connection is created, a 'hello' handshake takes place.
- After that handshake, the node sends back a list of outstanding
persistent
- requests left over from the last connection (based on the value
of
- the 'name' keyword passed into this constructor).
-
- This object then wraps all this info into JobTicket instances
and stores
- them in the self.persistentJobs dict
-
- """
- # grab and save parms
- self.name = kw.get('clientName', self._getUniqueId())
- self.host = kw.get('host', defaultFCPHost)
- self.port = kw.get('port', defaultFCPPort)
-
- # set up the logger
- logfile = kw.get('logfile', sys.stdout)
- if not hasattr(logfile, 'write'):
- # might be a pathname
- if not isinstance(logfile, str):
- raise Exception("Bad logfile, must be pathname or file object")
- logfile = file(logfile, "a")
- self.logfile = logfile
- self.verbosity = kw.get('verbosity', 0)
-
- # try to connect to node
- self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.socket.connect((self.host, self.port))
-
- # now do the hello
- self._hello()
-
- # the pending job tickets
- self.jobs = {} # keyed by request ID
-
- # queue for incoming client requests
- self.clientReqQueue = Queue.Queue()
-
- # launch receiver thread
- self.running = True
- thread.start_new_thread(self._mgrThread, ())
-
- def __del__(self):
- """
- object is getting cleaned up, so disconnect
- """
- # terminate the node
- try:
- self.shutdown()
- except:
- traceback.print_exc()
- pass
-
- # basic FCP primitives
-
- def genkey(self, **kw):
- """
- Generates and returns an SSK keypair
-
- Keywords:
- - async - whether to do this call asynchronously, and
- return a JobTicket object
- - callback - if given, this should be a callable which accepts 2
- arguments:
- - status - will be one of 'successful', 'failed' or 'pending'
- - value - depends on status:
- - if status is 'successful', this will contain the value
- returned from the command
- - if status is 'failed' or 'pending', this will contain
- a dict containing the response from node
- """
- id = kw.pop("id", None)
- if not id:
- id = self._getUniqueId()
-
- return self._submitCmd(id, "GenerateSSK", Identifier=id, **kw)
-
- def get(self, uri, **kw):
- """
- Does a direct get of a key
-
- Keywords:
- - async - whether to return immediately with a job ticket object,
default
- False (wait for completion)
- - persistence - default 'connection' - the kind of persistence for
- this request. If 'reboot' or 'forever', this job will be able to
- be recalled in subsequent FCP sessions. Other valid values are
- 'reboot' and 'forever', as per FCP spec
- - Global - default false - if evaluates to true, puts this request
- on the global queue. Note the capital G in Global. If you set
this,
- persistence must be 'reboot' or 'forever'
-
- - dsnly - whether to only check local datastore
- - ignoreds - don't check local datastore
- - file - if given, this is a pathname to which to store the
retrieved key
- - nodata - if true, no data will be returned. This can be a useful
- test of whether a key is retrievable, without having to consume
resources
- by retrieving it
-
- Returns a 2-tuple, depending on keyword args:
- - if 'file' is given, returns (mimetype, pathname) if key is
returned
- - if 'file' is not given, returns (mimetype, data) if key is
returned
- - if 'dontReturnData' is true, returns (mimetype, 1) if key is
returned
- If key is not found, raises an exception
- """
- self._log(INFO, "get: uri=%s" % uri)
-
- # ---------------------------------
- # format the request
- opts = {}
-
- id = kw.pop("id", None)
- if not id:
- id = self._getUniqueId()
-
- opts['async'] = kw.pop('async', False)
- if kw.has_key('callback'):
- opts['callback'] = kw['callback']
-
- opts['Persistence'] = kw.pop('persistence', 'connection')
- if kw.get('Global', False):
- print "global get"
- opts['Global'] = "true"
- else:
- opts['Global'] = "false"
-
- if opts['Global'] == 'true' and opts['Persistence'] == 'connection':
- raise Exception("Global requests must be persistent")
-
- file = kw.pop("file", None)
- if file:
- opts['ReturnType'] = "disk"
- #opts['File'] = file
- opts['Filename'] = file
-
- elif opts.get('nodata', False):
- nodata = True
- opts['ReturnType'] = "none"
- else:
- nodata = False
- opts['ReturnType'] = "direct"
-
- opts['Identifier'] = id
-
- if kw.get("ignoreds", False):
- opts["IgnoreDS"] = "true"
- else:
- opts["IgnoreDS"] = "false"
-
- if kw.get("dsonly", False):
- opts["DSOnly"] = "true"
- else:
- opts["DSOnly"] = "false"
-
- opts['URI'] = uri
- opts['Verbosity'] = "0"
-
- opts['MaxRetries'] = kw.get("maxretries", 3)
- opts['MaxSize'] = kw.get("maxsize", "1000000000000")
- opts['PriorityClass'] = int(kw.get("priority", 1))
-
- # ---------------------------------
- # now enqueue the request
- return self._submitCmd(id, "ClientGet", **opts)
-
- def put(self, uri="CHK@", **kw):
- """
- Inserts a key
-
- Arguments:
- - uri - uri under which to insert the key
-
- Keywords - you must specify one of the following to choose an insert
mode:
- - file - path of file from which to read the key data
- - data - the raw data of the key as string
- - dir - the directory to insert, for freesite insertion
- - redirect - the target URI to redirect to
-
- Keywords for 'dir' mode:
- - name - name of the freesite, the 'sitename' in SSK at
privkey/sitename'
- - usk - whether to insert as a USK (USK at
privkey/sitename/version/), default False
- - version - valid if usk is true, default 0
-
- Keywords for 'file' and 'data' modes:
- - chkonly - only generate CHK, don't insert - default false
- - dontcompress - do not compress on insert - default false
-
- Keywords for 'file', 'data' and 'redirect' modes:
- - mimetype - the mime type, default text/plain
-
- Keywords valid for all modes:
- - async - whether to do the job asynchronously, returning a job
ticket
- object (default False)
- - persistence - default 'connection' - the kind of persistence for
- this request. If 'reboot' or 'forever', this job will be able to
- be recalled in subsequent FCP sessions. Other valid values are
- 'reboot' and 'forever', as per FCP spec
- - Global - default false - if evaluates to true, puts this request
- on the global queue. Note the capital G in Global. If you set
this,
- persistence must be 'reboot' or 'forever'
-
- - maxretries - maximum number of retries, default 3
- - priority - default 1
-
- Notes:
- - exactly one of 'file', 'data' or 'dir' keyword arguments must be
present
- """
- self._log(INFO, "put: uri=%s" % uri)
-
- # divert to putdir if dir keyword present
- if kw.has_key('dir'):
- return self.putdir(uri, **kw)
-
- # ---------------------------------
- # format the request
- opts = {}
-
- opts['async'] = kw.get('async', False)
- if kw.has_key('callback'):
- opts['callback'] = kw['callback']
-
- opts['Persistence'] = kw.pop('persistence', 'connection')
- if kw.get('Global', False):
- opts['Global'] = "true"
- else:
- opts['Global'] = "false"
-
- if opts['Global'] == 'true' and opts['Persistence'] == 'connection':
- raise Exception("Global requests must be persistent")
-
- opts['URI'] = uri
- opts['Metadata.ContentType'] = kw.get("mimetype", "text/plain")
-
- id = kw.pop("id", None)
- if not id:
- id = self._getUniqueId()
- opts['Identifier'] = id
-
- opts['Verbosity'] = 0
- opts['MaxRetries'] = kw.get("maxretries", 3)
- opts['PriorityClass'] = kw.get("priority", 1)
- opts['GetCHKOnly'] = toBool(kw.get("chkonly", "false"))
- opts['DontCompress'] = toBool(kw.get("nocompress", "false"))
-
- if kw.has_key("file"):
- opts['UploadFrom'] = "disk"
- opts['Filename'] = kw['file']
- if not kw.has_key("mimetype"):
- opts['Metadata.ContentType'] =
mimetypes.guess_type(kw['file'])[0] or "text/plain"
-
- elif kw.has_key("data"):
- opts["UploadFrom"] = "direct"
- opts["Data"] = kw['data']
-
- elif kw.has_key("redirect"):
- opts["UploadFrom"] = "redirect"
- opts["TargetURI"] = kw['redirect']
- else:
- raise Exception("Must specify file, data or redirect keywords")
-
- #print "sendEnd=%s" % sendEnd
-
- # ---------------------------------
- # now dispatch the job
- return self._submitCmd(id, "ClientPut", **opts)
-
- def putdir(self, uri, **kw):
- """
- Inserts a freesite
-
- Arguments:
- - uri - uri under which to insert the key
-
- Keywords:
- - dir - the directory to insert - mandatory, no default.
- This directory must contain a toplevel index.html file
- - name - the name of the freesite, defaults to 'freesite'
- - usk - set to True to insert as USK (Default false)
- - version - the USK version number, default 0
-
- - maxretries - maximum number of retries, default 3
- - priority - default 1
-
- - async - default False - if True, return immediately with a job
ticket
- - persistence - default 'connection' - the kind of persistence for
- this request. If 'reboot' or 'forever', this job will be able to
- be recalled in subsequent FCP sessions. Other valid values are
- 'reboot' and 'forever', as per FCP spec
- - Global - default false - if evaluates to true, puts this request
- on the global queue. Note the capital G in Global. If you set
this,
- persistence must be 'reboot' or 'forever'
-
- Returns:
- - the URI under which the freesite can be retrieved
- """
- self._log(INFO, "putdir: uri=%s dir=%s" % (uri, kw['dir']))
-
- # -------------------------------------
- # format the command
- #
- # note that with this primitive, we have to format the command
- # buffer ourselves, not just drop it through as a bunch of keywords,
- # since we want to control the order of keyword lines
-
- # get keyword args
- dir = kw['dir']
- sitename = kw.get('name', 'freesite')
- usk = kw.get('usk', False)
- version = kw.get('version', 0)
- maxretries = kw.get('maxretries', 3)
- priority = kw.get('priority', 1)
-
- id = kw.pop("id", None)
- if not id:
- id = self._getUniqueId()
-
- # derive final URI for insert
- uriFull = uri + sitename + "/"
- if kw.get('usk', False):
- uriFull += "%d/" % int(version)
- uriFull = uriFull.replace("SSK@", "USK@")
-
- # build a big command buffer
- msgLines = ["ClientPutComplexDir",
- "Identifier=%s" % id,
- "Verbosity=0",
- "MaxRetries=%s" % maxretries,
- "PriorityClass=%s" % priority,
- "URI=%s" % uriFull,
- "Persistence=%s" % kw.get("persistence", "connection"),
- ]
-
- if kw.get('Global', False):
- msgLines.append("Global=true")
- else:
- msgLines.append("Global=false")
-
- # scan directory and add its files
- n = 0
- manifest = readdir(kw['dir'])
- default = None
- for file in manifest:
- relpath = file['relpath']
- fullpath = file['fullpath']
- mimetype = file['mimetype']
-
- if relpath == 'index.html':
- default = file
- self._log(DETAIL, "n=%s relpath=%s" % (repr(n), repr(relpath)))
-
- msgLines.extend(["Files.%d.Name=%s" % (n, relpath),
- "Files.%d.UploadFrom=disk" % n,
- "Files.%d.Filename=%s" % (n, fullpath),
- ])
- n += 1
-
- # now, add the default file
- msgLines.extend(["Files.%d.Name=" % n,
- "Files.%d.UploadFrom=disk" % n,
- "Files.%d.Filename=%s" % (n, default['fullpath']),
- ])
-
- msgLines.append("EndMessage")
-
- for line in msgLines:
- self._log(DETAIL, line)
- fullbuf = "\n".join(msgLines) + "\n"
-
- # --------------------------------------
- # now dispatch the job
- return self._submitCmd(id, "ClientPutComplexDir",
- rawcmd=fullbuf,
- async=kw.get('async', False),
- callback=kw.get('callback', False),
- Persistence=kw.get('Persistence', 'connection'),
- )
-
-
-
- # high level client methods
-
- def listenGlobal(self, **kw):
- """
- Enable listening on global queue
- """
- self._submitCmd(None, "WatchGlobal", Enabled="true", **kw)
-
- def ignoreGlobal(self, **kw):
- """
- Stop listening on global queue
- """
- self._submitCmd(None, "WatchGlobal", Enabled="false", **kw)
-
- def purgePersistentJobs(self):
- """
- Cancels all persistent jobs in one go
- """
- for job in self.getPersistentJobs():
- job.cancel()
-
- def getAllJobs(self):
- """
- Returns a list of persistent jobs, excluding global jobs
- """
- return self.jobs.values()
-
- def getPersistentJobs(self):
- """
- Returns a list of persistent jobs, excluding global jobs
- """
- return [j for j in self.jobs.values() if j.isPersistent and not
j.isGlobal]
-
- def getGlobalJobs(self):
- """
- Returns a list of global jobs
- """
- return [j for j in self.jobs.values() if j.isGlobal]
-
- def getTransientJobs(self):
- """
- Returns a list of non-persistent, non-global jobs
- """
- return [j for j in self.jobs.values() if not j.isPersistent]
-
- def refreshPersistentRequests(self, **kw):
- """
- Sends a ListPersistentRequests to node, to ensure that
- our records of persistent requests are up to date.
-
- Since, upon connection, the node sends us a list of all
- outstanding persistent requests anyway, I can't really
- see much use for this method. I've only added the method
- for FCP spec compliance
- """
- self._log(INFO, "listPersistentRequests")
-
- if self.jobs.has_key('__global'):
- raise Exception("An existing non-identifier job is currently
pending")
-
- # ---------------------------------
- # format the request
- opts = {}
-
- id = '__global'
- opts['Identifier'] = id
-
- opts['async'] = kw.pop('async', False)
- if kw.has_key('callback'):
- opts['callback'] = kw['callback']
-
- # ---------------------------------
- # now enqueue the request
- return self._submitCmd(id, "ListPersistentRequests", **opts)
-
- def setVerbosity(self, verbosity):
- """
- Sets the verbosity for future logging calls
- """
- self.verbosity = verbosity
-
- def shutdown(self):
- """
- Terminates the manager thread
-
- You should explicitly shutdown any existing nodes
- before exiting your client application
- """
- self.running = False
-
- # give the manager thread a chance to bail out
- time.sleep(pollTimeout * 3)
-
- # shut down FCP connection
- if hasattr(self, 'socket'):
- self.socket.close()
- del self.socket
-
- # and close the logfile
- if self.logfile not in [sys.stdout, sys.stderr]:
- self.logfile.close()
-
-
-
-
- # methods for manager thread
-
- def _mgrThread(self):
- """
- This thread is the nucleus of pyfcp, and coordinates incoming
- client commands and incoming node responses
- """
- log = self._log
- try:
- while self.running:
-
- log(DEBUG, "Top of manager thread")
-
- # try for incoming messages from node
- log(DEBUG, "Testing for incoming message")
- if self._msgIncoming():
- log(DEBUG, "Retrieving incoming message")
- msg = self._rxMsg()
- log(DEBUG, "Got incoming message, dispatching")
- self._on_rxMsg(msg)
- log(DEBUG, "back from on_rxMsg")
- else:
- log(DEBUG, "No incoming message from node")
-
- # try for incoming requests from clients
- log(DEBUG, "Testing for client req")
- try:
- req = self.clientReqQueue.get(True, pollTimeout)
- log(DEBUG, "Got client req, dispatching")
- self._on_clientReq(req)
- log(DEBUG, "Back from on_clientReq")
- except Queue.Empty:
- log(DEBUG, "No incoming client req")
- pass
-
- self._log(INFO, "Manager thread terminated normally")
- return
-
- except:
- traceback.print_exc()
- self._log(CRITICAL, "manager thread crashed")
-
- def _msgIncoming(self):
- """
- Returns True if a message is coming in from the node
- """
- return len(select.select([self.socket], [], [], pollTimeout)[0]) > 0
-
- def _submitCmd(self, id, cmd, **kw):
- """
- Submits a command for execution
-
- Arguments:
- - id - the command identifier
- - cmd - the command name, such as 'ClientPut'
-
- Keywords:
- - async - whether to return a JobTicket object, rather than
- the command result
- - callback - a function taking 2 args 'status' and 'value'.
- Status is one of 'successful', 'pending' or 'failed'.
- value is the primitive return value if successful, or the raw
- node message if pending or failed
- - rawcmd - a raw command buffer to send directly
- - options specific to command such as 'URI'
-
- Returns:
- - if command is sent in sync mode, returns the result
- - if command is sent in async mode, returns a JobTicket
- object which the client can poll or block on later
- """
- async = kw.pop('async', False)
- job = JobTicket(self, id, cmd, kw)
-
- self.clientReqQueue.put(job)
-
- self._log(DEBUG, "_submitCmd: id=%s cmd=%s kw=%s" % (id, cmd,
str(kw)[:256]))
-
- if cmd == 'WatchGlobal':
- return
- elif async:
- return job
- else:
- return job.wait()
-
- def _on_rxMsg(self, msg):
- """
- Handles incoming messages from node
-
- If an incoming message represents the termination of a command,
- the job ticket object will be notified accordingly
- """
- log = self._log
-
- # find the job this relates to
- id = msg.get('Identifier', '__global')
-
- hdr = msg['header']
-
- job = self.jobs.get(id, None)
- if not job:
- # we have a global job and/or persistent job from last connection
- log(INFO, "Got %s from prior session" % hdr)
- job = JobTicket(self, id, hdr, msg)
- self.jobs[id] = job
-
- # action from here depends on what kind of message we got
-
- # -----------------------------
- # handle GenerateSSK responses
-
- if hdr == 'SSKKeypair':
- # got requested keys back
- keys = (msg['RequestURI'], msg['InsertURI'])
- job.callback('successful', keys)
- job._putResult(keys)
-
- # and remove job from queue
- self.jobs.pop(id, None)
- return
-
- # -----------------------------
- # handle ClientGet responses
-
- if hdr == 'DataFound':
- log(INFO, "Got DataFound for URI=%s" % job.kw['URI'])
- mimetype = msg['Metadata.ContentType']
- if job.kw.has_key('Filename'):
- # already stored to disk, done
- #resp['file'] = file
- result = (mimetype, job.kw['Filename'])
- job.callback('successful', result)
- job._putResult(result)
- return
-
- elif job.kw['ReturnType'] == 'none':
- result = (mimetype, 1)
- job.callback('successful', result)
- job._putResult(result)
- return
-
- # otherwise, we're expecting an AllData and will react to it then
- else:
- # is this a persistent get?
- if job.kw['ReturnType'] == 'direct' \
- and job.kw.get('Persistence', None) != 'connection':
- # gotta poll for request status so we can get our data
- # FIXME: this is a hack, clean it up
- log(INFO, "Request was persistent")
- if not hasattr(job, "gotPersistentDataFound"):
- if job.isGlobal:
- isGlobal = "true"
- else:
- isGlobal = "false"
- job.gotPersistentDataFound = True
- log(INFO, " --> sending GetRequestStatus")
- self._txMsg("GetRequestStatus",
- Identifier=job.kw['Identifier'],
- Persistence=msg.get("Persistence",
"connection"),
- Global=isGlobal,
- )
-
- job.callback('pending', msg)
- job.mimetype = mimetype
- return
-
- if hdr == 'AllData':
- result = (job.mimetype, msg['Data'])
- job.callback('successful', result)
- job._putResult(result)
- return
-
- if hdr == 'GetFailed':
- # return an exception
- job.callback("failed", msg)
- job._putResult(FCPGetFailed(msg))
- return
-
- # -----------------------------
- # handle ClientPut responses
-
- if hdr == 'URIGenerated':
-
- job.uri = msg['URI']
- newUri = msg['URI']
- job.callback('pending', msg)
-
- return
-
- # bail here if no data coming back
- if job.kw.get('GetCHKOnly', False) == 'true':
- # done - only wanted a CHK
- job._putResult(newUri)
- return
-
- if hdr == 'PutSuccessful':
- result = msg['URI']
- job.callback('successful', result)
- job._putResult(result)
- return
-
- if hdr == 'PutFailed':
- job.callback('failed', msg)
- job._putResult(FCPPutFailed(msg))
- return
-
- # -----------------------------
- # handle progress messages
-
- if hdr == 'StartedCompression':
- job.callback('pending', msg)
- return
-
- if hdr == 'FinishedCompression':
- job.callback('pending', msg)
- return
-
- if hdr == 'SimpleProgress':
- job.callback('pending', msg)
- return
-
- # -----------------------------
- # handle persistent job messages
-
- if hdr == 'PersistentGet':
- job.callback('pending', msg)
- job._appendMsg(msg)
- return
-
- if hdr == 'PersistentPut':
- job.callback('pending', msg)
- job._appendMsg(msg)
- return
-
- if hdr == 'PersistentPutDir':
- job.callback('pending', msg)
- job._appendMsg(msg)
- return
-
- if hdr == 'EndListPersistentRequests':
- job._appendMsg(msg)
- job.callback('successful', job.msgs)
- job._putResult(job.msgs)
- return
-
- # -----------------------------
- # handle various errors
-
- if hdr == 'ProtocolError':
- job.callback('failed', msg)
- job._putResult(FCPProtocolError(msg))
- return
-
- if hdr == 'IdentifierCollision':
- log(ERROR, "IdentifierCollision on id %s ???" % id)
- job.callback('failed', msg)
- job._putResult(Exception("Duplicate job identifier %s" % id))
- return
-
- # -----------------------------
- # wtf is happening here?!?
-
- log(ERROR, "Unknown message type from node: %s" % hdr)
- job.callback('failed', msg)
- job._putResult(FCPException(msg))
- return
- def _on_clientReq(self, job):
- """
- takes an incoming request job from client and transmits it to
- the fcp port, and also registers it so the manager thread
- can action responses from the fcp port.
- """
- id = job.id
- cmd = job.cmd
- kw = job.kw
-
- # register the req
- if cmd != 'WatchGlobal':
- self.jobs[id] = job
-
- # now can send, since we're the only one who will
- self._txMsg(cmd, **kw)
-
-
- # low level noce comms methods
-
- def _hello(self):
- """
- perform the initial FCP protocol handshake
- """
- self._txMsg("ClientHello",
- Name=self.name,
- ExpectedVersion=expectedVersion)
-
- resp = self._rxMsg()
- return resp
-
- def _getUniqueId(self):
- """
- Allocate a unique ID for a request
- """
- return "id" + str(int(time.time() * 1000000))
-
- def _txMsg(self, msgType, **kw):
- """
- low level message send
-
- Arguments:
- - msgType - one of the FCP message headers, such as 'ClientHello'
- - args - zero or more (keyword, value) tuples
- Keywords:
- - rawcmd - if given, this is the raw buffer to send
- - other keywords depend on the value of msgType
- """
- log = self._log
-
- # just send the raw command, if given
- rawcmd = kw.get('rawcmd', None)
- if rawcmd:
- self.socket.send(rawcmd)
- log(DETAIL, "CLIENT: %s" % rawcmd)
- return
-
- if kw.has_key("Data"):
- data = kw.pop("Data")
- sendEndMessage = False
- else:
- data = None
- sendEndMessage = True
-
- items = [msgType + "\n"]
- log(DETAIL, "CLIENT: %s" % msgType)
-
- #print "CLIENT: %s" % msgType
- for k, v in kw.items():
- #print "CLIENT: %s=%s" % (k,v)
- line = k + "=" + str(v)
- items.append(line + "\n")
- log(DETAIL, "CLIENT: %s" % line)
-
- if data != None:
- items.append("DataLength=%d\n" % len(data))
- log(DETAIL, "CLIENT: DataLength=%d" % len(data))
- items.append("Data\n")
- log(DETAIL, "CLIENT: ...data...")
- items.append(data)
-
- #print "sendEndMessage=%s" % sendEndMessage
-
- if sendEndMessage:
- items.append("EndMessage\n")
- log(DETAIL, "CLIENT: EndMessage")
- raw = "".join(items)
-
- self.socket.send(raw)
-
- def _rxMsg(self):
- """
- Receives and returns a message as a dict
-
- The header keyword is included as key 'header'
- """
- log = self._log
-
- # shorthand, for reading n bytes
- def read(n):
- if n > 1:
- log(DEBUG, "read: want %d bytes" % n)
- chunks = []
- remaining = n
- while remaining > 0:
- chunk = self.socket.recv(remaining)
- chunklen = len(chunk)
- if chunk:
- chunks.append(chunk)
- remaining -= chunklen
- if remaining > 0:
- if n > 1:
- log(DEBUG,
- "wanted %s, got %s still need %s bytes" % (n,
chunklen, remaining)
- )
- pass
- buf = "".join(chunks)
- return buf
-
- # read a line
- def readln():
- buf = []
- while True:
- c = read(1)
- buf.append(c)
- if c == '\n':
- break
- ln = "".join(buf)
- log(DETAIL, "NODE: " + ln[:-1])
- return ln
-
- items = {}
-
- # read the header line
- while True:
- line = readln().strip()
- if line:
- items['header'] = line
- break
-
- # read the body
- while True:
- line = readln().strip()
- if line in ['End', 'EndMessage']:
- break
-
- if line == 'Data':
- # read the following data
- buf = read(items['DataLength'])
- items['Data'] = buf
- log(DETAIL, "NODE: ...<%d bytes of data>" % len(buf))
- break
- else:
- # it's a normal 'key=val' pair
- try:
- k, v = line.split("=")
- except:
- #print "unexpected: %s"% line
- raise
-
- # attempt int conversion
- try:
- v = int(v)
- except:
- pass
-
- items[k] = v
-
- # all done
- return items
-
- def _log(self, level, msg):
- """
- Logs a message. If level > verbosity, don't output it
- """
- if level > self.verbosity:
- return
-
- if not msg.endswith("\n"): msg += "\n"
-
- self.logfile.write(msg)
- self.logfile.flush()
-
-
-class JobTicket:
- """
- A JobTicket is an object returned to clients making
- asynchronous requests. It puts them in control of how
- they manage n concurrent requests.
-
- When you as a client receive a JobTicket, you can choose to:
- - block, awaiting completion of the job
- - poll the job for completion status
- - receive a callback upon completion
-
- Attributes of interest:
- - isPersistent - True if job is persistent
- - isGlobal - True if job is global
- - value - value returned upon completion, or None if not complete
- - node - the node this job belongs to
- - id - the job Identifier
- - cmd - the FCP message header word
- - kw - the keywords in the FCP header
- - msgs - any messages received from node in connection
- to this job
- """
- def __init__(self, node, id, cmd, kw):
- """
- You should never instantiate a JobTicket object yourself
- """
- self.node = node
- self.id = id
- self.cmd = cmd
-
- # find out if persistent
- if kw.get("Persistent", "connection") != "connection" \
- or kw.get("PersistenceType", "connection") != "connection":
- self.isPersistent = True
- else:
- self.isPersistent = False
-
- if kw.get('Global', 'false') == 'true':
- self.isGlobal = True
- else:
- self.isGlobal = False
-
- self.kw = kw
-
- self.msgs = []
-
- callback = kw.pop('callback', None)
- if callback:
- self.callback = callback
-
-
- self.lock = threading.Lock()
- self.lock.acquire()
- self.result = None
-
- def isComplete(self):
- """
- Returns True if the job has been completed
- """
- return self.result != None
-
- def wait(self, timeout=None):
- """
- Waits forever (or for a given timeout) for a job to complete
- """
- self.lock.acquire()
- self.lock.release()
- return self.getResult()
- def getResult(self):
- """
- Returns result of job, or None if job still not complete
-
- If result is an exception object, then raises it
- """
- if isinstance(self.result, Exception):
- raise self.result
- else:
- return self.result
-
- def callback(self, status, value):
- """
- This will be replaced in job ticket instances wherever
- user provides callback arguments
- """
- # no action needed
-
- def cancel(self):
- """
- Cancels the job, if it is persistent
-
- Does nothing if the job was not persistent
- """
- if not self.isPersistent:
- return
-
- # remove from node's jobs lists
- try:
- del self.node.jobs[self.id]
- except:
- pass
-
- # send the cancel
- if self.isGlobal:
- isGlobal = "true"
- else:
- isGlobal = "False"
-
- self.node._txMsg("RemovePersistentRequest",
- Global=isGlobal,
- Identifier=self.id)
-
- def _appendMsg(self, msg):
- self.msgs.append(msg)
-
- def _putResult(self, result):
- """
- Called by manager thread to indicate job is complete,
- and submit a result to be picked up by client
- """
- self.result = result
-
- if not self.isPersistent:
- try:
- del self.node.jobs[self.id]
- except:
- pass
-
- self.lock.release()
-
- def __repr__(self):
- if self.kw.has_key("URI"):
- uri = " URI=%s" % self.kw['URI']
- else:
- uri = ""
- return "<FCP job %s:%s%s" % (self.id, self.cmd, uri)
-
-
-def toBool(arg):
- try:
- arg = int(arg)
- if arg:
- return "true"
- except:
- pass
-
- if isinstance(arg, str):
- if arg.strip().lower()[0] == 't':
- return "true"
- else:
- return "false"
-
- if arg:
- return True
- else:
- return False
-
-def readdir(dirpath, prefix='', gethashes=False):
- """
- Reads a directory, returning a sequence of file dicts.
-
- Arguments:
- - dirpath - relative or absolute pathname of directory to scan
- - gethashes - also include a 'hash' key in each file dict, being
- the SHA1 hash of the file's name and contents
-
- Each returned dict in the sequence has the keys:
- - fullpath - usable for opening/reading file
- - relpath - relative path of file (the part after 'dirpath'),
- for the 'SSK at blahblah//relpath' URI
- - mimetype - guestimated mimetype for file
- """
-
- #set_trace()
- #print "dirpath=%s, prefix='%s'" % (dirpath, prefix)
- entries = []
- for f in os.listdir(dirpath):
- relpath = prefix + f
- fullpath = dirpath + "/" + f
- if f == '.freesiterc':
- continue
- if os.path.isdir(fullpath):
- entries.extend(readdir(dirpath+"/"+f, relpath + "/", gethashes))
- else:
- #entries[relpath] = {'mimetype':'blah/shit',
'fullpath':dirpath+"/"+relpath}
- fullpath = dirpath + "/" + f
- entry = {'relpath' :relpath,
- 'fullpath':fullpath,
- 'mimetype':guessMimetype(f)
- }
- if gethashes:
- h = sha.new(relpath)
- fobj = file(fullpath, "rb")
- while True:
- buf = fobj.read(262144)
- if len(buf) == 0:
- break
- h.update(buf)
- fobj.close()
- entry['hash'] = h.hexdigest()
- entries.append(entry)
- entries.sort(lambda f1,f2: cmp(f1['relpath'], f2['relpath']))
-
- return entries
-
-def guessMimetype(filename):
- """
- Returns a guess of a mimetype based on a filename's extension
- """
- m = mimetypes.guess_type(filename, False)[0]
- if m == None:
- m = "text/plain"
- return m
-
-
Deleted: trunk/apps/pyFreenet/fcpsitemgr.py
===================================================================
--- trunk/apps/pyFreenet/fcpsitemgr.py 2006-05-16 22:14:50 UTC (rev 8721)
+++ trunk/apps/pyFreenet/fcpsitemgr.py 2006-05-16 22:15:45 UTC (rev 8722)
@@ -1,249 +0,0 @@
-#! /usr/bin/env python
-"""
-A small freesite insertion/management utility
-"""
-import fcp, sys, os, sha, traceback
-
-# get log level constants
-from fcp import SILENT, FATAL, CRITICAL, ERROR, INFO, DETAIL, DEBUG
-
-from ConfigParser import SafeConfigParser
-
-fcpHost = "thoth"
-fcpPort = None
-#verbosity = fcp.DETAIL
-verbosity = None
-logfile = None
-
-class SiteMgr:
- """
- Manages insertion and updating of freesites
- """
- def __init__(self, configFile=None, **kw):
- """
- Creates a site manager object.
-
- Arguments:
- - configFile - ini-format file containing site specifications,
- defaults to ~/.freesitesrc on *nix or ~/freesites.ini
-
- Keywords:
- - logfile - a pathname or open file object to which to write
- log messages, defaults to sys.stdout
- """
- # set up the logger
- logfile = kw.pop('logfile', sys.stderr)
- if not hasattr(logfile, 'write'):
- # might be a pathname
- if not isinstance(logfile, str):
- raise Exception("Bad logfile, must be pathname or file object")
- logfile = file(logfile, "a")
- self.logfile = logfile
- self.verbosity = kw.get('verbosity', 0)
-
- # get a node handle
- self.createNode(logfile=logfile, **kw)
-
- # determine pathname for sites ini file
- if configFile == None:
- isDoze = sys.platform.lower().startswith("win")
- homedir = os.path.expanduser("~")
- if isDoze:
- filename = "freesites.ini"
- else:
- filename = ".freesites"
- configFile = os.path.join(homedir, filename)
-
- self.configFile = configFile
-
- if not os.path.isfile(configFile):
- self.createConfig()
- self._log(CRITICAL, "New config file created at %s" % configFile)
- self._log(CRITICAL, "Please edit that file and add your freesites")
-
- self.loadConfig()
-
- def __del__(self):
-
- try:
- if hasattr(self, 'node'):
- self.node.shutdown()
- del self.node
- self.node = None
- except:
- pass
-
- def createConfig(self):
- """
- Creates a whole new config
- """
- file(self.configFile, "w").write("\n".join([
- "# config file for freesites",
- "# being inserted via pyfcp 'sitemgr' utility",
- "#",
- "# edit this file with care",
- "",
- "# ignore this, it's not used",
- "[DEFAULT]",
- "",
- "# for each new site, just take a copy of the following",
- "# 2 lines, uncomment them and change as needed",
- "",
- "# [mysite]",
- "# dir=/path/to/mysite/directory",
- "",
- "",
- ]))
-
- def createNode(self, **kw):
-
- #kw = {}
-
- if fcpHost and not kw.has_key("fcpHost"):
- kw['host'] = fcpHost
- if fcpPort and not kw.has_key("fcpPort"):
- kw['port'] = fcpPort
- if verbosity and not kw.has_key("verbosity"):
- kw['verbosity'] = verbosity
- if logfile and not kw.has_key("logfile"):
- kw['logfile'] = logfile
-
- #print kw
-
- self.node = fcp.FCPNodeConnection(**kw)
-
- def loadConfig(self):
- """
- Loads the sites config file into self.config as a SafeConfigParser
- object
- """
- conf = self.config = SafeConfigParser()
- conf.read(self.configFile)
-
- needToSave = False
-
- # fill in any incomplete details with site entries
- for sitename in conf.sections():
-
- if not conf.has_option(sitename, "dir"):
- raise Exception("Config file error: No directory specified for
site '%s'" \
- % sitename)
-
- if not conf.has_option(sitename, "hash"):
- needToSave = True
- conf.set(sitename, "hash", "")
-
- if not conf.has_option(sitename, "version"):
- needToSave = True
- conf.set(sitename, "version", "0")
-
- if not conf.has_option(sitename, "privatekey"):
- needToSave = True
- pub, priv = self.node.genkey()
- uri = pub.replace("SSK@", "USK@") + sitename + "/0"
- conf.set(sitename, "uri", uri)
- conf.set(sitename, "privatekey", priv)
-
- if needToSave:
- self.saveConfig()
-
- def saveConfig(self):
- """
- Saves the amended config file to disk
- """
- self.createConfig()
-
- f = file(self.configFile, "a")
-
- self.config.write(f)
-
- f.close()
-
- def update(self):
- """
- Insert/update all registered freesites
- """
- noSites = True
-
- log = self._log
-
- conf = self.config
- for sitename in conf.sections():
- uri = conf.get(sitename, "uri")
- dir = conf.get(sitename, "dir")
- hash = conf.get(sitename, "hash")
- version = conf.get(sitename, "version")
- privatekey = conf.get(sitename, "privatekey")
-
- files = fcp.readdir(dir, gethashes=True)
- h = sha.new()
- for f in files:
- h.update(f['hash'])
- hashNew = h.hexdigest()
- if hashNew != hash:
- log(INFO, "Updating site %s" % sitename)
- log(INFO, "privatekey=%s" % privatekey)
- noSites = False
- try:
- res = self.node.put(privatekey,
- dir=dir,
- name=sitename,
- version=version,
- usk=True)
- log(INFO, "site %s updated successfully" % sitename)
- except:
- traceback.print_exc()
- log(ERROR, "site %s failed to update" % sitename)
- conf.set(sitename, "hash", hashNew)
-
- self.saveConfig()
-
- if noSites:
- log(INFO, "No sites needed updating")
-
- def shutdown(self):
- self.node.shutdown()
-
- def _log(self, level, msg):
- """
- Logs a message. If level > verbosity, don't output it
- """
- if level > self.verbosity:
- return
-
- if not msg.endswith("\n"): msg += "\n"
-
- self.logfile.write(msg)
- self.logfile.flush()
-
-
-def help():
- print "%s: A console-based, cron-able freesite inserter" % sys.argv[0]
- print "This utility inserts/updates freesites, and is"
- print "driven by a simple config file."
- print
- print "The first time you run this utility, a config file"
- print "will be created for you in your home directory,"
- print "You will be told where this file is (~/.freesites on *nix"
- print "or ~/freesites.ini on doze)"
- print "then you can edit this file and add details of"
- print "your freesites, and run it again."
- print
- print "Note - freesites are only updated if they have"
- print "changed since the last update, because a hash"
- print "of each site gets stored in the config"
-
- sys.exit(0)
-
-if __name__ == '__main__':
-
- if '-h' in sys.argv:
- help()
-
- if '-v' in sys.argv:
- verbosity = fcp.DETAIL
-
- s = SiteMgr()
- s.update()
- s.shutdown()
-
Deleted: trunk/apps/pyFreenet/fcpxmlrpc.py
===================================================================
--- trunk/apps/pyFreenet/fcpxmlrpc.py 2006-05-16 22:14:50 UTC (rev 8721)
+++ trunk/apps/pyFreenet/fcpxmlrpc.py 2006-05-16 22:15:45 UTC (rev 8722)
@@ -1,231 +0,0 @@
-#! /usr/bin/env python
-"""
-fcpxmlrpc.py
-
-Exposes some pyfcp primitives over an XML-RPC service
-"""
-
-import sys
-from SimpleXMLRPCServer import SimpleXMLRPCServer
-from SocketServer import ThreadingMixIn
-
-import fcp
-
-# where to listen, for the xml-rpc server
-xmlrpcHost = "127.0.0.1"
-xmlrpcPort = 19481
-
-class FCPXMLRPCServer(ThreadingMixIn, SimpleXMLRPCServer):
- """
- Multi-threaded XML-RPC server for freenet FCP access
- """
- def __init__(self, **kw):
- """
- Creates the xml-rpc server
-
- Keywords:
- - host - hostname to listen on for xml-rpc requests, default
127.0.0.1
- - port - port to listen on for xml-rpc requests, default 19481
- - fcpHost - hostname where FCP port is
- - fcpPort - port where FCP port is
- - verbosity - verbosity of output messages, 0 (silent) through 6
(noisy),
- default 4. Refer verbosity constants in fcp module
- """
- # create the server
- host = kw.get('host', xmlrpcHost)
- port = kw.get('port', xmlrpcPort)
-
- SimpleXMLRPCServer.__init__(self, (host, port))
-
- # create the fcp node interface
- fcpHost = kw.get('fcpHost', fcp.defaultFCPHost)
- fcpPort = kw.get('fcpPort', fcp.defaultFCPPort)
- verbosity = kw.get('verbosity', fcp.SILENT)
-
- node = self.node = fcp.FCPNodeConnection(host=fcpHost,
- port=fcpPort,
- verbosity=verbosity,
- )
-
- # create the request handler
- hdlr = FreenetXMLRPCRequestHandler(node)
-
- # link in the request handler object
- self.register_instance(hdlr)
- self.register_introspection_functions()
-
- def run(self):
- """
- Launch the server to run forever
- """
- try:
- self.serve_forever()
- except KeyboardInterrupt:
- self.node.shutdown()
- raise
-
-
-class FreenetXMLRPCRequestHandler:
- """
- Simple class which exposes basic primitives
- for freenet xmlrpc server
- """
- def __init__(self, node):
-
- self.node = node
-
-
- def get(self, uri, options=None):
- """
- Performs a fetch of a key
-
- Arguments:
- - uri - the URI to retrieve
- - options - a mapping (dict) object containing various
- options - refer to FCPNodeConnection.get documentation
- """
- if options==None:
- options = {}
-
- if options.has_key('file'):
- raise Exception("file option not available over XML-RPC")
- if options.has_key('dir'):
- raise Exception("dir option not available over XML-RPC")
-
- return self.node.get(uri, **options)
-
- def put(self, uri, options=None):
- """
- Inserts data to node
-
- Arguments:
- - uri - the URI to insert under
- - options - a mapping (dict) object containing various options,
- refer to FCPNodeConnection.get documentation
- """
- if options==None:
- options = {}
-
- if options.has_key('file'):
- raise Exception("file option not available over XML-RPC")
- if options.has_key('dir'):
- raise Exception("dir option not available over XML-RPC")
-
- return self.node.put(uri, data=data, **options)
-
- def genkey(self):
-
- return self.node.genkey()
-
-
-def usage(msg="", ret=1):
-
- if msg:
- sys.stderr.write(msg+"\n")
-
- print "\n".join([
- "Freenet XML-RPC Server",
- "Usage: %s [options]" % sys.argv[0],
- "Options:",
- " -h, --help",
- " show this usage message",
- " -v, --verbosity=",
- " set verbosity level, values are:",
- " 0 (SILENT) show only 1 line for incoming hits",
- " 1 (FATAL) show only fatal messages",
- " 2 (CRITICAL) show only major failures",
- " 3 (ERROR) show significant errors",
- " 4 (INFO) show basic request details",
- " 5 (DETAIL) show FCP dialogue",
- " 6 (DEBUG) show ridiculous amounts of debug info",
- " --host=",
- " listen hostname for xml-rpc requests, default %s" % xmlrpcHost,
- " --port=",
- " listen port number for xml-rpc requests, default %s" %
xmlrpcPort,
- " --fcphost=",
- " set hostname of freenet FCP interface, default %s" \
- % fcp.defaultFCPHost,
- " --fcpport=",
- " set port number of freenet FCP interface, default %s" \
- % fcp.defaultFCPPort,
- ])
-
- sys.exit(ret)
-
-def testServer():
-
- runServer(host="", fcpHost="10.0.0.1", verbosity=DETAIL)
-
-def runServer(**kw):
- """
- Creates and runs a basic XML-RPC server for FCP access
-
- For keyword parameters, refer FCPXMLRPCServer constructor
- """
- FCPXMLRPCServer(**kw).run()
-
-def main():
- """
- When this script is executed, it runs the XML-RPC server
- """
- import getopt
-
- opts = {'verbosity': fcp.INFO,
- 'host':xmlrpcHost,
- 'port':xmlrpcPort,
- 'fcpHost':fcp.defaultFCPHost,
- 'fcpPort':fcp.defaultFCPPort,
- }
-
- try:
- cmdopts, args = getopt.getopt(sys.argv[1:],
- "?hv:",
- ["help", "verbosity=", "host=", "port=",
- "fcphost=", "fcpport="])
- except getopt.GetoptError:
- # print help information and exit:
- usage()
- sys.exit(2)
- output = None
- verbose = False
- #print cmdopts
- for o, a in cmdopts:
- if o in ("-h", "--help"):
- usage(ret=0)
- elif o == "--host":
- opts['host'] = a
- elif o == "--port":
- try:
- opts['port'] = int(a)
- except:
- usage("Invalid port number '%s'" % a)
- elif o == "--fcphost":
- opts['fcpHost'] = a
- elif o == "--fcpport":
- opts['fcpPort'] = a
- elif o in ['-v', '--verbosity']:
- print "setting verbosity"
- try:
- opts['verbosity'] = int(a)
- #print "verbosity=%s" % opts['verbosity']
- except:
- usage("Invalid verbosity '%s'" % a)
-
- #print "Verbosity=%s" % opts['verbosity']
-
- if opts['verbosity'] >= fcp.INFO:
- print "Launching Freenet XML-RPC server"
- print "Listening on %s:%s" % (opts['host'], opts['port'])
- print "Talking to Freenet FCP at %s:%s" % (opts['fcpHost'],
opts['fcpPort'])
-
- try:
- runServer(**opts)
- except KeyboardInterrupt:
- print "Freenet XML-RPC server terminated by user"
-
-
-
-if __name__ == '__main__':
-
- main()
-
Deleted: trunk/apps/pyFreenet/start.sh
===================================================================
--- trunk/apps/pyFreenet/start.sh 2006-05-16 22:14:50 UTC (rev 8721)
+++ trunk/apps/pyFreenet/start.sh 2006-05-16 22:15:45 UTC (rev 8722)
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-# a wrapper which starts the freenet node,
-# and is used with the cron'ed freesite insertion
-# scripts
-
-# change this to where your bash startup script lives
-source /home/david/.bashrc
-
-# change this to where your freenet is installed
-cd /home/david/freenet
-
-# now start the freenet node
-./run.sh start
-
Deleted: trunk/apps/pyFreenet/stop.sh
===================================================================
--- trunk/apps/pyFreenet/stop.sh 2006-05-16 22:14:50 UTC (rev 8721)
+++ trunk/apps/pyFreenet/stop.sh 2006-05-16 22:15:45 UTC (rev 8722)
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-# a wrapper which starts the freenet node,
-# and is used with the cron'ed freesite insertion
-# scripts
-
-# change this to where your bash startup script lives
-source /home/david/.bashrc
-
-# change this to where your freenet is installed
-cd /home/david/freenet
-
-# now stop the freenet node
-./run.sh stop
-
Deleted: trunk/apps/pyFreenet/updatesites.py
===================================================================
--- trunk/apps/pyFreenet/updatesites.py 2006-05-16 22:14:50 UTC (rev 8721)
+++ trunk/apps/pyFreenet/updatesites.py 2006-05-16 22:15:45 UTC (rev 8722)
@@ -1,97 +0,0 @@
-#!/usr/bin/env python
-"""
-A utility to update freesites from within a cron environment
-"""
-import sys, os, time, commands, traceback
-import fcpsitemgr
-
-# time we wait after starting fred, to allow the node to 'warm up'
-# and make connections to its peers
-startupTime = 180
-
-# directory where we have freenet installed,
-# change it as needed
-freenetDir = "/home/david/freenet"
-
-# derive path of freenet pid file, the (non)existence
-# of which is the easiest test of whether the freenet
-# node is running
-freenetPidFile = os.path.join(freenetDir, "Freenet.pid")
-
-logFile = os.path.join(freenetDir, "updatesites.log")
-pidFile = os.path.join(freenetDir, "updatesites.pid")
-
-# small wrapper which, if freenet isn't already running,
-# starts it prior to inserting then stops it after
-# inserting
-def main(verbose=None):
-
- os.chdir(freenetDir)
-
- if verbose == None:
- verbose = ('-v' in sys.argv)
-
- if os.path.isfile(pidFile):
- print "updatesites.py already running: pid=%s" % file(pidFile).read()
- sys.exit(1)
- f = file(pidFile, "w")
- f.write(str(os.getpid()))
- f.close()
-
- logfile = file(logFile, "w")
- logfile.write("----------------------\n")
- logfile.write(time.asctime() + "\n")
-
- try:
- print "--------------------------------------------"
- print "Start of site updating run"
- print "Status being logged to file %s" % logFile
-
- # start freenet and let it warm up, if it's not already running
- if not os.path.isfile(freenetPidFile):
- startingFreenet = True
- os.chdir(freenetDir)
- print "Starting freenet..."
- print os.system("%s/start.sh &" % freenetDir)
- print "Letting node settle for %s seconds..." % startupTime
- time.sleep(startupTime)
- else:
- print "Freenet node is already running!"
- startingFreenet = False
-
- # add verbosity argument if needed
- if verbose:
- kw = {"verbosity" : sitemgr.fcp.DETAIL}
- else:
- kw = {"verbosity" : sitemgr.fcp.INFO}
-
- # get a site manager object, and perform the actual insertions
- print "Creating SiteMgr object"
- s = sitemgr.SiteMgr(logfile=logfile, **kw)
- print "Starting updates"
- try:
- s.update()
- except:
- traceback.print_exc()
- print "Updates done"
- del s
-
- # kill freenet if it was dynamically started
- if startingFreenet:
- print "Waiting %s for inserts to finish..." % startupTime
- time.sleep(startupTime)
- print "Stopping node..."
- os.system("./run.sh stop")
- print "Node stopped"
- else:
- print "Not killing freenet - it was already running"
- except:
- traceback.print_exc()
- pass
-
- # can now drop our pidfile
- os.unlink(pidFile)
-
-if __name__ == '__main__':
- main()
-