Update of /cvsroot/freevo/freevo/WIP/Aubin
In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv27867
Added Files:
patch-download.diff
Log Message:
Patch for RSS downloader... WIP
--- NEW FILE: patch-download.diff ---
diff -X diffexclude -Naur freevo/src/plugins/download.py
freevo-new/src/plugins/download.py
--- freevo/src/plugins/download.py 1969-12-31 19:00:00.000000000 -0500
+++ freevo-new/src/plugins/download.py 2004-06-16 23:38:22.000000000 -0400
@@ -0,0 +1,416 @@
+#if 0 /*
+# -----------------------------------------------------------------------
+# download.py - A plugin to handle downloading of video files
+# -----------------------------------------------------------------------
+# akb at demandmedia.net 6/04
+#
+# The download.py plugin is a daemon that downloads files from the Internet
+# at the request of other parts of freevo such as the web frontend and rss
+# reader. It polls for new downloads or control commands and uses a threaded
+# downloader to retrieve them. Additionally, if configured, the module
+# will poll a list of video syndication feeds at a specified time and download
+# any new videos.
+#
+# To access this module from elsewhere in Freevo see util/downloadinterface.py
+#
+# To activate:
+#
+# plugin.activate('plugins.download')
+# DOWNLOAD_DIR = '/home/akb/freevo/downloads' # where to store the downloads
+#DOWNLOAD_HEADLINES = [ ('Punkcast','http://demandmedia.net/xml/punkcast-rss2.xml'),
+# ('Internet Archive',
'http://demandmedia.net/xml/archive-rss2.xml'),
+# ('demandmedia', 'http://demandmedia.net/xml/rss2.xml') ]
+# DOWNLOAD_HEADLINES_TIME = "0400" # time to poll the feeds for new
videos
+# DOWNLOAD_MAX_SIMULTANEOUS = 3 # maximum active automatic downloads
at once
+#
+# You may also want to add DOWNLOAD_DIR to VIDEO_ITEMS:
+#
+# VIDEO_ITEMS = [ ('downloads', '/home/akb/freevo/downloads') ]
+#
+# todo
+# - bug in urllib2 that hampers 302's that have a space in them
+# - http resume so that pause doesn't just start over
+# - get the size of the actual download
+# - OSD frontend
+# - communicate with external p2p downloaders (bt, mldonkey, etc)
+#endif
+
+import anydbm
+import md5
+import pickle
+import string
+import urlparse
+import time
+import threading
+import os.path
+import util.downloadmanager as downloadmanager
+import util.downloadinterface as downloadinterface
+import util.headlinesfetcher as headlinesfetcher
+import config
+import plugin
+import glob
+import re
+from util.fxdimdb import FxdImdb, makeVideo
+
+DEBUG = 0
+class PluginInterface(plugin.DaemonPlugin):
+ """
+ A daemon plugin to allow Freevo to download media from the Internet.
+ Accepts commands from other parts of Freevo and periodically
+ polls syndication feeds for downloads. Currently the management frontend
+ is through the web plugin, so you'll have to turn that on.
+
+ plugin.activate('plugins.download')
+ DOWNLOAD_DIR = '/home/akb/freevo/downloads'
+ DOWNLOAD_HEADLINES = [
('punkcast.com','http://demandmedia.net/xml/punkcast-rss2.xml'),
+ ('Internet Archive Films',
'http://demandmedia.net/xml/archive-feature_films-rss2.xml'),
+ ('demandmedia', 'http://demandmedia.net/xml/rss2.xml') ]
+ DOWNLOAD_HEADLINES_TIME = "0400"
+ DOWNLOAD_MAX_SIMULTANEOUS = 3
+
+ You may also want to add DOWNLOAD_DIR to VIDEO_ITEMS:
+
+ VIDEO_ITEMS = [ ('downloads', '/home/akb/freevo/downloads') ]
+ """
+
+ def __init__(self):
+
+ if not os.path.isdir(config.DOWNLOAD_DIR):
+ self.reason = 'DOWNLOAD_DIR not found'
+ return
+ plugin.DaemonPlugin.__init__(self)
+ # set poll interval to 5 seconds
+ self.poll_interval = 500
+
+ self.downloads = []
+ self.queued_downloads = []
+ self.bad_downloads = []
+ self.paused_downloads = []
+
+ # look for previous downloads to resume
+ for d in glob.glob(config.DOWNLOAD_DIR + "/*.dwd"):
+ self.do_download(os.path.basename(d))
+
+ def shutdown(self):
+ self.pause_all()
+
+ def do_download(self,f):
+ fh = open(config.DOWNLOAD_DIR + "/" + f)
+ di = pickle.load(fh)
+ resumeddownload = Download(di.id, di.url, di.title, di.description, di.genre,
di.image, di.date, di.destination, di.size, f)
+ if not resumeddownload.status == downloadinterface.D_ERROR:
+ if len(self.downloads) < config.DOWNLOAD_MAX_SIMULTANEOUS:
+ if resumeddownload.download():
+ self.downloads.append(resumeddownload)
+ else:
+ self.bad_downloads.append(resumeddownload)
+ resumeddownload.status = downloadinterface.D_ERROR
+ else:
+ self.queued_downloads.append(resumeddownload)
+ resumeddownload.status = downloadinterface.D_READY
+ resumeddownload.update_status()
+ else:
+ # bad file, report and error and remove
+ pass
+
+ def progress_update(self):
+ # we need to check on the progress of the downloads
+ # to see if they have finished or have an error
+ x = 0
+ for d in self.downloads:
+ d.update_status()
+ if d.status == downloadinterface.D_ERROR:
+ self.bad_downloads.append(self.downloads.pop(x))
+ elif d.status == downloadinterface.D_DONE:
+ del(self.downloads[x])
+ d.create_fxd()
+ x = x+1
+ x = 0
+ for t in self.queued_downloads:
+ if len(self.downloads) >= config.DOWNLOAD_MAX_SIMULTANEOUS:
+ break
+ self.downloads.append(self.queued_downloads.pop(x))
+ t.download()
+ x = x+1
+
+ def poll(self):
+ self.progress_update()
+
+ # now check for submissions, cancels or pauses from other modules
+ for d in downloadinterface.pop_all_queued():
+ self.enqueue(d.id, d.url,d.title,d.description,d.genre,d.image)
+ for id in downloadinterface.pop_all_paused():
+ self.pause(id)
+ for id in downloadinterface.pop_all_cancelled():
+ self.cancel(id)
+ for id in downloadinterface.pop_all_resumed():
+ self.resume(id)
+
+ # download enclosures from rss feeds if its that time
+ if config.DOWNLOAD_HEADLINES:
+ if len(config.DOWNLOAD_HEADLINES_TIME) == 4 and
config.DOWNLOAD_HEADLINES_TIME.isdigit():
+ h = int(config.DOWNLOAD_HEADLINES_TIME[0:2])
+ m = int(config.DOWNLOAD_HEADLINES_TIME[2:4])
+ now = time.localtime()
+ snow = time.mktime(now)
+ dtime = time.mktime((now.tm_year, now.tm_mon, now.tm_mday, h, m,
0,now.tm_wday,now.tm_yday,now.tm_isdst))
+ if snow - dtime >=0 and snow - dtime <= self.poll_interval / 100:
+ self.do_headlinesfetcher()
+
+ def do_headlinesfetcher(self):
+ headlines = []
+
+ f = "%s/downloaded_enclosures.db" % config.DOWNLOAD_DIR
+ db = anydbm.open(f,'c')
+
+ for feed in config.DOWNLOAD_HEADLINES:
+ headlines.append(headlinesfetcher.headlinesfetcher(feed))
+
+ # queue the enclosures up round robin rather than all
+ # from one site followed by all from another site
+ while headlines:
+ x = 0
+ for site in headlines:
+ if site:
+ d = site[0]
+ site[0:1] = []
+
+ title = d[0]
+ link = d[1]
+ description = d[2]
+ enclosure_url = d[3]
+ enclosure_mime = d[4]
+ enclosure_size = d[5]
+ site_name = d[6]
+ if enclosure_size.isdigit():
+ enclosure_size = int(enclosure_size)
+ else:
+ enclosure_size = 0
+ if enclosure_url.find('http://') >= 0:
+ md5_url = md5.new(enclosure_url).hexdigest()
+ if not db.has_key(md5_url):
+ self.enqueue(md5_url,
+ enclosure_url,
+ title,
+ description,
+ '', '',
mime=enclosure_mime,size=enclosure_size, site_name=site_name)
+ db[md5_url] = "1"
+ db.sync()
+ else:
+ headlines[x:x+1] = []
+ x = x + 1
+
+ def enqueue(self, id, url, title, description, genre, image, destination=None,
mime=None, size=0, site_name=''):
+ self.id = id
+ self.url = url
+ self.title = title
+ self.description = description
+ self.genre = genre
+ self.image = image
+ self.mime = mime
+ self.size = size
+ self.site_name = site_name
+
+ # see if we're already downloading it
+ for d in self.downloads + self.queued_downloads + self.bad_downloads +
self.paused_downloads:
+ if d.url == url:
+ return 0
+
+ # if a destination is not explicitly specified figure one out but:
+ # - don't overwrite anything
+ # - figure out an extension if there isn't one
+ # - iterate if there's a name conflict
+
+ basename = re.sub('\W','_',title)[0:25]
+ if site_name:
+ basename += "-" + re.sub('\W', '_', site_name)
+
+ if not destination:
+ destination = os.path.basename(urlparse.urlparse(url)[2])
+ if destination.find('.') == -1:
+ ext = downloadinterface.mime2ext(self.mime)
+ base = basename
+ if ext:
+ destination = basename + '.' + ext
+ else:
+ base = basename
+ match = re.match('(.+)\.(\w+)$', destination)
+ ext = match.group(2)
+ destination = basename + '.' + ext
+
+ destinations = self.download_destinations()
+ while os.path.exists(config.DOWNLOAD_DIR + "/" + destination) or
config.DOWNLOAD_DIR + "/" + destination in destinations:
+ regroup = re.search('-([0-9]+)$', base)
+ if regroup:
+ x = int(regroup.group(1)) + 1
+ base = re.sub("-"+ regroup.group(1) + "$", "-%d" % x, destination)
+ else:
+ base = base + "-%d" % 1
+ if ext:
+ destination = base + '.' + ext
+ else:
+ destination = base
+
+ self.destination = config.DOWNLOAD_DIR + "/" + destination
+ fname = config.DOWNLOAD_DIR + "/" + basename + '.dwd'
+ fh = open(fname,'w')
+ pickle.dump(downloadinterface.DownloadInfo(id, url, title, description,
genre, image, downloadinterface.D_READY, destination=self.destination, size=size), fh)
+ fh.flush()
+ self.do_download(basename + '.dwd')
+
+ def cancel_all(self):
+ for d in self.downloads + self.bad_downloads + self.queued_downloads +
self.paused_downloads:
+ d.downloader.cancel()
+ os.unlink(config.DOWNLOAD_DIR + "/" + d.infofile)
+ os.unlink(d.destination + ".fxd")
+ self.downloads = []
+ self.queued_downloads = []
+ self.bad_downloads = []
+ self.paused_downloads = []
+
+ def pause_all(self):
+ self.progress_update()
+ for d in self.downloads:
+ d.downloader.pause()
+
+ def cancel(self,id):
+ x = 0
+ for d in self.downloads:
+ if d.id == id:
+ d.cancel()
+ del(self.downloads[x])
+ return
+ x=x+1
+ x = 0
+ for d in self.queued_downloads:
+ if d.id == id:
+ d.cancel()
+ del(self.queued_downloads[x])
+ return
+ x=x+1
+ x = 0
+ for d in self.bad_downloads:
+ if d.id == id:
+ d.cancel()
+ del(self.bad_downloads[x])
+ return
+ x=x+1
+ x = 0
+ for d in self.paused_downloads:
+ if d.id == id:
+ d.cancel()
+ del(self.paused_downloads[x])
+ return
+ x=x+1
+
+ def pause(self,id):
+ x = 0
+ for d in self.downloads:
+ if d.id == id:
+ d.pause()
+ self.paused_downloads.append(self.downloads.pop(x))
+ return 1
+ x += 1
+ x = 0
+ for d in self.queued_downloads:
+ if d.id == id:
+ d.pause()
+ self.paused_downloads.append(self.queued_downloads.pop(x))
+ return 1
+ x += 1
+
+ def resume(self,id):
+ x = 0
+ for d in self.queued_downloads:
+ if d.id == id:
+ d.download()
+ self.downloads.append(self.queued_downloads.pop(x))
+ return 1
+ x += 1
+ x = 0
+ for d in self.paused_downloads:
+ if d.id == id:
+ d.download()
+ self.downloads.append(self.paused_downloads.pop(x))
+ return 1
+ x += 1
+
+ def download_destinations(self):
+ destinations = []
+ for d in self.downloads + self.queued_downloads + self.bad_downloads +
self.paused_downloads:
+ destinations.append(d.destination)
+ return destinations
+
+class Download:
+ """
+ Download objects hold information about individual downloads and launch a
download thread.
+ """
+
+ def __init__(self, id, url, title, description, genre, image, date, destination,
size, infofile):
+ self.id = id
+ self.url = url
+ self.title = title
+ self.description = description
+ self.genre = genre
+ self.image = image
+ self.destination = destination
+ self.infofile = infofile
+ self.status = downloadinterface.D_READY
+ self.error_msg = ""
+ self.size = size
+ self.sofar = 0
+
+ def download(self):
+ try:
+ self.downloader = downloadmanager.Downloader(self.url,self.destination)
+ self.downloader.start()
+ self.status = downloadinterface.D_DOWNLOADING
+ return 1
+ except:
+ self.error_msg = "problem trying to download"
+ return 0
+
+ def update_status(self):
+ if hasattr(self, 'downloader'):
+ if self.downloader.error == 1:
+ self.status = downloadinterface.D_ERROR
+ self.error_msg = self.downloader.error_msg
+
+ if self.downloader.size:
+ self.size = self.downloader.size
+
+ if self.status == downloadinterface.D_DOWNLOADING:
+ if self.downloader.done == 1:
+ self.status = downloadinterface.D_DONE
+ os.unlink(config.DOWNLOAD_DIR + "/" + self.infofile)
+ return
+ else:
+ self.sofar = self.downloader.sofar
+
+ d = downloadinterface.DownloadInfo(self.id, self.url, self.title,
self.description, self.genre, self.image, status=self.status,
destination=self.destination, size=self.size, sofar=self.sofar,
error_msg=self.error_msg)
+ fh = open(config.DOWNLOAD_DIR + "/" + self.infofile,'w')
+ pickle.dump(d,fh)
+ fh.close()
+
+ def cancel(self):
+ if hasattr(self, 'downloader'):
+ self.downloader.cancel()
+ os.unlink(config.DOWNLOAD_DIR + "/" + self.infofile)
+
+ def pause(self):
+ if hasattr(self, 'downloader'):
+ self.downloader.pause()
+ self.status = downloadinterface.D_PAUSED
+ self.update_status()
+
+ def create_fxd(self):
+ fxd = FxdImdb()
+ new_description = re.sub('<[^<>]+>','', self.description)
+ fxd.setFxdFile(self.destination + '.fxd', overwrite=True)
+ video = makeVideo('file','f1',os.path.basename(self.destination))
+ fxd.setVideo(video)
+ fxd.info['tagline'] = ''
+ fxd.info['plot'] = fxd.str2XML(new_description)
+ fxd.info['runtime'] = None
+ fxd.title = self.title.replace('"','')
+ fxd.writeFxd()
diff -X diffexclude -Naur freevo/src/plugins/headlines.py
freevo-new/src/plugins/headlines.py
--- freevo/src/plugins/headlines.py 2003-12-06 08:47:28.000000000 -0500
+++ freevo-new/src/plugins/headlines.py 2004-06-16 23:19:57.000000000 -0400
@@ -96,7 +96,7 @@
import config, menu, rc, plugin, skin, osd, util
from gui.PopupBox import PopupBox
from item import Item
-
+from util.headlinesfetcher import headlinesfetcher
#get the singletons so we get skin info and access the osd
skin = skin.get_singleton()
@@ -189,56 +189,15 @@
if (os.path.isfile(pfile) == 0 or \
(abs(time.time() - os.path.getmtime(pfile)) > MAX_HEADLINE_AGE)):
#print 'Fresh Headlines'
- headlines = self.fetchheadlinesfromurl()
+ popup = PopupBox(text=_('Fetching headlines...'))
+ popup.show()
+ headlines = headlinesfetcher((self.name, self.url))
+ popup.destroy()
else:
#print 'Cache Headlines'
headlines = util.read_pickle(pfile)
return headlines
-
- def fetchheadlinesfromurl(self):
- headlines = []
- # create Reader object
- reader = Sax2.Reader()
-
- popup = PopupBox(text=_('Fetching headlines...'))
- popup.show()
-
- # parse the document
- try:
- myfile=urllib.urlopen(self.url)
- doc = reader.fromStream(myfile)
- items = doc.getElementsByTagName('item')
- for item in items:
- title = ''
- link = ''
- description = ''
-
- if item.hasChildNodes():
- for c in item.childNodes:
- if c.localName == 'title':
- title = c.firstChild.data
- if c.localName == 'link':
- link = c.firstChild.data
- if c.localName == 'description':
- description = c.firstChild.data
- if title:
- headlines.append((title, link, description))
-
- except:
- #unreachable or url error
- print 'HEADLINES ERROR: could not open %s' % self.url
- pass
-
- #write the file
- if len(headlines) > 0:
- pfile = os.path.join(self.cachedir, 'headlines-%i' % self.location_index)
- util.save_pickle(headlines, pfile)
-
- popup.destroy()
- return headlines
-
-
def show_details(self, arg=None, menuw=None):
ShowHeadlineDetails(arg)
@@ -247,7 +206,10 @@
headlines = []
rawheadlines = []
rawheadlines = self.getsiteheadlines()
- for title, link, description in rawheadlines:
+ for item in rawheadlines:
+ title = item[0]
+ link = item[1]
+ description = item[2]
mi = menu.MenuItem('%s' % title, self.show_details, 0)
mi.arg = (mi, menuw)
mi.link = link
diff -X diffexclude -Naur freevo/src/util/downloadinterface.py
freevo-new/src/util/downloadinterface.py
--- freevo/src/util/downloadinterface.py 1969-12-31 19:00:00.000000000 -0500
+++ freevo-new/src/util/downloadinterface.py 2004-06-16 23:02:17.000000000 -0400
@@ -0,0 +1,116 @@
+import config
+import os
+import glob
+import pickle
+import string
+import re
+#import plugins.download
+import md5
+
+D_ERROR = 0
+D_READY = 1
+D_DOWNLOADING = 2
+D_PAUSED = 3
+D_DONE = 4
+
+def list_downloads():
+ """There's no good mechanism for modules to communicate outside of sending
events, so here this hack is to share a listing of downloads with other modules"""
+ ret = []
+ for p in glob.glob(config.DOWNLOAD_DIR + "/*.dwd"):
+ f = open(p)
+ ret.append(pickle.load(f))
+ return ret
+
+def queue_download(url, title="", description="", genre="", image=""):
+ d = DownloadInfo(md5.new(url).hexdigest(),url, title, description, genre, image)
+ basename = re.sub('\W','_',title)[0:15] + '.qd'
+ fh = open(config.DOWNLOAD_DIR + "/" + basename,'w')
+ pickle.dump(d,fh)
+ fh.close()
+
+def pop_all_queued():
+ ret = []
+ for p in glob.glob(config.DOWNLOAD_DIR + "/*.qd"):
+ f = open(p)
+ ret.append(pickle.load(f))
+ os.unlink(p)
+ return ret
+
+def cancel(id):
+ fh = open(config.DOWNLOAD_DIR + "/" + id + ".cancel",'w')
+ fh.close()
+
+def resume(id):
+ fh = open(config.DOWNLOAD_DIR + "/" + id + ".resume",'w')
+ fh.close()
+
+def pop_all_cancelled():
+ ret = []
+ for p in glob.glob(config.DOWNLOAD_DIR + "/*.cancel"):
+ id = re.search('/(\w+)\.cancel$',p).group(1)
+ ret.append(id)
+ os.unlink(p)
+ return ret
+
+def pop_all_resumed():
+ ret = []
+ for p in glob.glob(config.DOWNLOAD_DIR + "/*.resume"):
+ id = re.search('/(\w+)\.resume$',p).group(1)
+ ret.append(id)
+ os.unlink(p)
+ return ret
+
+def pause(id):
+ fh = open(config.DOWNLOAD_DIR + "/" + id + ".pause",'w')
+ fh.close()
+
+def pop_all_paused():
+ ret = []
+ for p in glob.glob(config.DOWNLOAD_DIR + "/*.pause"):
+ id = re.search('/(\w+)\.pause$',p).group(1)
+ ret.append(id)
+ os.unlink(p)
+ return ret
+
+class DownloadInfo:
+ def __init__(self, id, url, title="", description="", genre="", image="",
date="", status="", destination="", size=0, sofar=0, error_msg='', site_name=''):
+ self.id = id
+ self.url = url
+ self.title = title
+ self.description = description
+ self.genre = genre
+ self.image = image
+ self.status = status
+ self.destination = destination
+ self.size = size
+ self.sofar = sofar
+ self.date = date
+ self.error_msg = error_msg
+ self.site_name = site_name
+
+def mime2ext(mime):
+ # !! hack alert !!
+ # when downloading we might get a file with no extension, so try to guess
+ # based on the mimetype we were sent so that freevo can figure out what to
+ # play it with
+
+ if mime == 'video/quicktime':
+ return 'mov'
+ elif mime == 'video/mp4':
+ return 'mp4'
+ elif mime.find('real'):
+ return 'rm'
+ elif mime.find('ogg'):
+ return 'ogg'
+ elif mime == 'audio/mpeg' or mime.find('mp3'):
+ return 'mp3'
+ elif mime.find('mpg') or mime.find('mpeg'):
+ return 'mpg'
+ elif mime == 'audio/wav':
+ return 'wav'
+ elif mime == 'audio/x-ms-wma':
+ return 'wma'
+ elif mime.search('ms'):
+ return 'wmv'
+ return 0
+
diff -X diffexclude -Naur freevo/src/util/downloadmanager.py
freevo-new/src/util/downloadmanager.py
--- freevo/src/util/downloadmanager.py 1969-12-31 19:00:00.000000000 -0500
+++ freevo-new/src/util/downloadmanager.py 2004-06-16 23:03:08.000000000 -0400
@@ -0,0 +1,86 @@
+# downloadmanager.py
+# akb 4/04
+'''
+Threaded downloader, based on
+
+http://starship.python.net/crew/aahz/OSCON2001/BruteThreadSpider.py
+
+todo:
+Subclassed urllib to allow resume's thanks to Chris Moffit
+http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/83208
+
+'''
+
+import os
+import sys
+import string
+import urllib2
+import urlparse
+import htmllib
+import formatter
+from cStringIO import StringIO
+import threading
+import time
+
+DEBUG = 0
+
+class Downloader(threading.Thread):
+ def __init__(self, URL, fname):
+ self.state = 0 # 0: ready/downloading 1: done 2: pause 3: cancel
+ self.sofar = 0
+ self.URL = URL
+ self.fname = fname
+ self.done = 0
+ self.error = 0
+ self.error_msg = ''
+ self.size = 0
+ self.request = urllib2.Request(url=self.URL)
+ v = string.split(sys.version)[0]
+ self.request.add_header('User-Agent', 'freevo python ' + v)
+ threading.Thread.__init__(self)
+
+ def run(self):
+ try:
+ self.page = urllib2.urlopen(self.request)
+ except:
+ self.error = 1
+ self.error_msg = "downloadmanager.run: Couldn't open url " + self.URL
+ return 0
+
+ self.size = self.page.info().getheader('Content-Length')
+ if not self.size:
+ self.size = 0
+ else:
+ self.size = int(self.size)
+
+ try:
+ self.fhandle = open(self.fname,'w')
+ except:
+ self.error = 1
+ self.error_msg = "downloadmanager.run: Couldn't open local file " +
self.fname
+ return 0
+
+ try:
+ while not self.state:
+ buf = self.page.read(16384)
+ if not buf: break
+ self.sofar += len(buf)
+ self.fhandle.write(buf)
+ except:
+ self.error_msg = "downloadmanager.run: error while downloading " +
self.URL + " only got " + self.sofar
+ self.error = 1
+ return 0
+
+ self.page.close()
+ self.fhandle.close()
+ if self.state == 3 : os.unlink(self.fname)
+ self.done = 1
+
+ def pause(self):
+ self.state = 2
+
+ def cancel(self):
+ self.state = 3
+
+
+
diff -X diffexclude -Naur freevo/src/util/headlinesfetcher.py
freevo-new/src/util/headlinesfetcher.py
--- freevo/src/util/headlinesfetcher.py 1969-12-31 19:00:00.000000000 -0500
+++ freevo-new/src/util/headlinesfetcher.py 2004-06-16 23:17:21.000000000 -0400
@@ -0,0 +1,45 @@
+import urllib
+import util.misc
+from xml.dom.ext.reader import Sax2
+
+def headlinesfetcher(feed):
+
+ headlines = []
+ site_name = feed[0]
+ url = feed[1]
+
+ # create Reader object
+ reader = Sax2.Reader()
+
+ # parse the document
+ try:
+ myfile=urllib.urlopen(url)
+ except:
+ print "headlines_fetcher: couldn't get " + url
+ return []
+ doc = reader.fromStream(myfile)
+ items = doc.getElementsByTagName('item')
+ for item in items:
+ title = ''
+ link = ''
+ description = ''
+ enclosure_url = ''
+ enclosure_mime = ''
+ enclosure_length = ''
+ if item.hasChildNodes():
+ for c in item.childNodes:
+ if c.firstChild:
+ if c.localName == 'title':
+ title = c.firstChild.data
+ if c.localName == 'link':
+ link = c.firstChild.data
+ if c.localName == 'description':
+ description = c.firstChild.data
+ if c.localName == 'enclosure':
+ e = c
+ enclosure_url = e.attributes.getNamedItem('url').nodeValue
+ enclosure_mime = e.attributes.getNamedItem('type').nodeValue
+ enclosure_length = e.attributes.getNamedItem('length').nodeValue
+ if title:
+ headlines.append((title, link, description, enclosure_url,
enclosure_mime, enclosure_length, site_name))
+ return headlines
diff -X diffexclude -Naur freevo/src/util/misc.py freevo-new/src/util/misc.py
--- freevo/src/util/misc.py 2004-04-20 13:13:34.000000000 -0400
+++ freevo-new/src/util/misc.py 2004-06-14 15:44:26.000000000 -0400
@@ -75,7 +75,7 @@
import string, re
import copy
import htmlentitydefs
-
+import urllib
# Configuration file. Determines where to look for AVI/MP3 files, etc
import config
@@ -556,3 +556,13 @@
except KeyError:
return getattr(self.__obj, name)
+
+# subclass urllib in order to set the user-agent string
+
+class AppURLopener(urllib.FancyURLopener):
+ def __init__(self, *args):
+ v = string.split(sys.version)[0]
+ self.version = "freevo python " + v
+ urllib.FancyURLopener.__init__(self, *args)
+
+urllib._urlopener = AppURLopener()
diff -X diffexclude -Naur freevo/src/www/htdocs/download.rpy
freevo-new/src/www/htdocs/download.rpy
--- freevo/src/www/htdocs/download.rpy 1969-12-31 19:00:00.000000000 -0500
+++ freevo-new/src/www/htdocs/download.rpy 2004-06-16 23:01:23.000000000 -0400
@@ -0,0 +1,141 @@
+#!/usr/bin/python
+#if 0 /*
+#
+# Web interface for downloading media
+#
+# akb 2004/4/8
+#endif
+
+import re
+import sys
+import event
+import rc
+import plugin
+from www.web_types import HTMLResource, FreevoResource
+import util.downloadinterface as downloadinterface
+
+class DownloadResource(FreevoResource):
+
+ def _render(self,request):
+ fv = HTMLResource()
+ form = request.args
+
+ fv.printHeader(_('Download'), 'styles/main.css',
selected=_('Download'))
+
+ fv.res += "<b>Changes will not appear immediately, give Freevo at
least 5 seconds</b>"
+ if fv.formValue(form, 'new_url'):
+ downloadinterface.queue_download(fv.formValue(form,'new_url'),
+ fv.formValue(form,'new_title'),
+ fv.formValue(form,'new_description'),
+ fv.formValue(form,'new_genre'),
+ fv.formValue(form,'new_image'))
+ fv.res += """
+ <form method="post" action="download.rpy" id="DownloadForm">
+ new download<p>
+ video url: <input name="new_url"><br>
+ title: <input name="new_title"><br>
+ description: <input name="new_description"><br>
+ genre: <input name="new_genre"><br>
+ image url: <input name="new_image"><br>
+ <input type="submit">
+ <p>"""
+ downloads = downloadinterface.list_downloads()
+ if downloads:
+ running = []
+ paused = []
+ bad = []
+ queued = []
+ for d in downloads:
+ if fv.formValue(form, "cancel_" + d.id) == 'on':
+ downloadinterface.cancel(d.id)
+ elif fv.formValue(form, "pause_" + d.id) == 'on':
+ downloadinterface.pause(d.id)
+ elif fv.formValue(form, "resume_" + d.id) == 'on':
+ downloadinterface.resume(d.id)
+
+ for d in downloads:
+ if d.status == downloadinterface.D_DOWNLOADING:
+ running.append(d)
+ elif d.status == downloadinterface.D_PAUSED:
+ paused.append(d)
+ elif d.status == downloadinterface.D_ERROR:
+ bad.append(d)
+ elif d.status == downloadinterface.D_READY:
+ queued.append(d)
+ if running:
+ fv.res += "DOWNLOADING"
+ fv.tableOpen('border="0" cellpadding="4"
cellspacing="1" width="100%"')
+ fv.tableRowOpen()
+ fv.tableCell(_('Cancel'))
+ fv.tableCell(_('Pause'))
+ fv.tableCell(_('Info'))
+ fv.tableRowClose()
+ for d in running:
+ fv.tableRowOpen()
+ fv.tableCell('<input type="checkbox"
name="cancel_' + d.id + '">')
+ fv.tableCell('<input type="checkbox"
name="pause_' + d.id + '">')
+ desc = re.sub('<.*?>', '',
d.description)[0:250]
+ fv.tableCell("<b>%(error_msg)s</b> <a
href=\"%(url)s\">%(title)s</a>, %(sofar)d of %(size)d, %(genre)s <p>%(description)s" %
{'error_msg': d.error_msg, 'url': d.url, 'title': d.title, 'sofar': d.sofar, 'size':
d.size, 'genre': d.genre, 'description': desc})
+ fv.tableRowClose()
+ fv.tableClose()
+
+ if queued:
+ fv.res += "QUEUED"
+ fv.tableOpen('border="0" cellpadding="4"
cellspacing="1" width="100%"')
+ fv.tableRowOpen()
+ fv.tableCell(_('Cancel'))
+ fv.tableCell(_('Pause'))
+ fv.tableCell(_('Download'))
+ fv.tableCell(_('Info'))
+ fv.tableRowClose()
+ for d in queued:
+ fv.tableRowOpen()
+ fv.tableCell('<input type="checkbox"
name="cancel_' + d.id + '">')
+ fv.tableCell('<input type="checkbox"
name="pause_' + d.id + '">')
+ fv.tableCell('<input type="checkbox"
name="resume_' + d.id + '">')
+ desc = re.sub('<.*?>', '',
d.description)[0:250]
+ fv.tableCell("<b>%(error_msg)s</b> <a
href=\"%(url)s\">%(title)s</a>, %(sofar)d of %(size)d, %(genre)s <p>%(description)s" %
{'error_msg': d.error_msg, 'url': d.url, 'title': d.title, 'sofar': d.sofar, 'size':
d.size, 'genre': d.genre, 'description': desc})
+ fv.tableRowClose()
+ fv.tableClose()
+
+ if bad:
+ fv.res += "ERROR"
+ fv.tableOpen('border="0" cellpadding="4"
cellspacing="1" width="100%"')
+ fv.tableRowOpen()
+ fv.tableCell(_('Cancel'))
+ fv.tableCell(_('Info'))
+ fv.tableRowClose()
+ for d in bad:
+ fv.tableRowOpen()
+ fv.tableCell('<input type="checkbox"
name="cancel_' + d.id + '">')
+ desc = re.sub('<.*?>', '',
d.description)[0:250]
+ fv.tableCell("<b>%(error_msg)s</b> <a
href=\"%(url)s\">%(title)s</a>, %(sofar)d of %(size)d, %(genre)s <p>%(description)s" %
{'error_msg': d.error_msg, 'url': d.url, 'title': d.title, 'sofar': d.sofar, 'size':
d.size, 'genre': d.genre, 'description': desc})
+ fv.tableRowClose()
+ fv.tableClose()
+
+ if paused:
+ fv.res += "PAUSED"
+ fv.tableOpen('border="0" cellpadding="4"
cellspacing="1" width="100%"')
+ fv.tableRowOpen()
+ fv.tableCell(_('Cancel'))
+ fv.tableCell(_('Resume'))
+ fv.tableCell(_('Info'))
+ fv.tableRowClose()
+ for d in paused:
+ fv.tableRowOpen()
+ fv.tableCell('<input type="checkbox"
name="cancel_' + d.id + '">')
+ fv.tableCell('<input type="checkbox"
name="resume_' + d.id + '">')
+ desc = re.sub('<.*?>', '',
d.description)[0:250]
+ fv.tableCell("<b>%(error_msg)s</b> <a
href=\"%(url)s\">%(title)s</a>, %(sofar)d of %(size)d, %(genre)s <p>%(description)s" %
{'error_msg': d.error_msg, 'url': d.url, 'title': d.title, 'sofar': d.sofar, 'size':
d.size, 'genre': d.genre, 'description': desc})
+ fv.tableRowClose()
+ fv.tableClose()
+
+
+ fv.res += """<input type="submit"></form>"""
+ else:
+ fv.res += "<p>No downloads"
+ fv.printLinks()
+ fv.printFooter()
+ return String( fv.res)
+
+resource = DownloadResource()
diff -X diffexclude -Naur freevo/src/www/web_types.py freevo-new/src/www/web_types.py
--- freevo/src/www/web_types.py 2004-03-08 19:14:35.000000000 -0500
+++ freevo-new/src/www/web_types.py 2004-06-14 17:21:51.000000000 -0400
@@ -181,6 +181,7 @@
(_('Favorites'),_('View Favorites'),'%sfavorites.rpy' %
str(strprefix)),
(_('Media Library'),_('View Media Library'),'%slibrary.rpy' %
str(strprefix)),
(_('Manual Recording'),_('Schedule a Manual
Recording'),'%smanualrecord.rpy' % str(strprefix)),
+ (_('Downloads'),_('Manage Downloads'), '%sdownload.rpy' %
str(strprefix)),
(_('Search'),_('Advanced Search Page'),'%ssearch.rpy' %
str(strprefix)),
(_('Help'),_('View Online Help and Documentation'),'%shelp/' %
str(strprefix))]
-------------------------------------------------------
This SF.Net email sponsored by Black Hat Briefings & Training.
Attend Black Hat Briefings & Training, Las Vegas July 24-29 -
digital self defense, top technical experts, no vendor pitches,
unmatched networking opportunities. Visit www.blackhat.com
_______________________________________________
Freevo-cvslog mailing list
[EMAIL PROTECTED]
https://lists.sourceforge.net/lists/listinfo/freevo-cvslog