Added full rotation support to QML interface
[feedingit] / src / rss.py
1 #!/usr/bin/env python2.5
2
3
4 # Copyright (c) 2007-2008 INdT.
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Lesser General Public License as published by
7 # the Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
9 #
10 #  This program is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 #  GNU Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public License
16 #  along with this program.  If not, see <http://www.gnu.org/licenses/>.
17 #
18
19 # ============================================================================
20 # Name        : FeedingIt.py
21 # Author      : Yves Marcoz
22 # Version     : 0.5.4
23 # Description : Simple RSS Reader
24 # ============================================================================
25
26 from os.path import isfile, isdir
27 from shutil import rmtree
28 from os import mkdir, remove, utime
29 import pickle
30 import md5
31 import feedparser
32 import time
33 import urllib2
34 from BeautifulSoup import BeautifulSoup
35 from urlparse import urljoin
36
37 #CONFIGDIR="/home/user/.feedingit/"
38
39 def getId(string):
40     return md5.new(string).hexdigest()
41
42 #def getProxy():
43 #    import gconf
44 #    if gconf.client_get_default().get_bool('/system/http_proxy/use_http_proxy'):
45 #        port = gconf.client_get_default().get_int('/system/http_proxy/port')
46 #        http = gconf.client_get_default().get_string('/system/http_proxy/host')
47 #        proxy = proxy = urllib2.ProxyHandler( {"http":"http://%s:%s/"% (http,port)} )
48 #        return (True, proxy)
49 #    return (False, None)
50
51 # Enable proxy support for images and ArchivedArticles
52 #(proxy_support, proxy) = getProxy()
53 #if proxy_support:
54 #    opener = urllib2.build_opener(proxy)
55 #    urllib2.install_opener(opener)
56
57 # Entry = {"title":XXX, "content":XXX, "date":XXX, "link":XXX, images = [] }
58
59 class ImageHandler:
60     def __init__(self, configdir):
61         pass
62
63 class Feed:
64     def __init__(self, uniqueId, name, url):
65         self.titles = []
66         self.entries = {}
67         self.ids = []
68         self.readItems = {}
69         self.name = name
70         self.url = url
71         self.countUnread = 0
72         self.updateTime = "Never"
73         self.updateStamp = 0
74         self.uniqueId = uniqueId
75         self.etag = None
76         self.modified = None
77
78     def addImage(self, configdir, key, baseurl, url):
79         filename = configdir+key+".d/"+getId(url)
80         if not isfile(filename):
81             try:
82                 #if url.startswith("http"):
83                 #    f = urllib2.urlopen(url)
84                 #else:
85                 f = urllib2.urlopen(urljoin(baseurl,url))
86                 outf = open(filename, "w")
87                 outf.write(f.read())
88                 f.close()
89                 outf.close()
90             except:
91                 print "Could not download " + url
92         else:
93             #open(filename,"a").close()  # "Touch" the file
94             file = open(filename,"a")
95             utime(filename, None)
96             file.close()
97         return filename
98
99     def editFeed(self, url):
100         self.url = url
101
102     def saveFeed(self, configdir):
103         if not isdir(configdir+self.uniqueId+".d"):
104              mkdir(configdir+self.uniqueId+".d")
105         file = open(configdir+self.uniqueId+".d/feed", "w")
106         pickle.dump(self, file )
107         file.close()
108         self.saveUnread(configdir)
109         
110     def saveUnread(self, configdir):
111         if not isdir(configdir+self.uniqueId+".d"):
112             mkdir(configdir+self.uniqueId+".d")
113         file = open(configdir+self.uniqueId+".d/unread", "w")
114         pickle.dump(self.readItems, file )
115         file.close()
116
117     def reloadUnread(self, configdir):
118         try:
119             file = open(configdir+self.uniqueId+".d/unread", "r")
120             self.readItems = pickle.load( file )
121             file.close()
122             self.countUnread = 0
123             for id in self.getIds():
124                if self.readItems[id]==False:
125                   self.countUnread = self.countUnread + 1
126         except:
127             pass
128         return self.countUnread
129
130     def updateFeed(self, configdir, expiryTime=24, proxy=None, imageCache=False):
131         # Expiry time is in hours
132         if proxy == None:
133             tmp=feedparser.parse(self.url, etag = self.etag, modified = self.modified)
134         else:
135             tmp=feedparser.parse(self.url, etag = self.etag, modified = self.modified, handlers = [proxy])
136         expiry = float(expiryTime) * 3600.
137
138         # Check if the parse was succesful (number of entries > 0, else do nothing)
139         if len(tmp["entries"])>0:
140            # The etag and modified value should only be updated if the content was not null
141            try:
142                self.etag = tmp["etag"]
143            except KeyError:
144                self.etag = None
145            try:
146                self.modified = tmp["modified"]
147            except KeyError:
148                self.modified = None
149            #if len(tmp["entries"])>0:
150            if not isdir(configdir+self.uniqueId+".d"):
151                mkdir(configdir+self.uniqueId+".d")
152            try:
153                f = urllib2.urlopen(urljoin(tmp["feed"]["link"],"/favicon.ico"))
154                data = f.read()
155                f.close()
156                outf = open(configdir+self.uniqueId+".d/favicon.ico", "w")
157                outf.write(data)
158                outf.close()
159                del data
160            except:
161                #import traceback
162                #traceback.print_exc()
163                 pass
164
165
166            #reversedEntries = self.getEntries()
167            #reversedEntries.reverse()
168
169            currentTime = time.time()
170            tmpEntries = {}
171            tmpIds = []
172            for entry in tmp["entries"]:
173                (dateTuple, date) = self.extractDate(entry)
174                try:
175                    entry["title"]
176                except:
177                    entry["title"] = "No Title"
178                try:
179                    entry["link"]
180                except:
181                    entry["link"] = ""
182                tmpEntry = {"title":entry["title"], "content":self.extractContent(entry),
183                             "date":date, "dateTuple":dateTuple, "link":entry["link"], "images":[] }
184                id = self.generateUniqueId(tmpEntry)
185                
186                #articleTime = time.mktime(self.entries[id]["dateTuple"])
187                if not id in self.ids:
188                    soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
189                    images = soup('img')
190                    baseurl = tmpEntry["link"]
191                    if imageCache:
192                       for img in images:
193                           try:
194                             filename = self.addImage(configdir, self.uniqueId, baseurl, img['src'])
195                             img['src']=filename
196                             tmpEntry["images"].append(filename)
197                           except:
198                               print "Error downloading image %s" % img
199                    tmpEntry["contentLink"] = configdir+self.uniqueId+".d/"+id+".html"
200                    file = open(tmpEntry["contentLink"], "w")
201                    file.write(soup.prettify())
202                    file.close()
203                    tmpEntries[id] = tmpEntry
204                    tmpIds.append(id)
205                    if id not in self.readItems:
206                        self.readItems[id] = False
207                else:
208                    try:
209                        filename = configdir+self.uniqueId+".d/"+id+".html"
210                        file = open(filename,"a")
211                        utime(filename, None)
212                        file.close()
213                        for image in self.entries[id]["images"]:
214                             file = open(image,"a")
215                             utime(image, None)
216                             file.close()
217                    except:
218                        pass
219                    tmpEntries[id] = self.entries[id]
220                    tmpIds.append(id)
221             
222            oldIds = self.ids[:]
223            for entryId in oldIds:
224                 if not entryId in tmpIds:
225                     try:
226                         articleTime = time.mktime(self.entries[entryId]["dateTuple"])
227                         if (currentTime - articleTime > 2*expiry):
228                             self.removeEntry(entryId)
229                             continue
230                         if (currentTime - articleTime > expiry) and (self.isEntryRead(entryId)):
231                             # Entry is over 24 hours, and already read
232                             self.removeEntry(entryId)
233                             continue
234                         tmpEntries[entryId] = self.entries[entryId]
235                         tmpIds.append(entryId)
236                     except:
237                         print "Error purging old articles %s" % entryId
238                         self.removeEntry(entryId)
239
240            self.entries = tmpEntries
241            self.ids = tmpIds
242            tmpUnread = 0
243            
244
245            ids = self.ids[:]
246            for id in ids:
247                if not self.readItems.has_key(id):
248                    self.readItems[id] = False
249                if self.readItems[id]==False:
250                   tmpUnread = tmpUnread + 1
251            keys = self.readItems.keys()
252            for id in keys:
253                if not id in self.ids:
254                    del self.readItems[id]
255            del tmp
256            self.countUnread = tmpUnread
257            self.updateTime = time.asctime()
258            self.updateStamp = currentTime
259            self.saveFeed(configdir)
260            from glob import glob
261            from os import stat
262            for file in glob(configdir+self.uniqueId+".d/*"):
263                 #
264                 stats = stat(file)
265                 #
266                 # put the two dates into matching format
267                 #
268                 lastmodDate = stats[8]
269                 #
270                 expDate = time.time()-expiry*3
271                 # check if image-last-modified-date is outdated
272                 #
273                 if expDate > lastmodDate:
274                     #
275                     try:
276                         #
277                         #print 'Removing', file
278                         #
279                         remove(file) # commented out for testing
280                         #
281                     except OSError:
282                         #
283                         print 'Could not remove', file
284            
285
286     def extractContent(self, entry):
287         content = ""
288         if entry.has_key('summary'):
289             content = entry.get('summary', '')
290         if entry.has_key('content'):
291             if len(entry.content[0].value) > len(content):
292                 content = entry.content[0].value
293         if content == "":
294             content = entry.get('description', '')
295         return content
296         
297     def extractDate(self, entry):
298         if entry.has_key("updated_parsed"):
299             date1 = entry["updated_parsed"]
300             date = time.strftime("%a, %d %b %Y %H:%M:%S",entry["updated_parsed"])
301         elif entry.has_key("published_parsed"):
302             date1 = entry["published_parsed"]
303             date = time.strftime("%a, %d %b %Y %H:%M:%S", entry["published_parsed"])
304         else:
305             date1= ""
306             date = ""
307         #print date1, date
308         return (date1, date)
309
310     def setEntryRead(self, id):
311         if self.readItems[id]==False:
312             self.countUnread = self.countUnread - 1
313             self.readItems[id] = True
314             
315     def setEntryUnread(self, id):
316         if self.readItems[id]==True:
317             self.countUnread = self.countUnread + 1
318             self.readItems[id] = False
319     
320     def isEntryRead(self, id):
321         # Check if an entry is read; return False if the read
322         # status of an entry is unknown (id not in readItems)
323         return self.readItems.get(id, False)
324     
325     def getTitle(self, id):
326         return self.entries[id]["title"]
327     
328     def getContentLink(self, id):
329         if self.entries[id].has_key("contentLink"):
330             return self.entries[id]["contentLink"]
331         return self.entries[id]["link"]
332     
333     def getExternalLink(self, id):
334         return self.entries[id]["link"]
335     
336     def getDate(self, id):
337         return self.entries[id]["date"]
338
339     def getDateTuple(self, id):
340         return self.entries[id]["dateTuple"]
341  
342     def getUniqueId(self, index):
343         return self.ids[index]
344     
345     def generateUniqueId(self, entry):
346         return getId(entry["date"] + entry["title"])
347     
348     def getUpdateTime(self):
349         return self.updateTime
350     
351     def getUpdateStamp(self):
352         try:
353             return self.updateStamp
354         except:
355             self.updateStamp = 0
356             return self.updateStamp
357
358     def getEntries(self):
359         return self.entries
360     
361     def getIds(self):
362         return self.ids
363     
364     def getNextId(self, id):
365         return self.ids[(self.ids.index(id)+1) % self.getNumberOfEntries()]
366     
367     def getPreviousId(self, id):
368         return self.ids[(self.ids.index(id)-1) % self.getNumberOfEntries()]
369     
370     def getNumberOfUnreadItems(self):
371         return self.countUnread
372     
373     def getNumberOfEntries(self):
374         return len(self.ids)
375     
376     def getItem(self, id):
377         try:
378             return self.entries[id]
379         except:
380             return []
381         
382     def getImages(self, id):
383         return self.entries[id]["images"]
384     
385     def getContent(self, id):
386         if self.entries[id].has_key("contentLink"):
387             file = open(self.entries[id]["contentLink"])
388             content = file.read()
389             file.close()
390             return content
391         return self.entries[id]["content"]
392     
393     def removeEntry(self, id):
394         #try:
395         if self.entries.has_key(id):
396             entry = self.entries[id]
397             
398             if entry.has_key("contentLink"):
399                 try:
400                     remove(entry["contentLink"])  #os.remove
401                 except:
402                     print "File not found for deletion: %s" % entry["contentLink"]
403             del self.entries[id]
404         else:
405             print "Entries has no %s key" % id
406         if id in self.ids:
407             self.ids.remove(id)
408         else:
409             print "Ids has no %s key" % id
410         if self.readItems.has_key(id):
411             if self.readItems[id]==False:
412                 self.countUnread = self.countUnread - 1
413             del self.readItems[id]
414         else:
415             print "ReadItems has no %s key" % id
416         #except:
417         #    print "Error removing entry %s" %id
418     
419     def getArticle(self, entry):
420         #self.setEntryRead(id)
421         #entry = self.entries[id]
422         title = entry['title']
423         #content = entry.get('content', entry.get('summary_detail', {}))
424         content = entry["content"]
425
426         link = entry['link']
427         date = entry["date"]
428
429         #text = '''<div style="color: black; background-color: white;">'''
430         text = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
431         text += "<html><head><title>" + title + "</title>"
432         text += '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>\n'
433         #text += '<style> body {-webkit-user-select: none;} </style>'
434         text += '</head><body><div><a href=\"' + link + '\">' + title + "</a>"
435         text += "<BR /><small><i>Date: " + date + "</i></small></div>"
436         text += "<BR /><BR />"
437         text += content
438         text += "</body></html>"
439         return text
440         
441 class ArchivedArticles(Feed):    
442     def addArchivedArticle(self, title, link, updated_parsed, configdir):
443         entry = {}
444         entry["title"] = title
445         entry["link"] = link
446         entry["summary"] = '<a href=\"' + link + '\">' + title + "</a>"
447         entry["updated_parsed"] = updated_parsed
448         entry["time"] = time.time()
449         #print entry
450         (dateTuple, date) = self.extractDate(entry)
451         tmpEntry = {"title":entry["title"], "content":self.extractContent(entry),
452                             "date":date, "dateTuple":dateTuple, "link":entry["link"], "images":[], "downloaded":False, "time":entry["time"] }
453         id = self.generateUniqueId(tmpEntry)
454         self.entries[id] = tmpEntry
455         self.ids.append(id)  
456         self.readItems[id] = False
457         self.countUnread = self.countUnread + 1
458         self.saveFeed(configdir)
459         self.saveUnread(configdir)
460         
461     def updateFeed(self, configdir, expiryTime=24, proxy=None, imageCache=False):
462         for id in self.getIds():
463             entry = self.entries[id]
464             if not entry["downloaded"]:
465                 #try:
466                     f = urllib2.urlopen(entry["link"])
467                     #entry["content"] = f.read()
468                     html = f.read()
469                     f.close()
470                     soup = BeautifulSoup(html)
471                     images = soup('img')
472                     baseurl = entry["link"]
473                     for img in images:
474                         filename = self.addImage(configdir, self.uniqueId, baseurl, img['src'])
475                         img['src']=filename
476                     entry["contentLink"] = configdir+self.uniqueId+".d/"+id+".html"
477                     file = open(entry["contentLink"], "w")
478                     file.write(soup.prettify())
479                     file.close()
480                     if len(entry["content"]) > 0:
481                         entry["downloaded"] = True
482                         entry["time"] = time.time()
483                         self.setEntryUnread(id)
484                 #except:
485                 #    pass
486             #currentTime = time.time()
487             #expiry = float(expiryTime) * 3600
488             #if currentTime - entry["time"] > expiry:
489             #    if self.isEntryRead(id):
490             #        self.removeEntry(id)
491             #    else:
492             #        if currentTime - entry["time"] > 2*expiry:
493             #            self.removeEntry(id)
494         self.updateTime = time.asctime()
495         self.updateStamp = time.time()
496         self.saveFeed(configdir)
497         
498     def purgeReadArticles(self):
499         ids = self.getIds()
500         for id in ids:
501             entry = self.entries[id]
502             if self.isEntryRead(id):
503                 self.removeEntry(id)
504                 
505     def removeArticle(self, id):
506         self.removeEntry(id)
507
508     def getArticle(self, index):
509         self.setEntryRead(index)
510         content = self.getContent(index)
511         return content
512
513
514 class Listing:
515     # Lists all the feeds in a dictionary, and expose the data
516     def __init__(self, configdir):
517         self.configdir = configdir
518         #self.feeds = {}
519         if isfile(self.configdir+"feeds.pickle"):
520             file = open(self.configdir+"feeds.pickle")
521             self.listOfFeeds = pickle.load(file)
522             file.close()
523         else:
524             self.listOfFeeds = {getId("Maemo News"):{"title":"Maemo News", "url":"http://maemo.org/news/items.xml", "unread":0, "updateTime":"Never"}, }
525         if self.listOfFeeds.has_key("font"):
526             del self.listOfFeeds["font"]
527         if self.listOfFeeds.has_key("feedingit-order"):
528             self.sortedKeys = self.listOfFeeds["feedingit-order"]
529         else:
530             self.sortedKeys = self.listOfFeeds.keys()
531             if "font" in self.sortedKeys:
532                 self.sortedKeys.remove("font")
533             self.sortedKeys.sort(key=lambda obj: self.getFeedTitle(obj))
534         list = self.sortedKeys[:]
535         #self.closeCurrentlyDisplayedFeed()
536
537     def addArchivedArticle(self, key, index):
538         feed = self.getFeed(key)
539         title = feed.getTitle(index)
540         link = feed.getExternalLink(index)
541         date = feed.getDateTuple(index)
542         if not self.listOfFeeds.has_key("ArchivedArticles"):
543             self.listOfFeeds["ArchivedArticles"] = {"title":"Archived Articles", "url":"", "unread":0, "updateTime":"Never"}
544             self.sortedKeys.append("ArchivedArticles")
545             #self.feeds["Archived Articles"] = ArchivedArticles("Archived Articles", "")
546             self.saveConfig()
547         archFeed = self.getFeed("ArchivedArticles")
548         archFeed.addArchivedArticle(title, link, date, self.configdir)
549         self.listOfFeeds[key]["unread"] = archFeed.getNumberOfUnreadItems()
550         
551     def loadFeed(self, key):
552             if isfile(self.configdir+key+".d/feed"):
553                 file = open(self.configdir+key+".d/feed")
554                 feed = pickle.load(file)
555                 file.close()
556                 try:
557                     feed.uniqueId
558                 except AttributeError:
559                     feed.uniqueId = getId(feed.name)
560                 try:
561                     del feed.imageHandler
562                 except:
563                     pass
564                 try:
565                     feed.etag
566                 except AttributeError:
567                     feed.etag = None
568                 try:
569                     feed.modified
570                 except AttributeError:
571                     feed.modified = None
572                 #feed.reloadUnread(self.configdir)
573             else:
574                 #print key
575                 title = self.listOfFeeds[key]["title"]
576                 url = self.listOfFeeds[key]["url"]
577                 if key == "ArchivedArticles":
578                     feed = ArchivedArticles("ArchivedArticles", title, url)
579                 else:
580                     feed = Feed(getId(title), title, url)
581             return feed
582         
583     def updateFeeds(self, expiryTime=24, proxy=None, imageCache=False):
584         for key in self.getListOfFeeds():
585             feed = self.loadFeed(key)
586             feed.updateFeed(self.configdir, expiryTime, proxy, imageCache)
587             self.listOfFeeds[key]["unread"] = feed.getNumberOfUnreadItems()
588             self.listOfFeeds[key]["updateTime"] = feed.getUpdateTime()
589             self.listOfFeeds[key]["updateStamp"] = feed.getUpdateStamp()
590             
591     def updateFeed(self, key, expiryTime=24, proxy=None, imageCache=False):
592         feed = self.getFeed(key)
593         feed.updateFeed(self.configdir, expiryTime, proxy, imageCache)
594         self.listOfFeeds[key]["unread"] = feed.getNumberOfUnreadItems()
595         self.listOfFeeds[key]["updateTime"] = feed.getUpdateTime()
596         self.listOfFeeds[key]["updateStamp"] = feed.getUpdateStamp()
597         
598     def editFeed(self, key, title, url):
599         self.listOfFeeds[key]["title"] = title
600         self.listOfFeeds[key]["url"] = url
601         feed = self.loadFeed(key)
602         feed.editFeed(url)
603
604     def getFeed(self, key):
605         try:
606             feed = self.loadFeed(key)
607             feed.reloadUnread(self.configdir)
608         except:
609             # If the feed file gets corrupted, we need to reset the feed.
610             import traceback
611             traceback.print_exc()
612             import dbus
613             bus = dbus.SessionBus()
614             remote_object = bus.get_object("org.freedesktop.Notifications", # Connection name
615                                "/org/freedesktop/Notifications" # Object's path
616                               )
617             iface = dbus.Interface(remote_object, 'org.freedesktop.Notifications')
618             iface.SystemNoteInfoprint("Error opening feed %s, it has been reset." % self.getFeedTitle(key))
619             if isdir(self.configdir+key+".d/"):
620                 rmtree(self.configdir+key+".d/")
621             feed = self.loadFeed(key)
622         return feed
623     
624     def getFeedUpdateTime(self, key):
625         #print self.listOfFeeds.has_key(key)
626         if not self.listOfFeeds[key].has_key("updateTime"):
627             self.listOfFeeds[key]["updateTime"] = "Never"
628         return self.listOfFeeds[key]["updateTime"]
629     
630     def getFeedUpdateStamp(self, key):
631         #print self.listOfFeeds.has_key(key)
632         if not self.listOfFeeds[key].has_key("updateStamp"):
633             self.listOfFeeds[key]["updateStamp"] = 0
634         return self.listOfFeeds[key]["updateStamp"]
635
636     def getFeedNumberOfUnreadItems(self, key):
637         if not self.listOfFeeds[key].has_key("unread"):
638             self.listOfFeeds[key]["unread"] = 0
639         return self.listOfFeeds[key]["unread"]
640
641     def updateUnread(self, key, unreadItems):
642         self.listOfFeeds[key]["unread"] = unreadItems
643    
644     def getFeedTitle(self, key):
645         return self.listOfFeeds[key]["title"]
646     
647     def getFeedUrl(self, key):
648         return self.listOfFeeds[key]["url"]
649     
650     def getListOfFeeds(self):
651         return self.sortedKeys
652     
653     def getFavicon(self, key):
654         filename = self.configdir+key+".d/favicon.ico"
655         if isfile(filename):
656             return filename
657         else:
658             return False
659     
660     def addFeed(self, title, url):
661         if not self.listOfFeeds.has_key(getId(title)):
662             self.listOfFeeds[getId(title)] = {"title":title, "url":url, "unread":0, "updateTime":"Never"}
663             self.sortedKeys.append(getId(title))
664             self.saveConfig()
665             #self.feeds[getId(title)] = Feed(title, url)
666             return True
667         else:
668             return False
669         
670     def removeFeed(self, key):
671         del self.listOfFeeds[key]
672         self.sortedKeys.remove(key)
673         #del self.feeds[key]
674         if isdir(self.configdir+key+".d/"):
675            rmtree(self.configdir+key+".d/")
676         self.saveConfig()
677     
678     def saveConfig(self):
679         self.listOfFeeds["feedingit-order"] = self.sortedKeys
680         file = open(self.configdir+"feeds.pickle", "w")
681         pickle.dump(self.listOfFeeds, file)
682         file.close()
683         
684     def moveUp(self, key):
685         index = self.sortedKeys.index(key)
686         self.sortedKeys[index] = self.sortedKeys[index-1]
687         self.sortedKeys[index-1] = key
688         
689     def moveDown(self, key):
690         index = self.sortedKeys.index(key)
691         index2 = (index+1)%len(self.sortedKeys)
692         self.sortedKeys[index] = self.sortedKeys[index2]
693         self.sortedKeys[index2] = key
694     
695 if __name__ == "__main__":
696     listing = Listing('/home/user/.feedingit/')
697     list = listing.getListOfFeeds()[:]
698         #list.reverse()
699     for key in list:
700         if key.startswith('d8'):
701             print listing.getFeedUpdateTime(key)