Adding wip version for meego harmattan
[feedingit] / src / rss.py
1 #!/usr/bin/env python2.5
2
3
4 # Copyright (c) 2007-2008 INdT.
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Lesser General Public License as published by
7 # the Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
9 #
10 #  This program is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 #  GNU Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public License
16 #  along with this program.  If not, see <http://www.gnu.org/licenses/>.
17 #
18
19 # ============================================================================
20 # Name        : FeedingIt.py
21 # Author      : Yves Marcoz
22 # Version     : 0.5.4
23 # Description : Simple RSS Reader
24 # ============================================================================
25
26 from os.path import isfile, isdir
27 from shutil import rmtree
28 from os import mkdir, remove, utime
29 import pickle
30 import md5
31 import feedparser
32 import time
33 import urllib2
34 from BeautifulSoup import BeautifulSoup
35 from urlparse import urljoin
36 import logging
37 logger = logging.getLogger(__name__)
38
39 #CONFIGDIR="/home/user/.feedingit/"
40
41 def getId(string):
42     return md5.new(string).hexdigest()
43
44 #def getProxy():
45 #    import gconf
46 #    if gconf.client_get_default().get_bool('/system/http_proxy/use_http_proxy'):
47 #        port = gconf.client_get_default().get_int('/system/http_proxy/port')
48 #        http = gconf.client_get_default().get_string('/system/http_proxy/host')
49 #        proxy = proxy = urllib2.ProxyHandler( {"http":"http://%s:%s/"% (http,port)} )
50 #        return (True, proxy)
51 #    return (False, None)
52
53 # Enable proxy support for images and ArchivedArticles
54 #(proxy_support, proxy) = getProxy()
55 #if proxy_support:
56 #    opener = urllib2.build_opener(proxy)
57 #    urllib2.install_opener(opener)
58
59 # Entry = {"title":XXX, "content":XXX, "date":XXX, "link":XXX, images = [] }
60
61 class ImageHandler:
62     def __init__(self, configdir):
63         pass
64
65 class Feed:
66     def __init__(self, uniqueId, name, url):
67         self.titles = []
68         self.entries = {}
69         self.ids = []
70         self.readItems = {}
71         self.name = name
72         self.url = url
73         self.countUnread = 0
74         self.updateTime = "Never"
75         self.updateStamp = 0
76         self.uniqueId = uniqueId
77         self.etag = None
78         self.modified = None
79
80     def addImage(self, configdir, key, baseurl, url):
81         filename = configdir+key+".d/"+getId(url)
82         if not isfile(filename):
83             try:
84                 #if url.startswith("http"):
85                 #    f = urllib2.urlopen(url)
86                 #else:
87                 f = urllib2.urlopen(urljoin(baseurl,url))
88                 outf = open(filename, "w")
89                 outf.write(f.read())
90                 f.close()
91                 outf.close()
92             except:
93                 logger.error("Could not download " + url)
94         else:
95             #open(filename,"a").close()  # "Touch" the file
96             file = open(filename,"a")
97             utime(filename, None)
98             file.close()
99         return filename
100
101     def editFeed(self, url):
102         self.url = url
103
104     def saveFeed(self, configdir):
105         if not isdir(configdir+self.uniqueId+".d"):
106              mkdir(configdir+self.uniqueId+".d")
107         file = open(configdir+self.uniqueId+".d/feed", "w")
108         pickle.dump(self, file )
109         file.close()
110         self.saveUnread(configdir)
111         
112     def saveUnread(self, configdir):
113         if not isdir(configdir+self.uniqueId+".d"):
114             mkdir(configdir+self.uniqueId+".d")
115         file = open(configdir+self.uniqueId+".d/unread", "w")
116         pickle.dump(self.readItems, file )
117         file.close()
118
119     def reloadUnread(self, configdir):
120         try:
121             file = open(configdir+self.uniqueId+".d/unread", "r")
122             self.readItems = pickle.load( file )
123             file.close()
124             self.countUnread = 0
125             for id in self.getIds():
126                if self.readItems[id]==False:
127                   self.countUnread = self.countUnread + 1
128         except:
129             pass
130         return self.countUnread
131
132     def updateFeed(self, configdir, expiryTime=24, proxy=None, imageCache=False):
133         # Expiry time is in hours
134         if proxy == None:
135             tmp=feedparser.parse(self.url, etag = self.etag, modified = self.modified)
136         else:
137             tmp=feedparser.parse(self.url, etag = self.etag, modified = self.modified, handlers = [proxy])
138         expiry = float(expiryTime) * 3600.
139
140         # Check if the parse was succesful (number of entries > 0, else do nothing)
141         if len(tmp["entries"])>0:
142            # The etag and modified value should only be updated if the content was not null
143            try:
144                self.etag = tmp["etag"]
145            except KeyError:
146                self.etag = None
147            try:
148                self.modified = tmp["modified"]
149            except KeyError:
150                self.modified = None
151            #if len(tmp["entries"])>0:
152            if not isdir(configdir+self.uniqueId+".d"):
153                mkdir(configdir+self.uniqueId+".d")
154            try:
155                f = urllib2.urlopen(urljoin(tmp["feed"]["link"],"/favicon.ico"))
156                data = f.read()
157                f.close()
158                outf = open(configdir+self.uniqueId+".d/favicon.ico", "w")
159                outf.write(data)
160                outf.close()
161                del data
162            except:
163                #import traceback
164                #traceback.print_exc()
165                 pass
166
167
168            #reversedEntries = self.getEntries()
169            #reversedEntries.reverse()
170
171            currentTime = time.time()
172            tmpEntries = {}
173            tmpIds = []
174            for entry in tmp["entries"]:
175                (dateTuple, date) = self.extractDate(entry)
176                try:
177                    entry["title"]
178                except:
179                    entry["title"] = "No Title"
180                try:
181                    entry["link"]
182                except:
183                    entry["link"] = ""
184                tmpEntry = {"title":entry["title"], "content":self.extractContent(entry),
185                             "date":date, "dateTuple":dateTuple, "link":entry["link"], "images":[] }
186                id = self.generateUniqueId(tmpEntry)
187                
188                #articleTime = time.mktime(self.entries[id]["dateTuple"])
189                if not id in self.ids:
190                    soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
191                    images = soup('img')
192                    baseurl = tmpEntry["link"]
193                    if imageCache:
194                       for img in images:
195                           try:
196                             filename = self.addImage(configdir, self.uniqueId, baseurl, img['src'])
197                             img['src']=filename
198                             tmpEntry["images"].append(filename)
199                           except:
200                               logger.error("Error downloading image %s" % img)
201                    tmpEntry["contentLink"] = configdir+self.uniqueId+".d/"+id+".html"
202                    file = open(tmpEntry["contentLink"], "w")
203                    file.write(soup.prettify())
204                    file.close()
205                    tmpEntries[id] = tmpEntry
206                    tmpIds.append(id)
207                    if id not in self.readItems:
208                        self.readItems[id] = False
209                else:
210                    try:
211                        filename = configdir+self.uniqueId+".d/"+id+".html"
212                        file = open(filename,"a")
213                        utime(filename, None)
214                        file.close()
215                        for image in self.entries[id]["images"]:
216                             file = open(image,"a")
217                             utime(image, None)
218                             file.close()
219                    except:
220                        pass
221                    tmpEntries[id] = self.entries[id]
222                    tmpIds.append(id)
223             
224            oldIds = self.ids[:]
225            for entryId in oldIds:
226                 if not entryId in tmpIds:
227                     try:
228                         articleTime = time.mktime(self.entries[entryId]["dateTuple"])
229                         if (currentTime - articleTime > 2*expiry):
230                             self.removeEntry(entryId)
231                             continue
232                         if (currentTime - articleTime > expiry) and (self.isEntryRead(entryId)):
233                             # Entry is over 24 hours, and already read
234                             self.removeEntry(entryId)
235                             continue
236                         tmpEntries[entryId] = self.entries[entryId]
237                         tmpIds.append(entryId)
238                     except:
239                         logger.error("Error purging old articles %s" % entryId)
240                         self.removeEntry(entryId)
241
242            self.entries = tmpEntries
243            self.ids = tmpIds
244            tmpUnread = 0
245            
246
247            ids = self.ids[:]
248            for id in ids:
249                if not self.readItems.has_key(id):
250                    self.readItems[id] = False
251                if self.readItems[id]==False:
252                   tmpUnread = tmpUnread + 1
253            keys = self.readItems.keys()
254            for id in keys:
255                if not id in self.ids:
256                    del self.readItems[id]
257            del tmp
258            self.countUnread = tmpUnread
259            self.updateTime = time.asctime()
260            self.updateStamp = currentTime
261            self.saveFeed(configdir)
262            from glob import glob
263            from os import stat
264            for file in glob(configdir+self.uniqueId+".d/*"):
265                 #
266                 stats = stat(file)
267                 #
268                 # put the two dates into matching format
269                 #
270                 lastmodDate = stats[8]
271                 #
272                 expDate = time.time()-expiry*3
273                 # check if image-last-modified-date is outdated
274                 #
275                 if expDate > lastmodDate:
276                     #
277                     try:
278                         #
279                         #print 'Removing', file
280                         #
281                         remove(file) # commented out for testing
282                         #
283                     except OSError:
284                         #
285                         logger.error('Could not remove %s' % file)
286            
287
288     def extractContent(self, entry):
289         content = ""
290         if entry.has_key('summary'):
291             content = entry.get('summary', '')
292         if entry.has_key('content'):
293             if len(entry.content[0].value) > len(content):
294                 content = entry.content[0].value
295         if content == "":
296             content = entry.get('description', '')
297         return content
298         
299     def extractDate(self, entry):
300         if entry.has_key("updated_parsed"):
301             date1 = entry["updated_parsed"]
302             date = time.strftime("%a, %d %b %Y %H:%M:%S",entry["updated_parsed"])
303         elif entry.has_key("published_parsed"):
304             date1 = entry["published_parsed"]
305             date = time.strftime("%a, %d %b %Y %H:%M:%S", entry["published_parsed"])
306         else:
307             date1= ""
308             date = ""
309         #print date1, date
310         return (date1, date)
311
312     def setEntryRead(self, id):
313         if self.readItems[id]==False:
314             self.countUnread = self.countUnread - 1
315             self.readItems[id] = True
316             
317     def setEntryUnread(self, id):
318         if self.readItems[id]==True:
319             self.countUnread = self.countUnread + 1
320             self.readItems[id] = False
321     
322     def isEntryRead(self, id):
323         # Check if an entry is read; return False if the read
324         # status of an entry is unknown (id not in readItems)
325         return self.readItems.get(id, False)
326     
327     def getTitle(self, id):
328         return self.entries[id]["title"]
329     
330     def getContentLink(self, id):
331         if self.entries[id].has_key("contentLink"):
332             return self.entries[id]["contentLink"]
333         return self.entries[id]["link"]
334     
335     def getExternalLink(self, id):
336         return self.entries[id]["link"]
337     
338     def getDate(self, id):
339         return self.entries[id]["date"]
340
341     def getDateTuple(self, id):
342         return self.entries[id]["dateTuple"]
343  
344     def getUniqueId(self, index):
345         return self.ids[index]
346     
347     def generateUniqueId(self, entry):
348         return getId(entry["date"] + entry["title"])
349     
350     def getUpdateTime(self):
351         return self.updateTime
352     
353     def getUpdateStamp(self):
354         try:
355             return self.updateStamp
356         except:
357             self.updateStamp = 0
358             return self.updateStamp
359
360     def getEntries(self):
361         return self.entries
362     
363     def getIds(self):
364         return self.ids
365     
366     def getNextId(self, id):
367         return self.ids[(self.ids.index(id)+1) % self.getNumberOfEntries()]
368     
369     def getPreviousId(self, id):
370         return self.ids[(self.ids.index(id)-1) % self.getNumberOfEntries()]
371     
372     def getNumberOfUnreadItems(self):
373         return self.countUnread
374     
375     def getNumberOfEntries(self):
376         return len(self.ids)
377     
378     def getItem(self, id):
379         try:
380             return self.entries[id]
381         except:
382             return []
383         
384     def getImages(self, id):
385         return self.entries[id]["images"]
386     
387     def getContent(self, id):
388         if self.entries[id].has_key("contentLink"):
389             file = open(self.entries[id]["contentLink"])
390             content = file.read()
391             file.close()
392             return content
393         return self.entries[id]["content"]
394     
395     def removeEntry(self, id):
396         #try:
397         if self.entries.has_key(id):
398             entry = self.entries[id]
399             
400             if entry.has_key("contentLink"):
401                 try:
402                     remove(entry["contentLink"])  #os.remove
403                 except:
404                     logger.error("File not found for deletion: %s"
405                                  % entry["contentLink"])
406             del self.entries[id]
407         else:
408             logger.error("Entries has no %s key" % id)
409         if id in self.ids:
410             self.ids.remove(id)
411         else:
412             logger.error("Ids has no %s key" % id)
413         if self.readItems.has_key(id):
414             if self.readItems[id]==False:
415                 self.countUnread = self.countUnread - 1
416             del self.readItems[id]
417         else:
418             logger.error("ReadItems has no %s key" % id)
419         #except:
420         #    print "Error removing entry %s" %id
421     
422     def getArticle(self, entry):
423         #self.setEntryRead(id)
424         #entry = self.entries[id]
425         title = entry['title']
426         #content = entry.get('content', entry.get('summary_detail', {}))
427         content = entry["content"]
428
429         link = entry['link']
430         date = entry["date"]
431
432         #text = '''<div style="color: black; background-color: white;">'''
433         text = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
434         text += "<html><head><title>" + title + "</title>"
435         text += '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>\n'
436         #text += '<style> body {-webkit-user-select: none;} </style>'
437         text += '</head><body><div><a href=\"' + link + '\">' + title + "</a>"
438         text += "<BR /><small><i>Date: " + date + "</i></small></div>"
439         text += "<BR /><BR />"
440         text += content
441         text += "</body></html>"
442         return text
443         
444 class ArchivedArticles(Feed):    
445     def addArchivedArticle(self, title, link, updated_parsed, configdir):
446         entry = {}
447         entry["title"] = title
448         entry["link"] = link
449         entry["summary"] = '<a href=\"' + link + '\">' + title + "</a>"
450         entry["updated_parsed"] = updated_parsed
451         entry["time"] = time.time()
452         #print entry
453         (dateTuple, date) = self.extractDate(entry)
454         tmpEntry = {"title":entry["title"], "content":self.extractContent(entry),
455                             "date":date, "dateTuple":dateTuple, "link":entry["link"], "images":[], "downloaded":False, "time":entry["time"] }
456         id = self.generateUniqueId(tmpEntry)
457         self.entries[id] = tmpEntry
458         self.ids.append(id)  
459         self.readItems[id] = False
460         self.countUnread = self.countUnread + 1
461         self.saveFeed(configdir)
462         self.saveUnread(configdir)
463         
464     def updateFeed(self, configdir, expiryTime=24, proxy=None, imageCache=False):
465         for id in self.getIds():
466             entry = self.entries[id]
467             if not entry["downloaded"]:
468                 #try:
469                     f = urllib2.urlopen(entry["link"])
470                     #entry["content"] = f.read()
471                     html = f.read()
472                     f.close()
473                     soup = BeautifulSoup(html)
474                     images = soup('img')
475                     baseurl = entry["link"]
476                     for img in images:
477                         filename = self.addImage(configdir, self.uniqueId, baseurl, img['src'])
478                         img['src']=filename
479                     entry["contentLink"] = configdir+self.uniqueId+".d/"+id+".html"
480                     file = open(entry["contentLink"], "w")
481                     file.write(soup.prettify())
482                     file.close()
483                     if len(entry["content"]) > 0:
484                         entry["downloaded"] = True
485                         entry["time"] = time.time()
486                         self.setEntryUnread(id)
487                 #except:
488                 #    pass
489             #currentTime = time.time()
490             #expiry = float(expiryTime) * 3600
491             #if currentTime - entry["time"] > expiry:
492             #    if self.isEntryRead(id):
493             #        self.removeEntry(id)
494             #    else:
495             #        if currentTime - entry["time"] > 2*expiry:
496             #            self.removeEntry(id)
497         self.updateTime = time.asctime()
498         self.updateStamp = time.time()
499         self.saveFeed(configdir)
500         
501     def purgeReadArticles(self):
502         ids = self.getIds()
503         for id in ids:
504             entry = self.entries[id]
505             if self.isEntryRead(id):
506                 self.removeEntry(id)
507                 
508     def removeArticle(self, id):
509         self.removeEntry(id)
510
511     def getArticle(self, index):
512         self.setEntryRead(index)
513         content = self.getContent(index)
514         return content
515
516
517 class Listing:
518     # Lists all the feeds in a dictionary, and expose the data
519     def __init__(self, configdir):
520         self.configdir = configdir
521         #self.feeds = {}
522         if isfile(self.configdir+"feeds.pickle"):
523             file = open(self.configdir+"feeds.pickle")
524             self.listOfFeeds = pickle.load(file)
525             file.close()
526         else:
527             self.listOfFeeds = {getId("Maemo News"):{"title":"Maemo News", "url":"http://maemo.org/news/items.xml", "unread":0, "updateTime":"Never"}, }
528         if self.listOfFeeds.has_key("font"):
529             del self.listOfFeeds["font"]
530         if self.listOfFeeds.has_key("feedingit-order"):
531             self.sortedKeys = self.listOfFeeds["feedingit-order"]
532         else:
533             self.sortedKeys = self.listOfFeeds.keys()
534             if "font" in self.sortedKeys:
535                 self.sortedKeys.remove("font")
536             self.sortedKeys.sort(key=lambda obj: self.getFeedTitle(obj))
537         list = self.sortedKeys[:]
538         #self.closeCurrentlyDisplayedFeed()
539
540     def addArchivedArticle(self, key, index):
541         feed = self.getFeed(key)
542         title = feed.getTitle(index)
543         link = feed.getExternalLink(index)
544         date = feed.getDateTuple(index)
545         if not self.listOfFeeds.has_key("ArchivedArticles"):
546             self.listOfFeeds["ArchivedArticles"] = {"title":"Archived Articles", "url":"", "unread":0, "updateTime":"Never"}
547             self.sortedKeys.append("ArchivedArticles")
548             #self.feeds["Archived Articles"] = ArchivedArticles("Archived Articles", "")
549             self.saveConfig()
550         archFeed = self.getFeed("ArchivedArticles")
551         archFeed.addArchivedArticle(title, link, date, self.configdir)
552         self.listOfFeeds[key]["unread"] = archFeed.getNumberOfUnreadItems()
553         
554     def loadFeed(self, key):
555             if isfile(self.configdir+key+".d/feed"):
556                 file = open(self.configdir+key+".d/feed")
557                 feed = pickle.load(file)
558                 file.close()
559                 try:
560                     feed.uniqueId
561                 except AttributeError:
562                     feed.uniqueId = getId(feed.name)
563                 try:
564                     del feed.imageHandler
565                 except:
566                     pass
567                 try:
568                     feed.etag
569                 except AttributeError:
570                     feed.etag = None
571                 try:
572                     feed.modified
573                 except AttributeError:
574                     feed.modified = None
575                 #feed.reloadUnread(self.configdir)
576             else:
577                 #print key
578                 title = self.listOfFeeds[key]["title"]
579                 url = self.listOfFeeds[key]["url"]
580                 if key == "ArchivedArticles":
581                     feed = ArchivedArticles("ArchivedArticles", title, url)
582                 else:
583                     feed = Feed(getId(title), title, url)
584             return feed
585         
586     def updateFeeds(self, expiryTime=24, proxy=None, imageCache=False):
587         for key in self.getListOfFeeds():
588             feed = self.loadFeed(key)
589             feed.updateFeed(self.configdir, expiryTime, proxy, imageCache)
590             self.listOfFeeds[key]["unread"] = feed.getNumberOfUnreadItems()
591             self.listOfFeeds[key]["updateTime"] = feed.getUpdateTime()
592             self.listOfFeeds[key]["updateStamp"] = feed.getUpdateStamp()
593             
594     def updateFeed(self, key, expiryTime=24, proxy=None, imageCache=False):
595         feed = self.getFeed(key)
596         feed.updateFeed(self.configdir, expiryTime, proxy, imageCache)
597         self.listOfFeeds[key]["unread"] = feed.getNumberOfUnreadItems()
598         self.listOfFeeds[key]["updateTime"] = feed.getUpdateTime()
599         self.listOfFeeds[key]["updateStamp"] = feed.getUpdateStamp()
600         
601     def editFeed(self, key, title, url):
602         self.listOfFeeds[key]["title"] = title
603         self.listOfFeeds[key]["url"] = url
604         feed = self.loadFeed(key)
605         feed.editFeed(url)
606
607     def getFeed(self, key):
608         try:
609             feed = self.loadFeed(key)
610             feed.reloadUnread(self.configdir)
611         except:
612             # If the feed file gets corrupted, we need to reset the feed.
613             import traceback
614             logger.error("getFeed: %s" % traceback.format_exc())
615             import dbus
616             bus = dbus.SessionBus()
617             remote_object = bus.get_object("org.freedesktop.Notifications", # Connection name
618                                "/org/freedesktop/Notifications" # Object's path
619                               )
620             iface = dbus.Interface(remote_object, 'org.freedesktop.Notifications')
621             iface.SystemNoteInfoprint("Error opening feed %s, it has been reset." % self.getFeedTitle(key))
622             if isdir(self.configdir+key+".d/"):
623                 rmtree(self.configdir+key+".d/")
624             feed = self.loadFeed(key)
625         return feed
626     
627     def getFeedUpdateTime(self, key):
628         #print self.listOfFeeds.has_key(key)
629         if not self.listOfFeeds[key].has_key("updateTime"):
630             self.listOfFeeds[key]["updateTime"] = "Never"
631         return self.listOfFeeds[key]["updateTime"]
632     
633     def getFeedUpdateStamp(self, key):
634         #print self.listOfFeeds.has_key(key)
635         if not self.listOfFeeds[key].has_key("updateStamp"):
636             self.listOfFeeds[key]["updateStamp"] = 0
637         return self.listOfFeeds[key]["updateStamp"]
638
639     def getFeedNumberOfUnreadItems(self, key):
640         if not self.listOfFeeds[key].has_key("unread"):
641             self.listOfFeeds[key]["unread"] = 0
642         return self.listOfFeeds[key]["unread"]
643
644     def updateUnread(self, key, unreadItems):
645         self.listOfFeeds[key]["unread"] = unreadItems
646    
647     def getFeedTitle(self, key):
648         return self.listOfFeeds[key]["title"]
649     
650     def getFeedUrl(self, key):
651         return self.listOfFeeds[key]["url"]
652     
653     def getListOfFeeds(self):
654         return self.sortedKeys
655     
656     def getFavicon(self, key):
657         filename = self.configdir+key+".d/favicon.ico"
658         if isfile(filename):
659             return filename
660         else:
661             return False
662     
663     def addFeed(self, title, url):
664         if not self.listOfFeeds.has_key(getId(title)):
665             self.listOfFeeds[getId(title)] = {"title":title, "url":url, "unread":0, "updateTime":"Never"}
666             self.sortedKeys.append(getId(title))
667             self.saveConfig()
668             #self.feeds[getId(title)] = Feed(title, url)
669             return True
670         else:
671             return False
672         
673     def removeFeed(self, key):
674         del self.listOfFeeds[key]
675         self.sortedKeys.remove(key)
676         #del self.feeds[key]
677         if isdir(self.configdir+key+".d/"):
678            rmtree(self.configdir+key+".d/")
679         self.saveConfig()
680     
681     def saveConfig(self):
682         self.listOfFeeds["feedingit-order"] = self.sortedKeys
683         file = open(self.configdir+"feeds.pickle", "w")
684         pickle.dump(self.listOfFeeds, file)
685         file.close()
686         
687     def moveUp(self, key):
688         index = self.sortedKeys.index(key)
689         self.sortedKeys[index] = self.sortedKeys[index-1]
690         self.sortedKeys[index-1] = key
691         
692     def moveDown(self, key):
693         index = self.sortedKeys.index(key)
694         index2 = (index+1)%len(self.sortedKeys)
695         self.sortedKeys[index] = self.sortedKeys[index2]
696         self.sortedKeys[index2] = key
697     
698 if __name__ == "__main__":
699     listing = Listing('/home/user/.feedingit/')
700     list = listing.getListOfFeeds()[:]
701         #list.reverse()
702     for key in list:
703         if key.startswith('d8'):
704             print listing.getFeedUpdateTime(key)