1 #!/usr/bin/env python2.5
4 # Copyright (c) 2007-2008 INdT.
5 # Copyright (c) 2011 Neal H. Walfield
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Lesser General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU Lesser General Public License for more details.
16 # You should have received a copy of the GNU Lesser General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 # ============================================================================
22 # Author : Yves Marcoz
24 # Description : Simple RSS Reader
25 # ============================================================================
28 from os.path import isfile, isdir
29 from shutil import rmtree
30 from os import mkdir, remove, utime
36 from BeautifulSoup import BeautifulSoup
37 from urlparse import urljoin
38 from calendar import timegm
39 from updatedbus import get_lock, release_lock
42 from wc import wc, wc_init
44 from jobmanager import JobManager
46 from httpprogresshandler import HTTPProgressHandler
51 return md5.new(string).hexdigest()
53 def download_callback(connection):
54 if JobManager().do_quit:
55 raise KeyboardInterrupt
57 def downloader(progress_handler=None, proxy=None):
61 openers.append (progress_handler)
63 openers.append(HTTPProgressHandler(download_callback))
66 openers.append (proxy)
68 return urllib2.build_opener (*openers)
71 serial_execution_lock = threading.Lock()
76 except AttributeError:
77 db = sqlite3.connect("%s/%s.db" % (self.dir, self.key), timeout=120)
82 def __init__(self, configdir, key):
84 self.configdir = configdir
85 self.dir = "%s/%s.d" %(self.configdir, self.key)
86 self.tls = threading.local ()
88 if not isdir(self.dir):
90 if not isfile("%s/%s.db" %(self.dir, self.key)):
91 self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, date float, updated float, link text, read int);")
92 self.db.execute("CREATE TABLE images (id text, imagePath text);")
95 def addImage(self, configdir, key, baseurl, url, proxy=None, opener=None):
96 filename = configdir+key+".d/"+getId(url)
97 if not isfile(filename):
100 opener = downloader(proxy=proxy)
102 abs_url = urljoin(baseurl,url)
103 f = opener.open(abs_url)
104 outf = open(filename, "w")
108 except (urllib2.HTTPError, urllib2.URLError, IOError), exception:
109 print ("Could not download image %s: %s"
110 % (abs_url, str (exception)))
113 exception = sys.exc_info()[0]
115 print "Downloading image: %s" % abs_url
116 traceback.print_exc()
125 #open(filename,"a").close() # "Touch" the file
126 file = open(filename,"a")
127 utime(filename, None)
131 def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
134 self._updateFeed(configdir, url, etag, modified, expiryTime, proxy, imageCache, postFeedUpdateFunc, *postFeedUpdateFuncArgs)
136 JobManager().execute(doit(), self.key, priority=priority)
138 def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
140 have_serial_execution_lock = False
143 update_lock = get_lock("key")
145 # Someone else is doing an update.
148 download_start = time.time ()
150 progress_handler = HTTPProgressHandler(download_callback)
152 openers = [progress_handler]
154 openers.append (proxy)
155 kwargs = {'handlers':openers}
157 tmp=feedparser.parse(url, etag=etag, modified=modified, **kwargs)
158 download_duration = time.time () - download_start
160 opener = downloader(progress_handler, proxy)
162 if JobManager().do_quit:
163 raise KeyboardInterrupt
165 process_start = time.time()
167 # Expiry time is in hours
168 expiry = float(expiryTime) * 3600.
172 have_woodchuck = mainthread.execute (wc().available)
176 wc().stream_register (self.key, "", 6 * 60 * 60)
177 except woodchuck.ObjectExistsError:
180 wc()[self.key].updated (
181 indicator=(woodchuck.Indicator.ApplicationVisual
182 |woodchuck.Indicator.StreamWide),
183 transferred_down=progress_handler.stats['received'],
184 transferred_up=progress_handler.stats['sent'],
185 transfer_time=download_start,
186 transfer_duration=download_duration,
187 new_objects=len (tmp.entries),
188 objects_inline=len (tmp.entries))
190 print "Failed to register update with woodchuck!"
193 http_status = tmp.get ('status', 200)
195 # Check if the parse was succesful. If the http status code
196 # is 304, then the download was successful, but there is
197 # nothing new. Indeed, no content is returned. This make a
198 # 304 look like an error because there are no entries and the
199 # parse fails. But really, everything went great! Check for
201 if http_status == 304:
202 print "%s: No changes to feed." % (self.key,)
203 mainthread.execute (wc_success, async=True)
205 elif len(tmp["entries"])==0 and not tmp.version:
206 # An error occured fetching or parsing the feed. (Version
207 # will be either None if e.g. the connection timed our or
208 # '' if the data is not a proper feed)
209 print ("Error fetching %s: version is: %s: error: %s"
210 % (url, str (tmp.version),
211 str (tmp.get ('bozo_exception', 'Unknown error'))))
215 print "%s: stream update failed!" % self.key
218 # It's not easy to get the feed's title from here.
219 # At the latest, the next time the application is
220 # started, we'll fix up the human readable name.
221 wc().stream_register (self.key, "", 6 * 60 * 60)
222 except woodchuck.ObjectExistsError:
224 ec = woodchuck.TransferStatus.TransientOther
225 if 300 <= http_status and http_status < 400:
226 ec = woodchuck.TransferStatus.TransientNetwork
227 if 400 <= http_status and http_status < 500:
228 ec = woodchuck.TransferStatus.FailureGone
229 if 500 <= http_status and http_status < 600:
230 ec = woodchuck.TransferStatus.TransientNetwork
231 wc()[self.key].update_failed(ec)
232 mainthread.execute (e, async=True)
234 currentTime = time.time()
235 # The etag and modified value should only be updated if the content was not null
241 modified = tmp["modified"]
245 abs_url = urljoin(tmp["feed"]["link"],"/favicon.ico")
246 f = opener.open(abs_url)
249 outf = open(self.dir+"/favicon.ico", "w")
253 except (urllib2.HTTPError, urllib2.URLError), exception:
254 print ("Could not download favicon %s: %s"
255 % (abs_url, str (exception)))
257 self.serial_execution_lock.acquire ()
258 have_serial_execution_lock = True
260 #reversedEntries = self.getEntries()
261 #reversedEntries.reverse()
265 tmp["entries"].reverse()
266 for entry in tmp["entries"]:
267 # Yield so as to make the main thread a bit more
271 if JobManager().do_quit:
272 raise KeyboardInterrupt
274 received_base = progress_handler.stats['received']
275 sent_base = progress_handler.stats['sent']
278 date = self.extractDate(entry)
282 entry["title"] = "No Title"
290 entry["author"] = None
291 if(not(entry.has_key("id"))):
293 content = self.extractContent(entry)
294 object_size = len (content)
295 received_base -= len (content)
296 tmpEntry = {"title":entry["title"], "content":content,
297 "date":date, "link":entry["link"], "author":entry["author"], "id":entry["id"]}
298 id = self.generateUniqueId(tmpEntry)
300 #articleTime = time.mktime(self.entries[id]["dateTuple"])
301 soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
303 baseurl = tmpEntry["link"]
305 if imageCache and len(images) > 0:
306 self.serial_execution_lock.release ()
307 have_serial_execution_lock = False
309 filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
311 img['src']="file://%s" %filename
312 count = self.db.execute("SELECT count(1) FROM images where id=? and imagePath=?;", (id, filename )).fetchone()[0]
314 self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
318 object_size += os.path.getsize (filename)
319 except os.error, exception:
320 print ("Error getting size of %s: %s"
321 % (filename, exception))
323 self.serial_execution_lock.acquire ()
324 have_serial_execution_lock = True
326 tmpEntry["contentLink"] = configdir+self.key+".d/"+id+".html"
327 file = open(tmpEntry["contentLink"], "w")
328 file.write(soup.prettify())
331 self.db.execute("UPDATE feed SET updated=? WHERE id=?;", (currentTime, id) )
334 values = (id, tmpEntry["title"], tmpEntry["contentLink"], tmpEntry["date"], currentTime, tmpEntry["link"], 0)
335 self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
339 # self.db.execute("UPDATE feed SET updated=? WHERE id=?;", (currentTime, id) )
341 # filename = configdir+self.key+".d/"+id+".html"
342 # file = open(filename,"a")
343 # utime(filename, None)
345 # images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
346 # for image in images:
347 # file = open(image[0],"a")
348 # utime(image[0], None)
353 # Register the object with Woodchuck and mark it as
358 obj = wc()[self.key].object_register(
359 object_identifier=id,
360 human_readable_name=tmpEntry["title"])
361 except woodchuck.ObjectExistsError:
362 obj = wc()[self.key][id]
364 # If the entry does not contain a publication
365 # time, the attribute won't exist.
366 pubtime = entry.get ('date_parsed', None)
368 obj.publication_time = time.mktime (pubtime)
370 received = (progress_handler.stats['received']
372 sent = progress_handler.stats['sent'] - sent_base
374 indicator=(woodchuck.Indicator.ApplicationVisual
375 |woodchuck.Indicator.StreamWide),
376 transferred_down=received,
378 object_size=object_size)
379 mainthread.execute(e, async=True)
382 print ("%s: Update successful: transferred: %d/%d; objects: %d)"
384 progress_handler.stats['sent'],
385 progress_handler.stats['received'],
387 mainthread.execute (wc_success, async=True)
390 rows = self.db.execute("SELECT id FROM feed WHERE (read=0 AND updated<?) OR (read=1 AND updated<?);", (currentTime-2*expiry, currentTime-expiry))
392 self.removeEntry(row[0])
394 from glob import glob
396 for file in glob(configdir+self.key+".d/*"):
400 # put the two dates into matching format
402 lastmodDate = stats[8]
404 expDate = time.time()-expiry*3
405 # check if image-last-modified-date is outdated
407 if expDate > lastmodDate:
411 #print 'Removing', file
413 # XXX: Tell woodchuck.
414 remove(file) # commented out for testing
416 except OSError, exception:
418 print 'Could not remove %s: %s' % (file, str (exception))
419 print ("updated %s: %fs in download, %fs in processing"
420 % (self.key, download_duration,
421 time.time () - process_start))
423 print "Updating %s: %s" % (self.key, sys.exc_info()[0])
424 traceback.print_exc()
428 if have_serial_execution_lock:
429 self.serial_execution_lock.release ()
431 if update_lock is not None:
432 release_lock (update_lock)
436 rows = self.db.execute("SELECT MAX(date) FROM feed;")
440 print "Fetching update time."
441 traceback.print_exc()
446 if postFeedUpdateFunc is not None:
447 postFeedUpdateFunc (self.key, updateTime, etag, modified,
448 *postFeedUpdateFuncArgs)
450 def setEntryRead(self, id):
451 self.db.execute("UPDATE feed SET read=1 WHERE id=?;", (id,) )
457 wc()[self.key][id].used()
461 def setEntryUnread(self, id):
462 self.db.execute("UPDATE feed SET read=0 WHERE id=?;", (id,) )
465 def markAllAsRead(self):
466 self.db.execute("UPDATE feed SET read=1 WHERE read=0;")
469 def isEntryRead(self, id):
470 read_status = self.db.execute("SELECT read FROM feed WHERE id=?;", (id,) ).fetchone()[0]
471 return read_status==1 # Returns True if read==1, and False if read==0
473 def getTitle(self, id):
474 return self.db.execute("SELECT title FROM feed WHERE id=?;", (id,) ).fetchone()[0]
476 def getContentLink(self, id):
477 return self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,) ).fetchone()[0]
479 def getExternalLink(self, id):
480 return self.db.execute("SELECT link FROM feed WHERE id=?;", (id,) ).fetchone()[0]
482 def getDate(self, id):
483 dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
484 return time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(dateStamp))
486 def getDateTuple(self, id):
487 dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
488 return time.localtime(dateStamp)
490 def getDateStamp(self, id):
491 return self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
493 def generateUniqueId(self, entry):
494 if(entry["id"] != None):
495 return getId(str(entry["id"]))
498 return getId(str(entry["date"]) + str(entry["title"]))
500 #print entry["title"]
501 return getId(str(entry["date"]))
503 def getIds(self, onlyUnread=False):
505 rows = self.db.execute("SELECT id FROM feed where read=0 ORDER BY date DESC;").fetchall()
507 rows = self.db.execute("SELECT id FROM feed ORDER BY date DESC;").fetchall()
514 def getNextId(self, id):
516 index = ids.index(id)
517 return ids[(index+1)%len(ids)]
519 def getPreviousId(self, id):
521 index = ids.index(id)
522 return ids[(index-1)%len(ids)]
524 def getNumberOfUnreadItems(self):
525 return self.db.execute("SELECT count(*) FROM feed WHERE read=0;").fetchone()[0]
527 def getNumberOfEntries(self):
528 return self.db.execute("SELECT count(*) FROM feed;").fetchone()[0]
530 def getArticle(self, entry):
531 #self.setEntryRead(id)
532 #entry = self.entries[id]
533 title = entry['title']
534 #content = entry.get('content', entry.get('summary_detail', {}))
535 content = entry["content"]
538 author = entry['author']
539 date = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(entry["date"]) )
541 #text = '''<div style="color: black; background-color: white;">'''
542 text = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
543 text += "<html><head><title>" + title + "</title>"
544 text += '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>\n'
545 #text += '<style> body {-webkit-user-select: none;} </style>'
546 text += '</head><body bgcolor=\"#ffffff\"><div><a href=\"' + link + '\">' + title + "</a>"
548 text += "<BR /><small><i>Author: " + author + "</i></small>"
549 text += "<BR /><small><i>Date: " + date + "</i></small></div>"
550 text += "<BR /><BR />"
552 text += "</body></html>"
555 def getContent(self, id):
556 contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
558 file = open(self.entries[id]["contentLink"])
559 content = file.read()
562 content = "Content unavailable"
565 def extractDate(self, entry):
566 if entry.has_key("updated_parsed"):
567 return timegm(entry["updated_parsed"])
568 elif entry.has_key("published_parsed"):
569 return timegm(entry["published_parsed"])
573 def extractContent(self, entry):
575 if entry.has_key('summary'):
576 content = entry.get('summary', '')
577 if entry.has_key('content'):
578 if len(entry.content[0].value) > len(content):
579 content = entry.content[0].value
581 content = entry.get('description', '')
584 def removeEntry(self, id):
585 contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
589 except OSError, exception:
590 print "Deleting %s: %s" % (contentLink, str (exception))
591 self.db.execute("DELETE FROM feed WHERE id=?;", (id,) )
592 self.db.execute("DELETE FROM images WHERE id=?;", (id,) )
598 wc()[self.key][id].files_deleted (
599 woodchuck.DeletionResponse.Deleted)
600 del wc()[self.key][id]
603 mainthread.execute (e, async=True)
605 class ArchivedArticles(Feed):
606 def addArchivedArticle(self, title, link, date, configdir):
607 id = self.generateUniqueId({"date":date, "title":title})
608 values = (id, title, link, date, 0, link, 0)
609 self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
612 def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False):
614 rows = self.db.execute("SELECT id, link FROM feed WHERE updated=0;")
616 currentTime = time.time()
619 f = urllib2.urlopen(link)
620 #entry["content"] = f.read()
623 soup = BeautifulSoup(html)
627 filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
629 self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
631 contentLink = configdir+self.key+".d/"+id+".html"
632 file = open(contentLink, "w")
633 file.write(soup.prettify())
636 self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
638 return (currentTime, None, None)
640 def purgeReadArticles(self):
641 rows = self.db.execute("SELECT id FROM feed WHERE read=1;")
644 self.removeArticle(row[0])
646 def removeArticle(self, id):
647 rows = self.db.execute("SELECT imagePath FROM images WHERE id=?;", (id,) )
650 count = self.db.execute("SELECT count(*) FROM images WHERE id!=? and imagePath=?;", (id,row[0]) ).fetchone()[0]
661 except AttributeError:
662 db = sqlite3.connect("%s/feeds.db" % self.configdir, timeout=120)
665 db = property(_getdb)
667 # Lists all the feeds in a dictionary, and expose the data
668 def __init__(self, config, configdir):
670 self.configdir = configdir
672 self.tls = threading.local ()
675 table = self.db.execute("SELECT sql FROM sqlite_master").fetchone()
677 self.db.execute("CREATE TABLE feeds(id text, url text, title text, unread int, updateTime float, rank int, etag text, modified text, widget int, category int);")
678 self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
679 self.addCategory("Default Category")
680 if isfile(self.configdir+"feeds.pickle"):
681 self.importOldFormatFeeds()
683 self.addFeed("Maemo News", "http://maemo.org/news/items.xml")
685 from string import find, upper
686 if find(upper(table[0]), "WIDGET")<0:
687 self.db.execute("ALTER TABLE feeds ADD COLUMN widget int;")
688 self.db.execute("UPDATE feeds SET widget=1;")
690 if find(upper(table[0]), "CATEGORY")<0:
691 self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
692 self.addCategory("Default Category")
693 self.db.execute("ALTER TABLE feeds ADD COLUMN category int;")
694 self.db.execute("UPDATE feeds SET category=1;")
699 # Check that Woodchuck's state is up to date with respect our
703 # The list of known streams.
704 streams = wc().streams_list ()
705 stream_ids = [s.identifier for s in streams]
707 # Register any unknown streams. Remove known streams from
709 for key in self.getListOfFeeds():
710 title = self.getFeedTitle(key)
711 # XXX: We should also check whether the list of
712 # articles/objects in each feed/stream is up to date.
713 if key not in stream_ids:
714 print ("Registering previously unknown channel: %s (%s)"
716 # Use a default refresh interval of 6 hours.
717 wc().stream_register (key, title, 6 * 60 * 60)
719 # Make sure the human readable name is up to date.
720 if wc()[key].human_readable_name != title:
721 wc()[key].human_readable_name = title
722 stream_ids.remove (key)
725 # Unregister any streams that are no longer subscribed to.
726 for id in stream_ids:
727 print ("Unregistering %s" % (id,))
728 w.stream_unregister (id)
730 def importOldFormatFeeds(self):
731 """This function loads feeds that are saved in an outdated format, and converts them to sqlite"""
733 listing = rss.Listing(self.configdir)
735 for id in listing.getListOfFeeds():
738 values = (id, listing.getFeedTitle(id) , listing.getFeedUrl(id), 0, time.time(), rank, None, "None", 1)
739 self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?, 1);", values)
742 feed = listing.getFeed(id)
743 new_feed = self.getFeed(id)
745 items = feed.getIds()[:]
748 if feed.isEntryRead(item):
752 date = timegm(feed.getDateTuple(item))
753 title = feed.getTitle(item)
754 newId = new_feed.generateUniqueId({"date":date, "title":title})
755 values = (newId, title , feed.getContentLink(item), date, tuple(time.time()), feed.getExternalLink(item), read_status)
756 new_feed.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
759 images = feed.getImages(item)
761 new_feed.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (item, image) )
765 self.updateUnread(id)
767 traceback.print_exc()
768 remove(self.configdir+"feeds.pickle")
771 def addArchivedArticle(self, key, index):
772 feed = self.getFeed(key)
773 title = feed.getTitle(index)
774 link = feed.getExternalLink(index)
775 date = feed.getDate(index)
776 count = self.db.execute("SELECT count(*) FROM feeds where id=?;", ("ArchivedArticles",) ).fetchone()[0]
778 self.addFeed("Archived Articles", "", id="ArchivedArticles")
780 archFeed = self.getFeed("ArchivedArticles")
781 archFeed.addArchivedArticle(title, link, date, self.configdir)
782 self.updateUnread("ArchivedArticles")
784 def updateFeed(self, key, expiryTime=None, proxy=None, imageCache=None,
786 if expiryTime is None:
787 expiryTime = self.config.getExpiry()
789 # Default to 24 hours
792 (use_proxy, proxy) = self.config.getProxy()
795 if imageCache is None:
796 imageCache = self.config.getImageCache()
798 feed = self.getFeed(key)
799 (url, etag, modified) = self.db.execute("SELECT url, etag, modified FROM feeds WHERE id=?;", (key,) ).fetchone()
801 modified = time.struct_time(eval(modified))
805 self.configdir, url, etag, modified, expiryTime, proxy, imageCache,
806 priority, postFeedUpdateFunc=self._queuePostFeedUpdate)
808 def _queuePostFeedUpdate(self, *args, **kwargs):
809 mainthread.execute (self._postFeedUpdate, async=True, *args, **kwargs)
811 def _postFeedUpdate(self, key, updateTime, etag, modified):
815 modified=str(tuple(modified))
817 self.db.execute("UPDATE feeds SET updateTime=?, etag=?, modified=? WHERE id=?;", (updateTime, etag, modified, key) )
819 self.db.execute("UPDATE feeds SET etag=?, modified=? WHERE id=?;", (etag, modified, key) )
821 self.updateUnread(key)
823 def getFeed(self, key):
824 if key == "ArchivedArticles":
825 return ArchivedArticles(self.configdir, key)
826 return Feed(self.configdir, key)
828 def editFeed(self, key, title, url, category=None):
830 self.db.execute("UPDATE feeds SET title=?, url=?, category=? WHERE id=?;", (title, url, category, key))
832 self.db.execute("UPDATE feeds SET title=?, url=? WHERE id=?;", (title, url, key))
837 wc()[key].human_readable_name = title
839 print "Feed %s (%s) unknown." % (key, title)
842 def getFeedUpdateTime(self, key):
843 return time.ctime(self.db.execute("SELECT updateTime FROM feeds WHERE id=?;", (key,)).fetchone()[0])
845 def getFeedNumberOfUnreadItems(self, key):
846 return self.db.execute("SELECT unread FROM feeds WHERE id=?;", (key,)).fetchone()[0]
848 def getFeedTitle(self, key):
849 return self.db.execute("SELECT title FROM feeds WHERE id=?;", (key,)).fetchone()[0]
851 def getFeedUrl(self, key):
852 return self.db.execute("SELECT url FROM feeds WHERE id=?;", (key,)).fetchone()[0]
854 def getFeedCategory(self, key):
855 return self.db.execute("SELECT category FROM feeds WHERE id=?;", (key,)).fetchone()[0]
857 def getListOfFeeds(self, category=None):
859 rows = self.db.execute("SELECT id FROM feeds WHERE category=? ORDER BY rank;", (category, ) )
861 rows = self.db.execute("SELECT id FROM feeds ORDER BY rank;" )
868 def getListOfCategories(self):
869 rows = self.db.execute("SELECT id FROM categories ORDER BY rank;" )
876 def getCategoryTitle(self, id):
877 row = self.db.execute("SELECT title FROM categories WHERE id=?;", (id, )).fetchone()
880 def getSortedListOfKeys(self, order, onlyUnread=False, category=1):
881 if order == "Most unread":
882 tmp = "ORDER BY unread DESC"
883 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1], reverse=True)
884 elif order == "Least unread":
885 tmp = "ORDER BY unread"
886 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1])
887 elif order == "Most recent":
888 tmp = "ORDER BY updateTime DESC"
889 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2], reverse=True)
890 elif order == "Least recent":
891 tmp = "ORDER BY updateTime"
892 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2])
893 else: # order == "Manual" or invalid value...
894 tmp = "ORDER BY rank"
895 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][0])
897 sql = "SELECT id FROM feeds WHERE unread>0 AND category=%s " %category + tmp
899 sql = "SELECT id FROM feeds WHERE category=%s " %category + tmp
900 rows = self.db.execute(sql)
907 def getFavicon(self, key):
908 filename = "%s%s.d/favicon.ico" % (self.configdir, key)
914 def updateUnread(self, key):
915 feed = self.getFeed(key)
916 self.db.execute("UPDATE feeds SET unread=? WHERE id=?;", (feed.getNumberOfUnreadItems(), key))
919 def addFeed(self, title, url, id=None, category=1):
922 count = self.db.execute("SELECT count(*) FROM feeds WHERE id=?;", (id,) ).fetchone()[0]
924 max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
927 values = (id, title, url, 0, 0, max_rank+1, None, "None", 1, category)
928 self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?,?);", values)
930 # Ask for the feed object, it will create the necessary tables
934 # Register the stream with Woodchuck. Update approximately
936 wc().stream_register(stream_identifier=id,
937 human_readable_name=title,
944 def addCategory(self, title):
945 rank = self.db.execute("SELECT MAX(rank)+1 FROM categories;").fetchone()[0]
948 id = self.db.execute("SELECT MAX(id)+1 FROM categories;").fetchone()[0]
951 self.db.execute("INSERT INTO categories (id, title, unread, rank) VALUES (?, ?, 0, ?)", (id, title, rank))
954 def removeFeed(self, key):
955 if wc().available ():
959 print "Removing unregistered feed %s failed" % (key,)
961 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
962 self.db.execute("DELETE FROM feeds WHERE id=?;", (key, ))
963 self.db.execute("UPDATE feeds SET rank=rank-1 WHERE rank>?;", (rank,) )
966 if isdir(self.configdir+key+".d/"):
967 rmtree(self.configdir+key+".d/")
969 def removeCategory(self, key):
970 if self.db.execute("SELECT count(*) FROM categories;").fetchone()[0] > 1:
971 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,) ).fetchone()[0]
972 self.db.execute("DELETE FROM categories WHERE id=?;", (key, ))
973 self.db.execute("UPDATE categories SET rank=rank-1 WHERE rank>?;", (rank,) )
974 self.db.execute("UPDATE feeds SET category=1 WHERE category=?;", (key,) )
977 #def saveConfig(self):
978 # self.listOfFeeds["feedingit-order"] = self.sortedKeys
979 # file = open(self.configdir+"feeds.pickle", "w")
980 # pickle.dump(self.listOfFeeds, file)
983 def moveUp(self, key):
984 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
986 self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank-1) )
987 self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank-1, key) )
990 def moveCategoryUp(self, key):
991 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
993 self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank-1) )
994 self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank-1, key) )
997 def moveDown(self, key):
998 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
999 max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
1001 self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank+1) )
1002 self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank+1, key) )
1005 def moveCategoryDown(self, key):
1006 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
1007 max_rank = self.db.execute("SELECT MAX(rank) FROM categories;").fetchone()[0]
1009 self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank+1) )
1010 self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank+1, key) )