1 #!/usr/bin/env python2.5
4 # Copyright (c) 2007-2008 INdT.
5 # Copyright (c) 2011 Neal H. Walfield
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Lesser General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU Lesser General Public License for more details.
16 # You should have received a copy of the GNU Lesser General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 # ============================================================================
22 # Author : Yves Marcoz
24 # Description : Simple RSS Reader
25 # ============================================================================
28 from os.path import isfile, isdir
29 from shutil import rmtree
30 from os import mkdir, remove, utime
36 from BeautifulSoup import BeautifulSoup
37 from urlparse import urljoin
38 from calendar import timegm
39 from updatedbus import get_lock, release_lock
42 from jobmanager import JobManager
44 from httpprogresshandler import HTTPProgressHandler
47 return md5.new(string).hexdigest()
49 def download_callback(connection):
50 if JobManager().do_quit:
51 raise KeyboardInterrupt
53 def downloader(progress_handler=None, proxy=None):
57 openers.append (progress_handler)
59 openers.append(HTTPProgressHandler(download_callback))
62 openers.append (proxy)
64 return urllib2.build_opener (*openers)
67 serial_execution_lock = threading.Lock()
72 except AttributeError:
73 db = sqlite3.connect("%s/%s.db" % (self.dir, self.key), timeout=120)
78 def __init__(self, configdir, key):
80 self.configdir = configdir
81 self.dir = "%s/%s.d" %(self.configdir, self.key)
82 self.tls = threading.local ()
84 if not isdir(self.dir):
86 if not isfile("%s/%s.db" %(self.dir, self.key)):
87 self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, date float, updated float, link text, read int);")
88 self.db.execute("CREATE TABLE images (id text, imagePath text);")
91 def addImage(self, configdir, key, baseurl, url, proxy=None, opener=None):
92 filename = configdir+key+".d/"+getId(url)
93 if not isfile(filename):
96 opener = downloader(proxy=proxy)
98 abs_url = urljoin(baseurl,url)
99 f = opener.open(abs_url)
100 outf = open(filename, "w")
104 except (urllib2.HTTPError, urllib2.URLError), exception:
105 print ("Could not download image %s: %s"
106 % (abs_url, str (exception)))
109 exception = sys.exc_info()[0]
111 print "Downloading image: %s" % abs_url
113 traceback.print_exc()
122 #open(filename,"a").close() # "Touch" the file
123 file = open(filename,"a")
124 utime(filename, None)
128 def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
131 self._updateFeed(configdir, url, etag, modified, expiryTime, proxy, imageCache, postFeedUpdateFunc, *postFeedUpdateFuncArgs)
133 JobManager().execute(doit(), self.key, priority=priority)
135 def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
137 have_serial_execution_lock = False
140 update_lock = get_lock("key")
142 # Someone else is doing an update.
145 download_start = time.time ()
147 progress_handler = HTTPProgressHandler(download_callback)
149 openers = [progress_handler]
151 openers.append (proxy)
152 kwargs = {'handlers':openers}
154 tmp=feedparser.parse(url, etag=etag, modified=modified, **kwargs)
155 download_duration = time.time () - download_start
157 opener = downloader(progress_handler, proxy)
159 if JobManager().do_quit:
160 raise KeyboardInterrupt
162 process_start = time.time()
164 # Expiry time is in hours
165 expiry = float(expiryTime) * 3600.
168 http_status = tmp.get ('status', 200)
170 # Check if the parse was succesful. If the http status code
171 # is 304, then the download was successful, but there is
172 # nothing new. Indeed, no content is returned. This make a
173 # 304 look like an error because there are no entries and the
174 # parse fails. But really, everything went great! Check for
176 if http_status == 304:
178 elif len(tmp["entries"])==0 and not tmp.version:
179 # An error occured fetching or parsing the feed. (Version
180 # will be either None if e.g. the connection timed our or
181 # '' if the data is not a proper feed)
182 print ("Error fetching %s: version is: %s: error: %s"
183 % (url, str (tmp.version),
184 str (tmp.get ('bozo_exception', 'Unknown error'))))
187 currentTime = time.time()
188 # The etag and modified value should only be updated if the content was not null
194 modified = tmp["modified"]
198 abs_url = urljoin(tmp["feed"]["link"],"/favicon.ico")
199 f = opener.open(abs_url)
202 outf = open(self.dir+"/favicon.ico", "w")
206 except (urllib2.HTTPError, urllib2.URLError), exception:
207 print ("Could not download favicon %s: %s"
208 % (abs_url, str (exception)))
210 self.serial_execution_lock.acquire ()
211 have_serial_execution_lock = True
213 #reversedEntries = self.getEntries()
214 #reversedEntries.reverse()
218 tmp["entries"].reverse()
219 for entry in tmp["entries"]:
220 # Yield so as to make the main thread a bit more
224 if JobManager().do_quit:
225 raise KeyboardInterrupt
227 received_base = progress_handler.stats['received']
228 sent_base = progress_handler.stats['sent']
231 date = self.extractDate(entry)
235 entry["title"] = "No Title"
243 entry["author"] = None
244 if(not(entry.has_key("id"))):
246 content = self.extractContent(entry)
247 object_size = len (content)
248 received_base -= len (content)
249 tmpEntry = {"title":entry["title"], "content":content,
250 "date":date, "link":entry["link"], "author":entry["author"], "id":entry["id"]}
251 id = self.generateUniqueId(tmpEntry)
253 #articleTime = time.mktime(self.entries[id]["dateTuple"])
254 soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
256 baseurl = tmpEntry["link"]
258 if imageCache and len(images) > 0:
259 self.serial_execution_lock.release ()
260 have_serial_execution_lock = False
262 filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
264 img['src']="file://%s" %filename
265 count = self.db.execute("SELECT count(1) FROM images where id=? and imagePath=?;", (id, filename )).fetchone()[0]
267 self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
271 object_size += os.path.getsize (filename)
272 except os.error, exception:
273 print ("Error getting size of %s: %s"
274 % (filename, exception))
276 self.serial_execution_lock.acquire ()
277 have_serial_execution_lock = True
279 tmpEntry["contentLink"] = configdir+self.key+".d/"+id+".html"
280 file = open(tmpEntry["contentLink"], "w")
281 file.write(soup.prettify())
284 self.db.execute("UPDATE feed SET updated=? WHERE id=?;", (currentTime, id) )
287 values = (id, tmpEntry["title"], tmpEntry["contentLink"], tmpEntry["date"], currentTime, tmpEntry["link"], 0)
288 self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
292 # self.db.execute("UPDATE feed SET updated=? WHERE id=?;", (currentTime, id) )
294 # filename = configdir+self.key+".d/"+id+".html"
295 # file = open(filename,"a")
296 # utime(filename, None)
298 # images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
299 # for image in images:
300 # file = open(image[0],"a")
301 # utime(image[0], None)
311 rows = self.db.execute("SELECT id FROM feed WHERE (read=0 AND updated<?) OR (read=1 AND updated<?);", (currentTime-2*expiry, currentTime-expiry))
313 self.removeEntry(row[0])
315 from glob import glob
317 for file in glob(configdir+self.key+".d/*"):
321 # put the two dates into matching format
323 lastmodDate = stats[8]
325 expDate = time.time()-expiry*3
326 # check if image-last-modified-date is outdated
328 if expDate > lastmodDate:
332 #print 'Removing', file
334 remove(file) # commented out for testing
336 except OSError, exception:
338 print 'Could not remove %s: %s' % (file, str (exception))
339 print ("updated %s: %fs in download, %fs in processing"
340 % (self.key, download_duration,
341 time.time () - process_start))
345 if have_serial_execution_lock:
346 self.serial_execution_lock.release ()
348 if update_lock is not None:
349 release_lock (update_lock)
353 rows = self.db.execute("SELECT MAX(date) FROM feed;")
357 print "Fetching update time."
359 traceback.print_exc()
364 if postFeedUpdateFunc is not None:
365 postFeedUpdateFunc (self.key, updateTime, etag, modified,
366 *postFeedUpdateFuncArgs)
368 def setEntryRead(self, id):
369 self.db.execute("UPDATE feed SET read=1 WHERE id=?;", (id,) )
372 def setEntryUnread(self, id):
373 self.db.execute("UPDATE feed SET read=0 WHERE id=?;", (id,) )
376 def markAllAsRead(self):
377 self.db.execute("UPDATE feed SET read=1 WHERE read=0;")
380 def isEntryRead(self, id):
381 read_status = self.db.execute("SELECT read FROM feed WHERE id=?;", (id,) ).fetchone()[0]
382 return read_status==1 # Returns True if read==1, and False if read==0
384 def getTitle(self, id):
385 return self.db.execute("SELECT title FROM feed WHERE id=?;", (id,) ).fetchone()[0]
387 def getContentLink(self, id):
388 return self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,) ).fetchone()[0]
390 def getExternalLink(self, id):
391 return self.db.execute("SELECT link FROM feed WHERE id=?;", (id,) ).fetchone()[0]
393 def getDate(self, id):
394 dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
395 return time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(dateStamp))
397 def getDateTuple(self, id):
398 dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
399 return time.localtime(dateStamp)
401 def getDateStamp(self, id):
402 return self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
404 def generateUniqueId(self, entry):
405 if(entry["id"] != None):
406 return getId(str(entry["id"]))
409 return getId(str(entry["date"]) + str(entry["title"]))
411 #print entry["title"]
412 return getId(str(entry["date"]))
414 def getIds(self, onlyUnread=False):
416 rows = self.db.execute("SELECT id FROM feed where read=0 ORDER BY date DESC;").fetchall()
418 rows = self.db.execute("SELECT id FROM feed ORDER BY date DESC;").fetchall()
425 def getNextId(self, id):
427 index = ids.index(id)
428 return ids[(index+1)%len(ids)]
430 def getPreviousId(self, id):
432 index = ids.index(id)
433 return ids[(index-1)%len(ids)]
435 def getNumberOfUnreadItems(self):
436 return self.db.execute("SELECT count(*) FROM feed WHERE read=0;").fetchone()[0]
438 def getNumberOfEntries(self):
439 return self.db.execute("SELECT count(*) FROM feed;").fetchone()[0]
441 def getArticle(self, entry):
442 #self.setEntryRead(id)
443 #entry = self.entries[id]
444 title = entry['title']
445 #content = entry.get('content', entry.get('summary_detail', {}))
446 content = entry["content"]
449 author = entry['author']
450 date = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(entry["date"]) )
452 #text = '''<div style="color: black; background-color: white;">'''
453 text = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
454 text += "<html><head><title>" + title + "</title>"
455 text += '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>\n'
456 #text += '<style> body {-webkit-user-select: none;} </style>'
457 text += '</head><body bgcolor=\"#ffffff\"><div><a href=\"' + link + '\">' + title + "</a>"
459 text += "<BR /><small><i>Author: " + author + "</i></small>"
460 text += "<BR /><small><i>Date: " + date + "</i></small></div>"
461 text += "<BR /><BR />"
463 text += "</body></html>"
466 def getContent(self, id):
467 contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
469 file = open(self.entries[id]["contentLink"])
470 content = file.read()
473 content = "Content unavailable"
476 def extractDate(self, entry):
477 if entry.has_key("updated_parsed"):
478 return timegm(entry["updated_parsed"])
479 elif entry.has_key("published_parsed"):
480 return timegm(entry["published_parsed"])
484 def extractContent(self, entry):
486 if entry.has_key('summary'):
487 content = entry.get('summary', '')
488 if entry.has_key('content'):
489 if len(entry.content[0].value) > len(content):
490 content = entry.content[0].value
492 content = entry.get('description', '')
495 def removeEntry(self, id):
496 contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
500 except OSError, exception:
501 print "Deleting %s: %s" % (contentLink, str (exception))
502 self.db.execute("DELETE FROM feed WHERE id=?;", (id,) )
503 self.db.execute("DELETE FROM images WHERE id=?;", (id,) )
506 class ArchivedArticles(Feed):
507 def addArchivedArticle(self, title, link, date, configdir):
508 id = self.generateUniqueId({"date":date, "title":title})
509 values = (id, title, link, date, 0, link, 0)
510 self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
513 def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False):
515 rows = self.db.execute("SELECT id, link FROM feed WHERE updated=0;")
517 currentTime = time.time()
520 f = urllib2.urlopen(link)
521 #entry["content"] = f.read()
524 soup = BeautifulSoup(html)
528 filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
530 self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
532 contentLink = configdir+self.key+".d/"+id+".html"
533 file = open(contentLink, "w")
534 file.write(soup.prettify())
537 self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
539 return (currentTime, None, None)
541 def purgeReadArticles(self):
542 rows = self.db.execute("SELECT id FROM feed WHERE read=1;")
545 self.removeArticle(row[0])
547 def removeArticle(self, id):
548 rows = self.db.execute("SELECT imagePath FROM images WHERE id=?;", (id,) )
551 count = self.db.execute("SELECT count(*) FROM images WHERE id!=? and imagePath=?;", (id,row[0]) ).fetchone()[0]
562 except AttributeError:
563 db = sqlite3.connect("%s/feeds.db" % self.configdir, timeout=120)
566 db = property(_getdb)
568 # Lists all the feeds in a dictionary, and expose the data
569 def __init__(self, config, configdir):
571 self.configdir = configdir
573 self.tls = threading.local ()
576 table = self.db.execute("SELECT sql FROM sqlite_master").fetchone()
578 self.db.execute("CREATE TABLE feeds(id text, url text, title text, unread int, updateTime float, rank int, etag text, modified text, widget int, category int);")
579 self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
580 self.addCategory("Default Category")
581 if isfile(self.configdir+"feeds.pickle"):
582 self.importOldFormatFeeds()
584 self.addFeed("Maemo News", "http://maemo.org/news/items.xml")
586 from string import find, upper
587 if find(upper(table[0]), "WIDGET")<0:
588 self.db.execute("ALTER TABLE feeds ADD COLUMN widget int;")
589 self.db.execute("UPDATE feeds SET widget=1;")
591 if find(upper(table[0]), "CATEGORY")<0:
592 self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
593 self.addCategory("Default Category")
594 self.db.execute("ALTER TABLE feeds ADD COLUMN category int;")
595 self.db.execute("UPDATE feeds SET category=1;")
600 def importOldFormatFeeds(self):
601 """This function loads feeds that are saved in an outdated format, and converts them to sqlite"""
603 listing = rss.Listing(self.configdir)
605 for id in listing.getListOfFeeds():
608 values = (id, listing.getFeedTitle(id) , listing.getFeedUrl(id), 0, time.time(), rank, None, "None", 1)
609 self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?, 1);", values)
612 feed = listing.getFeed(id)
613 new_feed = self.getFeed(id)
615 items = feed.getIds()[:]
618 if feed.isEntryRead(item):
622 date = timegm(feed.getDateTuple(item))
623 title = feed.getTitle(item)
624 newId = new_feed.generateUniqueId({"date":date, "title":title})
625 values = (newId, title , feed.getContentLink(item), date, tuple(time.time()), feed.getExternalLink(item), read_status)
626 new_feed.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
629 images = feed.getImages(item)
631 new_feed.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (item, image) )
635 self.updateUnread(id)
638 traceback.print_exc()
639 remove(self.configdir+"feeds.pickle")
642 def addArchivedArticle(self, key, index):
643 feed = self.getFeed(key)
644 title = feed.getTitle(index)
645 link = feed.getExternalLink(index)
646 date = feed.getDate(index)
647 count = self.db.execute("SELECT count(*) FROM feeds where id=?;", ("ArchivedArticles",) ).fetchone()[0]
649 self.addFeed("Archived Articles", "", id="ArchivedArticles")
651 archFeed = self.getFeed("ArchivedArticles")
652 archFeed.addArchivedArticle(title, link, date, self.configdir)
653 self.updateUnread("ArchivedArticles")
655 def updateFeed(self, key, expiryTime=None, proxy=None, imageCache=None,
657 if expiryTime is None:
658 expiryTime = self.config.getExpiry()
660 # Default to 24 hours
663 (use_proxy, proxy) = self.config.getProxy()
666 if imageCache is None:
667 imageCache = self.config.getImageCache()
669 feed = self.getFeed(key)
670 (url, etag, modified) = self.db.execute("SELECT url, etag, modified FROM feeds WHERE id=?;", (key,) ).fetchone()
672 modified = time.struct_time(eval(modified))
676 self.configdir, url, etag, modified, expiryTime, proxy, imageCache,
677 priority, postFeedUpdateFunc=self._queuePostFeedUpdate)
679 def _queuePostFeedUpdate(self, *args, **kwargs):
680 mainthread.execute (self._postFeedUpdate, async=True, *args, **kwargs)
682 def _postFeedUpdate(self, key, updateTime, etag, modified):
686 modified=str(tuple(modified))
688 self.db.execute("UPDATE feeds SET updateTime=?, etag=?, modified=? WHERE id=?;", (updateTime, etag, modified, key) )
690 self.db.execute("UPDATE feeds SET etag=?, modified=? WHERE id=?;", (etag, modified, key) )
692 self.updateUnread(key)
694 def getFeed(self, key):
695 if key == "ArchivedArticles":
696 return ArchivedArticles(self.configdir, key)
697 return Feed(self.configdir, key)
699 def editFeed(self, key, title, url, category=None):
701 self.db.execute("UPDATE feeds SET title=?, url=?, category=? WHERE id=?;", (title, url, category, key))
703 self.db.execute("UPDATE feeds SET title=?, url=? WHERE id=?;", (title, url, key))
706 def getFeedUpdateTime(self, key):
707 return time.ctime(self.db.execute("SELECT updateTime FROM feeds WHERE id=?;", (key,)).fetchone()[0])
709 def getFeedNumberOfUnreadItems(self, key):
710 return self.db.execute("SELECT unread FROM feeds WHERE id=?;", (key,)).fetchone()[0]
712 def getFeedTitle(self, key):
713 return self.db.execute("SELECT title FROM feeds WHERE id=?;", (key,)).fetchone()[0]
715 def getFeedUrl(self, key):
716 return self.db.execute("SELECT url FROM feeds WHERE id=?;", (key,)).fetchone()[0]
718 def getFeedCategory(self, key):
719 return self.db.execute("SELECT category FROM feeds WHERE id=?;", (key,)).fetchone()[0]
721 def getListOfFeeds(self, category=None):
723 rows = self.db.execute("SELECT id FROM feeds WHERE category=? ORDER BY rank;", (category, ) )
725 rows = self.db.execute("SELECT id FROM feeds ORDER BY rank;" )
732 def getListOfCategories(self):
733 rows = self.db.execute("SELECT id FROM categories ORDER BY rank;" )
740 def getCategoryTitle(self, id):
741 row = self.db.execute("SELECT title FROM categories WHERE id=?;", (id, )).fetchone()
744 def getSortedListOfKeys(self, order, onlyUnread=False, category=1):
745 if order == "Most unread":
746 tmp = "ORDER BY unread DESC"
747 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1], reverse=True)
748 elif order == "Least unread":
749 tmp = "ORDER BY unread"
750 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1])
751 elif order == "Most recent":
752 tmp = "ORDER BY updateTime DESC"
753 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2], reverse=True)
754 elif order == "Least recent":
755 tmp = "ORDER BY updateTime"
756 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2])
757 else: # order == "Manual" or invalid value...
758 tmp = "ORDER BY rank"
759 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][0])
761 sql = "SELECT id FROM feeds WHERE unread>0 AND category=%s " %category + tmp
763 sql = "SELECT id FROM feeds WHERE category=%s " %category + tmp
764 rows = self.db.execute(sql)
771 def getFavicon(self, key):
772 filename = "%s%s.d/favicon.ico" % (self.configdir, key)
778 def updateUnread(self, key):
779 feed = self.getFeed(key)
780 self.db.execute("UPDATE feeds SET unread=? WHERE id=?;", (feed.getNumberOfUnreadItems(), key))
783 def addFeed(self, title, url, id=None, category=1):
786 count = self.db.execute("SELECT count(*) FROM feeds WHERE id=?;", (id,) ).fetchone()[0]
788 max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
791 values = (id, title, url, 0, 0, max_rank+1, None, "None", 1, category)
792 self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?,?);", values)
794 # Ask for the feed object, it will create the necessary tables
800 def addCategory(self, title):
801 rank = self.db.execute("SELECT MAX(rank)+1 FROM categories;").fetchone()[0]
804 id = self.db.execute("SELECT MAX(id)+1 FROM categories;").fetchone()[0]
807 self.db.execute("INSERT INTO categories (id, title, unread, rank) VALUES (?, ?, 0, ?)", (id, title, rank))
810 def removeFeed(self, key):
811 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
812 self.db.execute("DELETE FROM feeds WHERE id=?;", (key, ))
813 self.db.execute("UPDATE feeds SET rank=rank-1 WHERE rank>?;", (rank,) )
816 if isdir(self.configdir+key+".d/"):
817 rmtree(self.configdir+key+".d/")
819 def removeCategory(self, key):
820 if self.db.execute("SELECT count(*) FROM categories;").fetchone()[0] > 1:
821 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,) ).fetchone()[0]
822 self.db.execute("DELETE FROM categories WHERE id=?;", (key, ))
823 self.db.execute("UPDATE categories SET rank=rank-1 WHERE rank>?;", (rank,) )
824 self.db.execute("UPDATE feeds SET category=1 WHERE category=?;", (key,) )
827 #def saveConfig(self):
828 # self.listOfFeeds["feedingit-order"] = self.sortedKeys
829 # file = open(self.configdir+"feeds.pickle", "w")
830 # pickle.dump(self.listOfFeeds, file)
833 def moveUp(self, key):
834 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
836 self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank-1) )
837 self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank-1, key) )
840 def moveCategoryUp(self, key):
841 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
843 self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank-1) )
844 self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank-1, key) )
847 def moveDown(self, key):
848 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
849 max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
851 self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank+1) )
852 self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank+1, key) )
855 def moveCategoryDown(self, key):
856 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
857 max_rank = self.db.execute("SELECT MAX(rank) FROM categories;").fetchone()[0]
859 self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank+1) )
860 self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank+1, key) )