Fix ArchivedArticles' update function.
[feedingit] / src / rss_sqlite.py
1 #!/usr/bin/env python2.5
2
3
4 # Copyright (c) 2007-2008 INdT.
5 # Copyright (c) 2011 Neal H. Walfield
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Lesser General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 #  This program is distributed in the hope that it will be useful,
12 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
13 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 #  GNU Lesser General Public License for more details.
15 #
16 #  You should have received a copy of the GNU Lesser General Public License
17 #  along with this program.  If not, see <http://www.gnu.org/licenses/>.
18 #
19
20 # ============================================================================
21 # Name        : FeedingIt.py
22 # Author      : Yves Marcoz
23 # Version     : 0.5.4
24 # Description : Simple RSS Reader
25 # ============================================================================
26
27 from __future__ import with_statement
28
29 import sqlite3
30 from os.path import isfile, isdir
31 from shutil import rmtree
32 from os import mkdir, remove, utime
33 import os
34 import md5
35 import feedparser
36 import time
37 import urllib2
38 from BeautifulSoup import BeautifulSoup
39 from urlparse import urljoin
40 from calendar import timegm
41 import threading
42 import traceback
43 from wc import wc, wc_init, woodchuck
44 import subprocess
45 import dbus
46 from updatedbus import update_server_object
47
48 from jobmanager import JobManager
49 import mainthread
50 from httpprogresshandler import HTTPProgressHandler
51 import random
52 import sys
53 import logging
54 logger = logging.getLogger(__name__)
55
56 def getId(string):
57     return md5.new(string).hexdigest()
58
59 def download_callback(connection):
60     if JobManager().do_quit:
61         raise KeyboardInterrupt
62
63 def downloader(progress_handler=None, proxy=None):
64     openers = []
65
66     if progress_handler is not None:
67         openers.append(progress_handler)
68     else:
69         openers.append(HTTPProgressHandler(download_callback))
70
71     if proxy:
72         openers.append(proxy)
73
74     return urllib2.build_opener(*openers)
75
76 def transfer_stats(sent, received, **kwargs):
77     """
78     This function takes two arguments: sent is the number of bytes
79     sent so far, received is the number of bytes received.  The
80     function returns a continuation that you can call later.
81
82     The continuation takes the same two arguments.  It returns a tuple
83     of the number of bytes sent, the number of bytes received and the
84     time since the original function was invoked.
85     """
86     start_time = time.time()
87     start_sent = sent
88     start_received = received
89
90     def e(sent, received, **kwargs):
91         return (sent - start_sent,
92                 received - start_received,
93                 time.time() - start_time)
94
95     return e
96
97 # If not None, a subprocess.Popen object corresponding to a
98 # update_feeds.py process.
99 update_feed_process = None
100
101 update_feeds_iface = None
102
103 jobs_at_start = 0
104
105 class BaseObject(object):
106     # Columns to cache.  Classes that inherit from this and use the
107     # cache mechanism should set this to a list of tuples, each of
108     # which contains two entries: the table and the column.  Note that
109     # both are case sensitive.
110     cached_columns = ()
111
112     def cache_invalidate(self, table=None):
113         """
114         Invalidate the cache.
115
116         If table is not None, invalidate only the specified table.
117         Otherwise, drop the whole cache.
118         """
119         if not hasattr(self, 'cache'):
120             return
121
122         if table is None:
123             del self.cache
124         else:
125             if table in self.cache:
126                 del self.cache[table]
127
128     def lookup(self, table, column, id=None):
129         """
130         Look up a column or value.  Uses a cache for columns in
131         cached_columns.  Note: the column is returned unsorted.
132         """
133         if not hasattr(self, 'cache'):
134             self.cache = {}
135
136         # Cache data for at most 60 seconds.
137         now = time.time()
138         try:
139             cache = self.cache[table]
140
141             if time.time() - cache[None] > 60:
142                 # logger.debug("%s: Cache too old: clearing" % (table,))
143                 del self.cache[table]
144                 cache = None
145         except KeyError:
146             cache = None
147
148         if (cache is None
149             or (table, column) not in self.cached_columns):
150             # The cache is empty or the caller wants a column that we
151             # don't cache.
152             if (table, column) in self.cached_columns:
153                 # logger.debug("%s: Rebuilding cache" % (table,))
154
155                 do_cache = True
156
157                 self.cache[table] = cache = {}
158                 columns = []
159                 for t, c in self.cached_columns:
160                     if table == t:
161                         cache[c] = {}
162                         columns.append(c)
163
164                 columns.append('id')
165                 where = ""
166             else:
167                 do_cache = False
168
169                 columns = (colums,)
170                 if id is not None:
171                     where = "where id = '%s'" % id
172                 else:
173                     where = ""
174
175             results = self.db.execute(
176                 "SELECT %s FROM %s %s" % (','.join(columns), table, where))
177
178             if do_cache:
179                 for r in results:
180                     values = list(r)
181                     i = values.pop()
182                     for index, value in enumerate(values):
183                         cache[columns[index]][i] = value
184
185                 cache[None] = now
186             else:
187                 results = []
188                 for r in results:
189                     if id is not None:
190                         return values[0]
191
192                     results.append(values[0])
193
194                 return results
195         else:
196             cache = self.cache[table]
197
198         try:
199             if id is not None:
200                 value = cache[column][id]
201                 # logger.debug("%s.%s:%s -> %s" % (table, column, id, value))
202                 return value
203             else:
204                 return cache[column].values()
205         except KeyError:
206             # logger.debug("%s.%s:%s -> Not found" % (table, column, id))
207             return None
208
209 class Feed(BaseObject):
210     # Columns to cache.
211     cached_columns = (('feed', 'read'),
212                       ('feed', 'title'))
213
214     serial_execution_lock = threading.Lock()
215
216     def _getdb(self):
217         try:
218             db = self.tls.db
219         except AttributeError:
220             db = sqlite3.connect("%s/%s.db" % (self.dir, self.key), timeout=120)
221             self.tls.db = db
222         return db
223     db = property(_getdb)
224
225     def __init__(self, configdir, key):
226         self.key = key
227         self.configdir = configdir
228         self.dir = "%s/%s.d" %(self.configdir, self.key)
229         self.tls = threading.local ()
230
231         if not isdir(self.dir):
232             mkdir(self.dir)
233         if not isfile("%s/%s.db" %(self.dir, self.key)):
234             self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, date float, updated float, link text, read int);")
235             self.db.execute("CREATE TABLE images (id text, imagePath text);")
236             self.db.commit()
237
238     def addImage(self, configdir, key, baseurl, url, proxy=None, opener=None):
239         filename = configdir+key+".d/"+getId(url)
240         if not isfile(filename):
241             try:
242                 if not opener:
243                     opener = downloader(proxy=proxy)
244
245                 abs_url = urljoin(baseurl,url)
246                 f = opener.open(abs_url)
247                 try:
248                     with open(filename, "w") as outf:
249                         for data in f:
250                             outf.write(data)
251                 finally:
252                     f.close()
253             except (urllib2.HTTPError, urllib2.URLError, IOError), exception:
254                 logger.info("Could not download image %s: %s"
255                             % (abs_url, str (exception)))
256                 return None
257             except:
258                 exception = sys.exc_info()[0]
259
260                 logger.info("Downloading image %s: %s" %
261                             (abs_url, traceback.format_exc()))
262                 try:
263                     remove(filename)
264                 except OSError:
265                     pass
266
267                 return None
268         else:
269             #open(filename,"a").close()  # "Touch" the file
270             file = open(filename,"a")
271             utime(filename, None)
272             file.close()
273         return filename
274
275     def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
276         if (os.path.basename(sys.argv[0]) == 'update_feeds.py'):
277             def doit():
278                 def it():
279                     self._updateFeed(configdir, url, etag, modified, expiryTime, proxy, imageCache, postFeedUpdateFunc, *postFeedUpdateFuncArgs)
280                 return it
281             JobManager().execute(doit(), self.key, priority=priority)
282         else:
283             def send_update_request():
284                 global update_feeds_iface
285                 if update_feeds_iface is None:
286                     bus=dbus.SessionBus()
287                     remote_object = bus.get_object(
288                         "org.marcoz.feedingit", # Connection name
289                         "/org/marcoz/feedingit/update" # Object's path
290                         )
291                     update_feeds_iface = dbus.Interface(
292                         remote_object, 'org.marcoz.feedingit')
293
294                 try:
295                     update_feeds_iface.Update(self.key)
296                 except Exception, e:
297                     logger.error("Invoking org.marcoz.feedingit.Update: %s"
298                                  % str(e))
299                     update_feeds_iface = None
300                 else:
301                     return True
302
303             if send_update_request():
304                 # Success!  It seems we were able to start the update
305                 # daemon via dbus (or, it was already running).
306                 return
307
308             global update_feed_process
309             if (update_feed_process is None
310                 or update_feed_process.poll() is not None):
311                 # The update_feeds process is not running.  Start it.
312                 update_feeds = os.path.join(os.path.dirname(__file__),
313                                             'update_feeds.py')
314                 argv = ['/usr/bin/env', 'python', update_feeds, '--daemon' ]
315                 logger.debug("Starting update_feeds: running %s"
316                              % (str(argv),))
317                 update_feed_process = subprocess.Popen(argv)
318                 # Make sure the dbus calls go to the right process:
319                 # rebind.
320                 update_feeds_iface = None
321
322             for _ in xrange(5):
323                 if send_update_request():
324                     break
325                 time.sleep(1)
326
327     def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
328         logger.debug("Updating %s" % url)
329
330         success = False
331         have_serial_execution_lock = False
332         try:
333             update_start = time.time ()
334
335             progress_handler = HTTPProgressHandler(download_callback)
336
337             openers = [progress_handler]
338             if proxy:
339                 openers.append (proxy)
340             kwargs = {'handlers':openers}
341             
342             feed_transfer_stats = transfer_stats(0, 0)
343
344             tmp=feedparser.parse(url, etag=etag, modified=modified, **kwargs)
345             download_duration = time.time () - update_start
346
347             opener = downloader(progress_handler, proxy)
348
349             if JobManager().do_quit:
350                 raise KeyboardInterrupt
351
352             process_start = time.time()
353
354             # Expiry time is in hours
355             expiry = float(expiryTime) * 3600.
356     
357             currentTime = 0
358             
359             updated_objects = 0
360             new_objects = 0
361
362             def wc_success():
363                 try:
364                     wc().stream_register (self.key, "", 6 * 60 * 60)
365                 except woodchuck.ObjectExistsError:
366                     pass
367                 try:
368                     wc()[self.key].updated (
369                         indicator=(woodchuck.Indicator.ApplicationVisual
370                                    |woodchuck.Indicator.StreamWide),
371                         transferred_down=progress_handler.stats['received'],
372                         transferred_up=progress_handler.stats['sent'],
373                         transfer_time=update_start,
374                         transfer_duration=download_duration,
375                         new_objects=new_objects,
376                         updated_objects=updated_objects,
377                         objects_inline=new_objects + updated_objects)
378                 except KeyError:
379                     logger.warn(
380                         "Failed to register update of %s with woodchuck!"
381                         % (self.key))
382     
383             http_status = tmp.get ('status', 200)
384     
385             # Check if the parse was succesful.  If the http status code
386             # is 304, then the download was successful, but there is
387             # nothing new.  Indeed, no content is returned.  This make a
388             # 304 look like an error because there are no entries and the
389             # parse fails.  But really, everything went great!  Check for
390             # this first.
391             if http_status == 304:
392                 logger.debug("%s: No changes to feed." % (self.key,))
393                 mainthread.execute(wc_success, async=True)
394                 success = True
395             elif len(tmp["entries"])==0 and not tmp.get('version', None):
396                 # An error occured fetching or parsing the feed.  (Version
397                 # will be either None if e.g. the connection timed our or
398                 # '' if the data is not a proper feed)
399                 logger.error(
400                     "Error fetching %s: version is: %s: error: %s"
401                     % (url, str (tmp.get('version', 'unset')),
402                        str (tmp.get ('bozo_exception', 'Unknown error'))))
403                 logger.debug(tmp)
404                 def register_stream_update_failed(http_status):
405                     def doit():
406                         logger.debug("%s: stream update failed!" % self.key)
407     
408                         try:
409                             # It's not easy to get the feed's title from here.
410                             # At the latest, the next time the application is
411                             # started, we'll fix up the human readable name.
412                             wc().stream_register (self.key, "", 6 * 60 * 60)
413                         except woodchuck.ObjectExistsError:
414                             pass
415                         ec = woodchuck.TransferStatus.TransientOther
416                         if 300 <= http_status and http_status < 400:
417                             ec = woodchuck.TransferStatus.TransientNetwork
418                         if 400 <= http_status and http_status < 500:
419                             ec = woodchuck.TransferStatus.FailureGone
420                         if 500 <= http_status and http_status < 600:
421                             ec = woodchuck.TransferStatus.TransientNetwork
422                         wc()[self.key].update_failed(ec)
423                     return doit
424                 if wc().available:
425                     mainthread.execute(
426                         register_stream_update_failed(
427                             http_status=http_status),
428                         async=True)
429             else:
430                currentTime = time.time()
431                # The etag and modified value should only be updated if the content was not null
432                try:
433                    etag = tmp["etag"]
434                except KeyError:
435                    etag = None
436                try:
437                    modified = tmp["modified"]
438                except KeyError:
439                    modified = None
440                try:
441                    abs_url = urljoin(tmp["feed"]["link"],"/favicon.ico")
442                    f = opener.open(abs_url)
443                    data = f.read()
444                    f.close()
445                    outf = open(self.dir+"/favicon.ico", "w")
446                    outf.write(data)
447                    outf.close()
448                    del data
449                except (urllib2.HTTPError, urllib2.URLError), exception:
450                    logger.debug("Could not download favicon %s: %s"
451                                 % (abs_url, str (exception)))
452     
453                self.serial_execution_lock.acquire ()
454                have_serial_execution_lock = True
455
456                #reversedEntries = self.getEntries()
457                #reversedEntries.reverse()
458     
459                tmp["entries"].reverse()
460                for entry in tmp["entries"]:
461                    # Yield so as to make the main thread a bit more
462                    # responsive.
463                    time.sleep(0)
464     
465                    entry_transfer_stats = transfer_stats(
466                        *feed_transfer_stats(**progress_handler.stats)[0:2])
467
468                    if JobManager().do_quit:
469                        raise KeyboardInterrupt
470
471                    object_size = 0
472
473                    date = self.extractDate(entry)
474                    try:
475                        entry["title"]
476                    except KeyError:
477                        entry["title"] = "No Title"
478                    try :
479                        entry["link"]
480                    except KeyError:
481                        entry["link"] = ""
482                    try:
483                        entry["author"]
484                    except KeyError:
485                        entry["author"] = None
486                    if(not(entry.has_key("id"))):
487                        entry["id"] = None
488                    content = self.extractContent(entry)
489                    object_size = len (content)
490                    tmpEntry = {"title":entry["title"], "content":content,
491                                 "date":date, "link":entry["link"], "author":entry["author"], "id":entry["id"]}
492                    id = self.generateUniqueId(tmpEntry)
493                    
494                    current_version = self.db.execute(
495                        'select date, ROWID from feed where id=?',
496                        (id,)).fetchone()
497                    if (current_version is not None
498                        and current_version[0] == date):
499                        logger.debug("ALREADY DOWNLOADED %s (%s)"
500                                     % (entry["title"], entry["link"]))
501                        ## This article is already present in the feed listing. Update the "updated" time, so it doesn't expire 
502                        self.db.execute("UPDATE feed SET updated=? WHERE id=?;",(currentTime,id))
503                        try: 
504                            logger.debug("Updating already downloaded files for %s" %(id))
505                            filename = configdir+self.key+".d/"+id+".html"
506                            file = open(filename,"a")
507                            utime(filename, None)
508                            file.close()
509                            images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
510                            for image in images:
511                                 file = open(image[0],"a")
512                                 utime(image[0], None)
513                                 file.close()
514                        except:
515                            logger.debug("Error in refreshing images for %s" % (id))
516                        self.db.commit()
517                        continue                       
518
519                    if current_version is not None:
520                        # The version was updated.  Mark it as unread.
521                        logger.debug("UPDATED: %s (%s)"
522                                     % (entry["title"], entry["link"]))
523                        updated_objects += 1
524                    else:
525                        logger.debug("NEW: %s (%s)"
526                                     % (entry["title"], entry["link"]))
527                        new_objects += 1
528
529                    #articleTime = time.mktime(self.entries[id]["dateTuple"])
530                    soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
531                    images = soup('img')
532                    baseurl = tmpEntry["link"]
533                    if imageCache and len(images) > 0:
534                        self.serial_execution_lock.release ()
535                        have_serial_execution_lock = False
536                        for img in images:
537                            if not img.has_key('src'):
538                                continue
539
540                            filename = self.addImage(
541                                configdir, self.key, baseurl, img['src'],
542                                opener=opener)
543                            if filename:
544                                 img['src']="file://%s" %filename
545                                 count = self.db.execute("SELECT count(1) FROM images where id=? and imagePath=?;", (id, filename )).fetchone()[0]
546                                 if count == 0:
547                                     self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
548                                     self.db.commit()
549     
550                                 try:
551                                     object_size += os.path.getsize (filename)
552                                 except os.error, exception:
553                                     logger.error ("Error getting size of %s: %s"
554                                                   % (filename, exception))
555                        self.serial_execution_lock.acquire ()
556                        have_serial_execution_lock = True
557     
558                    tmpEntry["contentLink"] = configdir+self.key+".d/"+id+".html"
559                    file = open(tmpEntry["contentLink"], "w")
560                    file.write(soup.prettify())
561                    file.close()
562
563                    values = {'id': id,
564                              'title': tmpEntry["title"],
565                              'contentLink': tmpEntry["contentLink"],
566                              'date': tmpEntry["date"],
567                              'updated': currentTime,
568                              'link': tmpEntry["link"],
569                              'read': 0}
570
571                    if current_version is not None:
572                        # This is an update.  Ensure that the existing
573                        # entry is replaced.
574                        values['ROWID'] = current_version[1]
575
576                    cols, values = zip(*values.items())
577                    self.db.execute(
578                        "INSERT OR REPLACE INTO feed (%s) VALUES (%s);"
579                        % (','.join(cols), ','.join(('?',) * len(values))),
580                        values)
581                    self.db.commit()
582
583                    # Register the object with Woodchuck and mark it as
584                    # downloaded.
585                    def register_object_transferred(
586                            id, title, publication_time,
587                            sent, received, object_size):
588                        def doit():
589                            logger.debug("Registering transfer of object %s"
590                                         % title)
591                            try:
592                                obj = wc()[self.key].object_register(
593                                    object_identifier=id,
594                                    human_readable_name=title)
595                            except woodchuck.ObjectExistsError:
596                                obj = wc()[self.key][id]
597                            else:
598                                obj.publication_time = publication_time
599                                obj.transferred(
600                                    indicator=(
601                                        woodchuck.Indicator.ApplicationVisual
602                                        |woodchuck.Indicator.StreamWide),
603                                    transferred_down=received,
604                                    transferred_up=sent,
605                                    object_size=object_size)
606                        return doit
607                    if wc().available:
608                        # If the entry does not contain a publication
609                        # time, the attribute won't exist.
610                        pubtime = entry.get('date_parsed', None)
611                        if pubtime:
612                            publication_time = time.mktime (pubtime)
613                        else:
614                            publication_time = None
615
616                        sent, received, _ \
617                            = entry_transfer_stats(**progress_handler.stats)
618                        # sent and received are for objects (in
619                        # particular, images) associated with this
620                        # item.  We also want to attribute the data
621                        # transferred for the item's content.  This is
622                        # a good first approximation.
623                        received += len(content)
624
625                        mainthread.execute(
626                            register_object_transferred(
627                                id=id,
628                                title=tmpEntry["title"],
629                                publication_time=publication_time,
630                                sent=sent, received=received,
631                                object_size=object_size),
632                            async=True)
633                self.db.commit()
634
635                sent, received, _ \
636                    = feed_transfer_stats(**progress_handler.stats)
637                logger.debug (
638                    "%s: Update successful: transferred: %d/%d; objects: %d)"
639                    % (url, sent, received, len (tmp.entries)))
640                mainthread.execute (wc_success, async=True)
641                success = True
642
643             rows = self.db.execute("SELECT id FROM feed WHERE (read=0 AND updated<?) OR (read=1 AND updated<?);", (currentTime-2*expiry, currentTime-expiry))
644             for row in rows:
645                self.removeEntry(row[0])
646             
647             from glob import glob
648             from os import stat
649             for file in glob(configdir+self.key+".d/*"):
650                 #
651                 stats = stat(file)
652                 #
653                 # put the two dates into matching format
654                 #
655                 lastmodDate = stats[8]
656                 #
657                 expDate = time.time()-expiry*3
658                 # check if image-last-modified-date is outdated
659                 #
660                 if expDate > lastmodDate:
661                     #
662                     try:
663                         #
664                         #print 'Removing', file
665                         #
666                         # XXX: Tell woodchuck.
667                         remove(file) # commented out for testing
668                         #
669                     except OSError, exception:
670                         #
671                         logger.error('Could not remove %s: %s'
672                                      % (file, str (exception)))
673             logger.debug("updated %s: %fs in download, %fs in processing"
674                          % (self.key, download_duration,
675                             time.time () - process_start))
676         except:
677             logger.error("Updating %s: %s" % (self.key, traceback.format_exc()))
678         finally:
679             self.db.commit ()
680
681             if have_serial_execution_lock:
682                 self.serial_execution_lock.release ()
683
684             updateTime = 0
685             try:
686                 rows = self.db.execute("SELECT MAX(date) FROM feed;")
687                 for row in rows:
688                     updateTime=row[0]
689             except Exception, e:
690                 logger.error("Fetching update time: %s: %s"
691                              % (str(e), traceback.format_exc()))
692             finally:
693                 if not success:
694                     etag = None
695                     modified = None
696                 title = None
697                 try:
698                     title = tmp.feed.title
699                 except (AttributeError, UnboundLocalError), exception:
700                     pass
701                 if postFeedUpdateFunc is not None:
702                     postFeedUpdateFunc (self.key, updateTime, etag, modified,
703                                         title, *postFeedUpdateFuncArgs)
704
705         self.cache_invalidate()
706
707     def setEntryRead(self, id):
708         self.db.execute("UPDATE feed SET read=1 WHERE id=?;", (id,) )
709         self.db.commit()
710
711         def doit():
712             try:
713                 wc()[self.key][id].used()
714             except KeyError:
715                 pass
716         if wc().available():
717             mainthread.execute(doit, async=True)
718         self.cache_invalidate('feed')
719
720     def setEntryUnread(self, id):
721         self.db.execute("UPDATE feed SET read=0 WHERE id=?;", (id,) )
722         self.db.commit()     
723         self.cache_invalidate('feed')
724         
725     def markAllAsRead(self):
726         self.db.execute("UPDATE feed SET read=1 WHERE read=0;")
727         self.db.commit()
728         self.cache_invalidate('feed')
729
730     def isEntryRead(self, id):
731         return self.lookup('feed', 'read', id) == 1
732     
733     def getTitle(self, id):
734         return self.lookup('feed', 'title', id)
735     
736     def getContentLink(self, id):
737         return self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,) ).fetchone()[0]
738     
739     def getExternalLink(self, id):
740         return self.db.execute("SELECT link FROM feed WHERE id=?;", (id,) ).fetchone()[0]
741     
742     def getDate(self, id):
743         dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
744         return time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(dateStamp))
745
746     def getDateTuple(self, id):
747         dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
748         return time.localtime(dateStamp)
749     
750     def getDateStamp(self, id):
751         return self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
752     
753     def generateUniqueId(self, entry):
754         """
755         Generate a stable identifier for the article.  For the same
756         entry, this should result in the same identifier.  If
757         possible, the identifier should remain the same even if the
758         article is updated.
759         """
760         # Prefer the entry's id, which is supposed to be globally
761         # unique.
762         key = entry.get('id', None)
763         if not key:
764             # Next, try the link to the content.
765             key = entry.get('link', None)
766         if not key:
767             # Ok, the title and the date concatenated are likely to be
768             # relatively stable.
769             key = entry.get('title', None) + entry.get('date', None)
770         if not key:
771             # Hmm, the article's content will at least guarantee no
772             # false negatives (i.e., missing articles)
773             key = entry.get('content', None)
774         if not key:
775             # If all else fails, just use a random number.
776             key = str (random.random ())
777         return getId (key)
778     
779     def getIds(self, onlyUnread=False):
780         if onlyUnread:
781             rows = self.db.execute("SELECT id FROM feed where read=0 ORDER BY date DESC;").fetchall()
782         else:
783             rows = self.db.execute("SELECT id FROM feed ORDER BY date DESC;").fetchall()
784         ids = []
785         for row in rows:
786             ids.append(row[0])
787         #ids.reverse()
788         return ids
789     
790     def getNextId(self, id, forward=True):
791         if forward:
792             delta = 1
793         else:
794             delta = -1
795         ids = self.getIds()
796         index = ids.index(id)
797         return ids[(index + delta) % len(ids)]
798         
799     def getPreviousId(self, id):
800         return self.getNextId(id, forward=False)
801     
802     def getNumberOfUnreadItems(self):
803         return self.db.execute("SELECT count(*) FROM feed WHERE read=0;").fetchone()[0]
804     
805     def getNumberOfEntries(self):
806         return self.db.execute("SELECT count(*) FROM feed;").fetchone()[0]
807
808     def getArticle(self, entry):
809         #self.setEntryRead(id)
810         #entry = self.entries[id]
811         title = entry['title']
812         #content = entry.get('content', entry.get('summary_detail', {}))
813         content = entry["content"]
814
815         link = entry['link']
816         author = entry['author']
817         date = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(entry["date"]) )
818
819         #text = '''<div style="color: black; background-color: white;">'''
820         text = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
821         text += "<html><head><title>" + title + "</title>"
822         text += '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>\n'
823         #text += '<style> body {-webkit-user-select: none;} </style>'
824         text += '</head><body bgcolor=\"#ffffff\"><div><a href=\"' + link + '\">' + title + "</a>"
825         if author != None:
826             text += "<BR /><small><i>Author: " + author + "</i></small>"
827         text += "<BR /><small><i>Date: " + date + "</i></small></div>"
828         text += "<BR /><BR />"
829         text += content
830         text += "</body></html>"
831         return text
832    
833     def getContent(self, id):
834         contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
835         try:
836             file = open(self.entries[id]["contentLink"])
837             content = file.read()
838             file.close()
839         except:
840             content = "Content unavailable"
841         return content
842     
843     def extractDate(self, entry):
844         if entry.has_key("updated_parsed"):
845             return timegm(entry["updated_parsed"])
846         elif entry.has_key("published_parsed"):
847             return timegm(entry["published_parsed"])
848         else:
849             return time.time()
850         
851     def extractContent(self, entry):
852         content = ""
853         if entry.has_key('summary'):
854             content = entry.get('summary', '')
855         if entry.has_key('content'):
856             if len(entry.content[0].value) > len(content):
857                 content = entry.content[0].value
858         if content == "":
859             content = entry.get('description', '')
860         return content
861     
862     def removeEntry(self, id):
863         contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
864         if contentLink:
865             try:
866                 remove(contentLink)
867             except OSError, exception:
868                 logger.error("Deleting %s: %s" % (contentLink, str (exception)))
869         self.db.execute("DELETE FROM feed WHERE id=?;", (id,) )
870         self.db.execute("DELETE FROM images WHERE id=?;", (id,) )
871         self.db.commit()
872
873         def doit():
874             try:
875                 wc()[self.key][id].files_deleted (
876                     woodchuck.DeletionResponse.Deleted)
877                 del wc()[self.key][id]
878             except KeyError:
879                 pass
880         if wc().available():
881             mainthread.execute (doit, async=True)
882  
883 class ArchivedArticles(Feed):    
884     def addArchivedArticle(self, title, link, date, configdir):
885         id = self.generateUniqueId({"date":date, "title":title})
886         values = (id, title, link, date, 0, link, 0)
887         self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
888         self.db.commit()
889
890     # Feed.UpdateFeed calls this function.
891     def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
892         currentTime = 0
893         rows = self.db.execute("SELECT id, link FROM feed WHERE updated=0;")
894         for row in rows:
895             try:
896                 currentTime = time.time()
897                 id = row[0]
898                 link = row[1]
899                 f = urllib2.urlopen(link)
900                 #entry["content"] = f.read()
901                 html = f.read()
902                 f.close()
903                 soup = BeautifulSoup(html)
904                 images = soup('img')
905                 baseurl = link
906                 for img in images:
907                     filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
908                     img['src']=filename
909                     self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
910                     self.db.commit()
911                 contentLink = configdir+self.key+".d/"+id+".html"
912                 file = open(contentLink, "w")
913                 file.write(soup.prettify())
914                 file.close()
915                 
916                 self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
917                 self.db.commit()
918             except:
919                 logger.error("Error updating Archived Article: %s %s"
920                              % (link,traceback.format_exc(),))
921
922         if postFeedUpdateFunc is not None:
923             postFeedUpdateFunc (self.key, currentTime, None, None, None,
924                                 *postFeedUpdateFuncArgs)
925     
926     def purgeReadArticles(self):
927         rows = self.db.execute("SELECT id FROM feed WHERE read=1;")
928         #ids = self.getIds()
929         for row in rows:
930             self.removeArticle(row[0])
931
932     def removeArticle(self, id):
933         rows = self.db.execute("SELECT imagePath FROM images WHERE id=?;", (id,) )
934         for row in rows:
935             try:
936                 count = self.db.execute("SELECT count(*) FROM images WHERE id!=? and imagePath=?;", (id,row[0]) ).fetchone()[0]
937                 if count == 0:
938                     os.remove(row[0])
939             except:
940                 pass
941         self.removeEntry(id)
942
943 class Listing(BaseObject):
944     # Columns to cache.
945     cached_columns = (('feeds', 'updateTime'),
946                       ('feeds', 'unread'),
947                       ('feeds', 'title'),
948                       ('categories', 'title'))
949
950     def _getdb(self):
951         try:
952             db = self.tls.db
953         except AttributeError:
954             db = sqlite3.connect("%s/feeds.db" % self.configdir, timeout=120)
955             self.tls.db = db
956         return db
957     db = property(_getdb)
958
959     # Lists all the feeds in a dictionary, and expose the data
960     def __init__(self, config, configdir):
961         self.config = config
962         self.configdir = configdir
963
964         self.tls = threading.local ()
965         
966         try:
967             table = self.db.execute("SELECT sql FROM sqlite_master").fetchone()
968             if table == None:
969                 self.db.execute("CREATE TABLE feeds(id text, url text, title text, unread int, updateTime float, rank int, etag text, modified text, widget int, category int);")
970                 self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
971                 self.addCategory("Default Category")
972                 if isfile(self.configdir+"feeds.pickle"):
973                     self.importOldFormatFeeds()
974                 else:
975                     self.addFeed("Maemo News", "http://maemo.org/news/items.xml")    
976             else:
977                 from string import find, upper
978                 if find(upper(table[0]), "WIDGET")<0:
979                     self.db.execute("ALTER TABLE feeds ADD COLUMN widget int;")
980                     self.db.execute("UPDATE feeds SET widget=1;")
981                     self.db.commit()
982                 if find(upper(table[0]), "CATEGORY")<0:
983                     self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
984                     self.addCategory("Default Category")
985                     self.db.execute("ALTER TABLE feeds ADD COLUMN category int;")
986                     self.db.execute("UPDATE feeds SET category=1;")
987             self.db.commit()
988         except:
989             pass
990
991         # Check that Woodchuck's state is up to date with respect our
992         # state.
993         try:
994             updater = os.path.basename(sys.argv[0]) == 'update_feeds.py'
995             wc_init(config, self, True if updater else False)
996             if wc().available() and updater:
997                 # The list of known streams.
998                 streams = wc().streams_list ()
999                 stream_ids = [s.identifier for s in streams]
1000     
1001                 # Register any unknown streams.  Remove known streams from
1002                 # STREAMS_IDS.
1003                 for key in self.getListOfFeeds():
1004                     title = self.getFeedTitle(key)
1005                     # XXX: We should also check whether the list of
1006                     # articles/objects in each feed/stream is up to date.
1007                     if key not in stream_ids:
1008                         logger.debug(
1009                             "Registering previously unknown channel: %s (%s)"
1010                             % (key, title,))
1011                         wc().stream_register(
1012                             key, title,
1013                             self.config.getUpdateInterval() * 60 * 60)
1014                     else:
1015                         # Make sure the human readable name is up to date.
1016                         if wc()[key].human_readable_name != title:
1017                             wc()[key].human_readable_name = title
1018                         stream_ids.remove (key)
1019                         wc()[key].freshness \
1020                             = self.config.getUpdateInterval() * 60 * 60
1021                         
1022     
1023                 # Unregister any streams that are no longer subscribed to.
1024                 for id in stream_ids:
1025                     logger.debug("Unregistering %s" % (id,))
1026                     w.stream_unregister (id)
1027         except Exception:
1028             logger.exception("Registering streams with Woodchuck")
1029
1030     def importOldFormatFeeds(self):
1031         """This function loads feeds that are saved in an outdated format, and converts them to sqlite"""
1032         import rss
1033         listing = rss.Listing(self.configdir)
1034         rank = 0
1035         for id in listing.getListOfFeeds():
1036             try:
1037                 rank += 1
1038                 values = (id, listing.getFeedTitle(id) , listing.getFeedUrl(id), 0, time.time(), rank, None, "None", 1)
1039                 self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?, 1);", values)
1040                 self.db.commit()
1041                 
1042                 feed = listing.getFeed(id)
1043                 new_feed = self.getFeed(id)
1044                 
1045                 items = feed.getIds()[:]
1046                 items.reverse()
1047                 for item in items:
1048                         if feed.isEntryRead(item):
1049                             read_status = 1
1050                         else:
1051                             read_status = 0 
1052                         date = timegm(feed.getDateTuple(item))
1053                         title = feed.getTitle(item)
1054                         newId = new_feed.generateUniqueId({"date":date, "title":title})
1055                         values = (newId, title , feed.getContentLink(item), date, tuple(time.time()), feed.getExternalLink(item), read_status)
1056                         new_feed.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
1057                         new_feed.db.commit()
1058                         try:
1059                             images = feed.getImages(item)
1060                             for image in images:
1061                                 new_feed.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (item, image) )
1062                                 new_feed.db.commit()
1063                         except:
1064                             pass
1065                 self.updateUnread(id)
1066             except:
1067                 logger.error("importOldFormatFeeds: %s"
1068                              % (traceback.format_exc(),))
1069         remove(self.configdir+"feeds.pickle")
1070                 
1071         
1072     def addArchivedArticle(self, key, index):
1073         feed = self.getFeed(key)
1074         title = feed.getTitle(index)
1075         link = feed.getExternalLink(index)
1076         date = feed.getDate(index)
1077         count = self.db.execute("SELECT count(*) FROM feeds where id=?;", ("ArchivedArticles",) ).fetchone()[0]
1078         if count == 0:
1079             self.addFeed("Archived Articles", "", id="ArchivedArticles")
1080
1081         archFeed = self.getFeed("ArchivedArticles")
1082         archFeed.addArchivedArticle(title, link, date, self.configdir)
1083         self.updateUnread("ArchivedArticles")
1084         
1085     def updateFeed(self, key, expiryTime=None, proxy=None, imageCache=None,
1086                    priority=0):
1087         if expiryTime is None:
1088             expiryTime = self.config.getExpiry()
1089         if not expiryTime:
1090             # Default to 24 hours
1091             expriyTime = 24
1092         if proxy is None:
1093             (use_proxy, proxy) = self.config.getProxy()
1094             if not use_proxy:
1095                 proxy = None
1096         if imageCache is None:
1097             imageCache = self.config.getImageCache()
1098
1099         feed = self.getFeed(key)
1100         (url, etag, modified) = self.db.execute("SELECT url, etag, modified FROM feeds WHERE id=?;", (key,) ).fetchone()
1101         try:
1102             modified = time.struct_time(eval(modified))
1103         except:
1104             modified = None
1105         feed.updateFeed(
1106             self.configdir, url, etag, modified, expiryTime, proxy, imageCache,
1107             priority, postFeedUpdateFunc=self._queuePostFeedUpdate)
1108
1109     def _queuePostFeedUpdate(self, *args, **kwargs):
1110         mainthread.execute (self._postFeedUpdate, async=True, *args, **kwargs)
1111
1112     def _postFeedUpdate(self, key, updateTime, etag, modified, title):
1113         if modified==None:
1114             modified="None"
1115         else:
1116             modified=str(tuple(modified))
1117         if updateTime > 0:
1118             self.db.execute("UPDATE feeds SET updateTime=?, etag=?, modified=? WHERE id=?;", (updateTime, etag, modified, key) )
1119         else:
1120             self.db.execute("UPDATE feeds SET etag=?, modified=? WHERE id=?;", (etag, modified, key) )
1121
1122         if title is not None:
1123             self.db.execute("UPDATE feeds SET title=(case WHEN title=='' THEN ? ELSE title END) where id=?;",
1124                             (title, key))
1125         self.db.commit()
1126         self.cache_invalidate('feeds')
1127         self.updateUnread(key)
1128
1129         update_server_object().ArticleCountUpdated()
1130
1131         stats = JobManager().stats()
1132         global jobs_at_start
1133         completed = stats['jobs-completed'] - jobs_at_start
1134         in_progress = stats['jobs-in-progress']
1135         queued = stats['jobs-queued']
1136
1137         try:
1138             percent = (100 * ((completed + in_progress / 2.))
1139                        / (completed + in_progress + queued))
1140         except ZeroDivisionError:
1141             percent = 100
1142
1143         update_server_object().UpdateProgress(
1144             percent, completed, in_progress, queued, 0, 0, 0, key)
1145
1146         if in_progress == 0 and queued == 0:
1147             jobs_at_start = stats['jobs-completed']
1148         
1149     def getFeed(self, key):
1150         if key == "ArchivedArticles":
1151             return ArchivedArticles(self.configdir, key)
1152         return Feed(self.configdir, key)
1153         
1154     def editFeed(self, key, title, url, category=None):
1155         if category:
1156             self.db.execute("UPDATE feeds SET title=?, url=?, category=? WHERE id=?;", (title, url, category, key))
1157         else:
1158             self.db.execute("UPDATE feeds SET title=?, url=? WHERE id=?;", (title, url, key))
1159         self.db.commit()
1160         self.cache_invalidate('feeds')
1161
1162         if wc().available():
1163             try:
1164                 wc()[key].human_readable_name = title
1165             except KeyError:
1166                 logger.debug("Feed %s (%s) unknown." % (key, title))
1167         
1168     def getFeedUpdateTime(self, key):
1169         update_time = self.lookup('feeds', 'updateTime', key)
1170
1171         if not update_time:
1172             return "Never"
1173
1174         delta = time.time() - update_time
1175
1176         delta_hours = delta / (60. * 60.)
1177         if delta_hours < .1:
1178             return "A few minutes ago"
1179         if delta_hours < .75:
1180             return "Less than an hour ago"
1181         if delta_hours < 1.5:
1182             return "About an hour ago"
1183         if delta_hours < 18:
1184             return "About %d hours ago" % (int(delta_hours + 0.5),)
1185
1186         delta_days = delta_hours / 24.
1187         if delta_days < 1.5:
1188             return "About a day ago"
1189         if delta_days < 18:
1190             return "%d days ago" % (int(delta_days + 0.5),)
1191
1192         delta_weeks = delta_days / 7.
1193         if delta_weeks <= 8:
1194             return "%d weeks ago" % int(delta_weeks + 0.5)
1195
1196         delta_months = delta_days / 30.
1197         if delta_months <= 30:
1198             return "%d months ago" % int(delta_months + 0.5)
1199
1200         return time.strftime("%x", time.gmtime(update_time))
1201         
1202     def getFeedNumberOfUnreadItems(self, key):
1203         return self.lookup('feeds', 'unread', key)
1204         
1205     def getFeedTitle(self, key):
1206         title = self.lookup('feeds', 'title', key)
1207         if title:
1208             return title
1209
1210         return self.getFeedUrl(key)
1211         
1212     def getFeedUrl(self, key):
1213         return self.db.execute("SELECT url FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1214     
1215     def getFeedCategory(self, key):
1216         return self.db.execute("SELECT category FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1217         
1218     def getListOfFeeds(self, category=None):
1219         if category:
1220             rows = self.db.execute("SELECT id FROM feeds WHERE category=? ORDER BY rank;", (category, ) )
1221         else:
1222             rows = self.db.execute("SELECT id FROM feeds ORDER BY rank;" )
1223         keys = []
1224         for row in rows:
1225             if row[0]:
1226                 keys.append(row[0])
1227         return keys
1228     
1229     def getListOfCategories(self):
1230         return list(row[0] for row in self.db.execute(
1231                 "SELECT id FROM categories ORDER BY rank;"))
1232     
1233     def getCategoryTitle(self, id):
1234         return self.lookup('categories', 'title', id)
1235     
1236     def getSortedListOfKeys(self, order, onlyUnread=False, category=1):
1237         if   order == "Most unread":
1238             tmp = "ORDER BY unread DESC"
1239             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1], reverse=True)
1240         elif order == "Least unread":
1241             tmp = "ORDER BY unread"
1242             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1])
1243         elif order == "Most recent":
1244             tmp = "ORDER BY updateTime DESC"
1245             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2], reverse=True)
1246         elif order == "Least recent":
1247             tmp = "ORDER BY updateTime"
1248             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2])
1249         else: # order == "Manual" or invalid value...
1250             tmp = "ORDER BY rank"
1251             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][0])
1252         if onlyUnread:
1253             sql = "SELECT id FROM feeds WHERE unread>0 AND category=%s " %category + tmp 
1254         else:
1255             sql = "SELECT id FROM feeds WHERE category=%s " %category + tmp
1256         rows = self.db.execute(sql)
1257         keys = []
1258         for row in rows:
1259             if row[0]:
1260                 keys.append(row[0])
1261         return keys
1262     
1263     def getFavicon(self, key):
1264         filename = "%s%s.d/favicon.ico" % (self.configdir, key)
1265         if isfile(filename):
1266             return filename
1267         else:
1268             return False
1269         
1270     def updateUnread(self, key):
1271         feed = self.getFeed(key)
1272         self.db.execute("UPDATE feeds SET unread=? WHERE id=?;", (feed.getNumberOfUnreadItems(), key))
1273         self.db.commit()
1274         self.cache_invalidate('feeds')
1275
1276     def addFeed(self, title, url, id=None, category=1):
1277         if not id:
1278             id = getId(url)
1279         count = self.db.execute("SELECT count(*) FROM feeds WHERE id=?;", (id,) ).fetchone()[0]
1280         if count == 0:
1281             max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
1282             if max_rank == None:
1283                 max_rank = 0
1284             values = (id, title, url, 0, 0, max_rank+1, None, "None", 1, category)
1285             self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?,?);", values)
1286             self.db.commit()
1287             # Ask for the feed object, it will create the necessary tables
1288             self.getFeed(id)
1289
1290             if wc().available():
1291                 # Register the stream with Woodchuck.  Update approximately
1292                 # every 6 hours.
1293                 wc().stream_register(stream_identifier=id,
1294                                      human_readable_name=title,
1295                                      freshness=6*60*60)
1296
1297             return True
1298         else:
1299             return False
1300         
1301     def addCategory(self, title):
1302         rank = self.db.execute("SELECT MAX(rank)+1 FROM categories;").fetchone()[0]
1303         if rank==None:
1304             rank=1
1305         id = self.db.execute("SELECT MAX(id)+1 FROM categories;").fetchone()[0]
1306         if id==None:
1307             id=1
1308         self.db.execute("INSERT INTO categories (id, title, unread, rank) VALUES (?, ?, 0, ?)", (id, title, rank))
1309         self.db.commit()
1310     
1311     def removeFeed(self, key):
1312         if wc().available ():
1313             try:
1314                 del wc()[key]
1315             except KeyError:
1316                 logger.debug("Removing unregistered feed %s failed" % (key,))
1317
1318         rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
1319         self.db.execute("DELETE FROM feeds WHERE id=?;", (key, ))
1320         self.db.execute("UPDATE feeds SET rank=rank-1 WHERE rank>?;", (rank,) )
1321         self.db.commit()
1322
1323         if isdir(self.configdir+key+".d/"):
1324            rmtree(self.configdir+key+".d/")
1325            
1326     def removeCategory(self, key):
1327         if self.db.execute("SELECT count(*) FROM categories;").fetchone()[0] > 1:
1328             rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,) ).fetchone()[0]
1329             self.db.execute("DELETE FROM categories WHERE id=?;", (key, ))
1330             self.db.execute("UPDATE categories SET rank=rank-1 WHERE rank>?;", (rank,) )
1331             self.db.execute("UPDATE feeds SET category=1 WHERE category=?;", (key,) )
1332             self.db.commit()
1333         
1334     #def saveConfig(self):
1335     #    self.listOfFeeds["feedingit-order"] = self.sortedKeys
1336     #    file = open(self.configdir+"feeds.pickle", "w")
1337     #    pickle.dump(self.listOfFeeds, file)
1338     #    file.close()
1339         
1340     def moveUp(self, key):
1341         rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1342         if rank>0:
1343             self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank-1) )
1344             self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank-1, key) )
1345             self.db.commit()
1346             
1347     def moveCategoryUp(self, key):
1348         rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
1349         if rank>0:
1350             self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank-1) )
1351             self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank-1, key) )
1352             self.db.commit()
1353         
1354     def moveDown(self, key):
1355         rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1356         max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
1357         if rank<max_rank:
1358             self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank+1) )
1359             self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank+1, key) )
1360             self.db.commit()
1361             
1362     def moveCategoryDown(self, key):
1363         rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
1364         max_rank = self.db.execute("SELECT MAX(rank) FROM categories;").fetchone()[0]
1365         if rank<max_rank:
1366             self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank+1) )
1367             self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank+1, key) )
1368             self.db.commit()
1369             
1370