Change addImage to not throw an exception but return None on failure.
[feedingit] / src / rss_sqlite.py
index 0319999..253ed85 100644 (file)
@@ -139,7 +139,9 @@ class BaseObject(object):
             cache = self.cache[table]
 
             if time.time() - cache[None] > 60:
-                self.cache[table].clear()
+                # logger.debug("%s: Cache too old: clearing" % (table,))
+                del self.cache[table]
+                cache = None
         except KeyError:
             cache = None
 
@@ -148,6 +150,8 @@ class BaseObject(object):
             # The cache is empty or the caller wants a column that we
             # don't cache.
             if (table, column) in self.cached_columns:
+                # logger.debug("%s: Rebuilding cache" % (table,))
+
                 do_cache = True
 
                 self.cache[table] = cache = {}
@@ -193,10 +197,13 @@ class BaseObject(object):
 
         try:
             if id is not None:
-                return cache[column][id]
+                value = cache[column][id]
+                # logger.debug("%s.%s:%s -> %s" % (table, column, id, value))
+                return value
             else:
                 return cache[column].values()
         except KeyError:
+            # logger.debug("%s.%s:%s -> Not found" % (table, column, id))
             return None
 
 class Feed(BaseObject):
@@ -257,7 +264,7 @@ class Feed(BaseObject):
                 except OSError:
                     pass
 
-                raise exception
+                return None
         else:
             #open(filename,"a").close()  # "Touch" the file
             file = open(filename,"a")
@@ -348,7 +355,10 @@ class Feed(BaseObject):
             expiry = float(expiryTime) * 3600.
     
             currentTime = 0
-    
+            
+            updated_objects = 0
+            new_objects = 0
+
             def wc_success():
                 try:
                     wc().stream_register (self.key, "", 6 * 60 * 60)
@@ -362,8 +372,9 @@ class Feed(BaseObject):
                         transferred_up=progress_handler.stats['sent'],
                         transfer_time=update_start,
                         transfer_duration=download_duration,
-                        new_objects=len (tmp.entries),
-                        objects_inline=len (tmp.entries))
+                        new_objects=new_objects,
+                        updated_objects=updated_objects,
+                        objects_inline=new_objects + updated_objects)
                 except KeyError:
                     logger.warn(
                         "Failed to register update of %s with woodchuck!"
@@ -482,6 +493,26 @@ class Feed(BaseObject):
                                 "date":date, "link":entry["link"], "author":entry["author"], "id":entry["id"]}
                    id = self.generateUniqueId(tmpEntry)
                    
+                   current_version \
+                       = self.db.execute('select date from feed where id=?',
+                                         (id,)).fetchone()
+                   if (current_version is not None
+                       and current_version[0] == date):
+                       logger.debug("ALREADY DOWNLOADED %s (%s)"
+                                    % (entry["title"], entry["link"]))
+                       continue                       
+
+                   if current_version is not None:
+                       # The version was updated.  Mark it as unread.
+                       logger.debug("UPDATED: %s (%s)"
+                                    % (entry["title"], entry["link"]))
+                       self.setEntryUnread(id)
+                       updated_objects += 1
+                   else:
+                       logger.debug("NEW: %s (%s)"
+                                    % (entry["title"], entry["link"]))
+                       new_objects += 1
+
                    #articleTime = time.mktime(self.entries[id]["dateTuple"])
                    soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
                    images = soup('img')