Replace use of print with Python's logging infrastructure.
[feedingit] / src / rss_sqlite.py
index ecc2ed0..e989a1a 100644 (file)
@@ -46,6 +46,8 @@ import mainthread
 from httpprogresshandler import HTTPProgressHandler
 import random
 import sys
+import logging
+logger = logging.getLogger(__name__)
 
 def getId(string):
     return md5.new(string).hexdigest()
@@ -106,15 +108,14 @@ class Feed:
                 f.close()
                 outf.close()
             except (urllib2.HTTPError, urllib2.URLError, IOError), exception:
-                print ("Could not download image %s: %s"
-                       % (abs_url, str (exception)))
+                logger.info("Could not download image %s: %s"
+                            % (abs_url, str (exception)))
                 return None
             except:
                 exception = sys.exc_info()[0]
 
-                print "Downloading image: %s" % abs_url
-                traceback.print_exc()
-
+                logger.info("Downloading image %s: %s" %
+                            (abs_url, traceback.format_exc()))
                 try:
                     remove(filename)
                 except OSError:
@@ -187,8 +188,9 @@ class Feed:
                         new_objects=len (tmp.entries),
                         objects_inline=len (tmp.entries))
                 except KeyError:
-                    print "Failed to register update with woodchuck!"
-                    pass
+                    logger.warn(
+                        "Failed to register update of %s with woodchuck!"
+                        % (self.key))
     
             http_status = tmp.get ('status', 200)
     
@@ -199,20 +201,21 @@ class Feed:
             # parse fails.  But really, everything went great!  Check for
             # this first.
             if http_status == 304:
-                print "%s: No changes to feed." % (self.key,)
+                logger.debug("%s: No changes to feed." % (self.key,))
                 mainthread.execute (wc_success, async=True)
                 success = True
             elif len(tmp["entries"])==0 and not tmp.version:
                 # An error occured fetching or parsing the feed.  (Version
                 # will be either None if e.g. the connection timed our or
                 # '' if the data is not a proper feed)
-                print ("Error fetching %s: version is: %s: error: %s"
-                       % (url, str (tmp.version),
-                          str (tmp.get ('bozo_exception', 'Unknown error'))))
-                print tmp
+                logger.error(
+                    "Error fetching %s: version is: %s: error: %s"
+                    % (url, str (tmp.version),
+                       str (tmp.get ('bozo_exception', 'Unknown error'))))
+                logger.debug(tmp)
                 if have_woodchuck:
                     def e():
-                        print "%s: stream update failed!" % self.key
+                        logger.debug("%s: stream update failed!" % self.key)
     
                         try:
                             # It's not easy to get the feed's title from here.
@@ -251,8 +254,8 @@ class Feed:
                    outf.close()
                    del data
                except (urllib2.HTTPError, urllib2.URLError), exception:
-                   print ("Could not download favicon %s: %s"
-                          % (abs_url, str (exception)))
+                   logger.debug("Could not download favicon %s: %s"
+                                % (abs_url, str (exception)))
     
                self.serial_execution_lock.acquire ()
                have_serial_execution_lock = True
@@ -317,9 +320,8 @@ class Feed:
                                 try:
                                     object_size += os.path.getsize (filename)
                                 except os.error, exception:
-                                    print ("Error getting size of %s: %s"
-                                           % (filename, exception))
-                                    pass
+                                    logger.error ("Error getting size of %s: %s"
+                                                  % (filename, exception))
                        self.serial_execution_lock.acquire ()
                        have_serial_execution_lock = True
     
@@ -379,11 +381,12 @@ class Feed:
                        mainthread.execute(e, async=True)
                self.db.commit()
 
-               print ("%s: Update successful: transferred: %d/%d; objects: %d)"
-                      % (self.key,
-                         progress_handler.stats['sent'],
-                         progress_handler.stats['received'],
-                         len (tmp.entries)))
+               logger.debug (
+                   "%s: Update successful: transferred: %d/%d; objects: %d)"
+                   % (self.key,
+                      progress_handler.stats['sent'],
+                      progress_handler.stats['received'],
+                      len (tmp.entries)))
                mainthread.execute (wc_success, async=True)
                success = True
 
@@ -415,13 +418,13 @@ class Feed:
                         #
                     except OSError, exception:
                         #
-                        print 'Could not remove %s: %s' % (file, str (exception))
-            print ("updated %s: %fs in download, %fs in processing"
-                   % (self.key, download_duration,
-                      time.time () - process_start))
+                        logger.error('Could not remove %s: %s'
+                                     % (file, str (exception)))
+            logger.debug("updated %s: %fs in download, %fs in processing"
+                         % (self.key, download_duration,
+                            time.time () - process_start))
         except:
-            print "Updating %s: %s" % (self.key, sys.exc_info()[0])
-            traceback.print_exc()
+            logger.error("Updating %s: %s" % (self.key, traceback.format_exc()))
         finally:
             self.db.commit ()
 
@@ -436,9 +439,9 @@ class Feed:
                 rows = self.db.execute("SELECT MAX(date) FROM feed;")
                 for row in rows:
                     updateTime=row[0]
-            except:
-                print "Fetching update time."
-                traceback.print_exc()
+            except Exception, e:
+                logger.error("Fetching update time: %s: %s"
+                             % (str(e), traceback.format_exc()))
             finally:
                 if not success:
                     etag = None
@@ -608,7 +611,7 @@ class Feed:
             try:
                 remove(contentLink)
             except OSError, exception:
-                print "Deleting %s: %s" % (contentLink, str (exception))
+                logger.error("Deleting %s: %s" % (contentLink, str (exception)))
         self.db.execute("DELETE FROM feed WHERE id=?;", (id,) )
         self.db.execute("DELETE FROM images WHERE id=?;", (id,) )
         self.db.commit()
@@ -732,8 +735,9 @@ class Listing:
                 # XXX: We should also check whether the list of
                 # articles/objects in each feed/stream is up to date.
                 if key not in stream_ids:
-                    print ("Registering previously unknown channel: %s (%s)"
-                           % (key, title,))
+                    logger.debug(
+                        "Registering previously unknown channel: %s (%s)"
+                        % (key, title,))
                     # Use a default refresh interval of 6 hours.
                     wc().stream_register (key, title, 6 * 60 * 60)
                 else:
@@ -745,7 +749,7 @@ class Listing:
 
             # Unregister any streams that are no longer subscribed to.
             for id in stream_ids:
-                print ("Unregistering %s" % (id,))
+                logger.debug("Unregistering %s" % (id,))
                 w.stream_unregister (id)
 
     def importOldFormatFeeds(self):
@@ -785,7 +789,8 @@ class Listing:
                             pass
                 self.updateUnread(id)
             except:
-                traceback.print_exc()
+                logger.error("importOldFormatFeeds: %s"
+                             % (traceback.format_exc(),))
         remove(self.configdir+"feeds.pickle")
                 
         
@@ -861,8 +866,7 @@ class Listing:
             try:
                 wc()[key].human_readable_name = title
             except KeyError:
-                print "Feed %s (%s) unknown." % (key, title)
-                pass
+                logger.debug("Feed %s (%s) unknown." % (key, title))
         
     def getFeedUpdateTime(self, key):
         return time.ctime(self.db.execute("SELECT updateTime FROM feeds WHERE id=?;", (key,)).fetchone()[0])
@@ -984,7 +988,7 @@ class Listing:
             try:
                 del wc()[key]
             except KeyError:
-                print "Removing unregistered feed %s failed" % (key,)
+                logger.debug("Removing unregistered feed %s failed" % (key,))
 
         rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
         self.db.execute("DELETE FROM feeds WHERE id=?;", (key, ))