from httpprogresshandler import HTTPProgressHandler
import random
import sys
+import logging
+logger = logging.getLogger(__name__)
def getId(string):
return md5.new(string).hexdigest()
f.close()
outf.close()
except (urllib2.HTTPError, urllib2.URLError, IOError), exception:
- print ("Could not download image %s: %s"
- % (abs_url, str (exception)))
+ logger.info("Could not download image %s: %s"
+ % (abs_url, str (exception)))
return None
except:
exception = sys.exc_info()[0]
- print "Downloading image: %s" % abs_url
- traceback.print_exc()
-
+ logger.info("Downloading image %s: %s" %
+ (abs_url, traceback.format_exc()))
try:
remove(filename)
except OSError:
new_objects=len (tmp.entries),
objects_inline=len (tmp.entries))
except KeyError:
- print "Failed to register update with woodchuck!"
- pass
+ logger.warn(
+ "Failed to register update of %s with woodchuck!"
+ % (self.key))
http_status = tmp.get ('status', 200)
# parse fails. But really, everything went great! Check for
# this first.
if http_status == 304:
- print "%s: No changes to feed." % (self.key,)
+ logger.debug("%s: No changes to feed." % (self.key,))
mainthread.execute (wc_success, async=True)
success = True
elif len(tmp["entries"])==0 and not tmp.version:
# An error occured fetching or parsing the feed. (Version
# will be either None if e.g. the connection timed our or
# '' if the data is not a proper feed)
- print ("Error fetching %s: version is: %s: error: %s"
- % (url, str (tmp.version),
- str (tmp.get ('bozo_exception', 'Unknown error'))))
- print tmp
+ logger.error(
+ "Error fetching %s: version is: %s: error: %s"
+ % (url, str (tmp.version),
+ str (tmp.get ('bozo_exception', 'Unknown error'))))
+ logger.debug(tmp)
if have_woodchuck:
def e():
- print "%s: stream update failed!" % self.key
+ logger.debug("%s: stream update failed!" % self.key)
try:
# It's not easy to get the feed's title from here.
outf.close()
del data
except (urllib2.HTTPError, urllib2.URLError), exception:
- print ("Could not download favicon %s: %s"
- % (abs_url, str (exception)))
+ logger.debug("Could not download favicon %s: %s"
+ % (abs_url, str (exception)))
self.serial_execution_lock.acquire ()
have_serial_execution_lock = True
try:
object_size += os.path.getsize (filename)
except os.error, exception:
- print ("Error getting size of %s: %s"
- % (filename, exception))
- pass
+ logger.error ("Error getting size of %s: %s"
+ % (filename, exception))
self.serial_execution_lock.acquire ()
have_serial_execution_lock = True
mainthread.execute(e, async=True)
self.db.commit()
- print ("%s: Update successful: transferred: %d/%d; objects: %d)"
- % (self.key,
- progress_handler.stats['sent'],
- progress_handler.stats['received'],
- len (tmp.entries)))
+ logger.debug (
+ "%s: Update successful: transferred: %d/%d; objects: %d)"
+ % (self.key,
+ progress_handler.stats['sent'],
+ progress_handler.stats['received'],
+ len (tmp.entries)))
mainthread.execute (wc_success, async=True)
success = True
#
except OSError, exception:
#
- print 'Could not remove %s: %s' % (file, str (exception))
- print ("updated %s: %fs in download, %fs in processing"
- % (self.key, download_duration,
- time.time () - process_start))
+ logger.error('Could not remove %s: %s'
+ % (file, str (exception)))
+ logger.debug("updated %s: %fs in download, %fs in processing"
+ % (self.key, download_duration,
+ time.time () - process_start))
except:
- print "Updating %s: %s" % (self.key, sys.exc_info()[0])
- traceback.print_exc()
+ logger.error("Updating %s: %s" % (self.key, traceback.format_exc()))
finally:
self.db.commit ()
rows = self.db.execute("SELECT MAX(date) FROM feed;")
for row in rows:
updateTime=row[0]
- except:
- print "Fetching update time."
- traceback.print_exc()
+ except Exception, e:
+ logger.error("Fetching update time: %s: %s"
+ % (str(e), traceback.format_exc()))
finally:
if not success:
etag = None
try:
remove(contentLink)
except OSError, exception:
- print "Deleting %s: %s" % (contentLink, str (exception))
+ logger.error("Deleting %s: %s" % (contentLink, str (exception)))
self.db.execute("DELETE FROM feed WHERE id=?;", (id,) )
self.db.execute("DELETE FROM images WHERE id=?;", (id,) )
self.db.commit()
# XXX: We should also check whether the list of
# articles/objects in each feed/stream is up to date.
if key not in stream_ids:
- print ("Registering previously unknown channel: %s (%s)"
- % (key, title,))
+ logger.debug(
+ "Registering previously unknown channel: %s (%s)"
+ % (key, title,))
# Use a default refresh interval of 6 hours.
wc().stream_register (key, title, 6 * 60 * 60)
else:
# Unregister any streams that are no longer subscribed to.
for id in stream_ids:
- print ("Unregistering %s" % (id,))
+ logger.debug("Unregistering %s" % (id,))
w.stream_unregister (id)
def importOldFormatFeeds(self):
pass
self.updateUnread(id)
except:
- traceback.print_exc()
+ logger.error("importOldFormatFeeds: %s"
+ % (traceback.format_exc(),))
remove(self.configdir+"feeds.pickle")
try:
wc()[key].human_readable_name = title
except KeyError:
- print "Feed %s (%s) unknown." % (key, title)
- pass
+ logger.debug("Feed %s (%s) unknown." % (key, title))
def getFeedUpdateTime(self, key):
return time.ctime(self.db.execute("SELECT updateTime FROM feeds WHERE id=?;", (key,)).fetchone()[0])
try:
del wc()[key]
except KeyError:
- print "Removing unregistered feed %s failed" % (key,)
+ logger.debug("Removing unregistered feed %s failed" % (key,))
rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
self.db.execute("DELETE FROM feeds WHERE id=?;", (key, ))