install src/jobmanager.py ${DESTDIR}/opt/FeedingIt
install src/httpprogresshandler.py ${DESTDIR}/opt/FeedingIt
install src/wc.py ${DESTDIR}/opt/FeedingIt
+ install src/debugging.py ${DESTDIR}/opt/FeedingIt
install -d ${DESTDIR}/usr/share/applications/hildon
install src/FeedingIt.desktop ${DESTDIR}/usr/share/applications/hildon
install -d ${DESTDIR}/usr/share/icons/hicolor/48x48/apps/
from os.path import isfile, isdir, exists
from os import mkdir, remove, stat, environ
+import logging
+logger = logging.getLogger(__name__)
+
import debugging
debugging.init(dot_directory=".feedingit", program_name="feedingit-web")
try:
mkdir(CONFIGDIR)
except:
- print "Error: Can't create configuration directory"
+ logger.error("Error: Can't create configuration directory")
from sys import exit
exit(1)
from cgi import escape
import weakref
import debugging
+import logging
+logger = logging.getLogger(__name__)
from rss_sqlite import Listing
from opml import GetOpmlData, ExportOpmlData
try:
self.orientation = FremantleRotation(__appname__, main_window=self.window, app=self)
self.orientation.set_mode(self.config.getOrientation())
- except:
- print "Could not start rotation manager"
+ except Exception, e:
+ logger.warn("Could not start rotation manager: %s" % str(e))
menu = hildon.AppMenu()
# Create a button and add it to the menu
self.updateDbusHandler.ArticleCountUpdated()
if not self.had_interaction and 'DBUS_STARTER_ADDRESS' in environ:
- print "Update complete. No interaction, started by dbus: quitting."
+ logger.info(
+ "Update complete. No interaction, started by dbus: quitting.")
self.quit()
def stop_running_update(self, button):
self.stopButton.set_sensitive(False)
try:
mkdir(CONFIGDIR)
except:
- print "Error: Can't create configuration directory"
+ logger.error("Error: Can't create configuration directory")
from sys import exit
exit(1)
app = FeedingIt()
from gconf import client_get_default
from urllib2 import ProxyHandler
from mainthread import mainthread
+import logging
+logger = logging.getLogger(__name__)
VERSION = "52"
# When the dialog is closed without hitting
# the "Save" button, restore the configuration
if self.do_restore_backup:
- print 'Restoring configuration'
+ logger.debug('Restoring configuration')
self.config = self.config_backup
self.saveConfig()
import urllib2
import httplib
import time
+import logging
+logger = logging.getLogger(__name__)
class ProgressSocket(object):
"""
self.connection = connection
def __getattribute__(self, attr):
- # print "%s.__getattribute__(%s)" % (self.__class__.__name__, attr)
+ # logger.debug("%s.__getattribute__(%s)"
+ # % (self.__class__.__name__, attr))
def send(data):
# 100k at a time.
cstats = connection.stats
ostats = connection.opener.stats
- print (("%s: connection: %d sent, %d received: %d kb/s; "
- + "opener: %d sent, %d received, %d kb/s")
- % (req,
- cstats['sent'], cstats['received'],
- ((cstats['sent'] + cstats['received'])
- / (time.time() - cstats['started']) / 1024),
- ostats['sent'], ostats['received'],
- ((ostats['sent'] + ostats['received'])
- / (time.time() - ostats['started']) / 1024)))
+ print(
+ ("%s: connection: %d sent, %d received: %d kb/s; "
+ + "opener: %d sent, %d received, %d kb/s")
+ % (req,
+ cstats['sent'], cstats['received'],
+ ((cstats['sent'] + cstats['received'])
+ / (time.time() - cstats['started']) / 1024),
+ ostats['sent'], ostats['received'],
+ ((ostats['sent'] + ostats['received'])
+ / (time.time() - ostats['started']) / 1024)))
opener = urllib2.build_opener(HTTPProgressHandler(callback))
import heapq
import sys
import mainthread
+import logging
+logger = logging.getLogger(__name__)
def debug(*args):
if False:
- sys.stdout.write(*args)
- sys.stdout.write("\n")
+ logger.debug(' '.join(args))
# The default priority. Like nice(), a smaller numeric priority
# corresponds to a higher priority class.
import threading
import traceback
+import logging
+logger = logging.getLogger(__name__)
_run_in_main_thread = None
_main_thread = None
try:
func (*args, **kwargs)
except:
- print ("mainthread.execute: Executing %s: %s"
- % (func, traceback.format_exc ()))
+ logger.debug("mainthread.execute: Executing %s: %s"
+ % (func, traceback.format_exc ()))
return
else:
return func (*args, **kwargs)
try:
result['result'] = func (*args, **kwargs)
except:
- print ("mainthread.execute: Executing %s: %s"
- % (func, traceback.format_exc ()))
+ logger.debug("mainthread.execute: Executing %s: %s"
+ % (func, traceback.format_exc ()))
if not async:
cond.acquire ()
import time
from os.path import isfile, dirname
import gobject
+import logging
+logger = logging.getLogger(__name__)
class ExportOpmlData():
def __init__(self, parent, listing):
dialog.hide()
if response == gtk.RESPONSE_OK:
filename = dialog.get_filename()
- print filename
+ logger.debug("ExportOpmlData: %s" % filename)
#try:
cont = True
def showOpmlData(widget, parent, button):
dialog = GetOpmlData(parent)
- print dialog.getData()
+ logger.debug("showOpmlData: %s" % dialog.getData())
#dialog.destroy()
if __name__ == "__main__":
import urllib2
from BeautifulSoup import BeautifulSoup
from urlparse import urljoin
+import logging
+logger = logging.getLogger(__name__)
#CONFIGDIR="/home/user/.feedingit/"
f.close()
outf.close()
except:
- print "Could not download " + url
+ logger.error("Could not download " + url)
else:
#open(filename,"a").close() # "Touch" the file
file = open(filename,"a")
img['src']=filename
tmpEntry["images"].append(filename)
except:
- print "Error downloading image %s" % img
+ logger.error("Error downloading image %s" % img)
tmpEntry["contentLink"] = configdir+self.uniqueId+".d/"+id+".html"
file = open(tmpEntry["contentLink"], "w")
file.write(soup.prettify())
tmpEntries[entryId] = self.entries[entryId]
tmpIds.append(entryId)
except:
- print "Error purging old articles %s" % entryId
+ logger.error("Error purging old articles %s" % entryId)
self.removeEntry(entryId)
self.entries = tmpEntries
#
except OSError:
#
- print 'Could not remove', file
+ logger.error('Could not remove %s' % file)
def extractContent(self, entry):
try:
remove(entry["contentLink"]) #os.remove
except:
- print "File not found for deletion: %s" % entry["contentLink"]
+ logger.error("File not found for deletion: %s"
+ % entry["contentLink"])
del self.entries[id]
else:
- print "Entries has no %s key" % id
+ logger.error("Entries has no %s key" % id)
if id in self.ids:
self.ids.remove(id)
else:
- print "Ids has no %s key" % id
+ logger.error("Ids has no %s key" % id)
if self.readItems.has_key(id):
if self.readItems[id]==False:
self.countUnread = self.countUnread - 1
del self.readItems[id]
else:
- print "ReadItems has no %s key" % id
+ logger.error("ReadItems has no %s key" % id)
#except:
# print "Error removing entry %s" %id
except:
# If the feed file gets corrupted, we need to reset the feed.
import traceback
- traceback.print_exc()
+ logger.error("getFeed: %s" % traceback.format_exc())
import dbus
bus = dbus.SessionBus()
remote_object = bus.get_object("org.freedesktop.Notifications", # Connection name
from httpprogresshandler import HTTPProgressHandler
import random
import sys
+import logging
+logger = logging.getLogger(__name__)
def getId(string):
return md5.new(string).hexdigest()
f.close()
outf.close()
except (urllib2.HTTPError, urllib2.URLError, IOError), exception:
- print ("Could not download image %s: %s"
- % (abs_url, str (exception)))
+ logger.info("Could not download image %s: %s"
+ % (abs_url, str (exception)))
return None
except:
exception = sys.exc_info()[0]
- print "Downloading image: %s" % abs_url
- traceback.print_exc()
-
+ logger.info("Downloading image %s: %s" %
+ (abs_url, traceback.format_exc()))
try:
remove(filename)
except OSError:
new_objects=len (tmp.entries),
objects_inline=len (tmp.entries))
except KeyError:
- print "Failed to register update with woodchuck!"
- pass
+ logger.warn(
+ "Failed to register update of %s with woodchuck!"
+ % (self.key))
http_status = tmp.get ('status', 200)
# parse fails. But really, everything went great! Check for
# this first.
if http_status == 304:
- print "%s: No changes to feed." % (self.key,)
+ logger.debug("%s: No changes to feed." % (self.key,))
mainthread.execute (wc_success, async=True)
success = True
elif len(tmp["entries"])==0 and not tmp.version:
# An error occured fetching or parsing the feed. (Version
# will be either None if e.g. the connection timed our or
# '' if the data is not a proper feed)
- print ("Error fetching %s: version is: %s: error: %s"
- % (url, str (tmp.version),
- str (tmp.get ('bozo_exception', 'Unknown error'))))
- print tmp
+ logger.error(
+ "Error fetching %s: version is: %s: error: %s"
+ % (url, str (tmp.version),
+ str (tmp.get ('bozo_exception', 'Unknown error'))))
+ logger.debug(tmp)
if have_woodchuck:
def e():
- print "%s: stream update failed!" % self.key
+ logger.debug("%s: stream update failed!" % self.key)
try:
# It's not easy to get the feed's title from here.
outf.close()
del data
except (urllib2.HTTPError, urllib2.URLError), exception:
- print ("Could not download favicon %s: %s"
- % (abs_url, str (exception)))
+ logger.debug("Could not download favicon %s: %s"
+ % (abs_url, str (exception)))
self.serial_execution_lock.acquire ()
have_serial_execution_lock = True
try:
object_size += os.path.getsize (filename)
except os.error, exception:
- print ("Error getting size of %s: %s"
- % (filename, exception))
- pass
+ logger.error ("Error getting size of %s: %s"
+ % (filename, exception))
self.serial_execution_lock.acquire ()
have_serial_execution_lock = True
mainthread.execute(e, async=True)
self.db.commit()
- print ("%s: Update successful: transferred: %d/%d; objects: %d)"
- % (self.key,
- progress_handler.stats['sent'],
- progress_handler.stats['received'],
- len (tmp.entries)))
+ logger.debug (
+ "%s: Update successful: transferred: %d/%d; objects: %d)"
+ % (self.key,
+ progress_handler.stats['sent'],
+ progress_handler.stats['received'],
+ len (tmp.entries)))
mainthread.execute (wc_success, async=True)
success = True
#
except OSError, exception:
#
- print 'Could not remove %s: %s' % (file, str (exception))
- print ("updated %s: %fs in download, %fs in processing"
- % (self.key, download_duration,
- time.time () - process_start))
+ logger.error('Could not remove %s: %s'
+ % (file, str (exception)))
+ logger.debug("updated %s: %fs in download, %fs in processing"
+ % (self.key, download_duration,
+ time.time () - process_start))
except:
- print "Updating %s: %s" % (self.key, sys.exc_info()[0])
- traceback.print_exc()
+ logger.error("Updating %s: %s" % (self.key, traceback.format_exc()))
finally:
self.db.commit ()
rows = self.db.execute("SELECT MAX(date) FROM feed;")
for row in rows:
updateTime=row[0]
- except:
- print "Fetching update time."
- traceback.print_exc()
+ except Exception, e:
+ logger.error("Fetching update time: %s: %s"
+ % (str(e), traceback.format_exc()))
finally:
if not success:
etag = None
try:
remove(contentLink)
except OSError, exception:
- print "Deleting %s: %s" % (contentLink, str (exception))
+ logger.error("Deleting %s: %s" % (contentLink, str (exception)))
self.db.execute("DELETE FROM feed WHERE id=?;", (id,) )
self.db.execute("DELETE FROM images WHERE id=?;", (id,) )
self.db.commit()
# XXX: We should also check whether the list of
# articles/objects in each feed/stream is up to date.
if key not in stream_ids:
- print ("Registering previously unknown channel: %s (%s)"
- % (key, title,))
+ logger.debug(
+ "Registering previously unknown channel: %s (%s)"
+ % (key, title,))
# Use a default refresh interval of 6 hours.
wc().stream_register (key, title, 6 * 60 * 60)
else:
# Unregister any streams that are no longer subscribed to.
for id in stream_ids:
- print ("Unregistering %s" % (id,))
+ logger.debug("Unregistering %s" % (id,))
w.stream_unregister (id)
def importOldFormatFeeds(self):
pass
self.updateUnread(id)
except:
- traceback.print_exc()
+ logger.error("importOldFormatFeeds: %s"
+ % (traceback.format_exc(),))
remove(self.configdir+"feeds.pickle")
try:
wc()[key].human_readable_name = title
except KeyError:
- print "Feed %s (%s) unknown." % (key, title)
- pass
+ logger.debug("Feed %s (%s) unknown." % (key, title))
def getFeedUpdateTime(self, key):
return time.ctime(self.db.execute("SELECT updateTime FROM feeds WHERE id=?;", (key,)).fetchone()[0])
try:
del wc()[key]
except KeyError:
- print "Removing unregistered feed %s failed" % (key,)
+ logger.debug("Removing unregistered feed %s failed" % (key,))
rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
self.db.execute("DELETE FROM feeds WHERE id=?;", (key, ))
from jobmanager import JobManager
import mainthread
+import logging
+logger = logging.getLogger(__name__)
import debugging
debugging.init(dot_directory=".feedingit", program_name="update_feeds")
mainloop.quit()
def stopUpdate(self):
- print "Stop update called."
+ logger.info("Stop update called.")
JobManager().quit()
import dbus.mainloop.glib
try:
mainloop.run()
except KeyboardInterrupt:
- print "Interrupted. Quitting."
+ logger.error("Interrupted. Quitting.")
JobManager().quit()
del app_lock
else:
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+import logging
+logger = logging.getLogger(__name__)
import traceback
# Don't fail if the Woodchuck modules are not available. Just disable
from pywoodchuck import PyWoodchuck
from pywoodchuck import woodchuck
except ImportError, exception:
- print ("Unable to load Woodchuck modules: disabling Woodchuck support: %s"
- % traceback.format_exc ())
+ logger.info(
+ "Unable to load Woodchuck modules: disabling Woodchuck support: %s"
+ % traceback.format_exc ())
woodchuck_imported = False
class PyWoodchuck (object):
def available(self):
# Woodchuck upcalls.
def stream_update_cb(self, stream):
- print ("stream update called on %s (%s)"
- % (stream.human_readable_name, stream.identifier,))
+ logger.debug("stream update called on %s (%s)"
+ % (stream.human_readable_name, stream.identifier,))
# Make sure no one else is concurrently updating this
# feed.
try:
self.listing.updateFeed(stream.identifier)
except:
- print ("Updating %s: %s"
- % (stream.identifier, traceback.format_exc ()))
+ logger.debug("Updating %s: %s"
+ % (stream.identifier, traceback.format_exc ()))
def object_transfer_cb(self, stream, object,
version, filename, quality):
_w = mywoodchuck (listing, "FeedingIt", "org.maemo.feedingit")
if not woodchuck_imported or not _w.available ():
- print "Unable to contact Woodchuck server."
+ logger.info("Unable to contact Woodchuck server.")
else:
- print "Woodchuck appears to be available."
+ logger.debug("Woodchuck appears to be available.")
def wc():
"""Connect to the woodchuck server and initialize any state."""