1 #!/usr/bin/env python2.5
4 # Copyright (c) 2007-2008 INdT.
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Lesser General Public License as published by
7 # the Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public License
16 # along with this program. If not, see <http://www.gnu.org/licenses/>.
19 # ============================================================================
21 # Author : Yves Marcoz
23 # Description : Simple RSS Reader
24 # ============================================================================
26 from os.path import isfile
27 from os.path import isdir
35 #CONFIGDIR="/home/user/.feedingit/"
38 return md5.new(string).hexdigest()
41 def __init__(self, title, content, date, link):
43 self.content = content
47 # Entry = {"title":XXX, "content":XXX, "date":XXX, "link":XXX, images = [] }
50 def __init__(self, name, url):
56 self.updateTime = "Never"
58 def editFeed(self, url):
61 def saveFeed(self, configdir):
63 def updateFeed(self, configdir, expiryTime=24):
64 # Expiry time is in hours
65 tmp=feedparser.parse(self.url)
66 # Check if the parse was succesful (number of entries > 0, else do nothing)
67 if len(tmp["entries"])>0:
68 #reversedEntries = self.getEntries()
69 #reversedEntries.reverse()
71 for entry in tmp["entries"]:
72 tmpEntry = {"title":entry["title"], "content":self.extractContent(entry),
73 "date":self.extractDate(entry), "link":entry["link"], images = [] }
75 tmpIds.append(self.getUniqueId(-1, entry))
76 for entry in self.getEntries():
77 currentTime = time.time()
78 expiry = float(expiryTime) * 3600.
79 if entry.has_key("updated_parsed"):
80 articleTime = time.mktime(entry["updated_parsed"])
81 if currentTime - articleTime < expiry:
82 id = self.getUniqueId(-1, entry)
84 tmp["entries"].append(entry)
86 self.entries = tmp["entries"]
88 # Initialize the new articles to unread
89 tmpReadItems = self.readItems
91 for index in range(self.getNumberOfEntries()):
92 if not tmpReadItems.has_key(self.getUniqueId(index)):
93 self.readItems[self.getUniqueId(index)] = False
95 self.readItems[self.getUniqueId(index)] = tmpReadItems[self.getUniqueId(index)]
96 if self.readItems[self.getUniqueId(index)]==False:
97 self.countUnread = self.countUnread + 1
99 self.updateTime = time.asctime()
100 self.saveFeed(configdir)
102 def extractContent(self, entry):
103 if entry.has_key('summary'):
104 content = entry.get('summary', '')
105 if entry.has_key('content'):
106 if len(entry.content[0].value) > len(content):
107 content = entry.content[0].value
109 content = entry.get('description', '')
112 def extractDate(self, entry):
113 if entry.has_key("updated_parsed"):
114 date = time.strftime("%a, %d %b %Y %H:%M:%S",entry["updated_parsed"])
115 elif entry.has_key("published_parsed"):
116 date = time.strftime("%a, %d %b %Y %H:%M:%S", entry["published_parsed"])
121 def setEntryRead(self, index):
122 if self.readItems[self.getUniqueId(index)]==False:
123 self.countUnread = self.countUnread - 1
124 self.readItems[self.getUniqueId(index)] = True
126 def setEntryUnread(self, index):
127 if self.readItems[self.getUniqueId(index)]==True:
128 self.countUnread = self.countUnread + 1
129 self.readItems[self.getUniqueId(index)] = False
131 def isEntryRead(self, index):
132 return self.readItems[self.getUniqueId(index)]
134 def getTitle(self, index):
135 return self.entries[index]["title"]
137 def getLink(self, index):
138 return self.entries[index]["link"]
140 def getDate(self, index):
142 def getUniqueId(self, index, entry=None):
144 entry = self.entries[index]
145 if entry.has_key("updated_parsed"):
146 return getId(time.strftime("%a, %d %b %Y %H:%M:%S",entry["updated_parsed"]) + entry["title"])
147 elif entry.has_key("link"):
148 return getId(entry["link"] + entry["title"])
150 return getId(entry["title"])
152 def getUpdateTime(self):
153 return self.updateTime
155 def getEntries(self):
161 def getNumberOfUnreadItems(self):
162 return self.countUnread
164 def getNumberOfEntries(self):
165 return len(self.entries)
167 def getItem(self, index):
169 return self.entries[index]
173 def getContent(self, index):
175 entry = self.entries[index]
176 if entry.has_key('summary'):
177 content = entry.get('summary', '')
178 if entry.has_key('content'):
179 if len(entry.content[0].value) > len(content):
180 content = entry.content[0].value
182 content = entry.get('description', '')
185 def getArticle(self, index):
190 # Contains all the info about a single feed (articles, ...), and expose the data
191 def __init__(self, name, url):
197 self.updateTime = "Never"
199 def editFeed(self, url):
202 def saveFeed(self, configdir):
203 file = open(configdir+getId(self.name), "w")
204 pickle.dump(self, file )
207 def updateFeed(self, configdir, expiryTime=24):
208 # Expiry time is in hours
209 tmp=feedparser.parse(self.url)
210 # Check if the parse was succesful (number of entries > 0, else do nothing)
211 if len(tmp["entries"])>0:
212 #reversedEntries = self.getEntries()
213 #reversedEntries.reverse()
215 for entry in tmp["entries"]:
216 tmpIds.append(self.getUniqueId(-1, entry))
217 for entry in self.getEntries():
218 currentTime = time.time()
219 expiry = float(expiryTime) * 3600.
220 if entry.has_key("updated_parsed"):
221 articleTime = time.mktime(entry["updated_parsed"])
222 if currentTime - articleTime < expiry:
223 id = self.getUniqueId(-1, entry)
225 tmp["entries"].append(entry)
227 self.entries = tmp["entries"]
229 # Initialize the new articles to unread
230 tmpReadItems = self.readItems
232 for index in range(self.getNumberOfEntries()):
233 if not tmpReadItems.has_key(self.getUniqueId(index)):
234 self.readItems[self.getUniqueId(index)] = False
236 self.readItems[self.getUniqueId(index)] = tmpReadItems[self.getUniqueId(index)]
237 if self.readItems[self.getUniqueId(index)]==False:
238 self.countUnread = self.countUnread + 1
240 self.updateTime = time.asctime()
241 self.saveFeed(configdir)
243 def setEntryRead(self, index):
244 if self.readItems[self.getUniqueId(index)]==False:
245 self.countUnread = self.countUnread - 1
246 self.readItems[self.getUniqueId(index)] = True
248 def setEntryUnread(self, index):
249 if self.readItems[self.getUniqueId(index)]==True:
250 self.countUnread = self.countUnread + 1
251 self.readItems[self.getUniqueId(index)] = False
253 def isEntryRead(self, index):
254 return self.readItems[self.getUniqueId(index)]
256 def getTitle(self, index):
257 return self.entries[index]["title"]
259 def getLink(self, index):
260 return self.entries[index]["link"]
262 def getDate(self, index):
264 return self.entries[index]["updated_parsed"]
266 return time.localtime()
268 def getUniqueId(self, index, entry=None):
270 entry = self.entries[index]
271 if entry.has_key("updated_parsed"):
272 return getId(time.strftime("%a, %d %b %Y %H:%M:%S",entry["updated_parsed"]) + entry["title"])
273 elif entry.has_key("link"):
274 return getId(entry["link"] + entry["title"])
276 return getId(entry["title"])
278 def getUpdateTime(self):
279 return self.updateTime
281 def getEntries(self):
287 def getNumberOfUnreadItems(self):
288 return self.countUnread
290 def getNumberOfEntries(self):
291 return len(self.entries)
293 def getItem(self, index):
295 return self.entries[index]
299 def getContent(self, index):
301 entry = self.entries[index]
302 if entry.has_key('summary'):
303 content = entry.get('summary', '')
304 if entry.has_key('content'):
305 if len(entry.content[0].value) > len(content):
306 content = entry.content[0].value
308 content = entry.get('description', '')
311 def getArticle(self, index):
312 self.setEntryRead(index)
313 entry = self.entries[index]
314 title = entry.get('title', 'No title')
315 #content = entry.get('content', entry.get('summary_detail', {}))
316 content = self.getContent(index)
318 link = entry.get('link', 'NoLink')
319 if entry.has_key("updated_parsed"):
320 date = time.strftime("%a, %d %b %Y %H:%M:%S",entry["updated_parsed"])
321 elif entry.has_key("published_parsed"):
322 date = time.strftime("%a, %d %b %Y %H:%M:%S", entry["published_parsed"])
325 #text = '''<div style="color: black; background-color: white;">'''
326 text = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
327 text += "<html><head><title>" + title + "</title>"
328 text += '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>\n'
329 text += '<style> body {-webkit-user-select: none;} </style></head>'
330 text += '<body><div><a href=\"' + link + '\">' + title + "</a>"
331 text += "<BR /><small><i>Date: " + date + "</i></small></div>"
332 text += "<BR /><BR />"
334 text += "</body></html>"
337 class ArchivedArticles(Feed):
338 def addArchivedArticle(self, title, link, updated_parsed, configdir):
340 entry["title"] = title
342 entry["downloaded"] = False
343 entry["summary"] = '<a href=\"' + link + '\">' + title + "</a>"
344 entry["updated_parsed"] = updated_parsed
345 entry["time"] = time.time()
346 self.entries.append(entry)
347 self.readItems[self.getUniqueId(len(self.entries)-1)] = False
348 self.countUnread = self.countUnread + 1
349 self.saveFeed(configdir)
352 def updateFeed(self, configdir, expiryTime=24):
354 for entry in self.getEntries():
355 if not entry["downloaded"]:
357 f = urllib2.urlopen(entry["link"])
358 entry["summary"] = f.read()
360 if len(entry["summary"]) > 0:
361 entry["downloaded"] = True
362 entry["time"] = time.time()
363 self.setEntryUnread(index)
366 currentTime = time.time()
367 expiry = float(expiryTime) * 3600
368 if currentTime - entry["time"] > expiry:
369 self.entries.remove(entry)
371 self.updateTime = time.asctime()
372 self.saveFeed(configdir)
374 def getArticle(self, index):
375 self.setEntryRead(index)
376 content = self.getContent(index)
381 # Lists all the feeds in a dictionary, and expose the data
382 def __init__(self, configdir):
383 self.configdir = configdir
385 if isfile(self.configdir+"feeds.pickle"):
386 file = open(self.configdir+"feeds.pickle")
387 self.listOfFeeds = pickle.load(file)
390 self.listOfFeeds = {getId("Slashdot"):{"title":"Slashdot", "url":"http://rss.slashdot.org/Slashdot/slashdot"}, }
391 if self.listOfFeeds.has_key("font"):
392 del self.listOfFeeds["font"]
393 if self.listOfFeeds.has_key("feedingit-order"):
394 self.sortedKeys = self.listOfFeeds["feedingit-order"]
396 self.sortedKeys = self.listOfFeeds.keys()
397 if "font" in self.sortedKeys:
398 self.sortedKeys.remove("font")
399 self.sortedKeys.sort(key=lambda obj: self.getFeedTitle(obj))
400 list = self.sortedKeys[:]
406 #if key.startswith('d8'):
407 #traceback.print_exc()
408 self.sortedKeys.remove(key)
410 #print key in self.sortedKeys
411 #print "d8eb3f07572892a7b5ed9c81c5bb21a2" in self.sortedKeys
412 #print self.listOfFeeds["d8eb3f07572892a7b5ed9c81c5bb21a2"]
413 self.closeCurrentlyDisplayedFeed()
416 def addArchivedArticle(self, key, index):
417 title = self.getFeed(key).getTitle(index)
418 link = self.getFeed(key).getLink(index)
419 date = self.getFeed(key).getDate(index)
420 if not self.listOfFeeds.has_key(getId("Archived Articles")):
421 self.listOfFeeds[getId("Archived Articles")] = {"title":"Archived Articles", "url":""}
422 self.sortedKeys.append(getId("Archived Articles"))
423 self.feeds[getId("Archived Articles")] = ArchivedArticles("Archived Articles", "")
426 self.getFeed(getId("Archived Articles")).addArchivedArticle(title, link, date, self.configdir)
428 def loadFeed(self, key):
429 if isfile(self.configdir+key):
430 file = open(self.configdir+key)
431 self.feeds[key] = pickle.load(file)
435 title = self.listOfFeeds[key]["title"]
436 url = self.listOfFeeds[key]["url"]
437 self.feeds[key] = Feed(title, url)
439 def updateFeeds(self, expiryTime=24):
440 for key in self.getListOfFeeds():
441 self.feeds[key].updateFeed(self.configdir, expiryTime)
443 def updateFeed(self, key, expiryTime=24):
444 self.feeds[key].updateFeed(self.configdir, expiryTime)
446 def editFeed(self, key, title, url):
447 self.listOfFeeds[key]["title"] = title
448 self.listOfFeeds[key]["url"] = url
449 self.feeds[key].editFeed(url)
451 def getFeed(self, key):
452 return self.feeds[key]
454 def getFeedUpdateTime(self, key):
455 #print self.listOfFeeds.has_key(key)
456 return self.feeds[key].getUpdateTime()
458 def getFeedNumberOfUnreadItems(self, key):
459 return self.feeds[key].getNumberOfUnreadItems()
461 def getFeedTitle(self, key):
462 return self.listOfFeeds[key]["title"]
464 def getFeedUrl(self, key):
465 return self.listOfFeeds[key]["url"]
467 def getListOfFeeds(self):
468 return self.sortedKeys
470 def addFeed(self, title, url):
471 if not self.listOfFeeds.has_key(getId(title)):
472 self.listOfFeeds[getId(title)] = {"title":title, "url":url}
473 self.sortedKeys.append(getId(title))
475 self.feeds[getId(title)] = Feed(title, url)
480 def removeFeed(self, key):
481 del self.listOfFeeds[key]
482 self.sortedKeys.remove(key)
484 if isfile(self.configdir+key):
485 remove(self.configdir+key)
488 def saveConfig(self):
489 self.listOfFeeds["feedingit-order"] = self.sortedKeys
490 file = open(self.configdir+"feeds.pickle", "w")
491 pickle.dump(self.listOfFeeds, file)
494 def moveUp(self, key):
495 index = self.sortedKeys.index(key)
496 self.sortedKeys[index] = self.sortedKeys[index-1]
497 self.sortedKeys[index-1] = key
499 def moveDown(self, key):
500 index = self.sortedKeys.index(key)
501 index2 = (index+1)%len(self.sortedKeys)
502 self.sortedKeys[index] = self.sortedKeys[index2]
503 self.sortedKeys[index2] = key
505 def setCurrentlyDisplayedFeed(self, key):
506 self.currentlyDisplayedFeed = key
507 def closeCurrentlyDisplayedFeed(self):
508 self.currentlyDisplayedFeed = False
509 def getCurrentlyDisplayedFeed(self):
510 return self.currentlyDisplayedFeed
512 if __name__ == "__main__":
513 listing = Listing('/home/user/.feedingit/')
514 list = listing.getListOfFeeds()[:]
517 if key.startswith('d8'):
518 print listing.getFeedUpdateTime(key)