refactoring, improvement of debug-output
This commit is contained in:
		
							
								
								
									
										36
									
								
								atomstrom.py
									
									
									
									
									
								
							
							
						
						
									
										36
									
								
								atomstrom.py
									
									
									
									
									
								
							@@ -124,7 +124,17 @@ session = Session()
 | 
				
			|||||||
#session.add(Feed('http://www.heise.de/newsticker/heise-atom.xml', 1, 0, 0, 1))
 | 
					#session.add(Feed('http://www.heise.de/newsticker/heise-atom.xml', 1, 0, 0, 1))
 | 
				
			||||||
#session.add(Feed('http://blog.schatenseite.de/feed/', 1, 0, 0, 1))
 | 
					#session.add(Feed('http://blog.schatenseite.de/feed/', 1, 0, 0, 1))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
for feed in session.query(Feed).filter_by(enabled=1).order_by(Feed.id):
 | 
					def process_feed_entry(feed, entry):
 | 
				
			||||||
 | 
					    query = session.query(Entry).filter_by(feed_id=feed.id, title=entry.title.encode('latin-1', 'replace'))
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        thisentry = query.one()
 | 
				
			||||||
 | 
					        thisentry.update(entry)
 | 
				
			||||||
 | 
					        return "-"
 | 
				
			||||||
 | 
					    except Exception, e:
 | 
				
			||||||
 | 
					        feed.entry.append(Entry(entry))
 | 
				
			||||||
 | 
					        return "+"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def fetch_single_feed(feed):
 | 
				
			||||||
    print "fetching %s" % feed.url
 | 
					    print "fetching %s" % feed.url
 | 
				
			||||||
    parser = feedparser.parse(feed.url)
 | 
					    parser = feedparser.parse(feed.url)
 | 
				
			||||||
    print "processing feed info..."
 | 
					    print "processing feed info..."
 | 
				
			||||||
@@ -136,16 +146,24 @@ for feed in session.query(Feed).filter_by(enabled=1).order_by(Feed.id):
 | 
				
			|||||||
        feed.feedinfo = Feedinfo(parser)
 | 
					        feed.feedinfo = Feedinfo(parser)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    print "processing feed entries: ",
 | 
					    print "processing feed entries: ",
 | 
				
			||||||
 | 
					    entries_new = 0
 | 
				
			||||||
 | 
					    entries_total = 0
 | 
				
			||||||
    for entry in parser.entries:
 | 
					    for entry in parser.entries:
 | 
				
			||||||
        query = session.query(Entry).filter_by(feed_id=feed.id, title=entry.title.encode('latin-1', 'replace'))
 | 
					        entries_total = entries_total + 1
 | 
				
			||||||
        try:
 | 
					        ret = process_feed_entry(feed, entry)
 | 
				
			||||||
            thisentry = query.one()
 | 
					        if ret == "+":
 | 
				
			||||||
            sys.stdout.write("-")
 | 
					            entries_new = entries_new + 1
 | 
				
			||||||
            thisentry.update(entry)
 | 
					        sys.stdout.write(ret)
 | 
				
			||||||
        except Exception, e:
 | 
					    print " (%d/%d new)" % (entries_new, entries_total)
 | 
				
			||||||
            sys.stdout.write("+")
 | 
					 | 
				
			||||||
            feed.entry.append(Entry(entry))
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def fetch_all_feeds():
 | 
				
			||||||
 | 
					    print "fetching all feeds..."
 | 
				
			||||||
 | 
					    for feed in session.query(Feed).filter_by(enabled=1).order_by(Feed.id):
 | 
				
			||||||
 | 
					        fetch_single_feed(feed)
 | 
				
			||||||
        print
 | 
					        print
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					if __name__ == "__main__":
 | 
				
			||||||
 | 
					    print "main"
 | 
				
			||||||
 | 
					    fetch_all_feeds()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
session.commit()
 | 
					session.commit()
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user