|
|
@ -3,10 +3,12 @@ import random |
|
|
|
from feedparser import parse |
|
|
|
|
|
|
|
|
|
|
|
def parse_single_rss_feed(url, entries): |
|
|
|
def parse_single_rss_feed(url, entries, tag_filter): |
|
|
|
feed = parse(url) |
|
|
|
entrylength = len(entries) |
|
|
|
for entrynumber, entry in enumerate(feed.entries): |
|
|
|
if tag_filter and not contains_category(entry, tag_filter): |
|
|
|
continue |
|
|
|
if entry.has_key("title"): |
|
|
|
entries[entry.title] = [] |
|
|
|
entrylist = entries[entry.title] |
|
|
@ -18,9 +20,16 @@ def parse_single_rss_feed(url, entries): |
|
|
|
entrylist.append(entry.published_parsed) |
|
|
|
return entries |
|
|
|
|
|
|
|
def contains_category(entry, tag_filter): |
|
|
|
if not entry.has_key("tags"): |
|
|
|
return False |
|
|
|
for tag in entry.tags: |
|
|
|
if tag.term.lower() == tag_filter.lower(): |
|
|
|
return True |
|
|
|
return False |
|
|
|
|
|
|
|
def parse_rss_feeds(urls): |
|
|
|
def parse_rss_feeds(urls, tag_filter): |
|
|
|
entries = {} |
|
|
|
for url in urls: |
|
|
|
entries = parse_single_rss_feed(url, entries) |
|
|
|
entries = parse_single_rss_feed(url, entries, tag_filter) |
|
|
|
return entries |
|
|
|