本文整理汇总了Python中apps.rss_feeds.models.MFetchHistory类的典型用法代码示例。如果您正苦于以下问题:Python MFetchHistory类的具体用法?Python MFetchHistory怎么用?Python MFetchHistory使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了MFetchHistory类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: receive_newsletter
def receive_newsletter(self, params):
user = self.user_from_email(params['recipient'])
if not user:
return
sender_name, sender_username, sender_domain = self.split_sender(params['from'])
feed_address = self.feed_address(user, "%[email protected]%s" % (sender_username, sender_domain))
usf = UserSubscriptionFolders.objects.get(user=user)
usf.add_folder('', 'Newsletters')
try:
feed = Feed.objects.get(feed_address=feed_address)
except Feed.DoesNotExist:
feed = Feed.objects.create(feed_address=feed_address,
feed_link='http://' + sender_domain,
feed_title=sender_name,
fetched_once=True,
known_good=True)
feed.update()
logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed))
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(user.username, 'reload:%s' % feed.pk)
try:
usersub = UserSubscription.objects.get(user=user, feed=feed)
except UserSubscription.DoesNotExist:
_, _, usersub = UserSubscription.add_subscription(
user=user,
feed_address=feed_address,
folder='Newsletters'
)
story_hash = MStory.ensure_story_hash(params['signature'], feed.pk)
story_params = {
"story_feed_id": feed.pk,
"story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])),
"story_title": params['subject'],
"story_content": self.get_content(params),
"story_author_name": escape(params['from']),
"story_permalink": reverse('newsletter-story',
kwargs={'story_hash': story_hash}),
"story_guid": params['signature'],
}
try:
story = MStory.objects.get(story_hash=story_hash)
except MStory.DoesNotExist:
story = MStory(**story_params)
story.save()
usersub.needs_unread_recalc = True
usersub.save()
self.publish_to_subscribers(feed)
MFetchHistory.add(feed_id=feed.pk, fetch_type='push')
logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed))
return story
示例2: push_callback
def push_callback(request, push_id):
if request.method == 'GET':
mode = request.GET['hub.mode']
topic = request.GET['hub.topic']
challenge = request.GET['hub.challenge']
lease_seconds = request.GET.get('hub.lease_seconds')
verify_token = request.GET.get('hub.verify_token', '')
if mode == 'subscribe':
if not verify_token.startswith('subscribe'):
raise Http404
subscription = get_object_or_404(PushSubscription,
pk=push_id,
topic=topic,
verify_token=verify_token)
subscription.verified = True
subscription.set_expiration(int(lease_seconds))
subscription.save()
subscription.feed.setup_push()
logging.debug(' ---> [%-30s] [%s] ~BBVerified PuSH' % (unicode(subscription.feed)[:30], subscription.feed_id))
verified.send(sender=subscription)
return HttpResponse(challenge, content_type='text/plain')
elif request.method == 'POST':
subscription = get_object_or_404(PushSubscription, pk=push_id)
fetch_history = MFetchHistory.feed(subscription.feed_id)
latest_push_date_delta = None
if fetch_history and fetch_history.get('push_history'):
latest_push = fetch_history['push_history'][0]['push_date']
latest_push_date = datetime.datetime.strptime(latest_push, '%Y-%m-%d %H:%M:%S')
latest_push_date_delta = datetime.datetime.now() - latest_push_date
if latest_push_date > datetime.datetime.now() - datetime.timedelta(minutes=1):
logging.debug(' ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago' % (unicode(subscription.feed)[:30], latest_push_date_delta.seconds))
return HttpResponse('Slow down, you just pushed %s seconds ago...' % latest_push_date_delta.seconds, status=429)
# XXX TODO: Optimize this by removing feedparser. It just needs to find out
# the hub_url or topic has changed. ElementTree could do it.
if random.random() < 0.1:
parsed = feedparser.parse(request.raw_post_data)
subscription.check_urls_against_pushed_data(parsed)
# Don't give fat ping, just fetch.
# subscription.feed.queue_pushed_feed_xml(request.raw_post_data)
if subscription.feed.active_premium_subscribers >= 1:
subscription.feed.queue_pushed_feed_xml("Fetch me", latest_push_date_delta=latest_push_date_delta)
MFetchHistory.add(feed_id=subscription.feed_id,
fetch_type='push')
else:
logging.debug(' ---> [%-30s] ~FBSkipping feed fetch, no actives: %s' % (unicode(subscription.feed)[:30], subscription.feed))
return HttpResponse('OK')
return Http404
示例3: push_callback
def push_callback(request, push_id):
if request.method == 'GET':
mode = request.GET['hub.mode']
topic = request.GET['hub.topic']
challenge = request.GET['hub.challenge']
lease_seconds = request.GET.get('hub.lease_seconds')
verify_token = request.GET.get('hub.verify_token', '')
if mode == 'subscribe':
if not verify_token.startswith('subscribe'):
raise Http404
subscription = get_object_or_404(PushSubscription,
pk=push_id,
topic=topic,
verify_token=verify_token)
subscription.verified = True
subscription.set_expiration(int(lease_seconds))
subscription.save()
subscription.feed.setup_push()
logging.debug(' ---> [%-30s] [%s] ~BBVerified PuSH' % (unicode(subscription.feed)[:30], subscription.feed_id))
verified.send(sender=subscription)
return HttpResponse(challenge, content_type='text/plain')
elif request.method == 'POST':
subscription = get_object_or_404(PushSubscription, pk=push_id)
# XXX TODO: Optimize this by removing feedparser. It just needs to find out
# the hub_url or topic has changed. ElementTree could do it.
if random.random() < 0.1:
parsed = feedparser.parse(request.raw_post_data)
subscription.check_urls_against_pushed_data(parsed)
# Don't give fat ping, just fetch.
# subscription.feed.queue_pushed_feed_xml(request.raw_post_data)
if subscription.feed.active_premium_subscribers >= 1:
subscription.feed.queue_pushed_feed_xml("Fetch me")
MFetchHistory.add(feed_id=subscription.feed_id,
fetch_type='push')
else:
logging.debug(' ---> [%-30s] ~FBSkipping feed fetch, no actives: %s' % (unicode(subscription.feed)[:30], subscription.feed))
return HttpResponse('')
return Http404
示例4: load_feed_statistics
def load_feed_statistics(request, feed_id):
user = get_user(request)
stats = dict()
feed = get_object_or_404(Feed, pk=feed_id)
feed.update_all_statistics()
feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
feed.save_feed_story_history_statistics()
feed.save_classifier_counts()
# Dates of last and next update
stats['active'] = feed.active
stats['last_update'] = relative_timesince(feed.last_update)
stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
stats['push'] = feed.is_push
# Minutes between updates
update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
stats['update_interval_minutes'] = update_interval_minutes
original_active_premium_subscribers = feed.active_premium_subscribers
original_premium_subscribers = feed.premium_subscribers
feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1)
feed.premium_subscribers += 1
premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
feed.active_premium_subscribers = original_active_premium_subscribers
feed.premium_subscribers = original_premium_subscribers
stats['premium_update_interval_minutes'] = premium_update_interval_minutes
stats['errors_since_good'] = feed.errors_since_good
# Stories per month - average and month-by-month breakout
average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
stats['average_stories_per_month'] = average_stories_per_month
stats['story_count_history'] = story_count_history and json.decode(story_count_history)
# Subscribers
stats['subscriber_count'] = feed.num_subscribers
stats['num_subscribers'] = feed.num_subscribers
stats['stories_last_month'] = feed.stories_last_month
stats['last_load_time'] = feed.last_load_time
stats['premium_subscribers'] = feed.premium_subscribers
stats['active_subscribers'] = feed.active_subscribers
stats['active_premium_subscribers'] = feed.active_premium_subscribers
# Classifier counts
stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
# Fetch histories
timezone = user.profile.timezone
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
stats['page_fetch_history'] = fetch_history['page_fetch_history']
stats['feed_push_history'] = fetch_history['push_history']
logging.user(request, "~FBStatistics: ~SB%s" % (feed))
return stats
示例5: load_feed_settings
def load_feed_settings(request, feed_id):
stats = dict()
feed = get_object_or_404(Feed, pk=feed_id)
user = get_user(request)
timezone = user.profile.timezone
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
stats['page_fetch_history'] = fetch_history['page_fetch_history']
stats['feed_push_history'] = fetch_history['push_history']
stats['duplicate_addresses'] = feed.duplicate_addresses.all()
return stats
示例6: exception_change_feed_link
def exception_change_feed_link(request):
feed_id = request.POST['feed_id']
feed = get_object_or_404(Feed, pk=feed_id)
original_feed = feed
feed_link = request.POST['feed_link']
timezone = request.user.profile.timezone
code = -1
if feed.has_page_exception or feed.has_feed_exception:
# Fix broken feed
logging.user(request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link))
feed_address = feedfinder.feed(feed_link)
if feed_address:
code = 1
feed.has_page_exception = False
feed.active = True
feed.fetched_once = False
feed.feed_link = feed_link
feed.feed_address = feed_address
duplicate_feed = feed.schedule_feed_fetch_immediately()
if duplicate_feed:
new_feed = Feed.objects.get(pk=duplicate_feed.pk)
feed = new_feed
new_feed.schedule_feed_fetch_immediately()
new_feed.has_page_exception = False
new_feed.active = True
new_feed.save()
else:
# Branch good feed
logging.user(request, "~FRBranching feed by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link))
feed, _ = Feed.objects.get_or_create(feed_address=feed.feed_address, feed_link=feed_link)
code = 1
if feed.pk != original_feed.pk:
try:
feed.branch_from_feed = original_feed.branch_from_feed or original_feed
except Feed.DoesNotExist:
feed.branch_from_feed = original_feed
feed.feed_link_locked = True
feed.save()
feed = feed.update()
feed = Feed.get_by_id(feed.pk)
try:
usersub = UserSubscription.objects.get(user=request.user, feed=feed)
except UserSubscription.DoesNotExist:
usersubs = UserSubscription.objects.filter(user=request.user, feed=original_feed)
if usersubs:
usersub = usersubs[0]
usersub.switch_feed(feed, original_feed)
else:
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
return {
'code': -1,
'feed_fetch_history': fetch_history['feed_fetch_history'],
'page_fetch_history': fetch_history['page_fetch_history'],
'push_history': fetch_history['push_history'],
}
usersub.calculate_feed_scores(silent=False)
feed.update_all_statistics()
classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id)
if feed and feed.has_feed_exception:
code = -1
feeds = {
original_feed.pk: usersub.canonical(full=True, classifiers=classifiers),
}
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
return {
'code': code,
'feeds': feeds,
'new_feed_id': usersub.feed_id,
'feed_fetch_history': fetch_history['feed_fetch_history'],
'page_fetch_history': fetch_history['page_fetch_history'],
'push_history': fetch_history['push_history'],
}
示例7: load_feed_statistics
def load_feed_statistics(request, feed_id):
user = get_user(request)
timezone = user.profile.timezone
stats = dict()
feed = get_object_or_404(Feed, pk=feed_id)
feed.update_all_statistics()
feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
feed.save_feed_story_history_statistics()
feed.save_classifier_counts()
# Dates of last and next update
stats['active'] = feed.active
stats['last_update'] = relative_timesince(feed.last_update)
stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
stats['push'] = feed.is_push
if feed.is_push:
try:
stats['push_expires'] = localtime_for_timezone(feed.push.lease_expires,
timezone).strftime("%Y-%m-%d %H:%M:%S")
except PushSubscription.DoesNotExist:
stats['push_expires'] = 'Missing push'
feed.is_push = False
feed.save()
# Minutes between updates
update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
stats['update_interval_minutes'] = update_interval_minutes
original_active_premium_subscribers = feed.active_premium_subscribers
original_premium_subscribers = feed.premium_subscribers
feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1)
feed.premium_subscribers += 1
premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False,
premium_speed=True)
feed.active_premium_subscribers = original_active_premium_subscribers
feed.premium_subscribers = original_premium_subscribers
stats['premium_update_interval_minutes'] = premium_update_interval_minutes
stats['errors_since_good'] = feed.errors_since_good
# Stories per month - average and month-by-month breakout
average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
stats['average_stories_per_month'] = average_stories_per_month
story_count_history = story_count_history and json.decode(story_count_history)
if story_count_history and isinstance(story_count_history, dict):
stats['story_count_history'] = story_count_history['months']
stats['story_days_history'] = story_count_history['days']
stats['story_hours_history'] = story_count_history['hours']
else:
stats['story_count_history'] = story_count_history
# Rotate hours to match user's timezone offset
localoffset = timezone.utcoffset(datetime.datetime.utcnow())
hours_offset = int(localoffset.total_seconds() / 3600)
rotated_hours = {}
for hour, value in stats['story_hours_history'].items():
rotated_hours[str(int(hour)+hours_offset)] = value
stats['story_hours_history'] = rotated_hours
# Subscribers
stats['subscriber_count'] = feed.num_subscribers
stats['num_subscribers'] = feed.num_subscribers
stats['stories_last_month'] = feed.stories_last_month
stats['last_load_time'] = feed.last_load_time
stats['premium_subscribers'] = feed.premium_subscribers
stats['active_subscribers'] = feed.active_subscribers
stats['active_premium_subscribers'] = feed.active_premium_subscribers
# Classifier counts
stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
# Fetch histories
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
stats['page_fetch_history'] = fetch_history['page_fetch_history']
stats['feed_push_history'] = fetch_history['push_history']
logging.user(request, "~FBStatistics: ~SB%s" % (feed))
return stats
示例8: exception_change_feed_address
def exception_change_feed_address(request):
feed_id = request.POST["feed_id"]
feed = get_object_or_404(Feed, pk=feed_id)
original_feed = feed
feed_address = request.POST["feed_address"]
timezone = request.user.profile.timezone
code = -1
if not feed.known_good and (feed.has_page_exception or feed.has_feed_exception):
# Fix broken feed
logging.user(
request, "~FRFixing feed exception by address: ~SB%s~SN to ~SB%s" % (feed.feed_address, feed_address)
)
feed.has_feed_exception = False
feed.active = True
feed.fetched_once = False
feed.feed_address = feed_address
duplicate_feed = feed.schedule_feed_fetch_immediately()
code = 1
if duplicate_feed:
new_feed = Feed.objects.get(pk=duplicate_feed.pk)
feed = new_feed
new_feed.schedule_feed_fetch_immediately()
new_feed.has_feed_exception = False
new_feed.active = True
new_feed = new_feed.save()
if new_feed.pk != feed.pk:
merge_feeds(new_feed.pk, feed.pk)
else:
# Branch good feed
logging.user(request, "~FRBranching feed by address: ~SB%s~SN to ~SB%s" % (feed.feed_address, feed_address))
try:
feed = Feed.objects.get(
hash_address_and_link=Feed.generate_hash_address_and_link(feed_address, feed.feed_link)
)
except Feed.DoesNotExist:
feed = Feed.objects.create(feed_address=feed_address, feed_link=feed.feed_link)
code = 1
if feed.pk != original_feed.pk:
try:
feed.branch_from_feed = original_feed.branch_from_feed or original_feed
except Feed.DoesNotExist:
feed.branch_from_feed = original_feed
feed.feed_address_locked = True
feed = feed.save()
feed = feed.update()
feed = Feed.get_by_id(feed.pk)
try:
usersub = UserSubscription.objects.get(user=request.user, feed=feed)
except UserSubscription.DoesNotExist:
usersubs = UserSubscription.objects.filter(user=request.user, feed=original_feed)
if usersubs:
usersub = usersubs[0]
usersub.switch_feed(feed, original_feed)
else:
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
return {
"code": -1,
"feed_fetch_history": fetch_history["feed_fetch_history"],
"page_fetch_history": fetch_history["page_fetch_history"],
"push_history": fetch_history["push_history"],
}
usersub.calculate_feed_scores(silent=False)
feed.update_all_statistics()
classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id)
feeds = {original_feed.pk: usersub and usersub.canonical(full=True, classifiers=classifiers)}
if feed and feed.has_feed_exception:
code = -1
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
return {
"code": code,
"feeds": feeds,
"new_feed_id": usersub.feed_id,
"feed_fetch_history": fetch_history["feed_fetch_history"],
"page_fetch_history": fetch_history["page_fetch_history"],
"push_history": fetch_history["push_history"],
}
示例9: load_feed_statistics
def load_feed_statistics(request, feed_id):
user = get_user(request)
timezone = user.profile.timezone
stats = dict()
feed = get_object_or_404(Feed, pk=feed_id)
feed.update_all_statistics()
feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
feed.save_feed_story_history_statistics()
feed.save_classifier_counts()
# Dates of last and next update
stats["active"] = feed.active
stats["last_update"] = relative_timesince(feed.last_update)
stats["next_update"] = relative_timeuntil(feed.next_scheduled_update)
stats["push"] = feed.is_push
if feed.is_push:
try:
stats["push_expires"] = localtime_for_timezone(feed.push.lease_expires, timezone).strftime(
"%Y-%m-%d %H:%M:%S"
)
except PushSubscription.DoesNotExist:
stats["push_expires"] = "Missing push"
feed.is_push = False
feed.save()
# Minutes between updates
update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
stats["update_interval_minutes"] = update_interval_minutes
original_active_premium_subscribers = feed.active_premium_subscribers
original_premium_subscribers = feed.premium_subscribers
feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1, 1)
feed.premium_subscribers += 1
premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False, premium_speed=True)
feed.active_premium_subscribers = original_active_premium_subscribers
feed.premium_subscribers = original_premium_subscribers
stats["premium_update_interval_minutes"] = premium_update_interval_minutes
stats["errors_since_good"] = feed.errors_since_good
# Stories per month - average and month-by-month breakout
average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
stats["average_stories_per_month"] = average_stories_per_month
stats["story_count_history"] = story_count_history and json.decode(story_count_history)
# Subscribers
stats["subscriber_count"] = feed.num_subscribers
stats["num_subscribers"] = feed.num_subscribers
stats["stories_last_month"] = feed.stories_last_month
stats["last_load_time"] = feed.last_load_time
stats["premium_subscribers"] = feed.premium_subscribers
stats["active_subscribers"] = feed.active_subscribers
stats["active_premium_subscribers"] = feed.active_premium_subscribers
# Classifier counts
stats["classifier_counts"] = json.decode(feed.data.feed_classifier_counts)
# Fetch histories
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
stats["feed_fetch_history"] = fetch_history["feed_fetch_history"]
stats["page_fetch_history"] = fetch_history["page_fetch_history"]
stats["feed_push_history"] = fetch_history["push_history"]
logging.user(request, "~FBStatistics: ~SB%s" % (feed))
return stats
示例10: receive_newsletter
def receive_newsletter(self, params):
user = self._user_from_email(params['recipient'])
if not user:
return
sender_name, sender_username, sender_domain = self._split_sender(params['from'])
feed_address = self._feed_address(user, "%[email protected]%s" % (sender_username, sender_domain))
try:
usf = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist:
logging.user(user, "~FRUser does not have a USF, ignoring newsletter.")
return
usf.add_folder('', 'Newsletters')
try:
feed = Feed.objects.get(feed_address=feed_address)
except Feed.DoesNotExist:
feed = Feed.objects.create(feed_address=feed_address,
feed_link='http://' + sender_domain,
feed_title=sender_name,
fetched_once=True,
known_good=True)
feed.update()
logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed))
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(user.username, 'reload:%s' % feed.pk)
self._check_if_first_newsletter(user)
feed.last_update = datetime.datetime.now()
feed.last_story_date = datetime.datetime.now()
feed.save()
if feed.feed_title != sender_name:
feed.feed_title = sender_name
feed.save()
try:
usersub = UserSubscription.objects.get(user=user, feed=feed)
except UserSubscription.DoesNotExist:
_, _, usersub = UserSubscription.add_subscription(
user=user,
feed_address=feed_address,
folder='Newsletters'
)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(user.username, 'reload:feeds')
story_hash = MStory.ensure_story_hash(params['signature'], feed.pk)
story_content = self._get_content(params)
plain_story_content = self._get_content(params, force_plain=True)
if len(plain_story_content) > len(story_content):
story_content = plain_story_content
story_content = self._clean_content(story_content)
story_params = {
"story_feed_id": feed.pk,
"story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])),
"story_title": params['subject'],
"story_content": story_content,
"story_author_name": params['from'],
"story_permalink": "https://%s%s" % (
Site.objects.get_current().domain,
reverse('newsletter-story',
kwargs={'story_hash': story_hash})),
"story_guid": params['signature'],
}
try:
story = MStory.objects.get(story_hash=story_hash)
except MStory.DoesNotExist:
story = MStory(**story_params)
story.save()
usersub.needs_unread_recalc = True
usersub.save()
self._publish_to_subscribers(feed, story.story_hash)
MFetchHistory.add(feed_id=feed.pk, fetch_type='push')
logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed))
return story
示例11: receive_newsletter
def receive_newsletter(self, params):
user = self.user_from_email(params["recipient"])
if not user:
return
sender_name, sender_username, sender_domain = self.split_sender(params["from"])
feed_address = self.feed_address(user, "%[email protected]%s" % (sender_username, sender_domain))
usf = UserSubscriptionFolders.objects.get(user=user)
usf.add_folder("", "Newsletters")
try:
feed = Feed.objects.get(feed_address=feed_address)
except Feed.DoesNotExist:
feed = Feed.objects.create(
feed_address=feed_address,
feed_link="http://" + sender_domain,
feed_title=sender_name,
fetched_once=True,
known_good=True,
)
feed.update()
logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed))
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(user.username, "reload:%s" % feed.pk)
if feed.feed_title != sender_name:
feed.feed_title = sender_name
feed.save()
try:
usersub = UserSubscription.objects.get(user=user, feed=feed)
except UserSubscription.DoesNotExist:
_, _, usersub = UserSubscription.add_subscription(
user=user, feed_address=feed_address, folder="Newsletters"
)
story_hash = MStory.ensure_story_hash(params["signature"], feed.pk)
story_content = self.get_content(params)
story_content = self.clean_content(story_content)
story_params = {
"story_feed_id": feed.pk,
"story_date": datetime.datetime.fromtimestamp(int(params["timestamp"])),
"story_title": params["subject"],
"story_content": story_content,
"story_author_name": params["from"],
"story_permalink": "https://%s%s"
% (Site.objects.get_current().domain, reverse("newsletter-story", kwargs={"story_hash": story_hash})),
"story_guid": params["signature"],
}
try:
story = MStory.objects.get(story_hash=story_hash)
except MStory.DoesNotExist:
story = MStory(**story_params)
story.save()
usersub.needs_unread_recalc = True
usersub.save()
self.publish_to_subscribers(feed)
MFetchHistory.add(feed_id=feed.pk, fetch_type="push")
logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed))
return story