本文整理汇总了Python中utils.json_functions.encode函数的典型用法代码示例。如果您正苦于以下问题:Python encode函数的具体用法?Python encode怎么用?Python encode使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了encode函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: process_request
def process_request(self, request):
user_agent = request.environ.get('HTTP_USER_AGENT', 'missing').lower()
if 'profile' in request.path: return
if 'haproxy' in request.path: return
if 'dbcheck' in request.path: return
if 'account' in request.path: return
if 'push' in request.path: return
if getattr(settings, 'TEST_DEBUG'): return
if any(ua in user_agent for ua in BANNED_USER_AGENTS):
data = {
'error': 'User agent banned: %s' % user_agent,
'code': -1
}
logging.user(request, "~FB~SN~BBBanned UA: ~SB%s / %s (%s)" % (user_agent, request.path, request.META))
return HttpResponse(json.encode(data), status=403, mimetype='text/json')
if request.user.is_authenticated() and any(username == request.user.username for username in BANNED_USERNAMES):
data = {
'error': 'User banned: %s' % request.user.username,
'code': -1
}
logging.user(request, "~FB~SN~BBBanned Username: ~SB%s / %s (%s)" % (request.user, request.path, request.META))
return HttpResponse(json.encode(data), status=403, mimetype='text/json')
示例2: collect_statistics_sites_loaded
def collect_statistics_sites_loaded(cls, last_day=None):
if not last_day:
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
now = datetime.datetime.now()
sites_loaded = []
avg_time_taken = []
for hour in range(24):
start_hours_ago = now - datetime.timedelta(hours=hour)
end_hours_ago = now - datetime.timedelta(hours=hour+1)
aggregates = dict(count=Count('loadtime'), avg=Avg('loadtime'))
load_times = FeedLoadtime.objects.filter(
date_accessed__lte=start_hours_ago,
date_accessed__gte=end_hours_ago
).aggregate(**aggregates)
sites_loaded.append(load_times['count'] or 0)
avg_time_taken.append(load_times['avg'] or 0)
sites_loaded.reverse()
avg_time_taken.reverse()
values = (
('sites_loaded', json.encode(sites_loaded)),
('avg_time_taken', json.encode(avg_time_taken)),
('latest_sites_loaded', sites_loaded[-1]),
('latest_avg_time_taken', avg_time_taken[-1]),
('max_sites_loaded', max(sites_loaded)),
('max_avg_time_taken', max(1, max(avg_time_taken))),
)
for key, value in values:
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
示例3: _parse_user_info
def _parse_user_info(user):
return {
"user_info": {
"is_anonymous": json.encode(user.is_anonymous()),
"is_authenticated": json.encode(user.is_authenticated()),
"username": json.encode(user.username if user.is_authenticated() else "Anonymous"),
}
}
示例4: _parse_user_info
def _parse_user_info(user):
return {
'user_info': {
'is_anonymous': json.encode(user.is_anonymous()),
'is_authenticated': json.encode(user.is_authenticated()),
'username': json.encode(user.username if user.is_authenticated() else 'Anonymous')
}
}
示例5: collect_statistics_sites_loaded
def collect_statistics_sites_loaded(cls):
now = datetime.datetime.now()
sites_loaded = []
avg_time_taken = []
for hour in range(24):
start_hours_ago = now - datetime.timedelta(hours=hour)
end_hours_ago = now - datetime.timedelta(hours=hour+1)
load_times = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{
"$match": {
"date": {
"$gte": end_hours_ago,
"$lte": start_hours_ago,
},
"path": {
"$in": [
"/reader/feed/",
"/social/stories/",
"/reader/river_stories/",
"/social/river_stories/",
]
}
},
}, {
"$group": {
"_id" : 1,
"count" : {"$sum": 1},
"avg" : {"$avg": "$duration"},
},
}])
count = 0
avg = 0
if load_times['result']:
count = load_times['result'][0]['count']
avg = load_times['result'][0]['avg']
sites_loaded.append(count)
avg_time_taken.append(avg)
sites_loaded.reverse()
avg_time_taken.reverse()
values = (
('sites_loaded', json.encode(sites_loaded)),
('avg_time_taken', json.encode(avg_time_taken)),
('latest_sites_loaded', sites_loaded[-1]),
('latest_avg_time_taken', avg_time_taken[-1]),
('max_sites_loaded', max(sites_loaded)),
('max_avg_time_taken', max(1, max(avg_time_taken))),
)
for key, value in values:
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
示例6: collect_statistics_for_db
def collect_statistics_for_db(cls):
lag = db_functions.mongo_max_replication_lag(settings.MONGODB)
cls.set('mongodb_replication_lag', lag)
now = round_time(datetime.datetime.now(), round_to=60)
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
db_times = {}
latest_db_times = {}
for db in ['sql', 'mongo', 'redis', 'task_sql', 'task_mongo', 'task_redis']:
db_times[db] = []
for hour in range(24):
start_hours_ago = now - datetime.timedelta(hours=hour+1)
pipe = r.pipeline()
for m in range(60):
minute = start_hours_ago + datetime.timedelta(minutes=m)
key = "DB:%s:%s" % (db, minute.strftime('%s'))
pipe.get("%s:c" % key)
pipe.get("%s:t" % key)
times = pipe.execute()
counts = [int(c or 0) for c in times[::2]]
avgs = [float(a or 0) for a in times[1::2]]
if counts and avgs:
count = sum(counts)
avg = round(sum(avgs) / count, 3) if count else 0
else:
count = 0
avg = 0
if hour == 0:
latest_count = float(counts[-1]) if len(counts) else 0
latest_avg = float(avgs[-1]) if len(avgs) else 0
latest_db_times[db] = latest_avg / latest_count if latest_count else 0
db_times[db].append(avg)
db_times[db].reverse()
values = (
('avg_sql_times', json.encode(db_times['sql'])),
('avg_mongo_times', json.encode(db_times['mongo'])),
('avg_redis_times', json.encode(db_times['redis'])),
('latest_sql_avg', latest_db_times['sql']),
('latest_mongo_avg', latest_db_times['mongo']),
('latest_redis_avg', latest_db_times['redis']),
('latest_task_sql_avg', latest_db_times['task_sql']),
('latest_task_mongo_avg', latest_db_times['task_mongo']),
('latest_task_redis_avg', latest_db_times['task_redis']),
)
for key, value in values:
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
示例7: _view
def _view(request, *args, **kwargs):
if request.user.is_anonymous():
return HttpResponse(content=json.encode({
"message": "You must have a valid OAuth token.",
}), status=401)
else:
try:
setattr(request, 'body_json', json.decode(request.body))
except JSONDecodeError:
return HttpResponse(content=json.encode({
"message": "Your JSON body is malformed.",
}), status=400)
return view_func(request, *args, **kwargs)
示例8: collect_statistics_sites_loaded
def collect_statistics_sites_loaded(cls):
now = round_time(datetime.datetime.now(), round_to=60)
sites_loaded = []
avg_time_taken = []
last_5_min_time_taken = 0
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
for hour in range(24):
start_hours_ago = now - datetime.timedelta(hours=hour+1)
pipe = r.pipeline()
for m in range(60):
minute = start_hours_ago + datetime.timedelta(minutes=m)
key = "%s:%s" % (RStats.stats_type('page_load'), minute.strftime('%s'))
pipe.get("%s:s" % key)
pipe.get("%s:a" % key)
times = pipe.execute()
counts = [int(c) for c in times[::2] if c]
avgs = [float(a) for a in times[1::2] if a]
if hour == 0:
last_5_min_time_taken = round(sum(avgs[:1]) / max(1, sum(counts[:1])), 2)
if counts and avgs:
count = max(1, sum(counts))
avg = round(sum(avgs) / count, 3)
else:
count = 0
avg = 0
sites_loaded.append(count)
avg_time_taken.append(avg)
sites_loaded.reverse()
avg_time_taken.reverse()
values = (
('sites_loaded', json.encode(sites_loaded)),
('avg_time_taken', json.encode(avg_time_taken)),
('latest_sites_loaded', sites_loaded[-1]),
('latest_avg_time_taken', avg_time_taken[-1]),
('max_sites_loaded', max(sites_loaded)),
('max_avg_time_taken', max(1, max(avg_time_taken))),
('last_5_min_time_taken', last_5_min_time_taken),
)
for key, value in values:
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
示例9: process
def process(self):
outline = opml.from_string(self.opml_xml)
self.clear_feeds()
folders = self.process_outline(outline)
UserSubscriptionFolders.objects.create(user=self.user, folders=json.encode(folders))
self.queue_new_feeds()
return folders
示例10: set_view_setting
def set_view_setting(request):
print 'KHong profile_views set_view_setting()'
code = 1
feed_id = request.POST['feed_id']
feed_view_setting = request.POST.get('feed_view_setting')
feed_order_setting = request.POST.get('feed_order_setting')
feed_read_filter_setting = request.POST.get('feed_read_filter_setting')
feed_layout_setting = request.POST.get('feed_layout_setting')
view_settings = json.decode(request.user.profile.view_settings)
setting = view_settings.get(feed_id, {})
if isinstance(setting, basestring): setting = {'v': setting}
if feed_view_setting: setting['v'] = feed_view_setting
if feed_order_setting: setting['o'] = feed_order_setting
if feed_read_filter_setting: setting['r'] = feed_read_filter_setting
if feed_layout_setting: setting['l'] = feed_layout_setting
view_settings[feed_id] = setting
request.user.profile.view_settings = json.encode(view_settings)
request.user.profile.save()
logging.user(request, "~FMView settings: %s/%s/%s/%s" % (feed_view_setting,
feed_order_setting, feed_read_filter_setting, feed_layout_setting))
response = dict(code=code)
return response
示例11: add_site
def add_site(request, token):
code = 0
url = request.GET['url']
folder = request.GET['folder']
callback = request.GET['callback']
if not url:
code = -1
else:
try:
profile = Profile.objects.get(secret_token=token)
code, message, us = UserSubscription.add_subscription(
user=profile.user,
feed_address=url,
folder=folder,
bookmarklet=True
)
except Profile.DoesNotExist:
code = -1
if code > 0:
message = 'OK'
return HttpResponse(callback + '(' + json.encode({
'code': code,
'message': message,
'usersub': us and us.feed.pk,
}) + ')', mimetype='text/plain')
示例12: clear_view_setting
def clear_view_setting(request):
code = 1
view_setting_type = request.POST.get('view_setting_type')
view_settings = json.decode(request.user.profile.view_settings)
new_view_settings = {}
removed = 0
for feed_id, view_setting in view_settings.items():
if view_setting_type == 'layout' and 'l' in view_setting:
del view_setting['l']
removed += 1
if view_setting_type == 'view' and 'v' in view_setting:
del view_setting['v']
removed += 1
if view_setting_type == 'order' and 'o' in view_setting:
del view_setting['o']
removed += 1
if view_setting_type == 'order' and 'r' in view_setting:
del view_setting['r']
removed += 1
new_view_settings[feed_id] = view_setting
request.user.profile.view_settings = json.encode(new_view_settings)
request.user.profile.save()
logging.user(request, "~FMClearing view settings: %s (found %s)" % (view_setting_type, removed))
response = dict(code=code, view_settings=view_settings, removed=removed)
return response
示例13: opml_upload
def opml_upload(request):
xml_opml = None
message = "OK"
code = 1
payload = {}
if request.method == 'POST':
if 'file' in request.FILES:
logging.user(request, "~FR~SBOPML upload starting...")
file = request.FILES['file']
xml_opml = file.read()
opml_importer = OPMLImporter(xml_opml, request.user)
folders = opml_importer.process()
feeds = UserSubscription.objects.filter(user=request.user).values()
payload = dict(folders=folders, feeds=feeds)
logging.user(request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds)))
request.session['import_from_google_reader'] = False
else:
message = "Attach an .opml file."
code = -1
data = json.encode(dict(message=message, code=code, payload=payload))
return HttpResponse(data, mimetype='text/plain')
示例14: delete_feed
def delete_feed(self, feed_id, in_folder):
def _find_feed_in_folders(old_folders, folder_name='', multiples_found=False, deleted=False):
new_folders = []
for k, folder in enumerate(old_folders):
if isinstance(folder, int):
if (folder == feed_id and (
(folder_name != in_folder) or
(folder_name == in_folder and deleted))):
multiples_found = True
logging.info(" ---> [%s] Deleting feed, and a multiple has been found in '%s'" % (self.user, folder_name))
if folder == feed_id and folder_name == in_folder and not deleted:
logging.info(" ---> [%s] Delete feed: %s'th item: %s folders/feeds" % (
self.user, k, len(old_folders)
))
deleted = True
else:
new_folders.append(folder)
elif isinstance(folder, dict):
for f_k, f_v in folder.items():
nf, multiples_found, deleted = _find_feed_in_folders(f_v, f_k, multiples_found, deleted)
new_folders.append({f_k: nf})
return new_folders, multiples_found, deleted
user_sub_folders = json.decode(self.folders)
user_sub_folders, multiples_found, deleted = _find_feed_in_folders(user_sub_folders)
self.folders = json.encode(user_sub_folders)
self.save()
if not multiples_found and deleted:
user_sub = UserSubscription.objects.get(user=self.user, feed=feed_id)
user_sub.delete()
MUserStory.objects(user_id=self.user.pk, feed_id=feed_id).delete()
示例15: add_site
def add_site(request, token):
code = 0
url = request.GET['url']
folder = request.GET['folder']
new_folder = request.GET.get('new_folder')
callback = request.GET['callback']
if not url:
code = -1
else:
try:
profile = Profile.objects.get(secret_token=token)
if new_folder:
usf, _ = UserSubscriptionFolders.objects.get_or_create(user=profile.user)
usf.add_folder(folder, new_folder)
folder = new_folder
code, message, us = UserSubscription.add_subscription(
user=profile.user,
feed_address=url,
folder=folder,
bookmarklet=True
)
except Profile.DoesNotExist:
code = -1
if code > 0:
message = 'OK'
logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder))
return HttpResponse(callback + '(' + json.encode({
'code': code,
'message': message,
'usersub': us and us.feed.pk,
}) + ')', mimetype='text/plain')