本文整理汇总了Python中kalite.settings.LOG.debug方法的典型用法代码示例。如果您正苦于以下问题:Python LOG.debug方法的具体用法?Python LOG.debug怎么用?Python LOG.debug使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类kalite.settings.LOG
的用法示例。
在下文中一共展示了LOG.debug方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: process_request
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def process_request(self, request):
next = request.GET.get("next", "")
if next.startswith("/"):
logging.debug("next='%s'" % next)
request.next = next
else:
request.next = ""
示例2: update_json
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def update_json(youtube_id, lang_code, downloaded, api_response, time_of_attempt):
"""Update language_srt_map to reflect download status
lang_code in IETF format
"""
# Open JSON file
filepath = get_lang_map_filepath(lang_code)
language_srt_map = softload_json(filepath, logger=logging.error)
if not language_srt_map:
return False
# create updated entry
entry = language_srt_map[youtube_id]
entry["downloaded"] = downloaded
entry["api_response"] = api_response
entry["last_attempt"] = time_of_attempt
if api_response == "success":
entry["last_success"] = time_of_attempt
# update full-size JSON with new information
language_srt_map[youtube_id].update(entry)
# write it to file
json_file = open(filepath, "wb")
json_file.write(json.dumps(language_srt_map))
json_file.close()
logging.debug("File updated.")
return True
示例3: add_to_summary
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def add_to_summary(sender, **kwargs):
assert UserLog.is_enabled(), "We shouldn't be saving unless UserLog is enabled."
instance = kwargs["instance"]
if not instance.start_datetime:
raise ValidationError("start_datetime cannot be None")
if instance.last_active_datetime and instance.start_datetime > instance.last_active_datetime:
raise ValidationError("UserLog date consistency check for start_datetime and last_active_datetime")
if instance.end_datetime and not instance.total_seconds:
# Compute total_seconds, save to summary
# Note: only supports setting end_datetime once!
instance.full_clean()
# The top computation is more lenient: user activity is just time logged in, literally.
# The bottom computation is more strict: user activity is from start until the last "action"
# recorded--in the current case, that means from login until the last moment an exercise or
# video log was updated.
#instance.total_seconds = datediff(instance.end_datetime, instance.start_datetime, units="seconds")
instance.total_seconds = 0 if not instance.last_active_datetime else datediff(instance.last_active_datetime, instance.start_datetime, units="seconds")
# Confirm the result (output info first for easier debugging)
if instance.total_seconds < 0:
raise ValidationError("Total learning time should always be non-negative.")
logging.debug("%s: total time (%d): %d seconds" % (instance.user.username, instance.activity_type, instance.total_seconds))
# Save only completed log items to the UserLogSummary
UserLogSummary.add_log_to_summary(instance)
示例4: recurse_nodes_to_extract_knowledge_map
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def recurse_nodes_to_extract_knowledge_map(node, node_cache):
"""
Internal function for recursing the topic tree and building the knowledge map.
Requires rebranding of metadata done by recurse_nodes function.
"""
assert node["kind"] == "Topic"
if node.get("in_knowledge_map", None):
if node["slug"] not in knowledge_map["topics"]:
logging.debug("Not in knowledge map: %s" % node["slug"])
node["in_knowledge_map"] = False
for node in node_cache["Topic"][node["slug"]]:
node["in_knowledge_map"] = False
knowledge_topics[node["slug"]] = topic_tools.get_all_leaves(node, leaf_type="Exercise")
if not knowledge_topics[node["slug"]]:
sys.stderr.write("Removing topic from topic tree: no exercises. %s" % node["slug"])
del knowledge_topics[node["slug"]]
del knowledge_map["topics"][node["slug"]]
node["in_knowledge_map"] = False
for node in node_cache["Topic"][node["slug"]]:
node["in_knowledge_map"] = False
else:
if node["slug"] in knowledge_map["topics"]:
sys.stderr.write("Removing topic from topic tree; does not belong. '%s'" % node["slug"])
logging.warn("Removing from knowledge map: %s" % node["slug"])
del knowledge_map["topics"][node["slug"]]
for child in [n for n in node.get("children", []) if n["kind"] == "Topic"]:
recurse_nodes_to_extract_knowledge_map(child, node_cache)
示例5: update_user_activity
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def update_user_activity(cls, user, activity_type="login", update_datetime=None, language=None, suppress_save=False):
"""Helper function to update an existing user activity log entry."""
# Do nothing if the max # of records is zero
# (i.e. this functionality is disabled)
if not cls.is_enabled():
return
if not user:
raise ValidationError("A valid user must always be specified.")
if not update_datetime: # must be done outside the function header (else becomes static)
update_datetime = datetime.now()
activity_type = cls.get_activity_int(activity_type)
cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)
if cur_log:
# How could you start after you updated??
if cur_log.start_datetime > update_datetime:
raise ValidationError("Update time must always be later than the login time.")
else:
# No unstopped starts. Start should have been called first!
logging.warn("%s: Had to create a user log entry on an UPDATE(%d)! @ %s" % (user.username, activity_type, update_datetime))
cur_log = cls.begin_user_activity(user=user, activity_type=activity_type, start_datetime=update_datetime, suppress_save=True)
logging.debug("%s: UPDATE activity (%d) @ %s" % (user.username, activity_type, update_datetime))
cur_log.last_active_datetime = update_datetime
cur_log.language = language or cur_log.language # set the language to the current language, if there is one.
if not suppress_save:
cur_log.save()
return cur_log
示例6: end_user_activity
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def end_user_activity(cls, user, activity_type="login", end_datetime=None, suppress_save=False): # don't accept language--we're just closing previous activity.
"""Helper function to complete an existing user activity log entry."""
# Do nothing if the max # of records is zero
# (i.e. this functionality is disabled)
if not cls.is_enabled():
return
if not user:
raise ValidationError("A valid user must always be specified.")
if not end_datetime: # must be done outside the function header (else becomes static)
end_datetime = datetime.now()
activity_type = cls.get_activity_int(activity_type)
cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)
if cur_log:
# How could you start after you ended??
if cur_log.start_datetime > end_datetime:
raise ValidationError("Update time must always be later than the login time.")
else:
# No unstopped starts. Start should have been called first!
logging.warn("%s: Had to BEGIN a user log entry, but ENDING(%d)! @ %s" % (user.username, activity_type, end_datetime))
cur_log = cls.begin_user_activity(user=user, activity_type=activity_type, start_datetime=end_datetime, suppress_save=True)
logging.debug("%s: Logging LOGOUT activity @ %s" % (user.username, end_datetime))
cur_log.end_datetime = end_datetime
if not suppress_save:
cur_log.save() # total-seconds will be computed here.
return cur_log
示例7: __init__
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def __init__(self, comment=None, fixture=None, **kwargs):
self.return_dict = {}
self.return_dict['comment'] = comment
self.return_dict['class']=type(self).__name__
self.return_dict['uname'] = platform.uname()
self.return_dict['fixture'] = fixture
try:
self.verbosity = int(kwargs.get("verbosity"))
except:
self.verbosity = 1
try:
branch = subprocess.Popen(["git", "describe", "--contains", "--all", "HEAD"], stdout=subprocess.PIPE).communicate()[0]
self.return_dict['branch'] = branch[:-1]
head = subprocess.Popen(["git", "log", "--pretty=oneline", "--abbrev-commit", "--max-count=1"], stdout=subprocess.PIPE).communicate()[0]
self.return_dict['head'] = head[:-1]
except:
self.return_dict['branch'] = None
self.return_dict['head'] = None
# if setup fails, what could we do?
# let the exception bubble up is the best.
try:
self._setup(**kwargs)
except Exception as e:
logging.debug("Failed setup (%s); trying to tear down" % e)
try:
self._teardown()
except:
pass
raise e
示例8: begin_user_activity
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def begin_user_activity(cls, user, activity_type="login", start_datetime=None, language=None, suppress_save=False):
"""Helper function to create a user activity log entry."""
# Do nothing if the max # of records is zero
# (i.e. this functionality is disabled)
if not cls.is_enabled():
return
if not user:
raise ValidationError("A valid user must always be specified.")
if not start_datetime: # must be done outside the function header (else becomes static)
start_datetime = datetime.now()
activity_type = cls.get_activity_int(activity_type)
cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)
if cur_log:
# Seems we're logging in without logging out of the previous.
# Best thing to do is simulate a login
# at the previous last update time.
#
# Note: this can be a recursive call
logging.warn("%s: had to END activity on a begin(%d) @ %s" % (user.username, activity_type, start_datetime))
# Don't mark current language when closing an old one
cls.end_user_activity(user=user, activity_type=activity_type, end_datetime=cur_log.last_active_datetime) # can't suppress save
cur_log = None
# Create a new entry
logging.debug("%s: BEGIN activity(%d) @ %s" % (user.username, activity_type, start_datetime))
cur_log = cls(user=user, activity_type=activity_type, start_datetime=start_datetime, last_active_datetime=start_datetime, language=language)
if not suppress_save:
cur_log.save()
return cur_log
示例9: validate_times
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def validate_times(srt_content, srt_issues):
times = re.findall("([0-9:,]+) --> ([0-9:,]+)\r\n", srt_content, re.S | re.M)
parse_time = lambda str: datetime.datetime.strptime(str, "%H:%M:%S,%f")
for i in range(len(times)):
try:
between_subtitle_time = datediff(
parse_time(times[i][0]), parse_time(times[i - 1][1] if i > 0 else "00:00:00,000")
)
within_subtitle_time = datediff(parse_time(times[i][1]), parse_time(times[i][0]))
if between_subtitle_time > 60.0:
srt_issues.append("Between-subtitle gap of %5.2f seconds" % between_subtitle_time)
if within_subtitle_time > 60.0:
srt_issues.append("Within-subtitle duration of %5.2f seconds" % within_subtitle_time)
elif within_subtitle_time == 0.0:
logging.debug("Subtitle flies by too fast (%s --> %s)." % times[i])
# print "Start: %s\tB: %5.2f\tW: %5.2f" % (parse_time(times[i][0]), between_subtitle_time, within_subtitle_time)
except Exception as e:
if not times[i][1].startswith("99:59:59"):
srt_issues.append("Error checking times: %s" % e)
else:
if len(times) - i > 1 and len(times) - i - 1 > len(times) / 10.0:
if i == 0:
srt_issues.append("No subtitles have a valid starting point.")
else:
logging.debug(
"Hit end of movie, but %d (of %d) subtitle(s) remain in the queue."
% (len(times) - i - 1, len(times))
)
break
示例10: select_best_available_language
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def select_best_available_language(target_code, available_codes=None):
"""
Critical function for choosing the best available language for a resource,
given a target language code.
This is used by video and exercise pages, for example,
to determine what file to serve, based on available resources
and the current requested language.
"""
# Scrub the input
target_code = lcode_to_django_lang(target_code)
if available_codes is None:
available_codes = get_installed_language_packs().keys()
available_codes = [lcode_to_django_lang(lc) for lc in available_codes]
# Hierarchy of language selection
if target_code in available_codes:
actual_code = target_code
elif target_code.split("-", 1)[0] in available_codes:
actual_code = target_code.split("-", 1)[0]
elif settings.LANGUAGE_CODE in available_codes:
actual_code = settings.LANGUAGE_CODE
elif "en" in available_codes:
actual_code = "en"
elif available_codes:
actual_code = available_codes[0]
else:
actual_code = None
if actual_code != target_code:
logging.debug("Requested code %s, got code %s" % (target_code, actual_code))
return actual_code
示例11: generate_test_files
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def generate_test_files():
"""Insert asterisks as translations in po files"""
# Open them up and insert asterisks for all empty msgstrs
logging.info("Generating test po files")
en_po_dir = os.path.join(settings.LOCALE_PATHS[0], "en/LC_MESSAGES/")
for po_file in glob.glob(os.path.join(en_po_dir, "*.po")):
msgid_pattern = re.compile(r'msgid \"(.*)\"\nmsgstr', re.S | re.M)
content = open(os.path.join(en_po_dir, po_file), 'r').read()
results = content.split("\n\n")
with open(os.path.join(en_po_dir, "tmp.po"), 'w') as temp_file:
# We know the first block is static, so just dump that.
temp_file.write(results[0])
# Now work through actual translations
for result in results[1:]:
try:
msgid = re.findall(msgid_pattern, result)[0]
temp_file.write("\n\n")
temp_file.write(result.replace("msgstr \"\"", "msgstr \"***%s***\"" % msgid))
except Exception as e:
logging.error("Failed to insert test string: %s\n\n%s\n\n" % (e, result))
# Once done replacing, rename temp file to overwrite original
os.rename(os.path.join(en_po_dir, "tmp.po"), os.path.join(en_po_dir, po_file))
(out, err, rc) = compile_po_files("en")
if err:
logging.debug("Error executing compilemessages: %s" % err)
示例12: handle
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def handle(self, *args, **options):
if len(args)==1 and args[0]== "test":
# Callback for "weak" test--checks at least that the django project compiles (local_settings is OK)
sys.stdout.write("Success!\n")
exit(0)
try:
if options.get("branch", None):
# Specified a repo
self.update_via_git(**options)
elif options.get("zip_file", None):
# Specified a file
if not os.path.exists(options.get("zip_file")):
raise CommandError("Specified zip file does not exist: %s" % options.get("zip_file"))
self.update_via_zip(**options)
elif options.get("url", None):
self.update_via_zip(**options)
elif os.path.exists(settings.PROJECT_PATH + "/../.git"):
# If we detect a git repo, try git
if len(args) == 1 and not options["branch"]:
options["branch"] = args[0]
elif len(args) != 0:
raise CommandError("Specified too many command-line arguments")
self.update_via_git(**options)
elif len(args) > 1:
raise CommandError("Too many command-line arguments.")
elif len(args) == 1:
# Specify zip via first command-line arg
if options['zip_file'] is not None:
raise CommandError("Cannot specify a zipfile as unnamed and named command-line arguments at the same time.")
options['zip_file'] = args[0]
self.update_via_zip(**options)
else:
# No params, no git repo: try to get a file online.
zip_file = tempfile.mkstemp()[1]
for url in ["http://%s/api/download/kalite/latest/%s/%s/" % (settings.CENTRAL_SERVER_HOST, platform.system().lower(), "en")]:
logging.info("Downloading repo snapshot from %s to %s" % (url, zip_file))
try:
urllib.urlretrieve(url, zip_file)
sys.stdout.write("success @ %s\n" % url)
break;
except Exception as e:
logging.debug("Failed to get zipfile from %s: %s" % (url, e))
continue
options["zip_file"] = zip_file
self.update_via_zip(**options)
except Exception as e:
if self.started() and not not self.ended():
self.cancel(stage_status="error", notes=unicode(e))
raise
assert self.ended(), "Subroutines should complete() if they start()!"
示例13: invalidate_on_video_delete
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def invalidate_on_video_delete(sender, **kwargs):
"""
Listen in to see when available videos become unavailable.
"""
was_available = kwargs["instance"] and kwargs["instance"].percent_complete == 100
if was_available:
logging.debug("Invalidating cache on delete for %s" % kwargs["instance"])
invalidate_all_caches()
示例14: stamp_availability_on_topic
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def stamp_availability_on_topic(topic, videos_path=settings.CONTENT_ROOT, force=True, stamp_urls=True, update_counts_question_mark= None):
""" Uses the (json) topic tree to query the django database for which video files exist
Returns the original topic dictionary, with two properties added to each NON-LEAF node:
* nvideos_known: The # of videos in and under that node, that are known (i.e. in the Khan Academy library)
* nvideos_local: The # of vidoes in and under that node, that were actually downloaded and available locally
And the following property for leaf nodes:
* on_disk
Input Parameters:
* videos_path: the path to video files
"""
if update_counts_question_mark is None:
update_counts_question_mark = do_video_counts_need_update_question_mark()
if not force and "nvideos_local" in topic:
return (topic, topic["nvideos_local"], topic["nvideos_known"], False)
nvideos_local = 0
nvideos_known = 0
# Can't deal with leaves
assert topic["kind"] == "Topic", "Should not be calling this function on leaves; it's inefficient!"
# Only look for videos if there are more branches
if len(topic["children"]) == 0:
logging.debug("no children: %s" % topic["path"])
for child in topic["children"]:
# RECURSIVE CALL:
# The children have children, let them figure things out themselves
if "children" in child:
if not force and "nvideos_local" in child:
continue
stamp_availability_on_topic(topic=child, videos_path=videos_path, force=force, stamp_urls=stamp_urls, update_counts_question_mark=update_counts_question_mark)
nvideos_local += child["nvideos_local"]
nvideos_known += child["nvideos_known"]
# BASE CASE:
# All my children are leaves, so we'll query here (a bit more efficient than 1 query per leaf)
videos = get_videos(topic)
for video in videos:
if force or update_counts_question_mark or "availability" not in video:
stamp_availability_on_video(video, force=force, stamp_urls=stamp_urls, videos_path=videos_path)
nvideos_local += int(video["on_disk"])
nvideos_known += len(videos)
nvideos_available = nvideos_local if not settings.BACKUP_VIDEO_SOURCE else nvideos_known
changed = "nvideos_local" in topic and topic["nvideos_local"] != nvideos_local
changed = changed or ("nvideos_known" in topic and topic["nvideos_known"] != nvideos_known)
topic["nvideos_local"] = nvideos_local
topic["nvideos_known"] = nvideos_known
topic["nvideos_available"] = nvideos_available
topic["available"] = bool(nvideos_local) or bool(settings.BACKUP_VIDEO_SOURCE)
return (topic, nvideos_local, nvideos_known, nvideos_available, changed)
示例15: update_all_distributed_callback
# 需要导入模块: from kalite.settings import LOG [as 别名]
# 或者: from kalite.settings.LOG import debug [as 别名]
def update_all_distributed_callback(request):
"""
"""
if request.method != "POST":
raise PermissionDenied("Only POST allowed to this URL endpoint.")
videos = json.loads(request.POST["video_logs"])
exercises = json.loads(request.POST["exercise_logs"])
user = FacilityUser.objects.get(id=request.POST["user_id"])
node_cache = get_node_cache()
# Save videos
n_videos_uploaded = 0
for video in videos:
video_id = video['video_id']
youtube_id = video['youtube_id']
# Only save video logs for videos that we recognize.
if video_id not in node_cache["Video"]:
logging.warn("Skipping unknown video %s" % video_id)
continue
try:
(vl, _) = VideoLog.get_or_initialize(user=user, video_id=video_id, youtube_id=youtube_id)
for key,val in video.iteritems():
setattr(vl, key, val)
logging.debug("Saving video log for %s: %s" % (video_id, vl))
vl.save()
n_videos_uploaded += 1
except KeyError: #
logging.error("Could not save video log for data with missing values: %s" % video)
except Exception as e:
error_message = "Unexpected error importing videos: %s" % e
return JsonResponseMessageError(error_message)
# Save exercises
n_exercises_uploaded = 0
for exercise in exercises:
# Only save video logs for videos that we recognize.
if exercise['exercise_id'] not in node_cache['Exercise']:
logging.warn("Skipping unknown video %s" % exercise['exercise_id'])
continue
try:
(el, _) = ExerciseLog.get_or_initialize(user=user, exercise_id=exercise["exercise_id"])
for key,val in exercise.iteritems():
setattr(el, key, val)
logging.debug("Saving exercise log for %s: %s" % (exercise['exercise_id'], el))
el.save()
n_exercises_uploaded += 1
except KeyError:
logging.error("Could not save exercise log for data with missing values: %s" % exercise)
except Exception as e:
error_message = "Unexpected error importing exercises: %s" % e
return JsonResponseMessageError(error_message)
return JsonResponse({"success": "Uploaded %d exercises and %d videos" % (n_exercises_uploaded, n_videos_uploaded)})