本文整理汇总了Python中tokenize.Name方法的典型用法代码示例。如果您正苦于以下问题:Python tokenize.Name方法的具体用法?Python tokenize.Name怎么用?Python tokenize.Name使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tokenize
的用法示例。
在下文中一共展示了tokenize.Name方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: normalize_names
# 需要导入模块: import tokenize [as 别名]
# 或者: from tokenize import Name [as 别名]
def normalize_names(self):
"""
It is internally used to normalize the name of the widgets.
It means a widget named foo:vbox-dialog in glade
is refered self.vbox_dialog in the code.
It also sets a data "prefixes" with the list of
prefixes a widget has for each widget.
"""
for widget in self.get_widgets():
widget_name = gtk.Widget.get_name(widget)
prefixes_name_l = widget_name.split(":")
prefixes = prefixes_name_l[ : -1]
widget_api_name = prefixes_name_l[-1]
widget_api_name = "_".join( re.findall(tokenize.Name, widget_api_name) )
gtk.Widget.set_name(widget, widget_api_name)
if hasattr(self, widget_api_name):
raise AttributeError("instance %s already has an attribute %s" % (self,widget_api_name))
else:
setattr(self, widget_api_name, widget)
if prefixes:
gtk.Widget.set_data(widget, "prefixes", prefixes)
示例2: _validate_string
# 需要导入模块: import tokenize [as 别名]
# 或者: from tokenize import Name [as 别名]
def _validate_string(self, text):
"""Validates a string is composed of valid characters.
Args:
text: any str to validate.
Raises:
ValueError: when text contains illegal characters.
"""
if not re.match(tokenize.Name, text):
raise ValueError('%s should only contain ascii letters or digits.' %
text)
示例3: isidentifier
# 需要导入模块: import tokenize [as 别名]
# 或者: from tokenize import Name [as 别名]
def isidentifier(string):
"""Check if string can be a variable name."""
return re.match(tokenize.Name + r'\Z', string) is not None
示例4: valid_identifier
# 需要导入模块: import tokenize [as 别名]
# 或者: from tokenize import Name [as 别名]
def valid_identifier(name):
return re.match(tokenize.Name + '$', name) and not keyword.iskeyword(name)
示例5: register_last_expr
# 需要导入模块: import tokenize [as 别名]
# 或者: from tokenize import Name [as 别名]
def register_last_expr(tree, register):
"""
Registers the last expression as register in the context of an AST.
tree may either be a list of nodes, or an ast node with a body.
Returns the newly modified structure AND mutates the original.
"""
if isinstance(tree, list):
if not tree:
# Empty body.
return tree
# Allows us to use cases like directly passing orelse bodies.
last_node = tree[-1]
else:
last_node = tree.body[-1]
if type(last_node) in NO_REGISTER_STATEMENTS:
return tree
def mk_register_node(final_node):
return ast.Expr(
value=ast.Call(
func=ast.Name(
id=register,
ctx=ast.Load(),
),
args=[
final_node.value,
],
keywords=[],
starargs=None,
kwargs=None,
)
)
if hasattr(last_node, 'body'):
# Deep inspect the body of the nodes.
register_last_expr(last_node, register)
# Try to register in all the body types.
try:
register_last_expr(last_node.orelse, register)
except AttributeError:
pass
try:
for handler in last_node.handlers:
register_last_expr(handler, register)
except AttributeError:
pass
try:
register_last_expr(last_node.finalbody, register)
except AttributeError:
pass
else:
# Nodes with no body require no recursive inspection.
tree.body[-1] = mk_register_node(last_node)
return ast.fix_missing_locations(tree)