当前位置: 首页>>代码示例>>Python>>正文


Python token.DOT属性代码示例

本文整理汇总了Python中token.DOT属性的典型用法代码示例。如果您正苦于以下问题:Python token.DOT属性的具体用法?Python token.DOT怎么用?Python token.DOT使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在token的用法示例。


在下文中一共展示了token.DOT属性的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: Annotate

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def Annotate(cls, nodes):
    if not nodes:
      return None
    if nodes[0].type != symbol.atom:
      return None
    if not nodes[0].children or nodes[0].children[0].type != token.NAME:
      return None

    for i in xrange(1, len(nodes)):
      if not nodes:
        break
      if nodes[i].type != symbol.trailer:
        break
      if len(nodes[i].children) != 2:
        break
      if (nodes[i].children[0].type != token.DOT or
          nodes[i].children[1].type != token.NAME):
        break
    else:
      i = len(nodes)

    return [cls(nodes[:i])] + nodes[i:] 
开发者ID:FSecureLABS,项目名称:Jandroid,代码行数:24,代码来源:reference.py

示例2: value

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def value(self, value):
    value_parts = value.split('.')

    # If we have too many children, cut the list down to size.
    # pylint: disable=attribute-defined-outside-init
    self._children = self._children[:len(value_parts)]

    # Update child nodes.
    for child, value_part in itertools.izip_longest(
        self._children, value_parts):
      if child:
        # Modify existing children. This helps preserve comments and spaces.
        child.children[-1].value = value_part
      else:
        # Add children as needed.
        token_snippets = [
            snippet.TokenSnippet.Create(token.DOT, '.'),
            snippet.TokenSnippet.Create(token.NAME, value_part),
        ]
        self._children.append(snippet.Symbol(symbol.trailer, token_snippets)) 
开发者ID:FSecureLABS,项目名称:Jandroid,代码行数:22,代码来源:reference.py

示例3: value

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def value(self, value):
    value_parts = value.split('.')
    for value_part in value_parts:
      if keyword.iskeyword(value_part):
        raise ValueError('%s is a reserved keyword.' % value_part)

    # If we have too many children, cut the list down to size.
    # pylint: disable=attribute-defined-outside-init
    self._children = self._children[:len(value_parts)*2-1]

    # Update child nodes.
    for child, value_part in itertools.izip_longest(
        self._children[::2], value_parts):
      if child:
        # Modify existing children. This helps preserve comments and spaces.
        child.value = value_part
      else:
        # Add children as needed.
        self._children.append(snippet.TokenSnippet.Create(token.DOT, '.'))
        self._children.append(
            snippet.TokenSnippet.Create(token.NAME, value_part)) 
开发者ID:FSecureLABS,项目名称:Jandroid,代码行数:23,代码来源:import_statement.py

示例4: clean

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def clean(ast_tuple):
    """
    reverse ast tuple to a list of tokens
    merge sequences (token.NAME, token.DOT, token.NAME)
    """
    result = []
    last = None
    for couple in _clean(ast_tuple):
        if couple[0] == token.NAME and last == token.DOT:
            result[-1][1] += couple[1]
        elif couple[0] == token.DOT and last == token.NAME:
            result[-1][1] += couple[1]
        else:
            result.append(couple)
        last = couple[0]
    return result 
开发者ID:jlachowski,项目名称:clonedigger,代码行数:18,代码来源:astutils.py

示例5: decorator_name

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def decorator_name(self, nodelist):
        listlen = len(nodelist)
        assert listlen >= 1 and listlen % 2 == 1

        item = self.atom_name(nodelist)
        i = 1
        while i < listlen:
            assert nodelist[i][0] == token.DOT
            assert nodelist[i + 1][0] == token.NAME
            item = Getattr(item, nodelist[i + 1][1])
            i += 2

        return item 
开发者ID:IronLanguages,项目名称:ironpython2,代码行数:15,代码来源:transformer.py

示例6: com_assign_trailer

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def com_assign_trailer(self, primary, node, assigning):
        t = node[1][0]
        if t == token.DOT:
            return self.com_assign_attr(primary, node[2], assigning)
        if t == token.LSQB:
            return self.com_subscriptlist(primary, node[2], assigning)
        if t == token.LPAR:
            raise SyntaxError, "can't assign to function call"
        raise SyntaxError, "unknown trailer type: %s" % t 
开发者ID:IronLanguages,项目名称:ironpython2,代码行数:11,代码来源:transformer.py

示例7: com_apply_trailer

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def com_apply_trailer(self, primaryNode, nodelist):
        t = nodelist[1][0]
        if t == token.LPAR:
            return self.com_call_function(primaryNode, nodelist[2])
        if t == token.DOT:
            return self.com_select_member(primaryNode, nodelist[2])
        if t == token.LSQB:
            return self.com_subscriptlist(primaryNode, nodelist[2], OP_APPLY)

        raise SyntaxError, 'unknown node type: %s' % t 
开发者ID:IronLanguages,项目名称:ironpython2,代码行数:12,代码来源:transformer.py

示例8: com_subscript

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def com_subscript(self, node):
        # slice_item: expression | proper_slice | ellipsis
        ch = node[1]
        t = ch[0]
        if t == token.DOT and node[2][0] == token.DOT:
            return Ellipsis()
        if t == token.COLON or len(node) > 2:
            return self.com_sliceobj(node)
        return self.com_node(ch) 
开发者ID:IronLanguages,项目名称:ironpython2,代码行数:11,代码来源:transformer.py

示例9: __init__

# 需要导入模块: import token [as 别名]
# 或者: from token import DOT [as 别名]
def __init__(self, token_type, tokens):
    # For operators and delimiters, the TokenSnippet's type may be more specific
    # than the type of the constituent token. E.g. the TokenSnippet type is
    # token.DOT, but the token type is token.OP. This is because the parser
    # has more context than the tokenizer.
    self._type = token_type
    self._tokens = tokens
    self._modified = False 
开发者ID:FSecureLABS,项目名称:Jandroid,代码行数:10,代码来源:snippet.py


注:本文中的token.DOT属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。