当前位置: 首页>>代码示例>>Python>>正文


Python lex.LexToken类代码示例

本文整理汇总了Python中ply.lex.LexToken的典型用法代码示例。如果您正苦于以下问题:Python LexToken类的具体用法?Python LexToken怎么用?Python LexToken使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了LexToken类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _lextoken

 def _lextoken( self, type_, value ) :
     tok = LexToken()
     tok.type = type_
     tok.value = value
     tok.lineno = self.lexer.lineno
     tok.lexpos = self.lexer.lexpos
     return tok
开发者ID:prataprc,项目名称:eazytext,代码行数:7,代码来源:lexer.py

示例2: token

  def token(self):
    t = LexToken()
    
    c = self.cur
    if c >= len(self.str):
      return None
      
    c = self.str[c]
    if c == "\\": t.type = "BACKSLASH"
    elif c == "/": t.type = "DIVIDE"
    elif c == "[": t.type = "LSBRACKET"
    elif c == "]": t.type = "RSBRACKET"
    elif c == "*": t.type = "STAR"
    elif c == "\n" or c == "\r": t.type = "LT"
    elif re.match(r"[a-zA-Z0-9_$]+", c) != None:
      t.type = "ID_PART"
    else: t.type = "UCHAR"
    
    t.value = c
    t.lineno = 0
    t.lexpos = self.cur

    self.cur += 1
    
    print(t)
    return t
开发者ID:joeedh,项目名称:FunLittleProject,代码行数:26,代码来源:js_regexpr_parse.py

示例3: clone_token

 def clone_token(old_token, new_type):
   token = LexToken()
   token.type = new_type
   token.value = old_token.value
   token.lineno = old_token.lineno
   token.lexpos = old_token.lexpos
   return token
开发者ID:MHordecki,项目名称:easyply,代码行数:7,代码来源:parser.py

示例4: _create_token

 def _create_token(self, type):
     token = LexToken()
     token.type = type
     token.value = ''
     token.lineno = 0
     token.lexpos = 0
     return token
开发者ID:alangpierce,项目名称:APPy,代码行数:7,代码来源:file_lexer.py

示例5: _new_token

def _new_token(type, token):
    tok = LexToken()
    tok.type = type
    tok.value = token.value
    tok.lineno = token.lineno
    tok.lexpos = token.lexpos
    return tok
开发者ID:abadger,项目名称:Bento,代码行数:7,代码来源:lexer.py

示例6: new_dedent

def new_dedent(amount, token):
    tok = LexToken()
    tok.type = "DEDENT"
    tok.value = amount
    tok.lineno = token.lineno
    tok.lexpos = token.lexpos
    return tok
开发者ID:davedoesdev,项目名称:Bento,代码行数:7,代码来源:lexer.py

示例7: token

 def token(self, value, ty=None):
     t = LexToken()
     t.type = ty if ty != None else value
     t.value = value
     t.lineno = -1
     t.lexpos = -1
     return t
开发者ID:ShishKabab,项目名称:python-jsonpath-rw,代码行数:7,代码来源:test_lexer.py

示例8: _to_yacc

 def _to_yacc(self, token_type, token_data):
     token = LexToken()
     token.type = token_type
     token.value = (token_type, token_data)
     token.lineno = 0 # TODO: file offset
     token.lexpos = 0
     self.__to_yacc(token)
开发者ID:vpelletier,项目名称:ITI1480A-linux,代码行数:7,代码来源:parser.py

示例9: p_error

def p_error(p):
  """
  print(p.lexer.prev.lineno, p.lineno)
  if p.lexer.prev.lineno < p.lineno or p.type == "RBRACKET":
    yacc.errok()
    return
  """
      
  if p == None:
    if not restricted() and glob.g_tried_semi == False:
      t = LexToken()
      t.type = "SEMI"
      t.value = ";"
      t.lexpos = -1
      t.lineno = -1
      glob.g_lexer.push(t)
      glob.g_tried_semi = True
      yacc.errok()
    else:
      sys.stderr.write(glob.g_file + ": error: unexpected end of file\n")
    return
  else:
    glob.g_error_pre = p
    if handle_semi_error(p):
      t = LexToken()
      t.type = "SEMI"
      t.value = ";"
      t.lexpos = p.lexpos
      t.lineno = p.lineno
      #glob.g_lexer.push(t)
      #glob.g_tried_semi = True
      
      yacc.errok()
      glob.g_error = False
      return
    else:      
      glob.g_error = True
      print_err(p)
      return
      
  if glob.g_error:
    print_err(glob.g_error_pre)
    
  glob.g_error_pre = p
  glob.g_error = True
  
  try:
    line = int(p.lineno)
  except:
    line = p.lineno(1)
  
  try:
    lexdata = p.lexer.lexer.lexdata
    sline = p.lexer.lexer.lexpos
  except:
    lexdata = p.lexer.lexdata
    sline = p.lexer.lexpos
  
  sline = lexdata[sline-40:sline+1]
开发者ID:joeedh,项目名称:es5parse,代码行数:59,代码来源:js_parse.py

示例10: gen

def gen(code):
	for line in code:
		for item in line:
			t = LexToken()
			t.type = item[1]
			t.value = item[0]
			yield t
	yield None
开发者ID:koo5,项目名称:secret-banana,代码行数:8,代码来源:parser.py

示例11: createFunctionDefinition

 def createFunctionDefinition(self, def_token, var_token, params, val_node):
    lamToken = LexToken()
    lamToken.value = 'lambda'
    lamToken.type = 'LAMBDA'
    return LetNode(def_token, [ 
         VariableNode(var_token),
         LambdaNode(lamToken, [ Node(None, None, nodes(params)), val_node ]), 
         ])
开发者ID:z-lang,项目名称:z-lang,代码行数:8,代码来源:SyntaxTreeFactory.py

示例12: newtok

 def newtok(tok, ttype=None):
     if tok.type != ttype and (ttype != None or tok.value != ""):
         if tok.type != None:
             push(tok)
         tok = LexToken()
         tok.type = ttype
         tok.value = ""
     return tok
开发者ID:joeedh,项目名称:webblender,代码行数:8,代码来源:js_ast_match_lex.py

示例13: _new_token

 def _new_token(self, type=None, value=None, lexpos=None, lineno=None) -> LexToken:
     """
     Creates a new lexer token with the given properties.
     :return: a new lexer token with the given properties.
     """
     token = LexToken()
     token.type = type
     token.value = value
     token.lexpos = lexpos
     token.lineno = lineno
开发者ID:outofbits,项目名称:linkedPy,代码行数:10,代码来源:lexer_wrapper.py

示例14: _lextoken_from_html

	def _lextoken_from_html(self, html_token):
		token = LexToken()
		token.type = {
			0 : 'HTML_DOCTYPE',
			1 : 'HTML_CHARS',
			2 : 'HTML_WS',
			3 : 'HTML_STARTTAG',
			4 : 'HTML_ENDTAG',
			5 : 'HTML_EMPTYTAG',
			6 : 'HTML_COMMENT',
			7 : 'HTML_PARSEERROR',
		}[html_token['type']]
		# TODO: fix lineno/lexpos
		token.lineno = self.lineno
		token.lexpos = self.lexpos
		
		token.value = {
			'self_closing' : html_token.get('selfClosing', False),
			'name' : html_token.get('name', None),
		}
		
		if isinstance(html_token['data'], (list, tuple)):
			token.value['attrs'] = html_token['data']
			token.value['data'] = ''
			if token.value['name'].lower() in voidElements:
				token.type = 'HTML_VOID_TAG'
		else:
			token.value['data'] = html_token['data']
		
		if token.type == tokenTypes['ParseError']:
			raise SyntaxError("Got HTML Parse Error for token {}".format(html_token))
		
		return token
开发者ID:Naddiseo,项目名称:jsdompl,代码行数:33,代码来源:htmllexer.py

示例15: p_error

 def p_error(self, p):
     # TODO
     if p:
         self._errors.append(p)
         pass # self._parser.errok()
     else:
         # hack handle eof, don't know why ply behaves this way
         from ply.lex import LexToken
         tok = LexToken()
         tok.value = self.lexer.lexdata[self.lexer.lexpos:]
         tok.lineno = self.lexer.lineno
         tok.type = 'error'
         tok.lexpos = self.lexer.lexpos
         self._parser.errok()
         return tok
开发者ID:pietype,项目名称:whisper-python,代码行数:15,代码来源:parser.py


注:本文中的ply.lex.LexToken类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。