本文整理汇总了Python中mypy.errors.Errors类的典型用法代码示例。如果您正苦于以下问题:Python Errors类的具体用法?Python Errors怎么用?Python Errors使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Errors类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: parse
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
custom_typing_module: str = None) -> MypyFile:
"""Parse a source file, without doing any semantic analysis.
Return the parse tree. If errors is not provided, raise ParseError
on failure. Otherwise, use the errors object to report parse errors.
The pyversion (major, minor) argument determines the Python syntax variant.
"""
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
try:
assert pyversion[0] < 3 and not is_stub_file
ast = ast27.parse(source, fnam, 'exec')
tree = ASTConverter(pyversion=pyversion,
is_stub=is_stub_file,
custom_typing_module=custom_typing_module,
).visit(ast)
assert isinstance(tree, MypyFile)
tree.path = fnam
tree.is_stub = is_stub_file
return tree
except (SyntaxError, TypeCommentParseError) as e:
if errors:
errors.set_file('<input>' if fnam is None else fnam)
errors.report(e.lineno, e.offset, e.msg)
else:
raise
return MypyFile([], [], False, set())
示例2: parse
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
custom_typing_module: str = None, implicit_any: bool = False) -> MypyFile:
"""Parse a source file, without doing any semantic analysis.
Return the parse tree. If errors is not provided, raise ParseError
on failure. Otherwise, use the errors object to report parse errors.
The pyversion (major, minor) argument determines the Python syntax variant.
"""
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
try:
ast = typed_ast.parse(source, fnam, 'exec')
except SyntaxError as e:
if errors:
errors.set_file('<input>' if fnam is None else fnam)
errors.report(e.lineno, e.msg) # type: ignore
else:
raise
else:
tree = ASTConverter().visit(ast)
tree.path = fnam
tree.is_stub = is_stub_file
return tree
return MypyFile([],
[],
False,
set(),
weak_opts=set())
示例3: parse_type_comment
def parse_type_comment(type_comment: str, line: int, errors: Errors) -> Optional[Type]:
try:
typ = ast35.parse(type_comment, '<type_comment>', 'eval')
except SyntaxError as e:
errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR)
return None
else:
assert isinstance(typ, ast35.Expression)
return TypeConverter(errors, line=line).visit(typ.body)
示例4: __init__
def __init__(self, data_dir: str,
lib_path: List[str],
target: int,
pyversion: Tuple[int, int],
flags: List[str],
ignore_prefix: str,
custom_typing_module: str,
reports: Reports) -> None:
self.data_dir = data_dir
self.errors = Errors()
self.errors.set_ignore_prefix(ignore_prefix)
self.lib_path = lib_path
self.target = target
self.pyversion = pyversion
self.flags = flags
self.custom_typing_module = custom_typing_module
self.reports = reports
self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors,
pyversion=pyversion)
modules = self.semantic_analyzer.modules
self.semantic_analyzer_pass3 = ThirdPass(modules, self.errors)
self.type_checker = TypeChecker(self.errors, modules, self.pyversion)
self.states = [] # type: List[State]
self.module_files = {} # type: Dict[str, str]
self.module_deps = {} # type: Dict[Tuple[str, str], bool]
self.missing_modules = set() # type: Set[str]
示例5: __init__
def __init__(self, data_dir: str,
lib_path: List[str],
target: int,
output_dir: str,
pyversion: int,
flags: List[str],
ignore_prefix: str) -> None:
self.data_dir = data_dir
self.errors = Errors()
self.errors.set_ignore_prefix(ignore_prefix)
self.lib_path = lib_path
self.target = target
self.output_dir = output_dir
self.pyversion = pyversion
self.flags = flags
self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors)
self.semantic_analyzer_pass3 = ThirdPass(self.errors)
self.type_checker = TypeChecker(self.errors,
self.semantic_analyzer.modules,
self.pyversion)
self.states = List[State]()
self.module_files = Dict[str, str]()
self.icode = Dict[str, FuncIcode]()
self.binary_path = None # type: str
self.module_deps = Dict[Tuple[str, str], bool]()
示例6: __init__
def __init__(self, data_dir: str,
lib_path: List[str],
target: int,
output_dir: str,
pyversion: int,
flags: List[str],
ignore_prefix: str,
custom_typing_module: str,
html_report_dir: str) -> None:
self.data_dir = data_dir
self.errors = Errors()
self.errors.set_ignore_prefix(ignore_prefix)
self.lib_path = lib_path
self.target = target
self.output_dir = output_dir
self.pyversion = pyversion
self.flags = flags
self.custom_typing_module = custom_typing_module
self.html_report_dir = html_report_dir
self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors,
pyversion=pyversion)
self.semantic_analyzer_pass3 = ThirdPass(self.errors)
self.type_checker = TypeChecker(self.errors,
self.semantic_analyzer.modules,
self.pyversion)
self.states = List[State]()
self.module_files = Dict[str, str]()
self.module_deps = Dict[Tuple[str, str], bool]()
self.missing_modules = Set[str]()
示例7: parse
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
custom_typing_module: str = None) -> MypyFile:
"""Parse a source file, without doing any semantic analysis.
Return the parse tree. If errors is not provided, raise ParseError
on failure. Otherwise, use the errors object to report parse errors.
The pyversion (major, minor) argument determines the Python syntax variant.
"""
raise_on_error = False
if errors is None:
errors = Errors()
raise_on_error = True
errors.set_file('<input>' if fnam is None else fnam)
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
try:
assert pyversion[0] >= 3 or is_stub_file
ast = ast35.parse(source, fnam, 'exec')
tree = ASTConverter(pyversion=pyversion,
is_stub=is_stub_file,
errors=errors,
custom_typing_module=custom_typing_module,
).visit(ast)
tree.path = fnam
tree.is_stub = is_stub_file
except SyntaxError as e:
errors.report(e.lineno, e.offset, e.msg)
tree = MypyFile([], [], False, set())
if raise_on_error and errors.is_errors():
errors.raise_error()
return tree
示例8: check_type_arguments
def check_type_arguments(graph: 'Graph', scc: List[str], errors: Errors) -> None:
for module in scc:
state = graph[module]
assert state.tree
analyzer = TypeArgumentAnalyzer(errors,
state.options,
errors.is_typeshed_file(state.path or ''))
with state.wrap_context():
with strict_optional_set(state.options.strict_optional):
state.tree.accept(analyzer)
示例9: check_type_arguments_in_targets
def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode], state: 'State',
errors: Errors) -> None:
"""Check type arguments against type variable bounds and restrictions.
This mirrors the logic in check_type_arguments() except that we process only
some targets. This is used in fine grained incremental mode.
"""
analyzer = TypeArgumentAnalyzer(errors,
state.options,
errors.is_typeshed_file(state.path or ''))
with state.wrap_context():
with strict_optional_set(state.options.strict_optional):
for target in targets:
analyzer.recurse_into_functions = not isinstance(target.node, MypyFile)
target.node.accept(analyzer)
示例10: __init__
def __init__(self, mypy_base_dir, lib_path, target, output_dir, flags):
self.mypy_base_dir = mypy_base_dir
self.errors = Errors()
self.lib_path = lib_path
self.target = target
self.output_dir = output_dir
self.flags = flags
self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors)
self.type_checker = TypeChecker(self.errors,
self.semantic_analyzer.modules)
self.states = []
self.module_files = {}
self.icode = None
self.binary_path = None
self.module_deps = {}
示例11: directory
class BuildManager:
"""This is the central class for building a mypy program.
It coordinates parsing, import processing, semantic analysis and
type checking. It manages state objects that actually perform the
build steps.
Attributes:
data_dir: Mypy data directory (contains stubs)
target: Build target; selects which passes to perform
lib_path: Library path for looking up modules
semantic_analyzer:
Semantic analyzer, pass 2
semantic_analyzer_pass3:
Semantic analyzer, pass 3
type_checker: Type checker
errors: Used for reporting all errors
pyversion: Python version (major, minor)
flags: Build options
states: States of all individual files that are being
processed. Each file in a build is always represented
by a single state object (after it has been encountered
for the first time). This is the only place where
states are stored.
module_files: Map from module name to source file path. There is a
1:1 mapping between modules and source files.
module_deps: Cache for module dependencies (direct or indirect).
Item (m, n) indicates whether m depends on n (directly
or indirectly).
missing_modules: Set of modules that could not be imported encountered so far
"""
def __init__(self, data_dir: str,
lib_path: List[str],
target: int,
pyversion: Tuple[int, int],
flags: List[str],
ignore_prefix: str,
custom_typing_module: str,
reports: Reports) -> None:
self.data_dir = data_dir
self.errors = Errors()
self.errors.set_ignore_prefix(ignore_prefix)
self.lib_path = lib_path
self.target = target
self.pyversion = pyversion
self.flags = flags
self.custom_typing_module = custom_typing_module
self.reports = reports
self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors,
pyversion=pyversion)
modules = self.semantic_analyzer.modules
self.semantic_analyzer_pass3 = ThirdPass(modules, self.errors)
self.type_checker = TypeChecker(self.errors, modules, self.pyversion)
self.states = [] # type: List[State]
self.module_files = {} # type: Dict[str, str]
self.module_deps = {} # type: Dict[Tuple[str, str], bool]
self.missing_modules = set() # type: Set[str]
def process(self, initial_states: List['UnprocessedFile']) -> BuildResult:
"""Perform a build.
The argument is a state that represents the main program
file. This method should only be called once per a build
manager object. The return values are identical to the return
values of the build function.
"""
self.states += initial_states
for initial_state in initial_states:
self.module_files[initial_state.id] = initial_state.path
for initial_state in initial_states:
initial_state.load_dependencies()
# Process states in a loop until all files (states) have been
# semantically analyzed or type checked (depending on target).
#
# We type check all files before the rest of the passes so that we can
# report errors and fail as quickly as possible.
while True:
# Find the next state that has all its dependencies met.
next = self.next_available_state()
if not next:
trace('done')
break
# Potentially output some debug information.
trace('next {} ({})'.format(next.path, next.state()))
# Set the import context for reporting error messages correctly.
self.errors.set_import_context(next.import_context)
# Process the state. The process method is reponsible for adding a
# new state object representing the new state of the file.
next.process()
# Raise exception if the build failed. The build can fail for
# various reasons, such as parse error, semantic analysis error,
# etc.
if self.errors.is_blockers():
self.errors.raise_error()
#.........这里部分代码省略.........
示例12: directory
class BuildManager:
"""This is the central class for building a mypy program.
It coordinates parsing, import processing, semantic analysis and
type checking. It manages state objects that actually perform the
build steps.
Attributes:
data_dir: Mypy data directory (contains stubs)
target: Build target; selects which passes to perform
lib_path: Library path for looking up modules
semantic_analyzer:
Semantic analyzer, pass 2
semantic_analyzer_pass3:
Semantic analyzer, pass 3
type_checker: Type checker
errors: Used for reporting all errors
output_dir: Store output files here (Python)
pyversion: Python version (2 or 3)
flags: Build options
states: States of all individual files that are being
processed. Each file in a build is always represented
by a single state object (after it has been encountered
for the first time). This is the only place where
states are stored.
module_files: Map from module name to source file path. There is a
1:1 mapping between modules and source files.
icode: Generated icode (when compiling via C)
binary_path: Path of the generated binary (or None)
module_deps: Cache for module dependencies (direct or indirect).
Item (m, n) indicates whether m depends on n (directly
or indirectly).
TODO Refactor code related to transformation, icode generation etc. to
external objects. This module should not directly depend on them.
"""
def __init__(self, data_dir: str,
lib_path: List[str],
target: int,
output_dir: str,
pyversion: int,
flags: List[str],
ignore_prefix: str) -> None:
self.data_dir = data_dir
self.errors = Errors()
self.errors.set_ignore_prefix(ignore_prefix)
self.lib_path = lib_path
self.target = target
self.output_dir = output_dir
self.pyversion = pyversion
self.flags = flags
self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors)
self.semantic_analyzer_pass3 = ThirdPass(self.errors)
self.type_checker = TypeChecker(self.errors,
self.semantic_analyzer.modules,
self.pyversion)
self.states = List[State]()
self.module_files = Dict[str, str]()
self.icode = Dict[str, FuncIcode]()
self.binary_path = None # type: str
self.module_deps = Dict[Tuple[str, str], bool]()
def process(self, initial_state: 'UnprocessedFile') -> BuildResult:
"""Perform a build.
The argument is a state that represents the main program
file. This method should only be called once per a build
manager object. The return values are identical to the return
values of the build function.
"""
self.states.append(initial_state)
# Process states in a loop until all files (states) have been
# semantically analyzed or type checked (depending on target).
#
# We type check all files before the rest of the passes so that we can
# report errors and fail as quickly as possible.
while True:
# Find the next state that has all its dependencies met.
next = self.next_available_state()
if not next:
trace('done')
break
# Potentially output some debug information.
trace('next {} ({})'.format(next.path, next.state()))
# Set the import context for reporting error messages correctly.
self.errors.set_import_context(next.import_context)
# Process the state. The process method is reponsible for adding a
# new state object representing the new state of the file.
next.process()
# Raise exception if the build failed. The build can fail for
# various reasons, such as parse error, semantic analysis error,
# etc.
if self.errors.is_errors():
self.errors.raise_error()
#.........这里部分代码省略.........