本文整理汇总了Python中mrjob.parse.parse_mr_job_stderr函数的典型用法代码示例。如果您正苦于以下问题:Python parse_mr_job_stderr函数的具体用法?Python parse_mr_job_stderr怎么用?Python parse_mr_job_stderr使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了parse_mr_job_stderr函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_update_counters
def test_update_counters(self):
counters = {'Foo': {'Bar': 3, 'Baz': 1}}
parse_mr_job_stderr(
StringIO('reporter:counter:Foo,Baz,1\n'), counters=counters)
assert_equal(counters, {'Foo': {'Bar': 3, 'Baz': 2}})
示例2: test_update_counters
def test_update_counters(self):
counters = {'Foo': {'Bar': 3, 'Baz': 1}}
parse_mr_job_stderr(
BytesIO(b'reporter:counter:Foo,Baz,1\n'), counters=counters)
self.assertEqual(counters, {'Foo': {'Bar': 3, 'Baz': 2}})
示例3: _run_step
def _run_step(self, step_num, step_type, input_path, output_path,
working_dir, env, child_stdin=None):
step = self._get_step(step_num)
# if no mapper, just pass the data through (see #1141)
if step_type == 'mapper' and not step.get('mapper'):
copyfile(input_path, output_path)
return
# Passing local=False ensures the job uses proper names for file
# options (see issue #851 on github)
common_args = (['--step-num=%d' % step_num] +
self._mr_job_extra_args(local=False))
if step_type == 'mapper':
child_args = (
['--mapper'] + [input_path] + common_args)
elif step_type == 'reducer':
child_args = (
['--reducer'] + [input_path] + common_args)
elif step_type == 'combiner':
child_args = ['--combiner'] + common_args + ['-']
has_combiner = (step_type == 'mapper' and 'combiner' in step)
try:
# Use custom stdout
if has_combiner:
child_stdout = BytesIO()
else:
child_stdout = open(output_path, 'wb')
with save_current_environment():
with save_cwd():
os.environ.update(env)
os.chdir(working_dir)
child_instance = self._mrjob_cls(args=child_args)
child_instance.sandbox(stdin=child_stdin,
stdout=child_stdout)
child_instance.execute()
if has_combiner:
sorted_lines = sorted(child_stdout.getvalue().splitlines())
combiner_stdin = BytesIO(b'\n'.join(sorted_lines))
else:
child_stdout.flush()
finally:
child_stdout.close()
while len(self._counters) <= step_num:
self._counters.append({})
parse_mr_job_stderr(child_instance.stderr.getvalue(),
counters=self._counters[step_num])
if has_combiner:
self._run_step(step_num, 'combiner', None, output_path,
working_dir, env, child_stdin=combiner_stdin)
combiner_stdin.close()
示例4: _parse_task_counters
def _parse_task_counters(self, task_type, step_num):
"""Parse all stderr files from the given task (if any)."""
# don't disable if read_logs=False; parsing counters is
# internal to Hadoop, not something that happens in log files
stderr_paths = self.fs.ls(self._task_stderr_paths_glob(
task_type, step_num))
for stderr_path in stderr_paths:
with open(stderr_path, 'rb') as stderr:
parse_mr_job_stderr(stderr, counters=self._counters[step_num])
示例5: _run_step
def _run_step(self, step_num, step_type, input_path, output_path,
working_dir, env, child_stdin=None):
step = self._get_step(step_num)
common_args = (['--step-num=%d' % step_num] +
self._mr_job_extra_args(local=True))
if step_type == 'mapper':
child_args = (
['--mapper'] + [input_path] + common_args)
elif step_type == 'reducer':
child_args = (
['--reducer'] + [input_path] + common_args)
elif step_type == 'combiner':
child_args = ['--combiner'] + common_args + ['-']
child_instance = self._mrjob_cls(args=child_args)
has_combiner = (step_type == 'mapper' and 'combiner' in step)
# Use custom stdin
if has_combiner:
child_stdout = StringIO()
else:
child_stdout = open(output_path, 'w')
with save_current_environment():
with save_cwd():
os.environ.update(env)
os.chdir(working_dir)
child_instance.sandbox(stdin=child_stdin, stdout=child_stdout)
child_instance.execute()
if has_combiner:
sorted_lines = sorted(child_stdout.getvalue().splitlines())
combiner_stdin = StringIO('\n'.join(sorted_lines))
else:
child_stdout.flush()
child_stdout.close()
while len(self._counters) <= step_num:
self._counters.append({})
parse_mr_job_stderr(child_instance.stderr.getvalue(),
counters=self._counters[step_num])
if has_combiner:
self._run_step(step_num, 'combiner', None, output_path,
working_dir, env, child_stdin=combiner_stdin)
combiner_stdin.close()
示例6: test_negative_counters
def test_negative_counters(self):
# kind of poor practice to use negative counters, but Hadoop
# Streaming supports it (negative numbers are integers too!)
self.assertEqual(
parse_mr_job_stderr([b'reporter:counter:Foo,Bar,-2\n']),
{'counters': {'Foo': {'Bar': -2}},
'statuses': [], 'other': []})
示例7: test_counters_and_status
def test_counters_and_status(self):
mr_job = MRJob().sandbox()
mr_job.increment_counter('Foo', 'Bar')
mr_job.set_status('Initializing qux gradients...')
mr_job.increment_counter('Foo', 'Bar')
mr_job.increment_counter('Foo', 'Baz', 20)
mr_job.set_status('Sorting metasyntactic variables...')
parsed_stderr = parse_mr_job_stderr(mr_job.stderr.getvalue())
self.assertEqual(
parsed_stderr, {
'counters': {
'Foo': {
'Bar': 2,
'Baz': 20
}
},
'statuses': [
'Initializing qux gradients...',
'Sorting metasyntactic variables...'
],
'other': []
})
# make sure parse_counters() works
self.assertEqual(mr_job.parse_counters(), parsed_stderr['counters'])
示例8: _run_step
def _run_step(self, step_num, step_type, input_path, output_path, working_dir, env, child_stdin=None):
step = self._get_step(step_num)
# Passing local=False ensures the job uses proper names for file
# options (see issue #851 on github)
common_args = ["--step-num=%d" % step_num] + self._mr_job_extra_args(local=False)
if step_type == "mapper":
child_args = ["--mapper"] + [input_path] + common_args
elif step_type == "reducer":
child_args = ["--reducer"] + [input_path] + common_args
elif step_type == "combiner":
child_args = ["--combiner"] + common_args + ["-"]
has_combiner = step_type == "mapper" and "combiner" in step
# Use custom stdin
if has_combiner:
child_stdout = BytesIO()
else:
child_stdout = open(output_path, "wb")
with save_current_environment():
with save_cwd():
os.environ.update(env)
os.chdir(working_dir)
child_instance = self._mrjob_cls(args=child_args)
child_instance.sandbox(stdin=child_stdin, stdout=child_stdout)
child_instance.execute()
if has_combiner:
sorted_lines = sorted(child_stdout.getvalue().splitlines())
combiner_stdin = BytesIO(b"\n".join(sorted_lines))
else:
child_stdout.flush()
child_stdout.close()
while len(self._counters) <= step_num:
self._counters.append({})
parse_mr_job_stderr(child_instance.stderr.getvalue(), counters=self._counters[step_num])
if has_combiner:
self._run_step(step_num, "combiner", None, output_path, working_dir, env, child_stdin=combiner_stdin)
combiner_stdin.close()
示例9: test_commas_in_counters
def test_commas_in_counters(self):
# commas should be replaced with semicolons
mr_job = MRJob().sandbox()
mr_job.increment_counter("Bad items", "a, b, c")
mr_job.increment_counter("girl, interrupted", "movie")
parsed_stderr = parse_mr_job_stderr(mr_job.stderr.getvalue())
self.assertEqual(parsed_stderr["counters"], {"Bad items": {"a; b; c": 1}, "girl; interrupted": {"movie": 1}})
示例10: test_negative_and_zero_counters
def test_negative_and_zero_counters(self):
mr_job = MRJob().sandbox()
mr_job.increment_counter("Foo", "Bar", -1)
mr_job.increment_counter("Foo", "Baz")
mr_job.increment_counter("Foo", "Baz", -1)
mr_job.increment_counter("Qux", "Quux", 0)
parsed_stderr = parse_mr_job_stderr(mr_job.stderr.getvalue())
self.assertEqual(parsed_stderr["counters"], {"Foo": {"Bar": -1, "Baz": 0}, "Qux": {"Quux": 0}})
示例11: test_negative_and_zero_counters
def test_negative_and_zero_counters(self):
mr_job = MRJob().sandbox()
mr_job.increment_counter('Foo', 'Bar', -1)
mr_job.increment_counter('Foo', 'Baz')
mr_job.increment_counter('Foo', 'Baz', -1)
mr_job.increment_counter('Qux', 'Quux', 0)
parsed_stderr = parse_mr_job_stderr(mr_job.stderr.getvalue())
self.assertEqual(parsed_stderr['counters'],
{'Foo': {'Bar': -1, 'Baz': 0}, 'Qux': {'Quux': 0}})
示例12: test_commas_in_counters
def test_commas_in_counters(self):
# commas should be replaced with semicolons
mr_job = MRJob().sandbox()
mr_job.increment_counter('Bad items', 'a, b, c')
mr_job.increment_counter('girl, interrupted', 'movie')
parsed_stderr = parse_mr_job_stderr(mr_job.stderr.getvalue())
self.assertEqual(parsed_stderr['counters'],
{'Bad items': {'a; b; c': 1},
'girl; interrupted': {'movie': 1}})
示例13: test_garbled_counters
def test_garbled_counters(self):
# we should be able to do something graceful with
# garbled counters and status messages
BAD_LINES = [
'reporter:counter:Foo,Bar,Baz,1\n', # too many items
'reporter:counter:Foo,1\n', # too few items
'reporter:counter:Foo,Bar,a million\n', # not a number
'reporter:counter:Foo,Bar,1.0\n', # not an int
'reporter:crounter:Foo,Bar,1\n', # not a valid reporter
'reporter,counter:Foo,Bar,1\n', # wrong format!
]
self.assertEqual(parse_mr_job_stderr(BAD_LINES),
{'counters': {}, 'statuses': [], 'other': BAD_LINES})
示例14: test_parsing
def test_parsing(self):
INPUT = BytesIO(
b'reporter:counter:Foo,Bar,2\n' +
b'reporter:status:Baz\n' +
b'reporter:status:Baz\n' +
b'reporter:counter:Foo,Bar,1\n' +
b'reporter:counter:Foo,Baz,1\n' +
b'reporter:counter:Quux Subsystem,Baz,42\n' +
b'Warning: deprecated metasyntactic variable: garply\n')
self.assertEqual(
parse_mr_job_stderr(INPUT),
{'counters': {'Foo': {'Bar': 3, 'Baz': 1},
'Quux Subsystem': {'Baz': 42}},
'statuses': ['Baz', 'Baz'],
'other': ['Warning: deprecated metasyntactic variable: garply\n']
})
示例15: test_counters_and_status
def test_counters_and_status(self):
mr_job = MRJob().sandbox()
mr_job.increment_counter("Foo", "Bar")
mr_job.set_status("Initializing qux gradients...")
mr_job.increment_counter("Foo", "Bar")
mr_job.increment_counter("Foo", "Baz", 20)
mr_job.set_status("Sorting metasyntactic variables...")
parsed_stderr = parse_mr_job_stderr(mr_job.stderr.getvalue())
self.assertEqual(
parsed_stderr,
{
"counters": {"Foo": {"Bar": 2, "Baz": 20}},
"statuses": ["Initializing qux gradients...", "Sorting metasyntactic variables..."],
"other": [],
},
)