Skip to content

Commit

Permalink
[code coverage] Remove results from corrupted shards.
Browse files Browse the repository at this point in the history
This change makes the merge script remove the results file for bad
shards. This should make the rest of the pipeline properly treat those
shards as missing and retry them if appropriate.

R=liaoyuke,martiniss

Bug: 955012
Change-Id: I4bddc82fe1219b68adeb11d76a299962dddf65c3
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1719265
Commit-Queue: Roberto Carrillo <robertocn@chromium.org>
Reviewed-by: Stephen Martinis <martiniss@chromium.org>
Reviewed-by: Yuke Liao <liaoyuke@chromium.org>
Cr-Commit-Position: refs/heads/master@{#682934}
  • Loading branch information
Roberto Carrillo authored and Commit Bot committed Jul 31, 2019
1 parent 0a13421 commit 884e41f
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 82 deletions.
53 changes: 22 additions & 31 deletions testing/merge_scripts/code_coverage/merge_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ def main():
'w') as f:
json.dump(invalid_profiles, f)

mark_invalid_shards(
coverage_merger.get_shards_to_retry(invalid_profiles),
params.jsons_to_merge)
logging.info('Merging %d test results', len(params.jsons_to_merge))
failed = False

Expand Down Expand Up @@ -114,51 +117,39 @@ def main():
failed = True
logging.warning('Additional merge script %s exited with %s' %
(params.additional_merge_script, rc))
mark_invalid_shards(coverage_merger.get_shards_to_retry(invalid_profiles),
params.summary_json, params.output_json)
elif len(params.jsons_to_merge) == 1:
logging.info("Only one output needs to be merged; directly copying it.")
with open(params.jsons_to_merge[0]) as f_read:
with open(params.output_json, 'w') as f_write:
f_write.write(f_read.read())
mark_invalid_shards(coverage_merger.get_shards_to_retry(invalid_profiles),
params.summary_json, params.output_json)
else:
logging.warning(
"This script was told to merge %d test results, but no additional "
"merge script was given.")

return 1 if (failed or bool(invalid_profiles)) else 0

def mark_invalid_shards(bad_shards, summary_file, output_file):

def mark_invalid_shards(bad_shards, jsons_to_merge):
"""Removes results json files from bad shards.
This is needed so that the caller (e.g. recipe) knows to retry, or otherwise
treat the tests in that shard as not having valid results. Note that this only
removes the results from the local machine, as these are expected to remain in
the shard's isolated output.
Args:
bad_shards: list of task_ids of the shards that are bad or corrupted.
jsons_to_merge: The path to the jsons with the results of the tests.
"""
if not bad_shards:
return
shard_indices = []
try:
with open(summary_file) as f:
summary = json.load(f)
except (OSError, ValueError):
logging.warning('Could not read summary.json, not marking invalid shards')
return

for i in range(len(summary['shards'])):
shard_info = summary['shards'][i]
shard_id = (shard_info['task_id']
if shard_info and 'task_id' in shard_info
else 'unknown')
if shard_id in bad_shards or shard_id == 'unknown':
shard_indices.append(i)

try:
with open(output_file) as f:
output = json.load(f)
except (OSError, ValueError):
logging.warning('Invalid/missing output.json, overwriting')
output = {}
output.setdefault('missing_shards', [])
output['missing_shards'].extend(shard_indices)
with open(output_file, 'w') as f:
json.dump(output, f)
for f in jsons_to_merge:
for task_id in bad_shards:
if task_id in f:
# Remove results json if it corresponds to a bad shard.
os.remove(f)
break


if __name__ == '__main__':
Expand Down
61 changes: 10 additions & 51 deletions testing/merge_scripts/code_coverage/merge_results_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,57 +206,16 @@ def test_retry_profdata_merge_failures(self):
stderr=-2,
), mock_exec_cmd.call_args)

@mock.patch('__builtin__.open',
new_callable=mock.mock_open,
read_data=json.dumps(
{'shards': [{'task_id': '1234567890abcdeff'}]}))
def test_mark_invalid_shards(self, mo):
mock_result = mock.mock_open()
mock_write = mock.MagicMock()
mock_result.return_value.write = mock_write
mo.side_effect = (
mo.return_value,
mock.mock_open(read_data='{}').return_value,
mock_result.return_value,
)
merge_results.mark_invalid_shards(['1234567890abcdeff'], 'dummy.json',
'o.json')
written = json.loads(''.join(c[0][0] for c in mock_write.call_args_list))
self.assertIn('missing_shards', written)
self.assertEqual(written['missing_shards'], [0])

@mock.patch('__builtin__.open',
new_callable=mock.mock_open,
read_data='invalid data')
def test_mark_invalid_shards_bad_summary(self, _mo):
# Should not raise an exception.
raised_exception = False
try:
merge_results.mark_invalid_shards(['1234567890abcdeff'], 'dummy.json',
'o.json')
except: # pylint: disable=bare-except
raised_exception = True
self.assertFalse(raised_exception, 'unexpected exception')

@mock.patch('__builtin__.open',
new_callable=mock.mock_open,
read_data=json.dumps(
{'shards': [{'task_id': '1234567890abcdeff'}]}))
def test_mark_invalid_shards_bad_output(self, mo):
mock_result = mock.mock_open()
mock_write = mock.MagicMock()
mock_result.return_value.write = mock_write
mo.side_effect = (
mo.return_value,
mock.mock_open(read_data='invalid_data').return_value,
mock_result.return_value,
)
# Should succeed anyway.
merge_results.mark_invalid_shards(['1234567890abcdeff'], 'dummy.json',
'o.json')
written = json.loads(''.join(c[0][0] for c in mock_write.call_args_list))
self.assertIn('missing_shards', written)
self.assertEqual(written['missing_shards'], [0])
@mock.patch('os.remove')
def test_mark_invalid_shards(self, mock_rm):
merge_results.mark_invalid_shards(['123abc'], [
'/tmp/123abc/dummy.json', '/tmp/123abc/dummy2.json',
'/tmp/1234abc/dummy.json'
])
self.assertEqual([
mock.call('/tmp/123abc/dummy.json'),
mock.call('/tmp/123abc/dummy2.json')
], mock_rm.call_args_list)

def test_get_shards_to_retry(self):
bad_profiles = [
Expand Down

0 comments on commit 884e41f

Please sign in to comment.