Skip to content

Commit

Permalink
Merge pull request #8 from eleanorjboyd/new-main
Browse files Browse the repository at this point in the history
New-main
  • Loading branch information
eleanorjboyd committed Apr 19, 2023
2 parents 7dbe187 + 8147718 commit d5637d5
Show file tree
Hide file tree
Showing 5 changed files with 128 additions and 5 deletions.
18 changes: 18 additions & 0 deletions pythonFiles/tests/unittestadapter/.data/test_subtest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.

import unittest

# Test class for the test_subtest_run test.
# The test_failed_tests function should return a dictionary that has a "success" status
# and the "result" value is a dict with 6 entries, one for each subtest.


class NumbersTest(unittest.TestCase):
def test_even(self):
"""
Test that numbers between 0 and 5 are all even.
"""
for i in range(0, 6):
with self.subTest(i=i):
self.assertEqual(i % 2, 0)
29 changes: 29 additions & 0 deletions pythonFiles/tests/unittestadapter/test_execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,35 @@ def test_single_ids_run() -> None:
assert id_result["outcome"] == "success"


def test_subtest_run() -> None:
"""This test runs on a the test_subtest which has a single method, test_even,
that uses unittest subtest.
The actual result of run should return a dict payload with 6 entry for the 6 subtests.
"""
id = "test_subtest.NumbersTest.test_even"
actual = run_tests(
os.fspath(TEST_DATA_PATH), [id], "test_subtest.py", None, "fake-uuid"
)
subtests_ids = [
"test_subtest.NumbersTest.test_even (i=0)",
"test_subtest.NumbersTest.test_even (i=1)",
"test_subtest.NumbersTest.test_even (i=2)",
"test_subtest.NumbersTest.test_even (i=3)",
"test_subtest.NumbersTest.test_even (i=4)",
"test_subtest.NumbersTest.test_even (i=5)",
]
assert actual
assert all(item in actual for item in ("cwd", "status"))
assert actual["status"] == "success"
assert actual["cwd"] == os.fspath(TEST_DATA_PATH)
assert "result" in actual
result = actual["result"]
assert len(result) == 6
for id in subtests_ids:
assert id in result


@pytest.mark.parametrize(
"test_ids, pattern, cwd, expected_outcome",
[
Expand Down
5 changes: 4 additions & 1 deletion pythonFiles/unittestadapter/execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,10 @@ def formatResult(
formatted = formatted[1:]
tb = "".join(formatted)

test_id = test.id()
if subtest:
test_id = subtest.id()
else:
test_id = test.id()

result = {
"test": test.id(),
Expand Down
4 changes: 4 additions & 0 deletions src/client/testing/testController/pytest/runner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,10 @@ export class PytestRunner implements ITestsRunner {
testArgs.push('--capture', 'no');
}

if (options.debug && !testArgs.some((a) => a.startsWith('--no-cov'))) {
testArgs.push('--no-cov');
}

// Positional arguments control the tests to be run.
const rawData = idToRawData.get(testNode.id);
if (!rawData) {
Expand Down
77 changes: 73 additions & 4 deletions src/client/testing/testController/workspaceTestAdapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import { sendTelemetryEvent } from '../../telemetry';
import { EventName } from '../../telemetry/constants';
import { TestProvider } from '../types';
import {
clearAllChildren,
createErrorTestItem,
DebugTestTag,
ErrorTestItemOptions,
Expand Down Expand Up @@ -135,8 +136,11 @@ export class WorkspaceTestAdapter {
}

if (rawTestExecData !== undefined && rawTestExecData.result !== undefined) {
// Map which holds the subtest information for each test item.
const subTestStats: Map<string, { passed: number; failed: number }> = new Map();

// iterate through payload and update the UI accordingly.
for (const keyTemp of Object.keys(rawTestExecData.result)) {
// check for result and update the UI accordingly.
const testCases: TestItem[] = [];

// grab leaf level test items
Expand All @@ -147,7 +151,6 @@ export class WorkspaceTestAdapter {

if (
rawTestExecData.result[keyTemp].outcome === 'failure' ||
rawTestExecData.result[keyTemp].outcome === 'subtest-failure' ||
rawTestExecData.result[keyTemp].outcome === 'passed-unexpected'
) {
const rawTraceback = rawTestExecData.result[keyTemp].traceback ?? '';
Expand Down Expand Up @@ -175,8 +178,7 @@ export class WorkspaceTestAdapter {
});
} else if (
rawTestExecData.result[keyTemp].outcome === 'success' ||
rawTestExecData.result[keyTemp].outcome === 'expected-failure' ||
rawTestExecData.result[keyTemp].outcome === 'subtest-passed'
rawTestExecData.result[keyTemp].outcome === 'expected-failure'
) {
const grabTestItem = this.runIdToTestItem.get(keyTemp);
const grabVSid = this.runIdToVSid.get(keyTemp);
Expand All @@ -203,6 +205,73 @@ export class WorkspaceTestAdapter {
}
});
}
} else if (rawTestExecData.result[keyTemp].outcome === 'subtest-failure') {
// split on " " since the subtest ID has the parent test ID in the first part of the ID.
const parentTestCaseId = keyTemp.split(' ')[0];
const parentTestItem = this.runIdToTestItem.get(parentTestCaseId);
const data = rawTestExecData.result[keyTemp];
// find the subtest's parent test item
if (parentTestItem) {
const subtestStats = subTestStats.get(parentTestCaseId);
if (subtestStats) {
subtestStats.failed += 1;
} else {
subTestStats.set(parentTestCaseId, { failed: 1, passed: 0 });
runInstance.appendOutput(fixLogLines(`${parentTestCaseId} [subtests]:\r\n`));
// clear since subtest items don't persist between runs
clearAllChildren(parentTestItem);
}
const subtestId = keyTemp;
const subTestItem = testController?.createTestItem(subtestId, subtestId);
runInstance.appendOutput(fixLogLines(`${subtestId} Failed\r\n`));
// create a new test item for the subtest
if (subTestItem) {
const traceback = data.traceback ?? '';
const text = `${data.subtest} Failed: ${data.message ?? data.outcome}\r\n${traceback}\r\n`;
runInstance.appendOutput(fixLogLines(text));
parentTestItem.children.add(subTestItem);
runInstance.started(subTestItem);
const message = new TestMessage(rawTestExecData?.result[keyTemp].message ?? '');
if (parentTestItem.uri && parentTestItem.range) {
message.location = new Location(parentTestItem.uri, parentTestItem.range);
}
runInstance.failed(subTestItem, message);
} else {
throw new Error('Unable to create new child node for subtest');
}
} else {
throw new Error('Parent test item not found');
}
} else if (rawTestExecData.result[keyTemp].outcome === 'subtest-success') {
// split on " " since the subtest ID has the parent test ID in the first part of the ID.
const parentTestCaseId = keyTemp.split(' ')[0];
const parentTestItem = this.runIdToTestItem.get(parentTestCaseId);

// find the subtest's parent test item
if (parentTestItem) {
const subtestStats = subTestStats.get(parentTestCaseId);
if (subtestStats) {
subtestStats.passed += 1;
} else {
subTestStats.set(parentTestCaseId, { failed: 0, passed: 1 });
runInstance.appendOutput(fixLogLines(`${parentTestCaseId} [subtests]:\r\n`));
// clear since subtest items don't persist between runs
clearAllChildren(parentTestItem);
}
const subtestId = keyTemp;
const subTestItem = testController?.createTestItem(subtestId, subtestId);
// create a new test item for the subtest
if (subTestItem) {
parentTestItem.children.add(subTestItem);
runInstance.started(subTestItem);
runInstance.passed(subTestItem);
runInstance.appendOutput(fixLogLines(`${subtestId} Passed\r\n`));
} else {
throw new Error('Unable to create new child node for subtest');
}
} else {
throw new Error('Parent test item not found');
}
}
}
}
Expand Down

0 comments on commit d5637d5

Please sign in to comment.