Skip to content

Commit 90f7f68

Browse files
committed
change order
1 parent b06006b commit 90f7f68

File tree

1 file changed

+48
-48
lines changed

1 file changed

+48
-48
lines changed

src/test/testing/common/testingAdapter.test.ts

Lines changed: 48 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1062,62 +1062,48 @@ suite('End to End Tests: test adapters', () => {
10621062
assert.strictEqual(failureOccurred, false, failureMsg);
10631063
});
10641064
});
1065-
test('unittest execution adapter seg fault error handling', async () => {
1066-
resultResolver = new PythonResultResolver(testController, unittestProvider, workspaceUri);
1065+
test('pytest execution adapter seg fault error handling', async () => {
1066+
resultResolver = new PythonResultResolver(testController, pytestProvider, workspaceUri);
10671067
let callCount = 0;
10681068
let failureOccurred = false;
10691069
let failureMsg = '';
10701070
resultResolver._resolveExecution = async (data, _token?) => {
10711071
// do the following asserts for each time resolveExecution is called, should be called once per test.
1072+
console.log(`pytest execution adapter seg fault error handling \n ${JSON.stringify(data)}`);
10721073
callCount = callCount + 1;
1073-
traceLog(`unittest execution adapter seg fault error handling \n ${JSON.stringify(data)}`);
10741074
try {
10751075
if (data.status === 'error') {
1076-
if (data.error === undefined) {
1077-
// Dereference a NULL pointer
1078-
const indexOfTest = JSON.stringify(data).search('Dereference a NULL pointer');
1079-
if (indexOfTest === -1) {
1080-
failureOccurred = true;
1081-
failureMsg = 'Expected test to have a null pointer';
1082-
}
1083-
} else if (data.error.length === 0) {
1084-
failureOccurred = true;
1085-
failureMsg = "Expected errors in 'error' field";
1086-
}
1076+
assert.ok(data.error, "Expected errors in 'error' field");
10871077
} else {
10881078
const indexOfTest = JSON.stringify(data.result).search('error');
1089-
if (indexOfTest === -1) {
1090-
failureOccurred = true;
1091-
failureMsg =
1092-
'If payload status is not error then the individual tests should be marked as errors. This should occur on windows machines.';
1093-
}
1094-
}
1095-
if (data.result === undefined) {
1096-
failureOccurred = true;
1097-
failureMsg = 'Expected results to be present';
1079+
assert.notDeepEqual(
1080+
indexOfTest,
1081+
-1,
1082+
'If payload status is not error then the individual tests should be marked as errors. This should occur on windows machines.',
1083+
);
10981084
}
1085+
assert.ok(data.result, 'Expected results to be present');
10991086
// make sure the testID is found in the results
1100-
const indexOfTest = JSON.stringify(data).search('test_seg_fault.TestSegmentationFault.test_segfault');
1101-
if (indexOfTest === -1) {
1102-
failureOccurred = true;
1103-
failureMsg = 'Expected testId to be present';
1104-
}
1087+
const indexOfTest = JSON.stringify(data).search(
1088+
'test_seg_fault.py::TestSegmentationFault::test_segfault',
1089+
);
1090+
assert.notDeepEqual(indexOfTest, -1, 'Expected testId to be present');
11051091
} catch (err) {
11061092
failureMsg = err ? (err as Error).toString() : '';
11071093
failureOccurred = true;
11081094
}
11091095
return Promise.resolve();
11101096
};
11111097

1112-
const testId = `test_seg_fault.TestSegmentationFault.test_segfault`;
1098+
const testId = `${rootPathErrorWorkspace}/test_seg_fault.py::TestSegmentationFault::test_segfault`;
11131099
const testIds: string[] = [testId];
11141100

11151101
// set workspace to test workspace folder
11161102
workspaceUri = Uri.parse(rootPathErrorWorkspace);
1117-
configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py'];
1103+
configService.getSettings(workspaceUri).testing.pytestArgs = [];
11181104

11191105
// run pytest execution
1120-
const executionAdapter = new UnittestTestExecutionAdapter(
1106+
const executionAdapter = new PytestTestExecutionAdapter(
11211107
configService,
11221108
testOutputChannel.object,
11231109
resultResolver,
@@ -1139,48 +1125,62 @@ suite('End to End Tests: test adapters', () => {
11391125
assert.strictEqual(failureOccurred, false, failureMsg);
11401126
});
11411127
});
1142-
test('pytest execution adapter seg fault error handling', async () => {
1143-
resultResolver = new PythonResultResolver(testController, pytestProvider, workspaceUri);
1128+
test('unittest execution adapter seg fault error handling', async () => {
1129+
resultResolver = new PythonResultResolver(testController, unittestProvider, workspaceUri);
11441130
let callCount = 0;
11451131
let failureOccurred = false;
11461132
let failureMsg = '';
11471133
resultResolver._resolveExecution = async (data, _token?) => {
11481134
// do the following asserts for each time resolveExecution is called, should be called once per test.
1149-
console.log(`pytest execution adapter seg fault error handling \n ${JSON.stringify(data)}`);
11501135
callCount = callCount + 1;
1136+
traceLog(`unittest execution adapter seg fault error handling \n ${JSON.stringify(data)}`);
11511137
try {
11521138
if (data.status === 'error') {
1153-
assert.ok(data.error, "Expected errors in 'error' field");
1139+
if (data.error === undefined) {
1140+
// Dereference a NULL pointer
1141+
const indexOfTest = JSON.stringify(data).search('Dereference a NULL pointer');
1142+
if (indexOfTest === -1) {
1143+
failureOccurred = true;
1144+
failureMsg = 'Expected test to have a null pointer';
1145+
}
1146+
} else if (data.error.length === 0) {
1147+
failureOccurred = true;
1148+
failureMsg = "Expected errors in 'error' field";
1149+
}
11541150
} else {
11551151
const indexOfTest = JSON.stringify(data.result).search('error');
1156-
assert.notDeepEqual(
1157-
indexOfTest,
1158-
-1,
1159-
'If payload status is not error then the individual tests should be marked as errors. This should occur on windows machines.',
1160-
);
1152+
if (indexOfTest === -1) {
1153+
failureOccurred = true;
1154+
failureMsg =
1155+
'If payload status is not error then the individual tests should be marked as errors. This should occur on windows machines.';
1156+
}
1157+
}
1158+
if (data.result === undefined) {
1159+
failureOccurred = true;
1160+
failureMsg = 'Expected results to be present';
11611161
}
1162-
assert.ok(data.result, 'Expected results to be present');
11631162
// make sure the testID is found in the results
1164-
const indexOfTest = JSON.stringify(data).search(
1165-
'test_seg_fault.py::TestSegmentationFault::test_segfault',
1166-
);
1167-
assert.notDeepEqual(indexOfTest, -1, 'Expected testId to be present');
1163+
const indexOfTest = JSON.stringify(data).search('test_seg_fault.TestSegmentationFault.test_segfault');
1164+
if (indexOfTest === -1) {
1165+
failureOccurred = true;
1166+
failureMsg = 'Expected testId to be present';
1167+
}
11681168
} catch (err) {
11691169
failureMsg = err ? (err as Error).toString() : '';
11701170
failureOccurred = true;
11711171
}
11721172
return Promise.resolve();
11731173
};
11741174

1175-
const testId = `${rootPathErrorWorkspace}/test_seg_fault.py::TestSegmentationFault::test_segfault`;
1175+
const testId = `test_seg_fault.TestSegmentationFault.test_segfault`;
11761176
const testIds: string[] = [testId];
11771177

11781178
// set workspace to test workspace folder
11791179
workspaceUri = Uri.parse(rootPathErrorWorkspace);
1180-
configService.getSettings(workspaceUri).testing.pytestArgs = [];
1180+
configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py'];
11811181

11821182
// run pytest execution
1183-
const executionAdapter = new PytestTestExecutionAdapter(
1183+
const executionAdapter = new UnittestTestExecutionAdapter(
11841184
configService,
11851185
testOutputChannel.object,
11861186
resultResolver,

0 commit comments

Comments
 (0)