diff --git a/.vscode/settings.json b/.vscode/settings.json
index cb341bc32..a2529cda3 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -10,8 +10,7 @@
},
"typescript.tsdk": "./node_modules/typescript/lib",
// jest
- "jest.runMode":"on-demand",
- // "testing.openTesting": "neverOpen",
+
// eslint
"tslint.enable": false,
"eslint.enable": true,
diff --git a/README.md b/README.md
index ec74f00ed..be676ba79 100644
--- a/README.md
+++ b/README.md
@@ -387,14 +387,15 @@ This setting can be one of the predefined types or a custom object.
4. "none": Do not clear any panel. (default)
(_**Note**: As of the current version, the testing framework does not support the clearing of the "TEST RESULTS" panel without side effects. The closest available command also clears all test item statuses, which may not be desirable. We are aware of this limitation and will raise the issue with the VS Code team._)
+
-**Handling Conflicts with "TEST RESULTS" panel**
+**Handling Conflicts with "TEST RESULTS" panel setting**
_The Problem_
The behavior of the "TEST RESULTS" panel is influenced by VSCode's native `"testing.openTesting"` setting. This can cause inconsistencies with your `"jest.outputConfig"` settings.
-For instance, if you set `"jest.outputConfig": {"revealWithFocus": "none"}` to prevent automatic focus changes, but leave `"testing.openTesting"` at its default value of `"openOnTestStart"`, the "TEST RESULTS" panel will still automatically switch focus whenever tests run.
+For instance, if you set `"jest.outputConfig": {"revealWithFocus": "none"}` to prevent automatic focus changes, but leave `"testing.openTesting"` at its default value of `"openOnTestStart"`, the "TEST RESULTS" panel will still automatically switch focus when the tests are run via UI.
_The Universal Solution_
@@ -410,18 +411,18 @@ _Validation and Diagnosis_
The extension features output config diagnosis information in the jest terminal, as well as the built-in conflict detection and quick fixes to assist with the transition.
-
-**Common Issues**
+
+**Default Output Focus Behavior by RunMode**
+When none of the output settings (`"testing.openTesting"` and `"jest.outputConfig"`) are present, The default output behavior is determined by [runMode](#runmode):
-Upon upgrading to v6.2, some users, frequently with auto run modes (e.g., 'watch', 'on-save'), might experience frequent "TEST RESULTS" panel automatically grabbing focus whenever files are saved or tests are run.
+| runMode| auto reveal "TEST RESULTS" | auto reveal "TERMINAL" |
+|:--:|:--:|:--:|
+| "watch" | :heavy_multiplication_x: | :heavy_multiplication_x:|
+| "on-save" | :heavy_multiplication_x: | :heavy_multiplication_x: |
+| "on-demand" | :heavy_check_mark: | :heavy_multiplication_x:|
-This is due to the extension generates a default `jest.outputConfig`, if none is existing in your settings, to match the existing `testing.openTesting` setting, which defaults to `"openOnTestStart"`. If this is not your desired output experience, you can easily disable `testing.openTesting` in your settings.json:
-```json
-"testing.openTesting": "neverOpen"
-```
-Then use the `jest.outputConfig` to find-tune the output experience you prefer.
-**Examples**
+**Configuration Examples**
- Choose a passive output experience that is identical to the previous version: no automatic focus switch, no automatic clear.
```json
"testing.openTesting": "neverOpen",
@@ -432,12 +433,7 @@ Then use the `jest.outputConfig` to find-tune the output experience you prefer.
"testing.openTesting": "neverOpen",
"jest.outputConfig": "terminal-based"
```
-- Choose a test-results-based experience and switch focus to it when test run starts.
- ```json
- "testing.openTesting": "neverOpen",
- "jest.outputConfig": "test-results-based"
- ```
-- Choose a test-results-based experience and switch focus to it when test fails.
+- Choose a test-results-based experience and switch focus to it only when test fails.
```json
"testing.openTesting": "neverOpen",
"jest.outputConfig": {
@@ -457,16 +453,17 @@ Then use the `jest.outputConfig` to find-tune the output experience you prefer.
>
> **Migration Guide**
>
-> Migrating to the new `"jest.outputConfig"` can require some manual adjustments, especially if you're working in a multi-root workspace. Here are some guidelines to help with the transition:
+> Migrating to the new `"jest.outputConfig"` might require some manual adjustments, especially if you're working in a multi-root workspace. Here are some guidelines to help with the transition:
>
> 1. **Workspace Level vs Workspace-Folder Level**: The new `"jest.outputConfig"` is a workspace-level setting, unlike legacy settings like `"jest.autoClearTerminal"` and `"jest.autoRevealOutput"`, which are workspace-folder level settings.
>
> 2. **Backward Compatibility**: If no `"jest.outputConfig"` is defined in your settings.json, the extension will attempt to generate a backward-compatible outputConfig in memory. This uses the `"testing.openTesting"` setting and any legacy settings (`"jest.autoClearTerminal"`, `"jest.autoRevealOutput"`) you might have. Note that this might only work for single-root workspaces.
>
-> 3. **Migration Steps**:
-> - Use the `"Jest: Save Current Output Config"` command from the command palette to update your settings.json.
-> - (optional) Fix warning: The save does not include `"testing.openTesting"`, so you might see the conflict warning message. You can either use the "Quick Fix" action or adjust the `settings.json` manually (see [handling conflict](#outputconfig-conflict)).
-> - Finally, remove any deprecated settings.
+> 3. **Customization Steps**:
+In general it should work out of the box, but if you encounter any issues, here are some steps to help adjusting the output behavior:
+> - Use the `"Jest: Save Current Output Config"` command from the command palette to update your settings.json. Then adjust it to fit your needs.
+> - Fix warning if any: The save does not include `"testing.openTesting"`, so you might see the conflict warning message. You can either use the "Quick Fix" action or adjust the `settings.json` manually (see [handling conflict](#outputconfig-conflict)).
+> - Finally, remove any deprecated settings.
>
> By following these guidelines, you should be able to smoothly transition to using `"jest.outputConfig"`.
diff --git a/package.json b/package.json
index 2d5e92175..b27b4c897 100644
--- a/package.json
+++ b/package.json
@@ -2,7 +2,7 @@
"name": "vscode-jest",
"displayName": "Jest",
"description": "Use Facebook's Jest With Pleasure.",
- "version": "6.2.2",
+ "version": "6.2.3",
"publisher": "Orta",
"engines": {
"vscode": "^1.68.1"
diff --git a/release-notes/release-note-v6.md b/release-notes/release-note-v6.md
index 2f36fb902..6737a384d 100644
--- a/release-notes/release-note-v6.md
+++ b/release-notes/release-note-v6.md
@@ -3,6 +3,7 @@
Release Notes
---
+- [v6.2.3](#v623)
- [v6.2.2](#v622)
- [CHANGELOG](#changelog)
- [v6.2.1](#v621)
@@ -35,6 +36,14 @@ Release Notes
---
+## v6.2.3
+This release is a patch release with the following changes:
+
+**Enhancement**
+
+- Improve output-focus default behavior for auto runs (e.g., "watch", "on-save"). This will eliminate the issue that the focus auto switching to "TEST RESULTS" panel whenever files are saved in auto-run modes. Now the default behavior is runMode aware and will not auto switch for auto runs unless specifically configured to do so. See [default output focus behavior](https://github.com/jest-community/vscode-jest#default-output-focus). ([#1128](https://github.com/jest-community/vscode-jest/pull/1128) - @connectdotz)
+- docs: update README to fix jest run mode type. ([#1126](https://github.com/jest-community/vscode-jest/pull/1126) - @kota-kamikawa)
+
## v6.2.2
This release is a patch release with the following changes:
diff --git a/src/JestExt/core.ts b/src/JestExt/core.ts
index 381db50be..c42a89b89 100644
--- a/src/JestExt/core.ts
+++ b/src/JestExt/core.ts
@@ -19,7 +19,7 @@ import { CoverageMapData } from 'istanbul-lib-coverage';
import { Logging } from '../logging';
import { createProcessSession, ProcessSession } from './process-session';
import { JestExtContext, JestSessionEvents, JestExtSessionContext, JestRunEvent } from './types';
-import { extensionName, OUTPUT_CONFIG_HELP_URL, SupportedLanguageIds } from '../appGlobals';
+import { extensionName, SupportedLanguageIds } from '../appGlobals';
import { createJestExtContext, getExtensionResourceSettings, prefixWorkspace } from './helper';
import { PluginResourceSettings } from '../Settings';
import { WizardTaskId } from '../setup-wizard';
@@ -38,8 +38,6 @@ interface JestCommandSettings {
jestCommandLine: string;
}
-const AUTO_FOCUS_WARNING = `The TEST RESULTS panel has auto-focus enabled, which may cause frequent focus shifts during the current run mode. If this becomes a problem, you can disable the auto-focus using the command "Jest: Disable Auto Focus Test Output". Alternatively, click on the action link below. For more details, see ${OUTPUT_CONFIG_HELP_URL}`;
-
/** extract lines starts and end with [] */
export class JestExt {
coverageMapProvider: CoverageMapProvider;
@@ -145,8 +143,8 @@ export class JestExt {
this.output.write(
'Critical Settings:\r\n' +
`jest.runMode: ${JSON.stringify(pluginSettings.runMode.config, undefined, 4)}\r\n` +
- `jest.outputConfig: ${JSON.stringify(outputConfig, undefined, 4)}\r\n` +
- `testing.openTesting: ${JSON.stringify(openTesting, undefined, 4)}\r\n`,
+ `jest.outputConfig: ${JSON.stringify(outputConfig.value, undefined, 4)}\r\n` +
+ `testing.openTesting: ${JSON.stringify(openTesting.value, undefined, 4)}\r\n`,
'info'
);
return pluginSettings;
@@ -186,7 +184,7 @@ export class JestExt {
}
private enableOutputOnRun(): void {
- outputManager.showOutputOn('run', this.output);
+ outputManager.showOutputOn('run', this.output, this.extContext.settings.runMode);
}
private setupRunEvents(events: JestSessionEvents): void {
events.onRunEvent.event((event: JestRunEvent) => {
@@ -236,7 +234,7 @@ export class JestExt {
}
case 'test-error': {
if (!event.process.userData?.testError) {
- outputManager.showOutputOn('test-error', this.output);
+ outputManager.showOutputOn('test-error', this.output, this.extContext.settings.runMode);
event.process.userData = { ...(event.process.userData ?? {}), testError: true };
}
break;
@@ -321,7 +319,6 @@ export class JestExt {
// update visible editors that belong to this folder
this.updateVisibleTextEditors();
- this.warnAutoRunAutoFocus();
} catch (e) {
this.outputActionMessages(
`Failed to start jest session: ${e}`,
@@ -354,24 +351,6 @@ export class JestExt {
}
}
- // check if output config has conflict, especially for auto-run modes
- private async warnAutoRunAutoFocus(): Promise {
- if (
- this.extContext.settings.runMode.config.deferred ||
- this.extContext.settings.runMode.config.type === 'on-demand' ||
- outputManager.isAutoFocus() === false
- ) {
- return;
- }
- const cmdLink = executableTerminalLinkProvider.executableLink(
- this.extContext.workspace.name,
- `${extensionName}.disable-auto-focus`
- );
-
- this.output.write(AUTO_FOCUS_WARNING, 'warn');
- this.output.write(`Disable Auto Focus: \u2192 ${cmdLink}\r\n`, 'info');
- }
-
private updateTestFileEditor(editor: vscode.TextEditor): void {
if (!this.isTestFileEditor(editor)) {
return;
diff --git a/src/Settings/helper.ts b/src/Settings/helper.ts
index b1200cce4..2edaf2674 100644
--- a/src/Settings/helper.ts
+++ b/src/Settings/helper.ts
@@ -1,5 +1,10 @@
import * as vscode from 'vscode';
-import { GetConfigFunction, VirtualFolderSettings, VirtualFolderSettingKey } from './types';
+import {
+ GetConfigFunction,
+ VirtualFolderSettings,
+ VirtualFolderSettingKey,
+ SettingDetail,
+} from './types';
import { isVirtualWorkspaceFolder } from '../virtual-workspace-folder';
/**
@@ -55,3 +60,23 @@ export const updateSetting = async (
(vFolder as any)[key] = value;
await config.update('virtualFolders', virtualFolders);
};
+
+export const getSettingDetail = (configName: string, section: string): SettingDetail => {
+ // Use the `inspect` method to get detailed information about the setting
+ const config = vscode.workspace.getConfiguration(configName);
+ const value = config.get(section);
+ const settingInspection = config.inspect(section);
+
+ if (settingInspection) {
+ const isExplicitlySet =
+ settingInspection.globalValue !== undefined ||
+ settingInspection.workspaceValue !== undefined ||
+ settingInspection.workspaceFolderValue !== undefined ||
+ settingInspection.globalLanguageValue !== undefined ||
+ settingInspection.workspaceLanguageValue !== undefined ||
+ settingInspection.workspaceFolderLanguageValue !== undefined;
+
+ return { value, isExplicitlySet };
+ }
+ return { value: undefined, isExplicitlySet: false };
+};
diff --git a/src/Settings/types.ts b/src/Settings/types.ts
index 9f37d1d21..18753d6cc 100644
--- a/src/Settings/types.ts
+++ b/src/Settings/types.ts
@@ -97,3 +97,9 @@ export interface VirtualFolderSettings extends AllPluginResourceSettings {
}
export type GetConfigFunction = (key: VirtualFolderSettingKey) => T | undefined;
+
+export interface SettingDetail {
+ value: T | undefined;
+ /** true if the setting is explicitly defined in a settings file, i.e., not from default value */
+ isExplicitlySet: boolean;
+}
diff --git a/src/extension-manager.ts b/src/extension-manager.ts
index 54a642b0a..3e10dec6c 100644
--- a/src/extension-manager.ts
+++ b/src/extension-manager.ts
@@ -531,6 +531,7 @@ export class ExtensionManager {
const ReleaseNoteBase = 'https://github.com/jest-community/vscode-jest/blob/master/release-notes';
const ReleaseNotes: Record = {
+ '6.2.3': `${ReleaseNoteBase}/release-note-v6.md#v623`,
'6.2.2': `${ReleaseNoteBase}/release-note-v6.md#v622`,
'6.2.0': `${ReleaseNoteBase}/release-note-v6.md#v620`,
'6.1.0': `${ReleaseNoteBase}/release-note-v6.md#v610-pre-release`,
diff --git a/src/output-manager.ts b/src/output-manager.ts
index 25c3afb29..70368ffbb 100644
--- a/src/output-manager.ts
+++ b/src/output-manager.ts
@@ -1,7 +1,14 @@
import * as vscode from 'vscode';
-import { AutoRevealOutputType, JestOutputSetting, JestRawOutputSetting } from './Settings/types';
+import {
+ AutoRevealOutputType,
+ JestOutputSetting,
+ JestRawOutputSetting,
+ SettingDetail,
+ getSettingDetail,
+} from './Settings';
import { ExtOutputTerminal } from './JestExt/output-terminal';
import { OUTPUT_CONFIG_HELP_URL, extensionName } from './appGlobals';
+import { RunMode } from './JestExt/run-mode';
export type OutputConfig = Required;
export const DefaultJestOutputSetting: OutputConfig = {
@@ -9,14 +16,17 @@ export const DefaultJestOutputSetting: OutputConfig = {
revealWithFocus: 'none',
clearOnRun: 'none',
};
+export interface OutputSettingDetail extends SettingDetail {
+ value: OutputConfig; // Override the value property to make it non-undefined
+}
export interface OutputConfigs {
- outputConfig: OutputConfig;
- openTesting: string | undefined;
+ outputConfig: OutputSettingDetail;
+ openTesting: SettingDetail;
}
export class OutputManager {
- private config!: OutputConfig;
- private openTesting: string | undefined;
+ private config!: OutputSettingDetail;
+ private openTesting!: SettingDetail;
private skipValidation = false;
constructor() {
@@ -24,11 +34,12 @@ export class OutputManager {
}
private initConfigs(): void {
- this.openTesting = vscode.workspace.getConfiguration('testing').get('openTesting');
- const config = vscode.workspace.getConfiguration('jest').get('outputConfig');
- this.config = config
- ? this.resolveSetting(config)
+ this.openTesting = getSettingDetail('testing', 'openTesting');
+ const config = getSettingDetail('jest', 'outputConfig');
+ const value: OutputConfig = config.value
+ ? this.resolveSetting(config.value)
: { ...DefaultJestOutputSetting, ...this.fromLegacySettings() };
+ this.config = { ...config, value };
}
private resolveSetting(setting: JestOutputSetting): OutputConfig {
@@ -67,7 +78,7 @@ export class OutputManager {
const config = {} as JestRawOutputSetting;
- switch (this.openTesting) {
+ switch (this.openTesting.value) {
case 'neverOpen':
case 'openExplorerOnTestStart':
// no-op
@@ -86,7 +97,7 @@ export class OutputManager {
if (autoRevealOutput === 'off') {
config.revealOn = 'demand';
config.revealWithFocus = 'none';
- if (this.openTesting !== 'neverOpen') {
+ if (this.openTesting.value !== 'neverOpen') {
console.warn(
'The "autoRevealOutput" setting is set to "off", but "testing.openTesting" is not set to "neverOpen".'
);
@@ -99,22 +110,23 @@ export class OutputManager {
public showOutputOn(
type: 'run' | 'test-error' | 'exec-error',
- terminalOutput: ExtOutputTerminal
+ terminalOutput: ExtOutputTerminal,
+ runMode?: RunMode
): void {
// will not reveal output for the following cases:
switch (type) {
case 'run':
- if (this.config.revealOn !== 'run') {
+ if (this.config.value.revealOn !== 'run') {
return;
}
break;
case 'test-error':
- if (this.config.revealOn !== 'error') {
+ if (this.config.value.revealOn !== 'error') {
return;
}
break;
case 'exec-error':
- if (this.config.revealOn === 'demand') {
+ if (this.config.value.revealOn === 'demand') {
return;
}
break;
@@ -122,22 +134,25 @@ export class OutputManager {
terminalOutput.enable();
// check to see if we need to show with the focus
- if (this.config.revealWithFocus === 'terminal') {
+ if (this.config.value.revealWithFocus === 'terminal') {
return terminalOutput.show();
- } else if (type !== 'exec-error' && this.config.revealWithFocus === 'test-results') {
+ } else if (type !== 'exec-error' && this.config.value.revealWithFocus === 'test-results') {
// exec-error will only show in terminal
- return this.showTestResultsOutput();
+ return this.showTestResultsOutput(runMode);
}
}
public clearOutputOnRun(terminalOutput: ExtOutputTerminal): void {
- if (this.config.clearOnRun === 'none') {
+ if (this.config.value.clearOnRun === 'none') {
return;
}
- if (this.config.clearOnRun === 'terminal' || this.config.clearOnRun === 'both') {
+ if (this.config.value.clearOnRun === 'terminal' || this.config.value.clearOnRun === 'both') {
terminalOutput.clear();
}
- if (this.config.clearOnRun === 'test-results' || this.config.clearOnRun === 'both') {
+ if (
+ this.config.value.clearOnRun === 'test-results' ||
+ this.config.value.clearOnRun === 'both'
+ ) {
this.clearTestResultsOutput();
}
}
@@ -147,8 +162,27 @@ export class OutputManager {
// should file a feature request for testing framework to provide a command to clear the output history only.
vscode.commands.executeCommand('testing.clearTestResults');
}
- private showTestResultsOutput(): void {
- vscode.commands.executeCommand('workbench.panel.testResults.view.focus');
+ private showTestResultsOutput(runMode?: RunMode): void {
+ switch (runMode?.config.type) {
+ case 'on-demand':
+ // only need to perform force reveal if users has turn off the openTesting; otherwise, test-results can
+ // handle the reveal logic itself (returns false)
+ if (this.openTesting.value !== 'neverOpen') {
+ return;
+ }
+ break;
+ case 'watch':
+ case 'on-save':
+ // for auto-runs, by default we will not perform auto reveal test results panel unless
+ // it is explicitly configured by the user, i.e. either openTesting or outputConfig is set explicitly.
+ if (!this.config.isExplicitlySet && !this.openTesting.isExplicitlySet) {
+ return;
+ }
+ break;
+ }
+ vscode.commands.executeCommand('workbench.panel.testResults.view.focus', {
+ preserveFocus: true,
+ });
}
private async updateTestResultsSettings(): Promise {
@@ -172,13 +206,10 @@ export class OutputManager {
];
}
- public isAutoFocus(): boolean {
- return this.config.revealWithFocus !== 'none' || this.openTesting !== 'neverOpen';
- }
public async disableAutoFocus(): Promise {
this.skipValidation = true;
await this.updateTestResultsSettings();
- this.config.revealWithFocus = 'none';
+ this.config.value.revealWithFocus = 'none';
await this.save();
this.skipValidation = false;
}
@@ -189,11 +220,17 @@ export class OutputManager {
}
public isTestResultsConfigsValid(): boolean {
- switch (this.openTesting) {
+ switch (this.openTesting.value) {
case 'openOnTestStart':
- return this.config.revealWithFocus === 'test-results' && this.config.revealOn === 'run';
+ return (
+ this.config.value.revealWithFocus === 'test-results' &&
+ this.config.value.revealOn === 'run'
+ );
case 'openOnTestFailure':
- return this.config.revealWithFocus === 'test-results' && this.config.revealOn === 'error';
+ return (
+ this.config.value.revealWithFocus === 'test-results' &&
+ this.config.value.revealOn === 'error'
+ );
default:
return true;
}
@@ -216,8 +253,8 @@ export class OutputManager {
}
const detail =
- `Output Config Conflict Detected: test-results panel setting "testing.openTesting: ${this.openTesting}" ` +
- `conflicts with jest.outputConfig:\r\n ${JSON.stringify(this.config, undefined, 4)}.`;
+ `Output Config Conflict Detected: test-results panel setting "testing.openTesting: ${this.openTesting.value}" ` +
+ `conflicts with jest.outputConfig:\r\n ${JSON.stringify(this.config.value, undefined, 4)}.`;
console.warn(detail);
const actions = {
@@ -273,8 +310,9 @@ export class OutputManager {
await this.updateTestResultsSettings();
return true;
case items.fixOutputConfig:
- this.config.revealWithFocus = 'test-results';
- this.config.revealOn = this.openTesting === 'openOnTestFailure' ? 'error' : 'run';
+ this.config.value.revealWithFocus = 'test-results';
+ this.config.value.revealOn =
+ this.openTesting.value === 'openOnTestFailure' ? 'error' : 'run';
await this.save();
return true;
case items.editSettings:
@@ -298,7 +336,7 @@ export class OutputManager {
}
public async save(): Promise {
- await vscode.workspace.getConfiguration('jest').update('outputConfig', this.config);
+ await vscode.workspace.getConfiguration('jest').update('outputConfig', this.config.value);
}
}
diff --git a/tests/JestExt/core.test.ts b/tests/JestExt/core.test.ts
index a9b56b71c..edd8ebfce 100644
--- a/tests/JestExt/core.test.ts
+++ b/tests/JestExt/core.test.ts
@@ -29,7 +29,6 @@ jest.mock('../../src/workspace-manager', () => ({
}));
const mockOutputManager = {
showOutputOn: jest.fn(),
- isAutoFocus: jest.fn(),
outputConfigs: jest.fn(),
};
jest.mock('../../src/output-manager', () => ({
@@ -151,7 +150,10 @@ describe('JestExt', () => {
jestCommandLine: 'jest',
};
getConfiguration.mockReturnValue({});
- mockOutputManager.outputConfigs.mockReturnValue({});
+ mockOutputManager.outputConfigs.mockReturnValue({
+ outputConfig: { value: {}, isExplicitlySet: false },
+ openTesting: { value: {}, isExplicitlySet: false },
+ });
vscode.window.visibleTextEditors = [];
(createProcessSession as jest.Mocked).mockReturnValue(mockProcessSession);
@@ -813,39 +815,6 @@ describe('JestExt', () => {
expect(update.state).toEqual('initial');
expect(update.mode.config.coverage).toEqual(true);
});
- describe('emit auto-focus warnings for auto-run modes', () => {
- it.each`
- case | runMode | isAutoFocus | showWarning
- ${1} | ${'watch'} | ${true} | ${true}
- ${2} | ${'watch'} | ${false} | ${false}
- ${3} | ${'on-save'} | ${true} | ${true}
- ${4} | ${'on-save'} | ${false} | ${false}
- ${5} | ${'on-demand'} | ${true} | ${false}
- ${6} | ${{ type: 'watch', deferred: true }} | ${true} | ${false}
- ${7} | ${{ type: 'watch', deferred: false }} | ${true} | ${true}
- `(
- 'case $case: showWarning: $showWarning',
- async ({ runMode, isAutoFocus, showWarning }) => {
- expect.hasAssertions();
-
- const sut = newJestExt({ settings: { runMode: new RunMode(runMode) } });
- mockOutputManager.isAutoFocus.mockReturnValueOnce(isAutoFocus);
-
- await sut.startSession();
- if (showWarning) {
- expect(mockOutputTerminal.write).toHaveBeenCalledWith(
- expect.stringContaining('auto-focus'),
- 'warn'
- );
- } else {
- expect(mockOutputTerminal.write).not.toHaveBeenCalledWith(
- expect.stringContaining('auto focus'),
- 'warn'
- );
- }
- }
- );
- });
});
describe('stopSession', () => {
it('will fire event', async () => {
@@ -1355,41 +1324,55 @@ describe('JestExt', () => {
});
});
describe('output handling', () => {
+ let runMode;
+ let sut: JestExt;
+ beforeEach(() => {
+ runMode = new RunMode('on-demand');
+ sut = newJestExt({ settings: { runMode } });
+ });
it('delegate output handling to outputManager during runEvent', () => {
- const sut = newJestExt();
const onRunEvent = (sut.events.onRunEvent.event as jest.Mocked).mock.calls[0][0];
const process = { id: 'a process id', request: { type: 'watch' } };
onRunEvent({ type: 'start', process });
- expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith('run', expect.anything());
+ expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith(
+ 'run',
+ expect.anything(),
+ runMode
+ );
});
describe('when test errors occurred', () => {
it('will notify outputManager', () => {
- const sut = newJestExt();
const onRunEvent = (sut.events.onRunEvent.event as jest.Mocked).mock.calls[0][0];
const process = { id: 'a process id', request: { type: 'watch' } };
onRunEvent({ type: 'test-error', process });
- expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith('run', expect.anything());
+ expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith(
+ 'run',
+ expect.anything(),
+ runMode
+ );
expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith(
'test-error',
- expect.anything()
+ expect.anything(),
+ runMode
);
});
it('will only notify outputManager once per run cycle', () => {
- const sut = newJestExt();
const onRunEvent = (sut.events.onRunEvent.event as jest.Mocked).mock.calls[0][0];
const process = { id: 'a process id', request: { type: 'watch' } };
onRunEvent({ type: 'test-error', process, userData: {} });
expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith(
'test-error',
- expect.anything()
+ expect.anything(),
+ runMode
);
mockOutputManager.showOutputOn.mockClear();
onRunEvent({ type: 'test-error', process });
expect(mockOutputManager.showOutputOn).not.toHaveBeenCalledWith(
'test-error',
- expect.anything()
+ expect.anything(),
+ runMode
);
});
it('will reset testError state when test run ended', () => {
@@ -1535,7 +1518,11 @@ describe('JestExt', () => {
await jestExt.runAllTests();
expect(runMode.config.deferred).toBe(false);
- expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith('run', expect.anything());
+ expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith(
+ 'run',
+ expect.anything(),
+ runMode
+ );
expect(mockOutputTerminal.revealOnError).toEqual(true);
expect(mockProcessSession.scheduleProcess).toHaveBeenCalledWith(
expect.objectContaining({ type: 'all-tests' })
@@ -1557,7 +1544,11 @@ describe('JestExt', () => {
await jestExt.runItemCommand(testItem, itemCommand);
expect(runMode.config.deferred).toBe(false);
- expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith('run', expect.anything());
+ expect(mockOutputManager.showOutputOn).toHaveBeenCalledWith(
+ 'run',
+ expect.anything(),
+ runMode
+ );
expect(mockTestProvider.runItemCommand).toHaveBeenCalled();
});
describe('when triggered explicitly (by UI)', () => {
diff --git a/tests/Settings/helper.test.ts b/tests/Settings/helper.test.ts
index 82185de1e..30bb1ff1f 100644
--- a/tests/Settings/helper.test.ts
+++ b/tests/Settings/helper.test.ts
@@ -139,3 +139,78 @@ describe('updateSetting', () => {
await expect(updateSetting(v2, key, value)).rejects.toThrow();
});
});
+import { getSettingDetail } from '../../src/Settings/helper';
+
+describe('getSettingDetail', () => {
+ it('should return the value and isExplicitlySet true when the setting is explicitly set', () => {
+ const configName = 'testing';
+ const section = 'openTesting';
+ const explicitValue = 'openOnTestFailure';
+ const settingInspection = {
+ globalValue: undefined,
+ workspaceValue: explicitValue,
+ workspaceFolderValue: undefined,
+ globalLanguageValue: undefined,
+ workspaceLanguageValue: undefined,
+ workspaceFolderLanguageValue: undefined,
+ };
+
+ const mockConfig = {
+ get: jest.fn().mockReturnValue(explicitValue),
+ inspect: jest.fn().mockReturnValue(settingInspection),
+ };
+ vscode.workspace.getConfiguration = jest.fn().mockReturnValue(mockConfig);
+
+ const result = getSettingDetail(configName, section);
+
+ expect(vscode.workspace.getConfiguration).toHaveBeenCalledWith(configName);
+ expect(mockConfig.get).toHaveBeenCalledWith(section);
+ expect(mockConfig.inspect).toHaveBeenCalledWith(section);
+ expect(result).toEqual({ value: explicitValue, isExplicitlySet: true });
+ });
+ it('should return the default value and isExplicitlySet false when the setting is not explicitly set', () => {
+ const configName = 'testing';
+ const section = 'openTesting';
+ const defaultValue = 'openOnTestStart';
+ const settingInspection = {
+ globalValue: undefined,
+ workspaceValue: undefined,
+ workspaceFolderValue: undefined,
+ globalLanguageValue: undefined,
+ workspaceLanguageValue: undefined,
+ workspaceFolderLanguageValue: undefined,
+ };
+
+ const mockConfig = {
+ get: jest.fn().mockReturnValue(defaultValue),
+ inspect: jest.fn().mockReturnValue(settingInspection),
+ };
+ vscode.workspace.getConfiguration = jest.fn().mockReturnValue(mockConfig);
+
+ const result = getSettingDetail(configName, section);
+
+ expect(vscode.workspace.getConfiguration).toHaveBeenCalledWith(configName);
+ expect(mockConfig.get).toHaveBeenCalledWith(section);
+ expect(mockConfig.inspect).toHaveBeenCalledWith(section);
+ expect(result).toEqual({ value: defaultValue, isExplicitlySet: false });
+ });
+
+ it('should return undefined value and isExplicitlySet flag as false when inspection failed', () => {
+ const configName = 'jest';
+ const section = 'wrongSetting';
+ const settingInspection = null;
+
+ const mockConfig = {
+ get: jest.fn(),
+ inspect: jest.fn().mockReturnValue(settingInspection),
+ };
+ vscode.workspace.getConfiguration = jest.fn().mockReturnValue(mockConfig);
+
+ const result = getSettingDetail(configName, section);
+
+ expect(vscode.workspace.getConfiguration).toHaveBeenCalledWith(configName);
+ expect(mockConfig.get).toHaveBeenCalledWith(section);
+ expect(mockConfig.inspect).toHaveBeenCalledWith(section);
+ expect(result).toEqual({ value: undefined, isExplicitlySet: false });
+ });
+});
diff --git a/tests/output-manager.test.ts b/tests/output-manager.test.ts
index 5ee0efa8a..a64197f1c 100644
--- a/tests/output-manager.test.ts
+++ b/tests/output-manager.test.ts
@@ -9,31 +9,38 @@ const mockWorkspace = {
};
(vscode.workspace as jest.Mocked) = mockWorkspace;
-// jest.dontMock('../src/output-manager');
+import { getSettingDetail } from '../src/Settings';
+
+const mockSettings = (outputConfig?: any, openTesting?: string) => {
+ (getSettingDetail as jest.Mocked).mockImplementation((_name: string, key: string) => {
+ console.log('getSettingDetail key', key);
+ if (key === 'outputConfig') {
+ return { value: outputConfig, isExplicitlySet: outputConfig !== undefined };
+ }
+ if (key === 'openTesting') {
+ return {
+ value: openTesting ?? 'openOnTestStart',
+ isExplicitlySet: openTesting !== undefined,
+ };
+ }
+ return undefined;
+ });
+};
+
+mockSettings();
+
jest.unmock('../src/output-manager');
import { OutputManager, DefaultJestOutputSetting } from '../src/output-manager';
describe('OutputManager', () => {
- const mockWorkspaceConfig = (outputConfig?: any, openTesting = 'openOnTestStart') => {
- mockConfig.get.mockImplementation((key: string) => {
- if (key === 'outputConfig') {
- return outputConfig;
- }
- if (key === 'openTesting') {
- return openTesting;
- }
- return undefined;
- });
- };
-
let showWarningMessageSpy: any;
beforeEach(() => {
jest.clearAllMocks();
showWarningMessageSpy = vscode.window.showWarningMessage as jest.Mocked;
// returns default config
- mockWorkspaceConfig();
+ mockSettings();
});
describe('constructor', () => {
@@ -50,25 +57,18 @@ describe('OutputManager', () => {
${7} | ${{ revealWithFocus: 'terminal', clearOnRun: 'terminal' }} | ${{ revealOn: 'run', revealWithFocus: 'terminal', clearOnRun: 'terminal' }}
${8} | ${'wrong-type'} | ${DefaultJestOutputSetting}
`('case $case', ({ outputConfig, expected }) => {
- mockWorkspaceConfig(outputConfig);
+ mockSettings(outputConfig);
const om = new OutputManager();
const { outputConfig: config } = om.outputConfigs();
- expect(config).toEqual(expected);
+ expect(config.value).toEqual(expected);
});
});
it('will ignore legacy settings', () => {
- mockConfig.get.mockImplementation((key: string) => {
- if (key === 'outputConfig') {
- return 'terminal-based';
- }
- if (key === 'openTesting') {
- return 'openOnTestStart';
- }
- return undefined;
- });
+ mockSettings('terminal-based', 'openOnTestStart');
+
const om = new OutputManager();
const { outputConfig: config } = om.outputConfigs();
- expect(config).toEqual({
+ expect(config.value).toEqual({
revealOn: 'run',
revealWithFocus: 'terminal',
clearOnRun: 'none',
@@ -91,12 +91,9 @@ describe('OutputManager', () => {
${10} | ${'whatever'} | ${undefined} | ${undefined} | ${DefaultJestOutputSetting}
${11} | ${'openOnTestStart'} | ${undefined} | ${'whatever'} | ${{ revealOn: 'run', revealWithFocus: 'test-results', clearOnRun: 'none' }}
`('case $case', ({ openTesting, autoClearTerminal, autoRevealOutput, expected }) => {
+ mockSettings(undefined, openTesting);
mockConfig.get.mockImplementation((key: string) => {
switch (key) {
- case 'outputConfig':
- return undefined;
- case 'openTesting':
- return openTesting;
case 'autoClearTerminal':
return autoClearTerminal;
case 'autoRevealOutput':
@@ -107,7 +104,7 @@ describe('OutputManager', () => {
});
const om = new OutputManager();
const { outputConfig: config } = om.outputConfigs();
- expect(config).toEqual(expected);
+ expect(config.value).toEqual(expected);
});
});
});
@@ -116,61 +113,190 @@ describe('OutputManager', () => {
describe('showOutputOn', () => {
let mockTerminalOutput: any;
const showTestResultsCommand = 'workbench.panel.testResults.view.focus';
-
beforeEach(() => {
mockTerminalOutput = {
enable: jest.fn(),
show: jest.fn(),
};
});
- it.each`
- case | outputConfig | type | enableTerminal | showOutput
- ${1} | ${undefined} | ${'run'} | ${true} | ${undefined}
- ${2} | ${undefined} | ${'test-error'} | ${undefined} | ${undefined}
- ${3} | ${undefined} | ${'exec-error'} | ${true} | ${undefined}
- ${4} | ${{ revealOn: 'error' }} | ${'run'} | ${undefined} | ${undefined}
- ${5} | ${{ revealOn: 'error' }} | ${'test-error'} | ${true} | ${undefined}
- ${6} | ${{ revealOn: 'error' }} | ${'exec-error'} | ${true} | ${undefined}
- ${7} | ${{ revealWithFocus: 'terminal' }} | ${'run'} | ${true} | ${'terminal'}
- ${8} | ${{ revealWithFocus: 'terminal' }} | ${'test-error'} | ${undefined} | ${undefined}
- ${9} | ${{ revealWithFocus: 'terminal' }} | ${'exec-error'} | ${true} | ${'terminal'}
- ${10} | ${{ revealWithFocus: 'test-results' }} | ${'run'} | ${true} | ${'test-results'}
- ${11} | ${{ revealWithFocus: 'test-results' }} | ${'test-error'} | ${undefined} | ${undefined}
- ${12} | ${{ revealWithFocus: 'test-results' }} | ${'exec-error'} | ${true} | ${undefined}
- ${13} | ${{ revealOn: 'error', revealWithFocus: 'terminal' }} | ${'run'} | ${undefined} | ${undefined}
- ${14} | ${{ revealOn: 'error', revealWithFocus: 'terminal' }} | ${'test-error'} | ${true} | ${'terminal'}
- ${15} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'test-error'} | ${true} | ${'test-results'}
- ${16} | ${{ revealOn: 'demand', revealWithFocus: 'test-results' }} | ${'run'} | ${undefined} | ${undefined}
- ${17} | ${{ revealOn: 'demand', revealWithFocus: 'test-results' }} | ${'test-error'} | ${undefined} | ${undefined}
- ${18} | ${{ revealOn: 'demand', revealWithFocus: 'test-results' }} | ${'exec-error'} | ${undefined} | ${undefined}
- `('case $case', ({ outputConfig, type, enableTerminal, showOutput }) => {
- mockConfig.get.mockImplementation((key) => {
- switch (key) {
- case 'outputConfig':
- return outputConfig;
- case 'openTesting':
- return 'neverOpen';
- }
+ describe('without runMode', () => {
+ describe('when no outputConfig is defined', () => {
+ it.each`
+ case | openTesting | type | enableTerminal | showTestResults
+ ${1} | ${'neverOpen'} | ${'run'} | ${true} | ${false}
+ ${2} | ${'neverOpen'} | ${'test-error'} | ${undefined} | ${false}
+ ${3} | ${'neverOpen'} | ${'exec-error'} | ${true} | ${false}
+ ${4} | ${'openOnTestStart'} | ${'run'} | ${true} | ${true}
+ ${5} | ${'openOnTestStart'} | ${'test-error'} | ${undefined} | ${false}
+ ${6} | ${'openOnTestStart'} | ${'exec-error'} | ${true} | ${false}
+ ${7} | ${'openOnTestFailure'} | ${'run'} | ${false} | ${false}
+ ${8} | ${'openOnTestFailure'} | ${'test-error'} | ${true} | ${true}
+ ${9} | ${'openOnTestFailure'} | ${'exec-error'} | ${true} | ${false}
+ ${10} | ${'openExplorerOnTestStart'} | ${'run'} | ${true} | ${false}
+ ${11} | ${'openExplorerOnTestStart'} | ${'test-error'} | ${undefined} | ${false}
+ ${12} | ${'openExplorerOnTestStart'} | ${'exec-error'} | ${true} | ${false}
+ ${13} | ${undefined} | ${'run'} | ${true} | ${true}
+ ${14} | ${undefined} | ${'test-error'} | ${undefined} | ${false}
+ ${15} | ${undefined} | ${'exec-error'} | ${true} | ${false}
+ `(
+ 'case $case openTesting=$openTesting, type=$type',
+ ({ openTesting, type, enableTerminal, showTestResults }) => {
+ mockSettings(undefined, openTesting);
+ const om = new OutputManager();
+ om.showOutputOn(type, mockTerminalOutput);
+
+ if (enableTerminal) {
+ expect(mockTerminalOutput.enable).toHaveBeenCalled();
+ } else {
+ expect(mockTerminalOutput.enable).not.toHaveBeenCalled();
+ }
+
+ expect(mockTerminalOutput.show).not.toHaveBeenCalled();
+
+ if (showTestResults) {
+ expect(vscode.commands.executeCommand).toHaveBeenCalledWith(showTestResultsCommand, {
+ preserveFocus: true,
+ });
+ } else {
+ expect(vscode.commands.executeCommand).not.toHaveBeenCalled();
+ }
+ }
+ );
});
- const om = new OutputManager();
- om.showOutputOn(type, mockTerminalOutput);
- if (enableTerminal) {
- expect(mockTerminalOutput.enable).toHaveBeenCalled();
- } else {
- expect(mockTerminalOutput.enable).not.toHaveBeenCalled();
- }
- if (showOutput) {
- if (showOutput === 'terminal') {
- expect(mockTerminalOutput.show).toHaveBeenCalled();
- expect(vscode.commands.executeCommand).not.toHaveBeenCalledWith(showTestResultsCommand);
- } else {
- expect(mockTerminalOutput.show).not.toHaveBeenCalled();
- expect(vscode.commands.executeCommand).toHaveBeenCalledWith(showTestResultsCommand);
+ describe.each([
+ ['neverOpen'],
+ ['openOnTestStart'],
+ ['openOnTestFailure'],
+ ['openExplorerOnTestStart'],
+ ])(`when openTesting is "%s"`, (openTesting) => {
+ it.each`
+ case | outputConfig | type | enableTerminal | showOutput
+ ${4} | ${{ revealOn: 'error' }} | ${'run'} | ${undefined} | ${undefined}
+ ${5} | ${{ revealOn: 'error' }} | ${'test-error'} | ${true} | ${undefined}
+ ${6} | ${{ revealOn: 'error' }} | ${'exec-error'} | ${true} | ${undefined}
+ ${7} | ${{ revealWithFocus: 'terminal' }} | ${'run'} | ${true} | ${'terminal'}
+ ${8} | ${{ revealWithFocus: 'terminal' }} | ${'test-error'} | ${undefined} | ${undefined}
+ ${9} | ${{ revealWithFocus: 'terminal' }} | ${'exec-error'} | ${true} | ${'terminal'}
+ ${10} | ${{ revealWithFocus: 'test-results' }} | ${'run'} | ${true} | ${'test-results'}
+ ${11} | ${{ revealWithFocus: 'test-results' }} | ${'test-error'} | ${undefined} | ${undefined}
+ ${12} | ${{ revealWithFocus: 'test-results' }} | ${'exec-error'} | ${true} | ${undefined}
+ ${13} | ${{ revealOn: 'error', revealWithFocus: 'terminal' }} | ${'run'} | ${undefined} | ${undefined}
+ ${14} | ${{ revealOn: 'error', revealWithFocus: 'terminal' }} | ${'test-error'} | ${true} | ${'terminal'}
+ ${15} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'test-error'} | ${true} | ${'test-results'}
+ ${16} | ${{ revealOn: 'demand', revealWithFocus: 'test-results' }} | ${'run'} | ${undefined} | ${undefined}
+ ${17} | ${{ revealOn: 'demand', revealWithFocus: 'test-results' }} | ${'test-error'} | ${undefined} | ${undefined}
+ ${18} | ${{ revealOn: 'demand', revealWithFocus: 'test-results' }} | ${'exec-error'} | ${undefined} | ${undefined}
+ `(
+ 'case $case when outputConfig is defined',
+ ({ outputConfig, type, enableTerminal, showOutput }) => {
+ mockSettings(outputConfig, openTesting);
+ const om = new OutputManager();
+ om.showOutputOn(type, mockTerminalOutput);
+ if (enableTerminal) {
+ expect(mockTerminalOutput.enable).toHaveBeenCalled();
+ } else {
+ expect(mockTerminalOutput.enable).not.toHaveBeenCalled();
+ }
+ if (showOutput) {
+ if (showOutput === 'terminal') {
+ expect(mockTerminalOutput.show).toHaveBeenCalled();
+ expect(vscode.commands.executeCommand).not.toHaveBeenCalled();
+ } else {
+ expect(mockTerminalOutput.show).not.toHaveBeenCalled();
+ expect(vscode.commands.executeCommand).toHaveBeenCalledWith(
+ showTestResultsCommand,
+ { preserveFocus: true }
+ );
+ }
+ } else {
+ expect(mockTerminalOutput.show).not.toHaveBeenCalled();
+ expect(vscode.commands.executeCommand).not.toHaveBeenCalled();
+ }
+ }
+ );
+ });
+ });
+ describe('with auto runMode', () => {
+ describe.each([['watch'], ['on-save']])('runMode=%s', (runMode) => {
+ it.each`
+ case | openTesting | outputConfig | type | execShowTestResults
+ ${1} | ${'neverOpen'} | ${undefined} | ${'run'} | ${false}
+ ${2} | ${'neverOpen'} | ${undefined} | ${'test-error'} | ${false}
+ ${3} | ${'neverOpen'} | ${undefined} | ${'exec-error'} | ${false}
+ ${4} | ${'neverOpen'} | ${{ revealOn: 'run', revealWithFocus: 'test-results' }} | ${'run'} | ${true}
+ ${5} | ${'neverOpen'} | ${{ revealOn: 'run', revealWithFocus: 'test-results' }} | ${'test-error'} | ${false}
+ ${6} | ${'neverOpen'} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'test-error'} | ${true}
+ ${7} | ${'openOnTestStart'} | ${undefined} | ${'run'} | ${true}
+ ${8} | ${'openOnTestStart'} | ${undefined} | ${'test-error'} | ${false}
+ ${9} | ${'openOnTestStart'} | ${undefined} | ${'exec-error'} | ${false}
+ ${10} | ${'openOnTestStart'} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'run'} | ${false}
+ ${11} | ${'openOnTestStart'} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'test-error'} | ${true}
+ ${12} | ${'openOnTestFailure'} | ${undefined} | ${'run'} | ${false}
+ ${13} | ${'openOnTestFailure'} | ${undefined} | ${'test-error'} | ${true}
+ ${14} | ${'openOnTestFailure'} | ${undefined} | ${'exec-error'} | ${false}
+ ${15} | ${'openOnTestFailure'} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'test-error'} | ${true}
+ ${16} | ${'openOnTestFailure'} | ${{ revealOn: 'run', revealWithFocus: 'test-results' }} | ${'test-error'} | ${false}
+ ${17} | ${'openExplorerOnTestStart'} | ${undefined} | ${'run'} | ${false}
+ ${18} | ${'openExplorerOnTestStart'} | ${undefined} | ${'test-error'} | ${false}
+ ${19} | ${'openExplorerOnTestStart'} | ${undefined} | ${'exec-error'} | ${false}
+ ${20} | ${undefined} | ${undefined} | ${'run'} | ${false}
+ ${21} | ${undefined} | ${undefined} | ${'test-error'} | ${false}
+ ${22} | ${undefined} | ${undefined} | ${'exec-error'} | ${false}
+ `('case $case', ({ openTesting, outputConfig, type, execShowTestResults }) => {
+ mockSettings(outputConfig, openTesting);
+ const om = new OutputManager();
+ const mockRunMode: any = { config: { type: runMode } };
+ om.showOutputOn(type, mockTerminalOutput, mockRunMode);
+
+ if (execShowTestResults) {
+ expect(vscode.commands.executeCommand).toHaveBeenCalledWith(showTestResultsCommand, {
+ preserveFocus: true,
+ });
+ } else {
+ expect(vscode.commands.executeCommand).not.toHaveBeenCalled();
+ }
+ });
+ });
+ });
+ describe('with on-demand runMode', () => {
+ it.each`
+ case | openTesting | outputConfig | type | execShowTestResults
+ ${1} | ${'neverOpen'} | ${undefined} | ${'run'} | ${false}
+ ${2} | ${'neverOpen'} | ${undefined} | ${'test-error'} | ${false}
+ ${3} | ${'neverOpen'} | ${undefined} | ${'exec-error'} | ${false}
+ ${4} | ${'neverOpen'} | ${{ revealOn: 'run', revealWithFocus: 'test-results' }} | ${'run'} | ${true}
+ ${5} | ${'neverOpen'} | ${{ revealOn: 'run', revealWithFocus: 'test-results' }} | ${'test-error'} | ${false}
+ ${6} | ${'neverOpen'} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'test-error'} | ${true}
+ ${7} | ${'openOnTestStart'} | ${undefined} | ${'run'} | ${false}
+ ${8} | ${'openOnTestStart'} | ${undefined} | ${'test-error'} | ${false}
+ ${9} | ${'openOnTestStart'} | ${undefined} | ${'exec-error'} | ${false}
+ ${10} | ${'openOnTestStart'} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'run'} | ${false}
+ ${11} | ${'openOnTestStart'} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'test-error'} | ${false}
+ ${12} | ${'openOnTestFailure'} | ${undefined} | ${'run'} | ${false}
+ ${13} | ${'openOnTestFailure'} | ${undefined} | ${'test-error'} | ${false}
+ ${14} | ${'openOnTestFailure'} | ${undefined} | ${'exec-error'} | ${false}
+ ${15} | ${'openOnTestFailure'} | ${{ revealOn: 'error', revealWithFocus: 'test-results' }} | ${'test-error'} | ${false}
+ ${16} | ${'openOnTestFailure'} | ${{ revealOn: 'run', revealWithFocus: 'test-results' }} | ${'test-error'} | ${false}
+ ${17} | ${'openExplorerOnTestStart'} | ${undefined} | ${'run'} | ${false}
+ ${18} | ${'openExplorerOnTestStart'} | ${undefined} | ${'test-error'} | ${false}
+ ${19} | ${'openExplorerOnTestStart'} | ${undefined} | ${'exec-error'} | ${false}
+ `(
+ 'case $case should be lazy in invoking command',
+ ({ openTesting, outputConfig, type, execShowTestResults }) => {
+ mockSettings(outputConfig, openTesting);
+ const om = new OutputManager();
+ const mockRunMode: any = { config: { type: 'on-demand' } };
+ om.showOutputOn(type, mockTerminalOutput, mockRunMode);
+
+ if (execShowTestResults) {
+ expect(vscode.commands.executeCommand).toHaveBeenCalledWith(showTestResultsCommand, {
+ preserveFocus: true,
+ });
+ } else {
+ expect(vscode.commands.executeCommand).not.toHaveBeenCalled();
+ }
}
- } else {
- expect(mockTerminalOutput.show).not.toHaveBeenCalled();
- expect(vscode.commands.executeCommand).not.toHaveBeenCalledWith(showTestResultsCommand);
- }
+ );
});
});
@@ -190,7 +316,7 @@ describe('OutputManager', () => {
${3} | ${'test-results'} | ${false} | ${true}
${4} | ${'both'} | ${true} | ${true}
`('case $case', ({ clearOnRun, clearTerminal, clearTestResults }) => {
- mockWorkspaceConfig({ ...DefaultJestOutputSetting, clearOnRun });
+ mockSettings({ ...DefaultJestOutputSetting, clearOnRun });
const om = new OutputManager();
om.clearOutputOnRun(mockTerminalOutput);
if (clearTerminal) {
@@ -206,57 +332,34 @@ describe('OutputManager', () => {
});
});
- describe('autoFocus', () => {
- it.each`
- case | outputConfig | openTesting | expected
- ${1} | ${undefined} | ${'openOnTestStart'} | ${true}
- ${2} | ${undefined} | ${'neverOpen'} | ${false}
- ${3} | ${{ revealWithFocus: 'none' }} | ${'openOnTestStart'} | ${true}
- ${4} | ${{ revealWithFocus: 'none' }} | ${'neverOpen'} | ${false}
- ${5} | ${{ revealWithFocus: 'test-results' }} | ${'neverOpen'} | ${true}
- ${6} | ${{ revealWithFocus: 'terminal' }} | ${'neverOpen'} | ${true}
- `('case $case: isAutoFocus = $expected', ({ outputConfig, openTesting, expected }) => {
- mockConfig.get.mockImplementation((key: string) => {
- switch (key) {
- case 'outputConfig':
- return outputConfig;
- case 'openTesting':
- return openTesting;
- }
- });
+ describe('disableAutoFocus', () => {
+ it('disableAutoFocus() will update both openTesting and outputConfig settings', async () => {
const om = new OutputManager();
- const result = om.isAutoFocus();
- expect(result).toEqual(expected);
+ await om.disableAutoFocus();
+ expect(mockConfig.update).toHaveBeenCalledWith(
+ 'openTesting',
+ 'neverOpen',
+ vscode.ConfigurationTarget.Workspace
+ );
+ expect(mockConfig.update).toHaveBeenCalledWith(
+ 'outputConfig',
+ expect.objectContaining({ revealWithFocus: 'none' })
+ );
});
- describe('disableAutoFocus', () => {
- it('disableAutoFocus() will update both openTesting and outputConfig settings', async () => {
- const om = new OutputManager();
- await om.disableAutoFocus();
- expect(mockConfig.update).toHaveBeenCalledWith(
- 'openTesting',
- 'neverOpen',
- vscode.ConfigurationTarget.Workspace
- );
- expect(mockConfig.update).toHaveBeenCalledWith(
- 'outputConfig',
- expect.objectContaining({ revealWithFocus: 'none' })
- );
- });
- it('during the update, validation will be skipped', async () => {
- const om = new OutputManager();
+ it('during the update, validation will be skipped', async () => {
+ const om = new OutputManager();
- let validateCount = 0;
- mockConfig.update.mockImplementation(async () => {
- // check if validation is skipped
- await expect(om.validate()).resolves.toBeUndefined();
- validateCount++;
- });
+ let validateCount = 0;
+ mockConfig.update.mockImplementation(async () => {
+ // check if validation is skipped
+ await expect(om.validate()).resolves.toBeUndefined();
+ validateCount++;
+ });
- await om.disableAutoFocus();
- expect(validateCount).toEqual(2);
+ await om.disableAutoFocus();
+ expect(validateCount).toEqual(2);
- mockConfig.update.mockReset();
- });
+ mockConfig.update.mockReset();
});
});
@@ -309,31 +412,24 @@ describe('OutputManager', () => {
mockChangeEvent = { affectsConfiguration: jest.fn() };
});
it('no-op if no outputConfig related changes detected', () => {
- mockWorkspaceConfig({ revealOn: 'error' });
+ mockSettings({ revealOn: 'error' });
mockChangeEvent.affectsConfiguration.mockReturnValue(false);
onDidChangeConfiguration.call(om, mockChangeEvent);
const { outputConfig: config } = om.outputConfigs();
- expect(config.revealOn).not.toBe('error');
+ expect(config.value.revealOn).not.toBe('error');
});
it('if outputConfig related changes detected, will load new config', () => {
- mockWorkspaceConfig({ revealOn: 'error' }, 'neverOpen');
+ mockSettings({ revealOn: 'error' }, 'neverOpen');
mockChangeEvent.affectsConfiguration.mockReturnValue(true);
onDidChangeConfiguration.call(om, mockChangeEvent);
const { outputConfig: config } = om.outputConfigs();
- expect(config.revealOn).toBe('error');
+ expect(config.value.revealOn).toBe('error');
expect(showWarningMessageSpy).not.toHaveBeenCalled();
});
it('will show warning message if outputConfig related changes detected and config is not valid', () => {
- mockConfig.get.mockImplementation((key: string) => {
- if (key === 'openTesting') {
- return 'openOnTestStart';
- }
- if (key === 'outputConfig') {
- return { revealOn: 'error' };
- }
- });
+ mockSettings({ revealOn: 'error' }, 'openOnTestStart');
mockChangeEvent.affectsConfiguration.mockReturnValue(true);
onDidChangeConfiguration.call(om, mockChangeEvent);
@@ -364,14 +460,7 @@ describe('OutputManager', () => {
${16} | ${{ revealWithFocus: 'terminal' }} | ${'openOnTestFailure'} | ${false}
${17} | ${{ revealWithFocus: 'terminal' }} | ${'openExplorerOnTestStart'} | ${true}
`('case $case: isAutoFocus = $expected', ({ outputConfig, openTesting, expected }) => {
- mockConfig.get.mockImplementation((key: string) => {
- switch (key) {
- case 'outputConfig':
- return outputConfig;
- case 'openTesting':
- return openTesting;
- }
- });
+ mockSettings(outputConfig, openTesting);
const om = new OutputManager();
expect(om.isTestResultsConfigsValid()).toEqual(expected);
});
@@ -391,7 +480,7 @@ describe('OutputManager', () => {
});
describe('when conflict detected', () => {
beforeEach(() => {
- mockWorkspaceConfig({ revealOn: 'error' });
+ mockSettings({ revealOn: 'error' });
});
it('will show warning message', async () => {
showWarningMessageSpy.mockResolvedValue(undefined);