refactor: move spec helpers to spec/lib (#37010)

Co-authored-by: Milan Burda <miburda@microsoft.com>
This commit is contained in:
Milan Burda 2023-01-25 22:01:25 +01:00 committed by GitHub
parent 355f322dbd
commit 4bc6b15f53
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
63 changed files with 125 additions and 125 deletions

View file

@ -0,0 +1,55 @@
/**
* @fileoverview A set of helper functions to make it easier to work
* with events in async/await manner.
*/
/**
* @param {!EventTarget} target
* @param {string} eventName
* @return {!Promise<!Event>}
*/
export const waitForEvent = (target: EventTarget, eventName: string) => {
return new Promise(resolve => {
target.addEventListener(eventName, resolve, { once: true });
});
};
/**
* @param {!EventEmitter} emitter
* @param {string} eventName
* @return {!Promise<!Array>} With Event as the first item.
*/
export const emittedOnce = (emitter: NodeJS.EventEmitter, eventName: string, trigger?: () => void) => {
return emittedNTimes(emitter, eventName, 1, trigger).then(([result]) => result);
};
export const emittedNTimes = async (emitter: NodeJS.EventEmitter, eventName: string, times: number, trigger?: () => void) => {
const events: any[][] = [];
const p = new Promise<any[][]>(resolve => {
const handler = (...args: any[]) => {
events.push(args);
if (events.length === times) {
emitter.removeListener(eventName, handler);
resolve(events);
}
};
emitter.on(eventName, handler);
});
if (trigger) {
await Promise.resolve(trigger());
}
return p;
};
export const emittedUntil = async (emitter: NodeJS.EventEmitter, eventName: string, untilFn: Function) => {
const p = new Promise<any[]>(resolve => {
const handler = (...args: any[]) => {
if (untilFn(...args)) {
emitter.removeListener(eventName, handler);
resolve(args);
}
};
emitter.on(eventName, handler);
});
return p;
};

View file

@ -0,0 +1,92 @@
import * as path from 'path';
import * as fs from 'fs';
import { screen, desktopCapturer, NativeImage } from 'electron';
const fixtures = path.resolve(__dirname, '..', 'fixtures');
export enum HexColors {
GREEN = '#00b140',
PURPLE = '#6a0dad',
RED = '#ff0000',
BLUE = '#0000ff'
};
/**
* Capture the screen at the given point.
*
* NOTE: Not yet supported on Linux in CI due to empty sources list.
*/
export const captureScreen = async (point: Electron.Point = { x: 0, y: 0 }): Promise<NativeImage> => {
const display = screen.getDisplayNearestPoint(point);
const sources = await desktopCapturer.getSources({ types: ['screen'], thumbnailSize: display.size });
// Toggle to save screen captures for debugging.
const DEBUG_CAPTURE = process.env.DEBUG_CAPTURE || false;
if (DEBUG_CAPTURE) {
for (const source of sources) {
await fs.promises.writeFile(path.join(fixtures, `screenshot_${source.display_id}_${Date.now()}.png`), source.thumbnail.toPNG());
}
}
const screenCapture = sources.find(source => source.display_id === `${display.id}`);
// Fails when HDR is enabled on Windows.
// https://bugs.chromium.org/p/chromium/issues/detail?id=1247730
if (!screenCapture) {
const displayIds = sources.map(source => source.display_id);
throw new Error(`Unable to find screen capture for display '${display.id}'\n\tAvailable displays: ${displayIds.join(', ')}`);
}
return screenCapture.thumbnail;
};
const formatHexByte = (val: number): string => {
const str = val.toString(16);
return str.length === 2 ? str : `0${str}`;
};
/**
* Get the hex color at the given pixel coordinate in an image.
*/
export const getPixelColor = (image: Electron.NativeImage, point: Electron.Point): string => {
// image.crop crashes if point is fractional, so round to prevent that crash
const pixel = image.crop({ x: Math.round(point.x), y: Math.round(point.y), width: 1, height: 1 });
// TODO(samuelmaddock): NativeImage.toBitmap() should return the raw pixel
// color, but it sometimes differs. Why is that?
const [b, g, r] = pixel.toBitmap();
return `#${formatHexByte(r)}${formatHexByte(g)}${formatHexByte(b)}`;
};
const hexToRgba = (hexColor: string) => {
const match = hexColor.match(/^#([0-9a-fA-F]{6,8})$/);
if (!match) return;
const colorStr = match[1];
return [
parseInt(colorStr.substring(0, 2), 16),
parseInt(colorStr.substring(2, 4), 16),
parseInt(colorStr.substring(4, 6), 16),
parseInt(colorStr.substring(6, 8), 16) || 0xFF
];
};
/** Calculate euclidian distance between colors. */
const colorDistance = (hexColorA: string, hexColorB: string) => {
const colorA = hexToRgba(hexColorA);
const colorB = hexToRgba(hexColorB);
if (!colorA || !colorB) return -1;
return Math.sqrt(
Math.pow(colorB[0] - colorA[0], 2) +
Math.pow(colorB[1] - colorA[1], 2) +
Math.pow(colorB[2] - colorA[2], 2)
);
};
/**
* Determine if colors are similar based on distance. This can be useful when
* comparing colors which may differ based on lossy compression.
*/
export const areColorsSimilar = (
hexColorA: string,
hexColorB: string,
distanceThreshold = 90
): boolean => {
const distance = colorDistance(hexColorA, hexColorB);
return distance <= distanceThreshold;
};

193
spec/lib/spec-helpers.ts Normal file
View file

@ -0,0 +1,193 @@
import * as childProcess from 'child_process';
import * as path from 'path';
import * as http from 'http';
import * as v8 from 'v8';
import { SuiteFunction, TestFunction } from 'mocha';
import { BrowserWindow } from 'electron/main';
import { AssertionError } from 'chai';
const addOnly = <T>(fn: Function): T => {
const wrapped = (...args: any[]) => {
return fn(...args);
};
(wrapped as any).only = wrapped;
(wrapped as any).skip = wrapped;
return wrapped as any;
};
export const ifit = (condition: boolean) => (condition ? it : addOnly<TestFunction>(it.skip));
export const ifdescribe = (condition: boolean) => (condition ? describe : addOnly<SuiteFunction>(describe.skip));
export const delay = (time: number = 0) => new Promise(resolve => setTimeout(resolve, time));
type CleanupFunction = (() => void) | (() => Promise<void>)
const cleanupFunctions: CleanupFunction[] = [];
export async function runCleanupFunctions () {
for (const cleanup of cleanupFunctions) {
const r = cleanup();
if (r instanceof Promise) { await r; }
}
cleanupFunctions.length = 0;
}
export function defer (f: CleanupFunction) {
cleanupFunctions.unshift(f);
}
class RemoteControlApp {
process: childProcess.ChildProcess;
port: number;
constructor (proc: childProcess.ChildProcess, port: number) {
this.process = proc;
this.port = port;
}
remoteEval = (js: string): Promise<any> => {
return new Promise((resolve, reject) => {
const req = http.request({
host: '127.0.0.1',
port: this.port,
method: 'POST'
}, res => {
const chunks = [] as Buffer[];
res.on('data', chunk => { chunks.push(chunk); });
res.on('end', () => {
const ret = v8.deserialize(Buffer.concat(chunks));
if (Object.prototype.hasOwnProperty.call(ret, 'error')) {
reject(new Error(`remote error: ${ret.error}\n\nTriggered at:`));
} else {
resolve(ret.result);
}
});
});
req.write(js);
req.end();
});
}
remotely = (script: Function, ...args: any[]): Promise<any> => {
return this.remoteEval(`(${script})(...${JSON.stringify(args)})`);
}
}
export async function startRemoteControlApp (extraArgs: string[] = [], options?: childProcess.SpawnOptionsWithoutStdio) {
const appPath = path.join(__dirname, '..', 'fixtures', 'apps', 'remote-control');
const appProcess = childProcess.spawn(process.execPath, [appPath, ...extraArgs], options);
appProcess.stderr.on('data', d => {
process.stderr.write(d);
});
const port = await new Promise<number>(resolve => {
appProcess.stdout.on('data', d => {
const m = /Listening: (\d+)/.exec(d.toString());
if (m && m[1] != null) {
resolve(Number(m[1]));
}
});
});
defer(() => { appProcess.kill('SIGINT'); });
return new RemoteControlApp(appProcess, port);
}
export function waitUntil (
callback: () => boolean,
opts: { rate?: number, timeout?: number } = {}
) {
const { rate = 10, timeout = 10000 } = opts;
return new Promise<void>((resolve, reject) => {
let intervalId: NodeJS.Timeout | undefined; // eslint-disable-line prefer-const
let timeoutId: NodeJS.Timeout | undefined;
const cleanup = () => {
if (intervalId) clearInterval(intervalId);
if (timeoutId) clearTimeout(timeoutId);
};
const check = () => {
let result;
try {
result = callback();
} catch (e) {
cleanup();
reject(e);
return;
}
if (result === true) {
cleanup();
resolve();
return true;
}
};
if (check()) {
return;
}
intervalId = setInterval(check, rate);
timeoutId = setTimeout(() => {
timeoutId = undefined;
cleanup();
reject(new Error(`waitUntil timed out after ${timeout}ms`));
}, timeout);
});
}
export async function repeatedly<T> (
fn: () => Promise<T>,
opts?: { until?: (x: T) => boolean, timeLimit?: number }
) {
const { until = (x: T) => !!x, timeLimit = 10000 } = opts ?? {};
const begin = +new Date();
while (true) {
const ret = await fn();
if (until(ret)) { return ret; }
if (+new Date() - begin > timeLimit) { throw new Error(`repeatedly timed out (limit=${timeLimit})`); }
}
}
async function makeRemoteContext (opts?: any) {
const { webPreferences, setup, url = 'about:blank', ...rest } = opts ?? {};
const w = new BrowserWindow({ show: false, webPreferences: { nodeIntegration: true, contextIsolation: false, ...webPreferences }, ...rest });
await w.loadURL(url.toString());
if (setup) await w.webContents.executeJavaScript(setup);
return w;
}
const remoteContext: BrowserWindow[] = [];
export async function getRemoteContext () {
if (remoteContext.length) { return remoteContext[0]; }
const w = await makeRemoteContext();
defer(() => w.close());
return w;
}
export function useRemoteContext (opts?: any) {
before(async () => {
remoteContext.unshift(await makeRemoteContext(opts));
});
after(() => {
const w = remoteContext.shift();
w!.close();
});
}
export async function itremote (name: string, fn: Function, args?: any[]) {
it(name, async () => {
const w = await getRemoteContext();
const { ok, message } = await w.webContents.executeJavaScript(`(async () => {
try {
const chai_1 = require('chai')
chai_1.use(require('chai-as-promised'))
chai_1.use(require('dirty-chai'))
await (${fn})(...${JSON.stringify(args ?? [])})
return {ok: true};
} catch (e) {
return {ok: false, message: e.message}
}
})()`);
if (!ok) { throw new AssertionError(message); }
});
}

498
spec/lib/video-helpers.js Normal file
View file

@ -0,0 +1,498 @@
/*
https://github.com/antimatter15/whammy
The MIT License (MIT)
Copyright (c) 2015 Kevin Kwok
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
function atob (str) {
return Buffer.from(str, 'base64').toString('binary');
}
// in this case, frames has a very specific meaning, which will be
// detailed once i finish writing the code
function ToWebM (frames) {
const info = checkFrames(frames);
// max duration by cluster in milliseconds
const CLUSTER_MAX_DURATION = 30000;
const EBML = [
{
id: 0x1a45dfa3, // EBML
data: [
{
data: 1,
id: 0x4286 // EBMLVersion
},
{
data: 1,
id: 0x42f7 // EBMLReadVersion
},
{
data: 4,
id: 0x42f2 // EBMLMaxIDLength
},
{
data: 8,
id: 0x42f3 // EBMLMaxSizeLength
},
{
data: 'webm',
id: 0x4282 // DocType
},
{
data: 2,
id: 0x4287 // DocTypeVersion
},
{
data: 2,
id: 0x4285 // DocTypeReadVersion
}
]
},
{
id: 0x18538067, // Segment
data: [
{
id: 0x1549a966, // Info
data: [
{
data: 1e6, // do things in millisecs (num of nanosecs for duration scale)
id: 0x2ad7b1 // TimecodeScale
},
{
data: 'whammy',
id: 0x4d80 // MuxingApp
},
{
data: 'whammy',
id: 0x5741 // WritingApp
},
{
data: doubleToString(info.duration),
id: 0x4489 // Duration
}
]
},
{
id: 0x1654ae6b, // Tracks
data: [
{
id: 0xae, // TrackEntry
data: [
{
data: 1,
id: 0xd7 // TrackNumber
},
{
data: 1,
id: 0x73c5 // TrackUID
},
{
data: 0,
id: 0x9c // FlagLacing
},
{
data: 'und',
id: 0x22b59c // Language
},
{
data: 'V_VP8',
id: 0x86 // CodecID
},
{
data: 'VP8',
id: 0x258688 // CodecName
},
{
data: 1,
id: 0x83 // TrackType
},
{
id: 0xe0, // Video
data: [
{
data: info.width,
id: 0xb0 // PixelWidth
},
{
data: info.height,
id: 0xba // PixelHeight
}
]
}
]
}
]
},
{
id: 0x1c53bb6b, // Cues
data: [
// cue insertion point
]
}
// cluster insertion point
]
}
];
const segment = EBML[1];
const cues = segment.data[2];
// Generate clusters (max duration)
let frameNumber = 0;
let clusterTimecode = 0;
while (frameNumber < frames.length) {
const cuePoint = {
id: 0xbb, // CuePoint
data: [
{
data: Math.round(clusterTimecode),
id: 0xb3 // CueTime
},
{
id: 0xb7, // CueTrackPositions
data: [
{
data: 1,
id: 0xf7 // CueTrack
},
{
data: 0, // to be filled in when we know it
size: 8,
id: 0xf1 // CueClusterPosition
}
]
}
]
};
cues.data.push(cuePoint);
const clusterFrames = [];
let clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < CLUSTER_MAX_DURATION);
let clusterCounter = 0;
const cluster = {
id: 0x1f43b675, // Cluster
data: [
{
data: Math.round(clusterTimecode),
id: 0xe7 // Timecode
}
].concat(clusterFrames.map(function (webp) {
const block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}))
};
// Add cluster to segment
segment.data.push(cluster);
clusterTimecode += clusterDuration;
}
// First pass to compute cluster positions
let position = 0;
for (let i = 0; i < segment.data.length; i++) {
if (i >= 3) {
cues.data[i - 3].data[1].data[1].data = position;
}
const data = generateEBML([segment.data[i]]);
position += data.size || data.byteLength || data.length;
if (i !== 2) { // not cues
// Save results to avoid having to encode everything twice
segment.data[i] = data;
}
}
return generateEBML(EBML);
}
// sums the lengths of all the frames and gets the duration, woo
function checkFrames (frames) {
const width = frames[0].width;
const height = frames[0].height;
let duration = frames[0].duration;
for (let i = 1; i < frames.length; i++) {
if (frames[i].width !== width) throw new Error('Frame ' + (i + 1) + ' has a different width');
if (frames[i].height !== height) throw new Error('Frame ' + (i + 1) + ' has a different height');
if (frames[i].duration < 0 || frames[i].duration > 0x7fff) throw new Error('Frame ' + (i + 1) + ' has a weird duration (must be between 0 and 32767)');
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer (num) {
const parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function numToFixedBuffer (num, size) {
const parts = new Uint8Array(size);
for (let i = size - 1; i >= 0; i--) {
parts[i] = num & 0xff;
num = num >> 8;
}
return parts;
}
function strToBuffer (str) {
// return new Blob([str]);
const arr = new Uint8Array(str.length);
for (let i = 0; i < str.length; i++) {
arr[i] = str.charCodeAt(i);
}
return arr;
// this is slower
// return new Uint8Array(str.split('').map(function(e){
// return e.charCodeAt(0)
// }))
}
// sorry this is ugly, and sort of hard to understand exactly why this was done
// at all really, but the reason is that there's some code below that i dont really
// feel like understanding, and this is easier than using my brain.
function bitsToBuffer (bits) {
const data = [];
const pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (let i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML (json) {
const ebml = [];
for (let i = 0; i < json.length; i++) {
if (!('id' in json[i])) {
// already encoded blob or byteArray
ebml.push(json[i]);
continue;
}
let data = json[i].data;
if (typeof data === 'object') data = generateEBML(data);
if (typeof data === 'number') data = ('size' in json[i]) ? numToFixedBuffer(data, json[i].size) : bitsToBuffer(data.toString(2));
if (typeof data === 'string') data = strToBuffer(data);
const len = data.size || data.byteLength || data.length;
const zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
const sizeStr = len.toString(2);
const padded = (new Array((zeroes * 7 + 7 + 1) - sizeStr.length)).join('0') + sizeStr;
const size = (new Array(zeroes)).join('0') + '1' + padded;
// i actually dont quite understand what went on up there, so I'm not really
// going to fix this, i'm probably just going to write some hacky thing which
// converts that string into a buffer-esque thing
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
// convert ebml to an array
const buffer = toFlatArray(ebml);
return new Uint8Array(buffer);
}
function toFlatArray (arr, outBuffer) {
if (outBuffer == null) {
outBuffer = [];
}
for (let i = 0; i < arr.length; i++) {
if (typeof arr[i] === 'object') {
// an array
toFlatArray(arr[i], outBuffer);
} else {
// a simple element
outBuffer.push(arr[i]);
}
}
return outBuffer;
}
function makeSimpleBlock (data) {
let flags = 0;
if (data.keyframe) flags |= 128;
if (data.invisible) flags |= 8;
if (data.lacing) flags |= (data.lacing << 1);
if (data.discardable) flags |= 1;
if (data.trackNum > 127) {
throw new Error('TrackNumber > 127 not supported');
}
const out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function (e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
// here's something else taken verbatim from weppy, awesome rite?
function parseWebP (riff) {
const VP8 = riff.RIFF[0].WEBP[0];
const frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
const c = [];
for (let i = 0; i < 4; i++) c[i] = VP8.charCodeAt(frameStart + 3 + i);
// the code below is literally copied verbatim from the bitstream spec
let tmp = (c[1] << 8) | c[0];
const width = tmp & 0x3FFF;
const horizontalScale = tmp >> 14;
tmp = (c[3] << 8) | c[2];
const height = tmp & 0x3FFF;
const verticalScale = tmp >> 14;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
// i think i'm going off on a riff by pretending this is some known
// idiom which i'm making a casual and brilliant pun about, but since
// i can't find anything on google which conforms to this idiomatic
// usage, I'm assuming this is just a consequence of some psychotic
// break which makes me make up puns. well, enough riff-raff (aha a
// rescue of sorts), this function was ripped wholesale from weppy
function parseRIFF (string) {
let offset = 0;
const chunks = {};
while (offset < string.length) {
const id = string.substr(offset, 4);
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
const len = parseInt(string.substr(offset + 4, 4).split('').map(function (i) {
const unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
const data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id].push(parseRIFF(data));
} else if (id === 'WEBP') {
// Use (offset + 8) to skip past "VP8 "/"VP8L"/"VP8X" field after "WEBP"
chunks[id].push(string.substr(offset + 8));
offset = string.length;
} else {
// Unknown chunk type; push entire payload
chunks[id].push(string.substr(offset + 4));
offset = string.length;
}
}
return chunks;
}
// here's a little utility function that acts as a utility for other functions
// basically, the only purpose is for encoding "Duration", which is encoded as
// a double (considerably more difficult to encode than an integer)
function doubleToString (num) {
return [].slice.call(
new Uint8Array(
(
new Float64Array([num]) // create a float64 array
).buffer) // extract the array buffer
, 0) // convert the Uint8Array into a regular array
.map(function (e) { // since it's a regular array, we can now use map
return String.fromCharCode(e); // encode all the bytes individually
})
.reverse() // correct the byte endianness (assume it's little endian for now)
.join(''); // join the bytes in holy matrimony as a string
}
function WhammyVideo (speed, quality = 0.8) { // a more abstract-ish API
this.frames = [];
this.duration = 1000 / speed;
this.quality = quality;
}
/**
*
* @param {string} frame
* @param {number} [duration]
*/
WhammyVideo.prototype.add = function (frame, duration) {
if (typeof duration !== 'undefined' && this.duration) throw new Error("you can't pass a duration if the fps is set");
if (typeof duration === 'undefined' && !this.duration) throw new Error("if you don't have the fps set, you need to have durations here.");
if (frame.canvas) { // CanvasRenderingContext2D
frame = frame.canvas;
}
if (frame.toDataURL) {
// frame = frame.toDataURL('image/webp', this.quality);
// quickly store image data so we don't block cpu. encode in compile method.
frame = frame.getContext('2d').getImageData(0, 0, frame.width, frame.height);
} else if (typeof frame !== 'string') {
throw new Error('frame must be a a HTMLCanvasElement, a CanvasRenderingContext2D or a DataURI formatted string');
}
if (typeof frame === 'string' && !(/^data:image\/webp;base64,/ig).test(frame)) {
throw new Error('Input must be formatted properly as a base64 encoded DataURI of type image/webp');
}
this.frames.push({
image: frame,
duration: duration || this.duration
});
};
WhammyVideo.prototype.compile = function (callback) {
const webm = new ToWebM(this.frames.map(function (frame) {
const webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
callback(webm);
};
export const WebmGenerator = WhammyVideo;

View file

@ -0,0 +1,47 @@
import { expect } from 'chai';
import { BrowserWindow } from 'electron/main';
import { emittedOnce } from './events-helpers';
async function ensureWindowIsClosed (window: BrowserWindow | null) {
if (window && !window.isDestroyed()) {
if (window.webContents && !window.webContents.isDestroyed()) {
// If a window isn't destroyed already, and it has non-destroyed WebContents,
// then calling destroy() won't immediately destroy it, as it may have
// <webview> children which need to be destroyed first. In that case, we
// await the 'closed' event which signals the complete shutdown of the
// window.
const isClosed = emittedOnce(window, 'closed');
window.destroy();
await isClosed;
} else {
// If there's no WebContents or if the WebContents is already destroyed,
// then the 'closed' event has already been emitted so there's nothing to
// wait for.
window.destroy();
}
}
}
export const closeWindow = async (
window: BrowserWindow | null = null,
{ assertNotWindows } = { assertNotWindows: true }
) => {
await ensureWindowIsClosed(window);
if (assertNotWindows) {
const windows = BrowserWindow.getAllWindows();
try {
expect(windows).to.have.lengthOf(0);
} finally {
for (const win of windows) {
await ensureWindowIsClosed(win);
}
}
}
};
export async function closeAllWindows () {
for (const w of BrowserWindow.getAllWindows()) {
await closeWindow(w, { assertNotWindows: false });
}
}