build: use github actions for windows (#45012)

* build: use github actions for windows (#44136)

* build: test windows runner

* build: try build windows on windows?

* build: take win/cross changes

* build: use bash as default shell always

* build: configure git for windows build tools

* build: bash as default

* build: configure windows correctly

* build: use sha1sum

* build: force windows cipd init and python3 existence

* just pain

* build: restore cache on windows

* build: use build-tools gclient

* build: sync gclient vars to build windows job

* build: output depshash for debugging

* build: past sam was a silly goose

* build: depshash logging

* build: force lf endings for lock and DEPS

* build: platform strings are hard

* build: checkout on windows host

* sup

* no check

* idk

* sigh

* ...

* no double checkout

* build: yolo some stuff

* build: run gn-check for windows on linux hosts for speed

* use container...

* cry ?

* build: e d

* e d

* no log

* fix toolchain on windows cross check

* build: use powershell to add mksnapshot_args

* build: enable x86 and arm64 windows builds too

* clean up

* maybe not needed

* build: keep action around for post step

* build: configure git global on win

* build: ia32 zip manifest

* build: no patch depot_tools for tests

* build: get arm64 windows closer to working

* build: windows tar is ass

* 32 bit on 32 bit

* maybe bash

* build: set up nodejs

* correct windows sharding

* fix some spec runner stuff

* fix windows tests

* overwrite -Force

* sigh

* screen res

* wat

* logs

* ... more logs

* line endings will be the death of me

* remove 1080p force thing

* vsctools + logging

* disable some fullscreen tests on GHA

* no progress

* run all CI

* install visual studio on arm64

* windows hax for non windows

* maybe arm sdk

* clean up depshash logic

* build: use single check per platform

* ensure clean args

* fix loop

* remove debug

* update default build image sha for dispatch

* plzzzz

* one more try

* arm64 vctools

* sad

* build: fix non-dispatch windows gn check

* chore: debug datadog-ci location

* chore: update build-tools for newer toolchain

* chore: set path for datadog-ci

* try this

* chore: fixup gn check

* fixup gn-check some more

* fixup windows gn check

* chore: fixup windows gn check

* test: use cmd for Windows testing

* fixup use cmd for testing on Windows

* fixup windows GN check

* fixup npm config arch for x86

* Can we set test files via powershell

* fixup to set test files via powershell

* fixup set test files via powershell

* Don't check cross instance cache disk space on Windows

* Use separate step to set env variables for testing

* fixup Use separate step to set env variables for testing

* fixup Use separate step to set env variables for testing

* fixup Use separate step to set env variables for testing (AGAIN)

* use powershell if in powershell

* fixup use powershell if in powershell

* chore: remove no longer needed changes to depot_tools

xref: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/5669094
and https://chromium-review.googlesource.com/c/chromium/src/+/5844046

* chore: try using 7zip on Windows to extract tarball

* Revert "chore: try using 7zip on Windows to extract tarball"

This reverts commit c7432b6a37857fd0746b8f1776fbd1103dba0b85.

* test: debug failing tests on GHA windows

* fix: ftbfs when including simdjson in Node.js

(cherry picked from commit 48e44c40d61b7aa843a990d4e0c8dec676b4ce8f)

* chore: try to track down Windows testing hang

* use correct timeout

* try this

* see if this helps

* try to figure out why node is running

* shard tests to try to narrow down WOA lockup

* try to narrow down problem test

* Narrow down blocking test more

* do we need a combo to repro

* see if this cleans up the tests

* fixup navigator.usb test

* remove logging from problematic tests

* Revert "shard tests to try to narrow down WOA lockup"

This reverts commit a1806583769678491814cb8b008131c32be4e8fb.

* remove logging

* debug keyboard test

* add timeout for Windows since arm64 sometimes hangs

* see if this helps

* put back original timeout

* try to use screenCapture to get screenshots of what is going on on WOA

* try using electron screencapture to debug WOA hang

* chore: turn off privacy experience

* run screenshot on both shards

* fixup screencap

* try to narrow down hanging spec

* chore: cleanup servers left open

* cleanup tests

* Revert "try to narrow down hanging spec"

This reverts commit a0f959f5382f4012a9919ac535d42c5333eb7d5f.

* cleanup test debugging

* fixup extensions spec

* cleanup unneeded items

* run wtf with 2 shards instead of 6

* Revert "run wtf with 2 shards instead of 6"

This reverts commit ca2d282129ee42c535d80f9876d6fa0dc6c08344.

* debug windows version on woa

* dump more info

* Get detailed CPU info

* revert debugging

* use same args as AppVeyor WOA for GHA WOA

* fixup use same args as AppVeyor WOA for GHA WOA

* fixup use same args as AppVeyor WOA for GHA WOA

* try to track down which tests trigger hang

* one or more of these combinations should hang

* break up web contents spec to find hang

* further break down api-web-contents to find hang

* test: ensure all webContents are closed

* test: fix require is not defined error

* see if api-web-contents spec is now good

* test: ensure all webContents are closed

* Revert "try to track down which tests trigger hang"

This reverts commit 07298d6ffeb4873ef7615a8ec3d1a6696e354ff4.

* chore: use alternate location for windows toolchain

* Reapply "try to track down which tests trigger hang"

This reverts commit 0321f76d01069ef325339b6fe6ed39700eae2b6b.

* try to narrow down problem test

* fix TEST_SHARD env var

* no, really fix TEST_SHARD env var

* see if this fixes it

* test: cleanup any remaining windows and webcontents

* see if new cleanup helps

* dont destroy webcontents for now

* fixup dont destroy webcontents for now

* Only cleanup right before process.exit

* see if this fixes the hang

* actually destroy webcontents

* Revert "Reapply "try to track down which tests trigger hang""

This reverts commit cdee7de049ce6bb5f67bbcc64882c56aa2c73027.

* see if this helps

* Revert "see if this helps"

This reverts commit 9a15a69cf7dbc456db7a61efa5b6870535bae993.

* Is it all about the web contents?

* it is all about the webcontents

but which one?

* Narrow down problem webcontents test

* try to speed up git install on WOA

* disable problematic test on WOA

* remove debugging

* remove debugging from choco installs

* Revert "disable problematic test on WOA"

This reverts commit e060fb0839b73d53cfde1f8acdca634f8e267937.

* Revert "remove debugging"

This reverts commit f18dd8b1a555f56bb06d0ea996a6eff31b424bf1.

* run against all the tests in the failing shard

* don't run visibility tests first

* remove debugging

* 3 is a magic number

* Revert "3 is a magic number"

This reverts commit 36b91ccf9f03a4b34230cd69ceca482f7d8428c1.

* match what Appveyor runs exactly

* Revert "match what Appveyor runs exactly"

This reverts commit 7260dd432216c62696e4bc864930f17c857eabbe.

* chore: sort files alphabetically

* find out what spec is leaving stuff open

* chore: Checkout PR HEAD commit

 instead of merge commit

* try using app.exit instead of process.exit

* test: cleanup BrowserWindows and webContents

* Revert "chore: sort files alphabetically"

This reverts commit d9e217ffb1522076e150fce9e43a31bf56716acb.

* chore: use win32 to match process.platform

Needed for build-tools to download from PRs

* chore: cache yarn dir

* fixup cache yarn

* fixup use win32 to match process.platform

* fixup use win32 to match process.platform

* fixup cache yarn

* Add debugging for WOA hang

* Add debugging for failing keyboard lock test

* Revert "Add debugging for WOA hang"

This reverts commit 8df03d568d15a269e4026140d1158e8cdf551dec.

* try using process.kill

* add more debugging to keyboard.lock test

* Revert "Add debugging for failing keyboard lock test"

* remove debugging

* test: disable keyboard.lock on Windows

* test: disable fullscreen tests on Windows

* test: only force test suite exit on WOA

* fixup test: only force test suite exit on WOA

* cleanup tests

* extract yarn caching/install to action

* try using bash to run windows tests

* remove left over debugging

* standardize on 'win' for Windows builds

* use 'x86' for arch for manifest files

* fixup try using bash to run windows tests

* fixup use 'x86' for arch for manifest files

* standardize on 'win' for Windows builds

* fixup use 'x86' for arch for manifest files

* fixup try using bash to run windows tests

---------

Co-authored-by: John Kleinschmidt <jkleinsc@electronjs.org>
Co-authored-by: Charles Kerr <charles@charleskerr.com>
(cherry picked from commit be1a3dce83)

* chore: update build tools to correct sha

---------

Co-authored-by: Samuel Attard <sam@electronjs.org>
This commit is contained in:
John Kleinschmidt 2024-12-16 14:51:21 +00:00 committed by GitHub
parent e672dd4628
commit 7cd00f143b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
42 changed files with 662 additions and 329 deletions

3
.gitattributes vendored
View file

@ -1,6 +1,9 @@
# `git apply` and friends don't understand CRLF, even on windows. Force those # `git apply` and friends don't understand CRLF, even on windows. Force those
# files to be checked out with LF endings even if core.autocrlf is true. # files to be checked out with LF endings even if core.autocrlf is true.
*.patch text eol=lf *.patch text eol=lf
DEPS text eol=lf
yarn.lock text eol=lf
script/zip_manifests/*.manifest text eol=lf
patches/**/.patches merge=union patches/**/.patches merge=union
# Source code and markdown files should always use LF as line ending. # Source code and markdown files should always use LF as line ending.

View file

@ -5,10 +5,10 @@ inputs:
description: 'Target arch' description: 'Target arch'
required: true required: true
target-platform: target-platform:
description: 'Target platform' description: 'Target platform, should be linux, win, macos'
required: true required: true
artifact-platform: artifact-platform:
description: 'Artifact platform, should be linux, darwin or mas' description: 'Artifact platform, should be linux, win, darwin or mas'
required: true required: true
step-suffix: step-suffix:
description: 'Suffix for build steps' description: 'Suffix for build steps'
@ -71,7 +71,7 @@ runs:
cd src cd src
e build --target electron:electron_dist_zip -j $NUMBER_OF_NINJA_PROCESSES e build --target electron:electron_dist_zip -j $NUMBER_OF_NINJA_PROCESSES
if [ "${{ inputs.is-asan }}" != "true" ]; then if [ "${{ inputs.is-asan }}" != "true" ]; then
target_os=${{ inputs.target-platform == 'linux' && 'linux' || 'mac'}} target_os=${{ inputs.target-platform == 'macos' && 'mac' || inputs.target-platform }}
if [ "${{ inputs.artifact-platform }}" = "mas" ]; then if [ "${{ inputs.artifact-platform }}" = "mas" ]; then
target_os="${target_os}_mas" target_os="${target_os}_mas"
fi fi
@ -82,7 +82,7 @@ runs:
run: | run: |
cd src cd src
e build --target electron:electron_mksnapshot -j $NUMBER_OF_NINJA_PROCESSES e build --target electron:electron_mksnapshot -j $NUMBER_OF_NINJA_PROCESSES
gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args ELECTRON_DEPOT_TOOLS_DISABLE_LOG=1 e d gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
# Remove unused args from mksnapshot_args # Remove unused args from mksnapshot_args
SEDOPTION="-i" SEDOPTION="-i"
if [ "`uname`" = "Darwin" ]; then if [ "`uname`" = "Darwin" ]; then
@ -91,7 +91,7 @@ runs:
sed $SEDOPTION '/.*builtins-pgo/d' out/Default/mksnapshot_args sed $SEDOPTION '/.*builtins-pgo/d' out/Default/mksnapshot_args
sed $SEDOPTION '/--turbo-profiling-input/d' out/Default/mksnapshot_args sed $SEDOPTION '/--turbo-profiling-input/d' out/Default/mksnapshot_args
if [ "`uname`" = "Linux" ]; then if [ "${{ inputs.target-platform }}" = "linux" ]; then
if [ "${{ inputs.target-arch }}" = "arm" ]; then if [ "${{ inputs.target-arch }}" = "arm" ]; then
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/mksnapshot electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/mksnapshot
electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/v8_context_snapshot_generator electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/v8_context_snapshot_generator
@ -105,7 +105,13 @@ runs:
fi fi
e build --target electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES e build --target electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES
if [ "${{ inputs.target-platform }}" = "win" ]; then
cd out/Default
powershell Compress-Archive -update mksnapshot_args mksnapshot.zip
powershell Compress-Archive -update gen/v8/embedded.S mksnapshot.zip
else
(cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S) (cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S)
fi
- name: Generate Cross-Arch Snapshot (arm/arm64) ${{ inputs.step-suffix }} - name: Generate Cross-Arch Snapshot (arm/arm64) ${{ inputs.step-suffix }}
shell: bash shell: bash
if: ${{ (inputs.target-arch == 'arm' || inputs.target-arch == 'arm64') && inputs.target-platform == 'linux' }} if: ${{ (inputs.target-arch == 'arm' || inputs.target-arch == 'arm64') && inputs.target-platform == 'linux' }}

View file

@ -5,6 +5,10 @@ inputs:
description: 'Whether to generate and persist a SAS token for the item in the cache' description: 'Whether to generate and persist a SAS token for the item in the cache'
required: false required: false
default: 'false' default: 'false'
use-cache:
description: 'Whether to persist the cache to the shared drive'
required: false
default: 'true'
runs: runs:
using: "composite" using: "composite"
steps: steps:
@ -13,31 +17,27 @@ runs:
run: | run: |
echo "GIT_CACHE_PATH=$(pwd)/git-cache" >> $GITHUB_ENV echo "GIT_CACHE_PATH=$(pwd)/git-cache" >> $GITHUB_ENV
- name: Install Dependencies - name: Install Dependencies
shell: bash uses: ./src/electron/.github/actions/install-dependencies
run: | - name: Install Build Tools
cd src/electron uses: ./src/electron/.github/actions/install-build-tools
node script/yarn install --frozen-lockfile
- name: Get Depot Tools - name: Get Depot Tools
shell: bash shell: bash
run: | run: |
if [[ ! -d depot_tools ]]; then
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Remove swift-format dep from cipd on macOS until we send a patch upstream.
cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
# Ensure depot_tools does not update. # Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools test -d depot_tools && cd depot_tools
touch .disable_auto_update touch .disable_auto_update
fi
- name: Add Depot Tools to PATH - name: Add Depot Tools to PATH
shell: bash shell: bash
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash - name: Generate DEPS Hash
shell: bash shell: bash
run: | run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target node src/electron/script/generate-deps-hash.js
echo "DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV echo "DEPSHASH=v1-src-cache-$(cat src/electron/.depshash)" >> $GITHUB_ENV
- name: Generate SAS Key - name: Generate SAS Key
if: ${{ inputs.generate-sas-token == 'true' }} if: ${{ inputs.generate-sas-token == 'true' }}
shell: bash shell: bash
@ -54,6 +54,10 @@ runs:
id: check-cache id: check-cache
shell: bash shell: bash
run: | run: |
if [[ "${{ inputs.use-cache }}" == "false" ]]; then
echo "Not using cache this time..."
echo "cache_exists=false" >> $GITHUB_OUTPUT
else
cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar
echo "Using cache key: $DEPSHASH" echo "Using cache key: $DEPSHASH"
echo "Checking for cache in: $cache_path" echo "Checking for cache in: $cache_path"
@ -64,8 +68,9 @@ runs:
echo "cache_exists=true" >> $GITHUB_OUTPUT echo "cache_exists=true" >> $GITHUB_OUTPUT
echo "Cache Already Exists for $DEPSHASH, Skipping.." echo "Cache Already Exists for $DEPSHASH, Skipping.."
fi fi
fi
- name: Check cross instance cache disk space - name: Check cross instance cache disk space
if: steps.check-cache.outputs.cache_exists == 'false' if: steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true'
shell: bash shell: bash
run: | run: |
# if there is less than 20 GB free space then creating the cache might fail so exit early # if there is less than 20 GB free space then creating the cache might fail so exit early
@ -81,13 +86,17 @@ runs:
if: steps.check-cache.outputs.cache_exists == 'false' if: steps.check-cache.outputs.cache_exists == 'false'
shell: bash shell: bash
run: | run: |
gclient config \ e d gclient config \
--name "src/electron" \ --name "src/electron" \
--unmanaged \ --unmanaged \
${GCLIENT_EXTRA_ARGS} \ ${GCLIENT_EXTRA_ARGS} \
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY" "$GITHUB_SERVER_URL/$GITHUB_REPOSITORY"
ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES=1 gclient sync --with_branch_heads --with_tags -vvvvv if [ "$TARGET_OS" != "" ]; then
echo "target_os=['$TARGET_OS']" >> ./.gclient
fi
ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES=1 e d gclient sync --with_branch_heads --with_tags -vv
if [ "${{ inputs.is-release }}" != "true" && -n "${{ env.PATCH_UP_APP_CREDS }}" ]; then if [ "${{ inputs.is-release }}" != "true" && -n "${{ env.PATCH_UP_APP_CREDS }}" ]; then
# Re-export all the patches to check if there were changes. # Re-export all the patches to check if there were changes.
python3 src/electron/script/export_all_patches.py src/electron/patches/config.json python3 src/electron/script/export_all_patches.py src/electron/patches/config.json
@ -128,13 +137,13 @@ runs:
# https://dawn-review.googlesource.com/c/dawn/+/83901 # https://dawn-review.googlesource.com/c/dawn/+/83901
# TODO: maybe better to always leave out */.git/HEAD file for all targets ? # TODO: maybe better to always leave out */.git/HEAD file for all targets ?
- name: Delete .git directories under src to free space - name: Delete .git directories under src to free space
if: steps.check-cache.outputs.cache_exists == 'false' if: ${{ steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true' }}
shell: bash shell: bash
run: | run: |
cd src cd src
( find . -type d -name ".git" -not -path "./third_party/angle/*" -not -path "./third_party/dawn/*" -not -path "./electron/*" ) | xargs rm -rf ( find . -type d -name ".git" -not -path "./third_party/angle/*" -not -path "./third_party/dawn/*" -not -path "./electron/*" ) | xargs rm -rf
- name: Minimize Cache Size for Upload - name: Minimize Cache Size for Upload
if: steps.check-cache.outputs.cache_exists == 'false' if: ${{ steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true' }}
shell: bash shell: bash
run: | run: |
rm -rf src/android_webview rm -rf src/android_webview
@ -145,9 +154,12 @@ runs:
rm -rf src/third_party/angle/third_party/VK-GL-CTS/src rm -rf src/third_party/angle/third_party/VK-GL-CTS/src
rm -rf src/third_party/swift-toolchain rm -rf src/third_party/swift-toolchain
rm -rf src/third_party/swiftshader/tests/regres/testlists rm -rf src/third_party/swiftshader/tests/regres/testlists
cp src/electron/.github/actions/checkout/action.yml ./
rm -rf src/electron rm -rf src/electron
mkdir -p src/electron/.github/actions/checkout
mv action.yml src/electron/.github/actions/checkout
- name: Compress Src Directory - name: Compress Src Directory
if: steps.check-cache.outputs.cache_exists == 'false' if: ${{ steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true' }}
shell: bash shell: bash
run: | run: |
echo "Uncompressed src size: $(du -sh src | cut -f1 -d' ')" echo "Uncompressed src size: $(du -sh src | cut -f1 -d' ')"
@ -155,7 +167,7 @@ runs:
echo "Compressed src to $(du -sh $DEPSHASH.tar | cut -f1 -d' ')" echo "Compressed src to $(du -sh $DEPSHASH.tar | cut -f1 -d' ')"
cp ./$DEPSHASH.tar /mnt/cross-instance-cache/ cp ./$DEPSHASH.tar /mnt/cross-instance-cache/
- name: Persist Src Cache - name: Persist Src Cache
if: steps.check-cache.outputs.cache_exists == 'false' if: ${{ steps.check-cache.outputs.cache_exists == 'false' && inputs.use-cache == 'true' }}
shell: bash shell: bash
run: | run: |
final_cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar final_cache_path=/mnt/cross-instance-cache/$DEPSHASH.tar

View file

@ -6,6 +6,15 @@ runs:
- name: Install Build Tools - name: Install Build Tools
shell: bash shell: bash
run: | run: |
export BUILD_TOOLS_SHA=eeb1a11392e4cec08fd926c93b31ab556dc0c23b if [ "$(expr substr $(uname -s) 1 10)" == "MSYS_NT-10" ]; then
git config --global core.filemode false
git config --global core.autocrlf false
git config --global branch.autosetuprebase always
fi
export BUILD_TOOLS_SHA=8246e57791b0af4ae5975eb96f09855f9269b1cd
npm i -g @electron/build-tools npm i -g @electron/build-tools
e auto-update disable e auto-update disable
if [ "$(expr substr $(uname -s) 1 10)" == "MSYS_NT-10" ]; then
e d cipd.bat --version
cp "C:\Python37\python.exe" "C:\Python37\python3.exe"
fi

View file

@ -0,0 +1,21 @@
name: 'Install Dependencies'
description: 'Installs yarn depdencies using cache when available'
runs:
using: "composite"
steps:
- name: Get yarn cache directory path
shell: bash
id: yarn-cache-dir-path
run: echo "dir=$(node src/electron/script/yarn cache dir)" >> $GITHUB_OUTPUT
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('src/electron/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install Dependencies
shell: bash
run: |
cd src/electron
node script/yarn install --frozen-lockfile --prefer-offline

View file

@ -18,6 +18,11 @@ on:
description: 'Skip Linux builds' description: 'Skip Linux builds'
default: false default: false
required: false required: false
skip-windows:
type: boolean
description: 'Skip Windows builds'
default: false
required: false
skip-lint: skip-lint:
type: boolean type: boolean
description: 'Skip lint check' description: 'Skip lint check'
@ -29,6 +34,10 @@ on:
- '[1-9][0-9]-x-y' - '[1-9][0-9]-x-y'
pull_request: pull_request:
defaults:
run:
shell: bash
jobs: jobs:
setup: setup:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -40,7 +49,9 @@ jobs:
build-image-sha: ${{ steps.set-output.outputs.build-image-sha }} build-image-sha: ${{ steps.set-output.outputs.build-image-sha }}
docs-only: ${{ steps.set-output.outputs.docs-only }} docs-only: ${{ steps.set-output.outputs.docs-only }}
steps: steps:
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 #v4.0.2 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.0.2
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
id: filter id: filter
with: with:
@ -98,6 +109,7 @@ jobs:
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Checkout & Sync & Save - name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout uses: ./src/electron/.github/actions/checkout
with: with:
@ -124,9 +136,68 @@ jobs:
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Checkout & Sync & Save - name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout uses: ./src/electron/.github/actions/checkout
checkout-windows:
needs: setup
if: ${{ needs.setup.outputs.src == 'true' && !inputs.skip-windows }}
runs-on: electron-arc-linux-amd64-32core
container:
image: ghcr.io/electron/build:${{ needs.setup.outputs.build-image-sha }}
options: --user root --device /dev/fuse --cap-add SYS_ADMIN
volumes:
- /mnt/cross-instance-cache:/mnt/cross-instance-cache
env:
GCLIENT_EXTRA_ARGS: '--custom-var=checkout_win=True'
TARGET_OS: 'win'
ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN: '1'
outputs:
build-image-sha: ${{ needs.setup.outputs.build-image-sha}}
steps:
- name: Checkout Electron
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
path: src/electron
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Checkout & Sync & Save
uses: ./src/electron/.github/actions/checkout
# GN Check Jobs
macos-gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
needs: checkout-macos
with:
target-platform: macos
target-archs: x64 arm64
check-runs-on: macos-14
gn-build-type: testing
secrets: inherit
linux-gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
needs: checkout-linux
with:
target-platform: linux
target-archs: x64 arm arm64
check-runs-on: electron-arc-linux-amd64-8core
check-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
gn-build-type: testing
secrets: inherit
windows-gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
needs: checkout-windows
with:
target-platform: win
target-archs: x64 x86 arm64
check-runs-on: electron-arc-linux-amd64-8core
check-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-windows.outputs.build-image-sha }}","options":"--user root --device /dev/fuse --cap-add SYS_ADMIN","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
gn-build-type: testing
secrets: inherit
# Build Jobs - These cascade into testing jobs # Build Jobs - These cascade into testing jobs
macos-x64: macos-x64:
permissions: permissions:
@ -137,7 +208,6 @@ jobs:
needs: checkout-macos needs: checkout-macos
with: with:
build-runs-on: macos-14-xlarge build-runs-on: macos-14-xlarge
check-runs-on: macos-14
test-runs-on: macos-13 test-runs-on: macos-13
target-platform: macos target-platform: macos
target-arch: x64 target-arch: x64
@ -156,7 +226,6 @@ jobs:
needs: checkout-macos needs: checkout-macos
with: with:
build-runs-on: macos-14-xlarge build-runs-on: macos-14-xlarge
check-runs-on: macos-14
test-runs-on: macos-14 test-runs-on: macos-14
target-platform: macos target-platform: macos
target-arch: arm64 target-arch: arm64
@ -175,7 +244,6 @@ jobs:
needs: checkout-linux needs: checkout-linux
with: with:
build-runs-on: electron-arc-linux-amd64-32core build-runs-on: electron-arc-linux-amd64-32core
check-runs-on: electron-arc-linux-amd64-8core
test-runs-on: electron-arc-linux-amd64-4core test-runs-on: electron-arc-linux-amd64-4core
build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}' build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}' test-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}'
@ -196,7 +264,6 @@ jobs:
needs: checkout-linux needs: checkout-linux
with: with:
build-runs-on: electron-arc-linux-amd64-32core build-runs-on: electron-arc-linux-amd64-32core
check-runs-on: electron-arc-linux-amd64-8core
test-runs-on: electron-arc-linux-amd64-4core test-runs-on: electron-arc-linux-amd64-4core
build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}' build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}' test-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}'
@ -218,7 +285,6 @@ jobs:
needs: checkout-linux needs: checkout-linux
with: with:
build-runs-on: electron-arc-linux-amd64-32core build-runs-on: electron-arc-linux-amd64-32core
check-runs-on: electron-arc-linux-amd64-8core
test-runs-on: electron-arc-linux-arm64-4core test-runs-on: electron-arc-linux-arm64-4core
build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}' build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/test:arm32v7-${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init","volumes":["/home/runner/externals:/mnt/runner-externals"]}' test-container: '{"image":"ghcr.io/electron/test:arm32v7-${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init","volumes":["/home/runner/externals:/mnt/runner-externals"]}'
@ -239,7 +305,6 @@ jobs:
needs: checkout-linux needs: checkout-linux
with: with:
build-runs-on: electron-arc-linux-amd64-32core build-runs-on: electron-arc-linux-amd64-32core
check-runs-on: electron-arc-linux-amd64-8core
test-runs-on: electron-arc-linux-arm64-4core test-runs-on: electron-arc-linux-arm64-4core
build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}' build-container: '{"image":"ghcr.io/electron/build:${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root","volumes":["/mnt/cross-instance-cache:/mnt/cross-instance-cache"]}'
test-container: '{"image":"ghcr.io/electron/test:arm64v8-${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}' test-container: '{"image":"ghcr.io/electron/test:arm64v8-${{ needs.checkout-linux.outputs.build-image-sha }}","options":"--user root --privileged --init"}'
@ -251,10 +316,67 @@ jobs:
upload-to-storage: '0' upload-to-storage: '0'
secrets: inherit secrets: inherit
windows-x64:
permissions:
contents: read
issues: read
pull-requests: read
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: setup
if: ${{ needs.setup.outputs.src == 'true' && !inputs.skip-windows }}
with:
build-runs-on: electron-arc-windows-amd64-16core
test-runs-on: windows-latest
target-platform: win
target-arch: x64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
windows-x86:
permissions:
contents: read
issues: read
pull-requests: read
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: setup
if: ${{ needs.setup.outputs.src == 'true' && !inputs.skip-windows }}
with:
build-runs-on: electron-arc-windows-amd64-16core
test-runs-on: windows-latest
target-platform: win
target-arch: x86
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
windows-arm64:
permissions:
contents: read
issues: read
pull-requests: read
uses: ./.github/workflows/pipeline-electron-build-and-test.yml
needs: setup
if: ${{ needs.setup.outputs.src == 'true' && !inputs.skip-windows }}
with:
build-runs-on: electron-arc-windows-amd64-16core
test-runs-on: electron-hosted-windows-arm64-4core
target-platform: win
target-arch: arm64
is-release: false
gn-build-type: testing
generate-symbols: false
upload-to-storage: '0'
secrets: inherit
gha-done: gha-done:
name: GitHub Actions Completed name: GitHub Actions Completed
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [docs-only, macos-x64, macos-arm64, linux-x64, linux-x64-asan, linux-arm, linux-arm64] needs: [docs-only, macos-x64, macos-arm64, linux-x64, linux-x64-asan, linux-arm, linux-arm64, windows-x64, windows-x86, windows-arm64]
if: always() && !contains(needs.*.result, 'failure') if: always() && !contains(needs.*.result, 'failure')
steps: steps:
- name: GitHub Actions Jobs Done - name: GitHub Actions Jobs Done

View file

@ -1,14 +0,0 @@
diff --git a/gclient.py b/gclient.py
index 59e2b4c5197928bdba1ef69bdbe637d7dfe471c1..b4bae5e48c83c84bd867187afaf40eed16e69851 100755
--- a/gclient.py
+++ b/gclient.py
@@ -783,7 +783,8 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
not condition or "non_git_source" not in condition):
continue
cipd_root = self.GetCipdRoot()
- for package in dep_value.get('packages', []):
+ packages = dep_value.get('packages', [])
+ for package in (x for x in packages if "infra/3pp/tools/swift-format" not in x.get('package')):
deps_to_add.append(
CipdDependency(parent=self,
name=name,

View file

@ -5,7 +5,7 @@ on:
inputs: inputs:
target-platform: target-platform:
type: string type: string
description: 'Platform to run on, can be macos or linux' description: 'Platform to run on, can be macos, win or linux.'
required: true required: true
target-arch: target-arch:
type: string type: string
@ -15,10 +15,6 @@ on:
type: string type: string
description: 'What host to run the build' description: 'What host to run the build'
required: true required: true
check-runs-on:
type: string
description: 'What host to run the gn-check'
required: true
test-runs-on: test-runs-on:
type: string type: string
description: 'What host to run the tests on' description: 'What host to run the tests on'
@ -76,16 +72,6 @@ jobs:
generate-symbols: ${{ inputs.generate-symbols }} generate-symbols: ${{ inputs.generate-symbols }}
upload-to-storage: ${{ inputs.upload-to-storage }} upload-to-storage: ${{ inputs.upload-to-storage }}
secrets: inherit secrets: inherit
gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
with:
target-platform: ${{ inputs.target-platform }}
target-arch: ${{ inputs.target-arch }}
check-runs-on: ${{ inputs.check-runs-on }}
check-container: ${{ inputs.build-container }}
gn-build-type: ${{ inputs.gn-build-type }}
is-asan: ${{ inputs.is-asan }}
secrets: inherit
test: test:
uses: ./.github/workflows/pipeline-segment-electron-test.yml uses: ./.github/workflows/pipeline-segment-electron-test.yml
needs: build needs: build

View file

@ -5,7 +5,7 @@ on:
inputs: inputs:
target-platform: target-platform:
type: string type: string
description: 'Platform to run on, can be macos or linux' description: 'Platform to run on, can be macos, win or linux'
required: true required: true
target-arch: target-arch:
type: string type: string
@ -15,10 +15,6 @@ on:
type: string type: string
description: 'What host to run the build' description: 'What host to run the build'
required: true required: true
check-runs-on:
type: string
description: 'What host to run the gn-check'
required: true
test-runs-on: test-runs-on:
type: string type: string
description: 'What host to run the tests on' description: 'What host to run the tests on'
@ -82,16 +78,6 @@ jobs:
upload-to-storage: ${{ inputs.upload-to-storage }} upload-to-storage: ${{ inputs.upload-to-storage }}
is-asan: ${{ inputs.is-asan}} is-asan: ${{ inputs.is-asan}}
secrets: inherit secrets: inherit
gn-check:
uses: ./.github/workflows/pipeline-segment-electron-gn-check.yml
with:
target-platform: ${{ inputs.target-platform }}
target-arch: ${{ inputs.target-arch }}
check-runs-on: ${{ inputs.check-runs-on }}
check-container: ${{ inputs.build-container }}
gn-build-type: ${{ inputs.gn-build-type }}
is-asan: ${{ inputs.is-asan }}
secrets: inherit
test: test:
uses: ./.github/workflows/pipeline-segment-electron-test.yml uses: ./.github/workflows/pipeline-segment-electron-test.yml
needs: build needs: build

View file

@ -24,10 +24,9 @@ jobs:
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Dependencies - name: Install Dependencies
run: | uses: ./src/electron/.github/actions/install-dependencies
cd src/electron
node script/yarn install --frozen-lockfile
- name: Run TS/JS compile - name: Run TS/JS compile
shell: bash shell: bash
run: | run: |

View file

@ -24,10 +24,9 @@ jobs:
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Dependencies - name: Install Dependencies
run: | uses: ./src/electron/.github/actions/install-dependencies
cd src/electron
node script/yarn install --frozen-lockfile
- name: Setup third_party Depot Tools - name: Setup third_party Depot Tools
shell: bash shell: bash
run: | run: |

View file

@ -9,7 +9,7 @@ on:
type: string type: string
target-platform: target-platform:
type: string type: string
description: 'Platform to run on, can be macos or linux' description: 'Platform to run on, can be macos, win or linux'
required: true required: true
target-arch: target-arch:
type: string type: string
@ -69,11 +69,14 @@ env:
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }} ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
SUDOWOODO_EXCHANGE_URL: ${{ secrets.SUDOWOODO_EXCHANGE_URL }} SUDOWOODO_EXCHANGE_URL: ${{ secrets.SUDOWOODO_EXCHANGE_URL }}
SUDOWOODO_EXCHANGE_TOKEN: ${{ secrets.SUDOWOODO_EXCHANGE_TOKEN }} SUDOWOODO_EXCHANGE_TOKEN: ${{ secrets.SUDOWOODO_EXCHANGE_TOKEN }}
GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' }} GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || inputs.target-platform == 'win' && '--custom-var=checkout_win=True' || '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' }}
ELECTRON_OUT_DIR: Default ELECTRON_OUT_DIR: Default
jobs: jobs:
build: build:
defaults:
run:
shell: bash
runs-on: ${{ inputs.build-runs-on }} runs-on: ${{ inputs.build-runs-on }}
container: ${{ fromJSON(inputs.build-container) }} container: ${{ fromJSON(inputs.build-container) }}
environment: ${{ inputs.environment }} environment: ${{ inputs.environment }}
@ -81,12 +84,14 @@ jobs:
TARGET_ARCH: ${{ inputs.target-arch }} TARGET_ARCH: ${{ inputs.target-arch }}
steps: steps:
- name: Create src dir - name: Create src dir
run: mkdir src run: |
mkdir src
- name: Checkout Electron - name: Checkout Electron
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Free up space (macOS) - name: Free up space (macOS)
if: ${{ inputs.target-platform == 'macos' }} if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/free-space-macos uses: ./src/electron/.github/actions/free-space-macos
@ -101,9 +106,7 @@ jobs:
cache: yarn cache: yarn
cache-dependency-path: src/electron/yarn.lock cache-dependency-path: src/electron/yarn.lock
- name: Install Dependencies - name: Install Dependencies
run: | uses: ./src/electron/.github/actions/install-dependencies
cd src/electron
node script/yarn install --frozen-lockfile
- name: Install AZCopy - name: Install AZCopy
if: ${{ inputs.target-platform == 'macos' }} if: ${{ inputs.target-platform == 'macos' }}
run: brew install azcopy run: brew install azcopy
@ -137,16 +140,13 @@ jobs:
# Ensure depot_tools does not update. # Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools test -d depot_tools && cd depot_tools
if [ "`uname`" = "Linux" ]; then
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
touch .disable_auto_update touch .disable_auto_update
- name: Add Depot Tools to PATH - name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Generate DEPS Hash - name: Generate DEPS Hash
run: | run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target node src/electron/script/generate-deps-hash.js
DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ') DEPSHASH=v1-src-cache-$(cat src/electron/.depshash)
echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV
echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV
- name: Restore src cache via AZCopy - name: Restore src cache via AZCopy
@ -155,11 +155,17 @@ jobs:
- name: Restore src cache via AKS - name: Restore src cache via AKS
if: ${{ inputs.target-platform == 'linux' }} if: ${{ inputs.target-platform == 'linux' }}
uses: ./src/electron/.github/actions/restore-cache-aks uses: ./src/electron/.github/actions/restore-cache-aks
- name: Checkout src via gclient sync
if: ${{ inputs.target-platform == 'win' }}
uses: ./src/electron/.github/actions/checkout
with:
use-cache: 'false'
- name: Checkout Electron - name: Checkout Electron
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Build Tools - name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools uses: ./src/electron/.github/actions/install-build-tools
- name: Init Build Tools - name: Init Build Tools
@ -167,11 +173,11 @@ jobs:
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }} e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }}
- name: Run Electron Only Hooks - name: Run Electron Only Hooks
run: | run: |
gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]" e d gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]"
- name: Regenerate DEPS Hash - name: Regenerate DEPS Hash
run: | run: |
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target (cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js
echo "DEPSHASH=$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV echo "DEPSHASH=$(cat src/electron/.depshash)" >> $GITHUB_ENV
- name: Add CHROMIUM_BUILDTOOLS_PATH to env - name: Add CHROMIUM_BUILDTOOLS_PATH to env
run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV
- name: Fix Sync (macOS) - name: Fix Sync (macOS)
@ -179,7 +185,7 @@ jobs:
uses: ./src/electron/.github/actions/fix-sync-macos uses: ./src/electron/.github/actions/fix-sync-macos
- name: Setup Number of Ninja Processes - name: Setup Number of Ninja Processes
run: | run: |
echo "NUMBER_OF_NINJA_PROCESSES=${{ inputs.target-platform == 'linux' && '300' || '200' }}" >> $GITHUB_ENV echo "NUMBER_OF_NINJA_PROCESSES=${{ inputs.target-platform != 'macos' && '300' || '200' }}" >> $GITHUB_ENV
- name: Free up space (macOS) - name: Free up space (macOS)
if: ${{ inputs.target-platform == 'macos' }} if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/free-space-macos uses: ./src/electron/.github/actions/free-space-macos
@ -189,7 +195,7 @@ jobs:
with: with:
target-arch: ${{ inputs.target-arch }} target-arch: ${{ inputs.target-arch }}
target-platform: ${{ inputs.target-platform }} target-platform: ${{ inputs.target-platform }}
artifact-platform: ${{ inputs.target-platform == 'linux' && 'linux' || 'darwin' }} artifact-platform: ${{ inputs.target-platform == 'macos' && 'darwin' || inputs.target-platform }}
is-release: '${{ inputs.is-release }}' is-release: '${{ inputs.is-release }}'
generate-symbols: '${{ inputs.generate-symbols }}' generate-symbols: '${{ inputs.generate-symbols }}'
strip-binaries: '${{ inputs.strip-binaries }}' strip-binaries: '${{ inputs.strip-binaries }}'

View file

@ -5,11 +5,11 @@ on:
inputs: inputs:
target-platform: target-platform:
type: string type: string
description: 'Platform to run on, can be macos or linux' description: 'Platform to run on, can be macos, win or linux'
required: true required: true
target-arch: target-archs:
type: string type: string
description: 'Arch to build for, can be x64, arm64 or arm' description: 'Archs to check for, can be x64, x86, arm64 or arm space separated'
required: true required: true
check-runs-on: check-runs-on:
type: string type: string
@ -25,35 +25,30 @@ on:
required: true required: true
type: string type: string
default: testing default: testing
is-asan:
description: 'Building the Address Sanitizer (ASan) Linux build'
required: false
type: boolean
default: false
concurrency: concurrency:
group: electron-gn-check-${{ inputs.target-platform }}-${{ inputs.target-arch }}-${{ inputs.is-asan }}-${{ github.ref }} group: electron-gn-check-${{ inputs.target-platform }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env: env:
ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }} ELECTRON_RBE_JWT: ${{ secrets.ELECTRON_RBE_JWT }}
GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' }} GCLIENT_EXTRA_ARGS: ${{ inputs.target-platform == 'macos' && '--custom-var=checkout_mac=True --custom-var=host_os=mac' || (inputs.target-platform == 'linux' && '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True' || '--custom-var=checkout_win=True') }}
ELECTRON_OUT_DIR: Default ELECTRON_OUT_DIR: Default
TARGET_ARCH: ${{ inputs.target-arch }}
jobs: jobs:
gn-check: gn-check:
# TODO(codebytere): Change this to medium VM defaults:
run:
shell: bash
runs-on: ${{ inputs.check-runs-on }} runs-on: ${{ inputs.check-runs-on }}
container: ${{ fromJSON(inputs.check-container) }} container: ${{ fromJSON(inputs.check-container) }}
env:
TARGET_ARCH: ${{ inputs.target-arch }}
steps: steps:
- name: Checkout Electron - name: Checkout Electron
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Cleanup disk space on macOS - name: Cleanup disk space on macOS
if: ${{ inputs.target-platform == 'macos' }} if: ${{ inputs.target-platform == 'macos' }}
shell: bash shell: bash
@ -73,58 +68,40 @@ jobs:
run: df -h run: df -h
- name: Install Build Tools - name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools uses: ./src/electron/.github/actions/install-build-tools
- name: Init Build Tools - name: Enable windows toolchain
if: ${{ inputs.target-platform == 'win' }}
run: | run: |
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }} echo "ELECTRON_DEPOT_TOOLS_WIN_TOOLCHAIN=1" >> $GITHUB_ENV
- name: Get Depot Tools
timeout-minutes: 5
run: |
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
SEDOPTION="-i"
if [ "`uname`" = "Darwin" ]; then
SEDOPTION="-i ''"
fi
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed $SEDOPTION '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools
if [ "`uname`" = "Linux" ]; then
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
touch .disable_auto_update
- name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
- name: Set GN_EXTRA_ARGS for Linux
if: ${{ inputs.target-platform == 'linux' }}
run: |
if [ "${{ inputs.target-arch }}" = "arm" ]; then
GN_EXTRA_ARGS='build_tflite_with_xnnpack=false'
elif [ "${{ inputs.target-arch }}" = "arm64" ]; then
GN_EXTRA_ARGS='fatal_linker_warnings=false enable_linux_installer=false'
fi
echo "GN_EXTRA_ARGS=$GN_EXTRA_ARGS" >> $GITHUB_ENV
- name: Generate DEPS Hash - name: Generate DEPS Hash
run: | run: |
node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target node src/electron/script/generate-deps-hash.js
DEPSHASH=v1-src-cache-$(shasum src/electron/.depshash | cut -f1 -d' ') DEPSHASH=v1-src-cache-$(cat src/electron/.depshash)
echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV echo "DEPSHASH=$DEPSHASH" >> $GITHUB_ENV
echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV echo "CACHE_PATH=$DEPSHASH.tar" >> $GITHUB_ENV
- name: Restore src cache via AZCopy - name: Restore src cache via AZCopy
if: ${{ inputs.target-platform == 'macos' }} if: ${{ inputs.target-platform == 'macos' }}
uses: ./src/electron/.github/actions/restore-cache-azcopy uses: ./src/electron/.github/actions/restore-cache-azcopy
- name: Restore src cache via AKS - name: Restore src cache via AKS
if: ${{ inputs.target-platform == 'linux' }} if: ${{ inputs.target-platform == 'linux' || inputs.target-platform == 'win' }}
uses: ./src/electron/.github/actions/restore-cache-aks uses: ./src/electron/.github/actions/restore-cache-aks
- name: Run Electron Only Hooks - name: Run Electron Only Hooks
run: | run: |
gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]" echo "solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]" > tmpgclient
if [ "${{ inputs.target-platform }}" = "win" ]; then
echo "solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False,'install_sysroot':False,'checkout_win':True},'managed':False}]" > tmpgclient
echo "target_os=['win']" >> tmpgclient
fi
e d gclient runhooks --gclientfile=tmpgclient
# Fix VS Toolchain
if [ "${{ inputs.target-platform }}" = "win" ]; then
rm -rf src/third_party/depot_tools/win_toolchain/vs_files
e d python3 src/build/vs_toolchain.py update --force
fi
- name: Regenerate DEPS Hash - name: Regenerate DEPS Hash
run: | run: |
(cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target (cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js
echo "DEPSHASH=$(shasum src/electron/.depshash | cut -f1 -d' ')" >> $GITHUB_ENV echo "DEPSHASH=$(cat src/electron/.depshash)" >> $GITHUB_ENV
- name: Add CHROMIUM_BUILDTOOLS_PATH to env - name: Add CHROMIUM_BUILDTOOLS_PATH to env
run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV run: echo "CHROMIUM_BUILDTOOLS_PATH=$(pwd)/src/buildtools" >> $GITHUB_ENV
- name: Checkout Electron - name: Checkout Electron
@ -132,30 +109,46 @@ jobs:
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Dependencies - name: Install Dependencies
run: | uses: ./src/electron/.github/actions/install-dependencies
cd src/electron
node script/yarn install --frozen-lockfile
- name: Default GN gen - name: Default GN gen
run: | run: |
cd src/electron cd src/electron
git pack-refs git pack-refs
cd .. - name: Run GN Check for ${{ inputs.target-archs }}
run: |
for target_cpu in ${{ inputs.target-archs }}
do
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu $target_cpu
cd src
export GN_EXTRA_ARGS="target_cpu=\"$target_cpu\""
if [ "${{ inputs.target-platform }}" = "linux" ]; then
if [ "$target_cpu" = "arm" ]; then
export GN_EXTRA_ARGS="$GN_EXTRA_ARGS build_tflite_with_xnnpack=false"
elif [ "$target_cpu" = "arm64" ]; then
export GN_EXTRA_ARGS="$GN_EXTRA_ARGS fatal_linker_warnings=false enable_linux_installer=false"
fi
fi
if [ "${{ inputs.target-platform }}" = "win" ]; then
export GN_EXTRA_ARGS="$GN_EXTRA_ARGS use_v8_context_snapshot=true target_os=\"win\""
fi
e build --only-gen e build --only-gen
- name: Run GN Check
run: | e d gn check out/Default //electron:electron_lib
cd src e d gn check out/Default //electron:electron_app
gn check out/Default //electron:electron_lib e d gn check out/Default //electron/shell/common:mojo
gn check out/Default //electron:electron_app e d gn check out/Default //electron/shell/common:plugin
gn check out/Default //electron/shell/common:mojo
gn check out/Default //electron/shell/common:plugin
# Check the hunspell filenames # Check the hunspell filenames
node electron/script/gen-hunspell-filenames.js --check node electron/script/gen-hunspell-filenames.js --check
node electron/script/gen-libc++-filenames.js --check node electron/script/gen-libc++-filenames.js --check
cd ..
done
- name: Wait for active SSH sessions - name: Wait for active SSH sessions
if: always() && !cancelled() if: always() && !cancelled()
shell: bash
run: | run: |
while [ -f /var/.ssh-lock ] while [ -f /var/.ssh-lock ]
do do

View file

@ -5,7 +5,7 @@ on:
inputs: inputs:
target-platform: target-platform:
type: string type: string
description: 'Platform to run on, can be macos or linux' description: 'Platform to run on, can be macos, win or linux'
required: true required: true
target-arch: target-arch:
type: string type: string
@ -41,22 +41,44 @@ env:
jobs: jobs:
test: test:
defaults:
run:
shell: bash
runs-on: ${{ inputs.test-runs-on }} runs-on: ${{ inputs.test-runs-on }}
container: ${{ fromJSON(inputs.test-container) }} container: ${{ fromJSON(inputs.test-container) }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
build-type: ${{ inputs.target-platform == 'macos' && fromJSON('["darwin","mas"]') || fromJSON('["linux"]') }} build-type: ${{ inputs.target-platform == 'macos' && fromJSON('["darwin","mas"]') || (inputs.target-platform == 'win' && fromJSON('["win"]') || fromJSON('["linux"]')) }}
shard: ${{ inputs.target-platform == 'macos' && fromJSON('[1, 2]') || fromJSON('[1, 2, 3]') }} shard: ${{ inputs.target-platform == 'linux' && fromJSON('[1, 2, 3]') || fromJSON('[1, 2]') }}
env: env:
BUILD_TYPE: ${{ matrix.build-type }} BUILD_TYPE: ${{ matrix.build-type }}
TARGET_ARCH: ${{ inputs.target-arch }} TARGET_ARCH: ${{ inputs.target-arch }}
ARTIFACT_KEY: ${{ matrix.build-type }}_${{ inputs.target-arch }} ARTIFACT_KEY: ${{ matrix.build-type }}_${{ inputs.target-arch }}
steps: steps:
- name: Fix node20 on arm32 runners - name: Fix node20 on arm32 runners
if: ${{ inputs.target-arch == 'arm' }} if: ${{ inputs.target-arch == 'arm' && inputs.target-platform == 'linux' }}
run: | run: |
cp $(which node) /mnt/runner-externals/node20/bin/ cp $(which node) /mnt/runner-externals/node20/bin/
- name: Install Git on Windows arm64 runners
if: ${{ inputs.target-arch == 'arm64' && inputs.target-platform == 'win' }}
shell: powershell
run: |
Set-ExecutionPolicy Bypass -Scope Process -Force
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072
iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1'))
choco install -y --no-progress git.install --params "'/GitAndUnixToolsOnPath'"
choco install -y --no-progress git
choco install -y --no-progress python --version 3.11.9
choco install -y --no-progress visualstudio2022-workload-vctools --package-parameters "--add Microsoft.VisualStudio.Component.VC.Tools.ARM64"
echo "C:\Program Files\Git\cmd" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
echo "C:\Program Files\Git\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
echo "C:\Python311" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Setup Node.js/npm
if: ${{ inputs.target-platform == 'win' }}
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6
with:
node-version: 20.11.x
- name: Add TCC permissions on macOS - name: Add TCC permissions on macOS
if: ${{ inputs.target-platform == 'macos' }} if: ${{ inputs.target-platform == 'macos' }}
run: | run: |
@ -95,24 +117,18 @@ jobs:
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Dependencies - name: Install Dependencies
run: | uses: ./src/electron/.github/actions/install-dependencies
cd src/electron
node script/yarn install --frozen-lockfile
- name: Get Depot Tools - name: Get Depot Tools
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
git config --global core.filemode false
git config --global core.autocrlf false
git config --global branch.autosetuprebase always
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
# Ensure depot_tools does not update. # Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools test -d depot_tools && cd depot_tools
if [ "`uname`" = "Darwin" ]; then
# remove ninjalog_uploader_wrapper.py from autoninja since we don't use it and it causes problems
sed -i '' '/ninjalog_uploader_wrapper.py/d' ./autoninja
else
sed -i '/ninjalog_uploader_wrapper.py/d' ./autoninja
# Remove swift-format dep from cipd on macOS until we send a patch upstream.
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
fi
touch .disable_auto_update touch .disable_auto_update
- name: Add Depot Tools to PATH - name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
@ -134,7 +150,17 @@ jobs:
path: ./src_artifacts_${{ matrix.build-type }}_${{ inputs.target-arch }} path: ./src_artifacts_${{ matrix.build-type }}_${{ inputs.target-arch }}
- name: Restore Generated Artifacts - name: Restore Generated Artifacts
run: ./src/electron/script/actions/restore-artifacts.sh run: ./src/electron/script/actions/restore-artifacts.sh
- name: Unzip Dist, Mksnapshot & Chromedriver - name: Unzip Dist, Mksnapshot & Chromedriver (win)
if: ${{ inputs.target-platform == 'win' }}
shell: powershell
run: |
Set-ExecutionPolicy Bypass -Scope Process -Force
cd src/out/Default
Expand-Archive -Force dist.zip -DestinationPath ./
Expand-Archive -Force chromedriver.zip -DestinationPath ./
Expand-Archive -Force mksnapshot.zip -DestinationPath ./
- name: Unzip Dist, Mksnapshot & Chromedriver (unix)
if: ${{ inputs.target-platform != 'win' }}
run: | run: |
cd src/out/Default cd src/out/Default
unzip -:o dist.zip unzip -:o dist.zip
@ -158,15 +184,24 @@ jobs:
ELECTRON_DISABLE_SECURITY_WARNINGS: 1 ELECTRON_DISABLE_SECURITY_WARNINGS: 1
ELECTRON_SKIP_NATIVE_MODULE_TESTS: true ELECTRON_SKIP_NATIVE_MODULE_TESTS: true
DISPLAY: ':99.0' DISPLAY: ':99.0'
NPM_CONFIG_MSVS_VERSION: '2022'
run: | run: |
cd src/electron cd src/electron
export ELECTRON_TEST_RESULTS_DIR=`pwd`/junit export ELECTRON_TEST_RESULTS_DIR=`pwd`/junit
# Get which tests are on this shard # Get which tests are on this shard
tests_files=$(node script/split-tests ${{ matrix.shard }} ${{ inputs.target-platform == 'macos' && 2 || 3 }}) tests_files=$(node script/split-tests ${{ matrix.shard }} ${{ inputs.target-platform == 'linux' && 3 || 2 }})
# Run tests # Run tests
if [ "`uname`" = "Darwin" ]; then if [ "${{ inputs.target-platform }}" != "linux" ]; then
echo "About to start tests" echo "About to start tests"
if [ "${{ inputs.target-platform }}" = "win" ]; then
if [ "${{ inputs.target-arch }}" = "x86" ]; then
export npm_config_arch="ia32"
fi
if [ "${{ inputs.target-arch }}" = "arm64" ]; then
export ELECTRON_FORCE_TEST_SUITE_EXIT="true"
fi
fi
node script/yarn test --runners=main --trace-uncaught --enable-logging --files $tests_files node script/yarn test --runners=main --trace-uncaught --enable-logging --files $tests_files
else else
chown :builduser .. && chmod g+w .. chown :builduser .. && chmod g+w ..
@ -197,19 +232,21 @@ jobs:
DD_CIVISIBILITY_LOGS_ENABLED: true DD_CIVISIBILITY_LOGS_ENABLED: true
DD_TAGS: "os.architecture:${{ inputs.target-arch }},os.family:${{ inputs.target-platform }},os.platform:${{ inputs.target-platform }},asan:${{ inputs.is-asan }}" DD_TAGS: "os.architecture:${{ inputs.target-arch }},os.family:${{ inputs.target-platform }},os.platform:${{ inputs.target-platform }},asan:${{ inputs.is-asan }}"
run: | run: |
if ! [ -z $DD_API_KEY ]; then if ! [ -z $DD_API_KEY ] && [ -f src/electron/junit/test-results-main.xml ]; then
datadog-ci junit upload src/electron/junit/test-results-main.xml export DATADOG_PATH=`node src/electron/script/yarn global bin`
$DATADOG_PATH/datadog-ci junit upload src/electron/junit/test-results-main.xml
fi fi
if: always() && !cancelled() if: always() && !cancelled()
- name: Upload Test Artifacts - name: Upload Test Artifacts
if: always() && !cancelled() if: always() && !cancelled()
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
with: with:
name: test_artifacts_${{ env.ARTIFACT_KEY }} name: test_artifacts_${{ env.ARTIFACT_KEY }}_${{ matrix.shard }}
path: src/electron/spec/artifacts path: src/electron/spec/artifacts
if-no-files-found: ignore if-no-files-found: ignore
- name: Wait for active SSH sessions - name: Wait for active SSH sessions
if: always() && !cancelled() if: always() && !cancelled()
shell: bash
run: | run: |
while [ -f /var/.ssh-lock ] while [ -f /var/.ssh-lock ]
do do

View file

@ -5,7 +5,7 @@ on:
inputs: inputs:
target-platform: target-platform:
type: string type: string
description: 'Platform to run on, can be macos or linux' description: 'Platform to run on, can be macos, win or linux'
required: true required: true
target-arch: target-arch:
type: string type: string
@ -49,23 +49,20 @@ jobs:
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Build Tools - name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools uses: ./src/electron/.github/actions/install-build-tools
- name: Init Build Tools - name: Init Build Tools
run: | run: |
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }} e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} --import ${{ inputs.gn-build-type }} --target-cpu ${{ inputs.target-arch }}
- name: Install Dependencies - name: Install Dependencies
run: | uses: ./src/electron/.github/actions/install-dependencies
cd src/electron
node script/yarn install --frozen-lockfile
- name: Get Depot Tools - name: Get Depot Tools
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update. # Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools test -d depot_tools && cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
touch .disable_auto_update touch .disable_auto_update
- name: Add Depot Tools to PATH - name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
@ -93,6 +90,7 @@ jobs:
node electron/script/node-spec-runner.js --default --jUnitDir=junit node electron/script/node-spec-runner.js --default --jUnitDir=junit
- name: Wait for active SSH sessions - name: Wait for active SSH sessions
if: always() && !cancelled() if: always() && !cancelled()
shell: bash
run: | run: |
while [ -f /var/.ssh-lock ] while [ -f /var/.ssh-lock ]
do do
@ -112,23 +110,20 @@ jobs:
with: with:
path: src/electron path: src/electron
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Build Tools - name: Install Build Tools
uses: ./src/electron/.github/actions/install-build-tools uses: ./src/electron/.github/actions/install-build-tools
- name: Init Build Tools - name: Init Build Tools
run: | run: |
e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }} e init -f --root=$(pwd) --out=Default ${{ inputs.gn-build-type }}
- name: Install Dependencies - name: Install Dependencies
run: | uses: ./src/electron/.github/actions/install-dependencies
cd src/electron
node script/yarn install --frozen-lockfile
- name: Get Depot Tools - name: Get Depot Tools
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git git clone --filter=tree:0 https://chromium.googlesource.com/chromium/tools/depot_tools.git
sed -i '/ninjalog_uploader_wrapper.py/d' ./depot_tools/autoninja
# Ensure depot_tools does not update. # Ensure depot_tools does not update.
test -d depot_tools && cd depot_tools test -d depot_tools && cd depot_tools
git apply --3way ../src/electron/.github/workflows/config/gclient.diff
touch .disable_auto_update touch .disable_auto_update
- name: Add Depot Tools to PATH - name: Add Depot Tools to PATH
run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH run: echo "$(pwd)/depot_tools" >> $GITHUB_PATH
@ -155,6 +150,7 @@ jobs:
cd src cd src
node electron/script/nan-spec-runner.js node electron/script/nan-spec-runner.js
- name: Wait for active SSH sessions - name: Wait for active SSH sessions
shell: bash
if: always() && !cancelled() if: always() && !cancelled()
run: | run: |
while [ -f /var/.ssh-lock ] while [ -f /var/.ssh-lock ]

View file

@ -23,6 +23,11 @@ jobs:
with: with:
fetch-depth: 0 fetch-depth: 0
token: ${{ steps.generate-token.outputs.token }} token: ${{ steps.generate-token.outputs.token }}
ref: ${{ github.event.pull_request.head.sha }}
- name: Setup Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
node-version: 20.11.x
- name: Yarn install - name: Yarn install
run: | run: |
node script/yarn.js install --frozen-lockfile node script/yarn.js install --frozen-lockfile

1
.gitignore vendored
View file

@ -48,7 +48,6 @@ ts-gen
# Used to accelerate CI builds # Used to accelerate CI builds
.depshash .depshash
.depshash-target
# Used to accelerate builds after sync # Used to accelerate builds after sync
patches/mtime-cache.json patches/mtime-cache.json

View file

@ -187,7 +187,12 @@ for:
7z a pdb.zip out\Default\*.pdb 7z a pdb.zip out\Default\*.pdb
} }
- ps: | - ps: |
$manifest_file = "electron/script/zip_manifests/dist_zip.win.$env:TARGET_ARCH.manifest" if ($env:TARGET_ARCH -eq 'ia32') {
$env:MANIFEST_ARCH = "x86"
} else {
$env:MANIFEST_ARCH = $env:TARGET_ARCH
}
$manifest_file = "electron/script/zip_manifests/dist_zip.win.$env:MANIFEST_ARCH.manifest"
python3 electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip $manifest_file python3 electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip $manifest_file
if ($LASTEXITCODE -ne 0) { if ($LASTEXITCODE -ne 0) {
throw "Zip contains files not listed in the manifest $manifest_file" throw "Zip contains files not listed in the manifest $manifest_file"

View file

@ -59,7 +59,7 @@ def skip_path(dep, dist_zip, target_cpu):
and dep == "snapshot_blob.bin" and dep == "snapshot_blob.bin"
) )
) )
if should_skip: if should_skip and os.environ.get('ELECTRON_DEBUG_ZIP_SKIP') == '1':
print("Skipping {}".format(dep)) print("Skipping {}".format(dep))
return should_skip return should_skip

View file

@ -1,6 +1,8 @@
#!/bin/bash #!/bin/bash
if [ "`uname`" == "Darwin" ]; then if [ "$(expr substr $(uname -s) 1 10)" == "MSYS_NT-10" ]; then
BUILD_TYPE="win"
elif [ "`uname`" == "Darwin" ]; then
if [ -z "$MAS_BUILD" ]; then if [ -z "$MAS_BUILD" ]; then
BUILD_TYPE="darwin" BUILD_TYPE="darwin"
else else
@ -46,23 +48,47 @@ cp_if_exist() {
move_src_dirs_if_exist() { move_src_dirs_if_exist() {
mkdir src_artifacts mkdir src_artifacts
for dir in \ dirs=("src/out/Default/gen/node_headers" \
src/out/Default/gen/node_headers \ "src/out/Default/overlapped-checker" \
src/out/Default/overlapped-checker \ "src/out/Default/ffmpeg" \
src/out/Default/ffmpeg \ "src/out/Default/hunspell_dictionaries" \
src/out/Default/hunspell_dictionaries \ "src/third_party/electron_node" \
src/third_party/electron_node \ "src/third_party/nan" \
src/third_party/nan \ "src/cross-arch-snapshots" \
src/cross-arch-snapshots \ "src/buildtools/mac" \
src/third_party/llvm-build \ "src/buildtools/third_party/libc++" \
src/build/linux \ "src/buildtools/third_party/libc++abi" \
src/buildtools/mac \ "src/third_party/libc++" \
src/buildtools/third_party/libc++ \ "src/third_party/libc++abi" \
src/buildtools/third_party/libc++abi \ "src/out/Default/obj/buildtools/third_party" \
src/third_party/libc++ \ "src/v8/tools/builtins-pgo")
src/third_party/libc++abi \
src/out/Default/obj/buildtools/third_party \ # Only do this for linux build type, this folder
src/v8/tools/builtins-pgo # exists for windows builds on linux hosts but we do
# not need it
if [ "$BUILD_TYPE" == "linux" ]; then
dirs+=('src/build/linux')
fi
# llvm-build is the host toolchain, for windows we need
# a different toolchain so no point copying this one
if [ "$BUILD_TYPE" != "win" ]; then
dirs+=('src/third_party/llvm-build')
fi
# On windows we should clean up two symlinks that aren't
# compatible with the windows test runner
if [ "$BUILD_TYPE" == "win" ]; then
rm -f src/third_party/electron_node/tools/node_modules/eslint/node_modules/eslint
rm -f src/third_party/electron_node/tools/node_modules/eslint/node_modules/.bin/eslint
rm -f src/third_party/electron_node/out/tools/bin/python
# Also need to copy electron.lib to node.lib for native module testing purposes
mkdir -p src/out/Default/gen/node_headers/Release
cp src/out/Default/electron.lib src/out/Default/gen/node_headers/Release/node.lib
fi
for dir in "${dirs[@]}"
do do
if [ -d "$dir" ]; then if [ -d "$dir" ]; then
mkdir -p src_artifacts/$(dirname $dir) mkdir -p src_artifacts/$(dirname $dir)
@ -70,7 +96,7 @@ move_src_dirs_if_exist() {
fi fi
done done
tar -C src_artifacts -cf src_artifacts.tar ./ tar -C src_artifacts -cf src_artifacts.tar .
echo Storing src_artifacts.tar echo Storing src_artifacts.tar
mv src_artifacts.tar $SRC_ARTIFACTS mv src_artifacts.tar $SRC_ARTIFACTS

View file

@ -2,21 +2,19 @@ const crypto = require('node:crypto');
const fs = require('node:fs'); const fs = require('node:fs');
const path = require('node:path'); const path = require('node:path');
// Fallback to blow away old cache keys
const FALLBACK_HASH_VERSION = 3;
// Per platform hash versions to bust the cache on different platforms // Per platform hash versions to bust the cache on different platforms
const HASH_VERSIONS = { const HASH_VERSIONS = {
darwin: 3, darwin: 4,
win32: 4, win32: 4,
linux: 3 linux: 4
}; };
// Base files to hash // Base files to hash
const filesToHash = [ const filesToHash = [
path.resolve(__dirname, '../DEPS'), path.resolve(__dirname, '../DEPS'),
path.resolve(__dirname, '../yarn.lock'), path.resolve(__dirname, '../yarn.lock'),
path.resolve(__dirname, '../script/sysroots.json') path.resolve(__dirname, '../script/sysroots.json'),
path.resolve(__dirname, '../.github/actions/checkout/action.yml')
]; ];
const addAllFiles = (dir) => { const addAllFiles = (dir) => {
@ -38,7 +36,7 @@ const hasher = crypto.createHash('SHA256');
const addToHashAndLog = (s) => { const addToHashAndLog = (s) => {
return hasher.update(s); return hasher.update(s);
}; };
addToHashAndLog(`HASH_VERSION:${HASH_VERSIONS[process.platform] || FALLBACK_HASH_VERSION}`); addToHashAndLog(`HASH_VERSION:${HASH_VERSIONS[process.platform]}`);
for (const file of filesToHash) { for (const file of filesToHash) {
hasher.update(fs.readFileSync(file)); hasher.update(fs.readFileSync(file));
} }
@ -47,15 +45,5 @@ for (const file of filesToHash) {
const extraArgs = process.env.GCLIENT_EXTRA_ARGS || 'no_extra_args'; const extraArgs = process.env.GCLIENT_EXTRA_ARGS || 'no_extra_args';
addToHashAndLog(extraArgs); addToHashAndLog(extraArgs);
const effectivePlatform = extraArgs.includes('host_os=mac') ? 'darwin' : process.platform;
// Write the hash to disk // Write the hash to disk
fs.writeFileSync(path.resolve(__dirname, '../.depshash'), hasher.digest('hex')); fs.writeFileSync(path.resolve(__dirname, '../.depshash'), hasher.digest('hex'));
let targetContent = `${effectivePlatform}\n${process.env.TARGET_ARCH}\n${process.env.GN_CONFIG}\n${undefined}\n${process.env.GN_EXTRA_ARGS}\n${process.env.GN_BUILDFLAG_ARGS}`;
const argsDir = path.resolve(__dirname, '../build/args');
for (const argFile of fs.readdirSync(argsDir).sort()) {
targetContent += `\n${argFile}--${crypto.createHash('SHA1').update(fs.readFileSync(path.resolve(argsDir, argFile))).digest('hex')}`;
}
fs.writeFileSync(path.resolve(__dirname, '../.depshash-target'), targetContent);

View file

@ -31,9 +31,9 @@ async function main () {
const outDir = utils.getOutDir({ shouldLog: true }); const outDir = utils.getOutDir({ shouldLog: true });
const nodeDir = path.resolve(BASE, 'out', outDir, 'gen', 'node_headers'); const nodeDir = path.resolve(BASE, 'out', outDir, 'gen', 'node_headers');
const env = { const env = {
npm_config_msvs_version: '2022',
...process.env, ...process.env,
npm_config_nodedir: nodeDir, npm_config_nodedir: nodeDir,
npm_config_msvs_version: '2019',
npm_config_arch: process.env.NPM_CONFIG_ARCH, npm_config_arch: process.env.NPM_CONFIG_ARCH,
npm_config_yes: 'true' npm_config_yes: 'true'
}; };

View file

@ -17,6 +17,8 @@ const unknownFlags = [];
const pass = chalk.green('✓'); const pass = chalk.green('✓');
const fail = chalk.red('✗'); const fail = chalk.red('✗');
const FAILURE_STATUS_KEY = 'Electron_Spec_Runner_Failures';
const args = minimist(process.argv, { const args = minimist(process.argv, {
string: ['runners', 'target', 'electronVersion'], string: ['runners', 'target', 'electronVersion'],
unknown: arg => unknownFlags.push(arg) unknown: arg => unknownFlags.push(arg)
@ -156,6 +158,36 @@ async function runElectronTests () {
} }
} }
async function asyncSpawn (exe, runnerArgs) {
return new Promise((resolve, reject) => {
let forceExitResult = 0;
const child = childProcess.spawn(exe, runnerArgs, {
cwd: path.resolve(__dirname, '../..')
});
child.stdout.pipe(process.stdout);
child.stderr.pipe(process.stderr);
if (process.env.ELECTRON_FORCE_TEST_SUITE_EXIT) {
child.stdout.on('data', data => {
const failureRE = RegExp(`${FAILURE_STATUS_KEY}: (\\d.*)`);
const failures = data.toString().match(failureRE);
if (failures) {
forceExitResult = parseInt(failures[1], 10);
}
});
}
child.on('error', error => reject(error));
child.on('close', (status, signal) => {
let returnStatus = 0;
if (process.env.ELECTRON_FORCE_TEST_SUITE_EXIT) {
returnStatus = forceExitResult;
} else {
returnStatus = status;
}
resolve({ status: returnStatus, signal });
});
});
}
async function runTestUsingElectron (specDir, testName) { async function runTestUsingElectron (specDir, testName) {
let exe; let exe;
if (args.electronVersion) { if (args.electronVersion) {
@ -169,10 +201,7 @@ async function runTestUsingElectron (specDir, testName) {
runnerArgs.unshift(path.resolve(__dirname, 'dbus_mock.py'), exe); runnerArgs.unshift(path.resolve(__dirname, 'dbus_mock.py'), exe);
exe = 'python3'; exe = 'python3';
} }
const { status, signal } = childProcess.spawnSync(exe, runnerArgs, { const { status, signal } = await asyncSpawn(exe, runnerArgs);
cwd: path.resolve(__dirname, '../..'),
stdio: 'inherit'
});
if (status !== 0) { if (status !== 0) {
if (status) { if (status) {
const textStatus = process.platform === 'win32' ? `0x${status.toString(16)}` : status.toString(); const textStatus = process.platform === 'win32' ? `0x${status.toString(16)}` : status.toString();
@ -191,9 +220,9 @@ async function runMainProcessElectronTests () {
async function installSpecModules (dir) { async function installSpecModules (dir) {
const env = { const env = {
npm_config_msvs_version: '2022',
...process.env, ...process.env,
CXXFLAGS: process.env.CXXFLAGS, CXXFLAGS: process.env.CXXFLAGS,
npm_config_msvs_version: '2019',
npm_config_yes: 'true' npm_config_yes: 'true'
}; };
if (args.electronVersion) { if (args.electronVersion) {

0
script/zip_manifests/dist_zip.win.arm64.manifest Executable file → Normal file
View file

View file

@ -2159,6 +2159,10 @@ describe('default behavior', () => {
serverUrl = (await listen(server)).url; serverUrl = (await listen(server)).url;
}); });
after(() => {
server.close();
});
it('should emit a login event on app when a WebContents hits a 401', async () => { it('should emit a login event on app when a WebContents hits a 401', async () => {
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
w.loadURL(serverUrl); w.loadURL(serverUrl);

View file

@ -934,6 +934,9 @@ describe('BrowserWindow module', () => {
}); });
url = (await listen(server)).url; url = (await listen(server)).url;
}); });
after(() => {
server.close();
});
it('for initial navigation, event order is consistent', async () => { it('for initial navigation, event order is consistent', async () => {
const firedEvents: string[] = []; const firedEvents: string[] = [];
const expectedEventOrder = [ const expectedEventOrder = [

View file

@ -858,6 +858,9 @@ describe('session module', () => {
res.end('authenticated'); res.end('authenticated');
} }
}); });
defer(() => {
server.close();
});
const { port } = await listen(server); const { port } = await listen(server);
const fetch = (url: string) => new Promise((resolve, reject) => { const fetch = (url: string) => new Promise((resolve, reject) => {
const request = net.request({ url, session: ses }); const request = net.request({ url, session: ses });
@ -941,6 +944,13 @@ describe('session module', () => {
}; };
describe('session.downloadURL', () => { describe('session.downloadURL', () => {
let server: http.Server;
afterEach(() => {
if (server) {
server.close();
server = null as unknown as http.Server;
}
});
it('can perform a download', async () => { it('can perform a download', async () => {
const willDownload = once(session.defaultSession, 'will-download'); const willDownload = once(session.defaultSession, 'will-download');
session.defaultSession.downloadURL(`${url}:${port}`); session.defaultSession.downloadURL(`${url}:${port}`);
@ -951,7 +961,7 @@ describe('session module', () => {
}); });
it('can perform a download with a valid auth header', async () => { it('can perform a download with a valid auth header', async () => {
const server = http.createServer((req, res) => { server = http.createServer((req, res) => {
const { authorization } = req.headers; const { authorization } = req.headers;
if (!authorization || authorization !== 'Basic i-am-an-auth-header') { if (!authorization || authorization !== 'Basic i-am-an-auth-header') {
res.statusCode = 401; res.statusCode = 401;
@ -1013,7 +1023,7 @@ describe('session module', () => {
}); });
it('correctly handles a download with an invalid auth header', async () => { it('correctly handles a download with an invalid auth header', async () => {
const server = http.createServer((req, res) => { server = http.createServer((req, res) => {
const { authorization } = req.headers; const { authorization } = req.headers;
if (!authorization || authorization !== 'Basic i-am-an-auth-header') { if (!authorization || authorization !== 'Basic i-am-an-auth-header') {
res.statusCode = 401; res.statusCode = 401;
@ -1057,6 +1067,13 @@ describe('session module', () => {
}); });
describe('webContents.downloadURL', () => { describe('webContents.downloadURL', () => {
let server: http.Server;
afterEach(() => {
if (server) {
server.close();
server = null as unknown as http.Server;
}
});
it('can perform a download', async () => { it('can perform a download', async () => {
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
const willDownload = once(w.webContents.session, 'will-download'); const willDownload = once(w.webContents.session, 'will-download');
@ -1068,7 +1085,7 @@ describe('session module', () => {
}); });
it('can perform a download with a valid auth header', async () => { it('can perform a download with a valid auth header', async () => {
const server = http.createServer((req, res) => { server = http.createServer((req, res) => {
const { authorization } = req.headers; const { authorization } = req.headers;
if (!authorization || authorization !== 'Basic i-am-an-auth-header') { if (!authorization || authorization !== 'Basic i-am-an-auth-header') {
res.statusCode = 401; res.statusCode = 401;
@ -1124,7 +1141,7 @@ describe('session module', () => {
}); });
it('correctly handles a download and an invalid auth header', async () => { it('correctly handles a download and an invalid auth header', async () => {
const server = http.createServer((req, res) => { server = http.createServer((req, res) => {
const { authorization } = req.headers; const { authorization } = req.headers;
if (!authorization || authorization !== 'Basic i-am-an-auth-header') { if (!authorization || authorization !== 'Basic i-am-an-auth-header') {
res.statusCode = 401; res.statusCode = 401;
@ -1315,6 +1332,9 @@ describe('session module', () => {
send(req, req.url!, options) send(req, req.url!, options)
.on('error', (error: any) => { throw error; }).pipe(res); .on('error', (error: any) => { throw error; }).pipe(res);
}); });
defer(() => {
rangeServer.close();
});
try { try {
const { url } = await listen(rangeServer); const { url } = await listen(rangeServer);
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });

View file

@ -15,6 +15,7 @@ import { closeAllWindows } from './lib/window-helpers';
describe('shell module', () => { describe('shell module', () => {
describe('shell.openExternal()', () => { describe('shell.openExternal()', () => {
let envVars: Record<string, string | undefined> = {}; let envVars: Record<string, string | undefined> = {};
let server: http.Server;
beforeEach(function () { beforeEach(function () {
envVars = { envVars = {
@ -31,8 +32,12 @@ describe('shell module', () => {
process.env.BROWSER = envVars.browser; process.env.BROWSER = envVars.browser;
process.env.DISPLAY = envVars.display; process.env.DISPLAY = envVars.display;
} }
await closeAllWindows();
if (server) {
server.close();
server = null as unknown as http.Server;
}
}); });
afterEach(closeAllWindows);
async function urlOpened () { async function urlOpened () {
let url = 'http://127.0.0.1'; let url = 'http://127.0.0.1';
@ -50,7 +55,7 @@ describe('shell module', () => {
const w = new BrowserWindow({ show: true }); const w = new BrowserWindow({ show: true });
requestReceived = once(w, 'blur'); requestReceived = once(w, 'blur');
} else { } else {
const server = http.createServer((req, res) => { server = http.createServer((req, res) => {
res.end(); res.end();
}); });
url = (await listen(server)).url; url = (await listen(server)).url;

View file

@ -11,7 +11,7 @@ import { setTimeout } from 'node:timers/promises';
import * as url from 'node:url'; import * as url from 'node:url';
import { ifdescribe, defer, waitUntil, listen, ifit } from './lib/spec-helpers'; import { ifdescribe, defer, waitUntil, listen, ifit } from './lib/spec-helpers';
import { closeAllWindows } from './lib/window-helpers'; import { cleanupWebContents, closeAllWindows } from './lib/window-helpers';
const pdfjs = require('pdfjs-dist'); const pdfjs = require('pdfjs-dist');
@ -63,6 +63,7 @@ describe('webContents module', () => {
}); });
describe('fromFrame()', () => { describe('fromFrame()', () => {
afterEach(cleanupWebContents);
it('returns WebContents for mainFrame', () => { it('returns WebContents for mainFrame', () => {
const contents = (webContents as typeof ElectronInternal.WebContents).create(); const contents = (webContents as typeof ElectronInternal.WebContents).create();
expect(webContents.fromFrame(contents.mainFrame)).to.equal(contents); expect(webContents.fromFrame(contents.mainFrame)).to.equal(contents);
@ -85,6 +86,7 @@ describe('webContents module', () => {
}); });
describe('fromDevToolsTargetId()', () => { describe('fromDevToolsTargetId()', () => {
afterEach(closeAllWindows);
it('returns WebContents for attached DevTools target', async () => { it('returns WebContents for attached DevTools target', async () => {
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
await w.loadURL('about:blank'); await w.loadURL('about:blank');
@ -103,7 +105,10 @@ describe('webContents module', () => {
}); });
describe('will-prevent-unload event', function () { describe('will-prevent-unload event', function () {
afterEach(closeAllWindows); afterEach(async () => {
await closeAllWindows();
await cleanupWebContents();
});
it('does not emit if beforeunload returns undefined in a BrowserWindow', async () => { it('does not emit if beforeunload returns undefined in a BrowserWindow', async () => {
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
w.webContents.once('will-prevent-unload', () => { w.webContents.once('will-prevent-unload', () => {
@ -305,11 +310,13 @@ describe('webContents module', () => {
]); ]);
let w: BrowserWindow; let w: BrowserWindow;
before(async () => { beforeEach(async () => {
w = new BrowserWindow({ show: false, webPreferences: { contextIsolation: false } }); w = new BrowserWindow({ show: false, webPreferences: { contextIsolation: false } });
await w.loadURL('about:blank'); await w.loadURL('about:blank');
}); });
after(closeAllWindows); afterEach(async () => {
await closeAllWindows();
});
it('resolves the returned promise with the result', async () => { it('resolves the returned promise with the result', async () => {
const result = await w.webContents.executeJavaScript(code); const result = await w.webContents.executeJavaScript(code);
@ -380,6 +387,8 @@ describe('webContents module', () => {
await w.loadURL('about:blank'); await w.loadURL('about:blank');
}); });
after(() => w.close());
it('resolves the returned promise with the result', async () => { it('resolves the returned promise with the result', async () => {
await w.webContents.executeJavaScriptInIsolatedWorld(999, [{ code: 'window.X = 123' }]); await w.webContents.executeJavaScriptInIsolatedWorld(999, [{ code: 'window.X = 123' }]);
const isolatedResult = await w.webContents.executeJavaScriptInIsolatedWorld(999, [{ code: 'window.X' }]); const isolatedResult = await w.webContents.executeJavaScriptInIsolatedWorld(999, [{ code: 'window.X' }]);
@ -391,6 +400,14 @@ describe('webContents module', () => {
describe('loadURL() promise API', () => { describe('loadURL() promise API', () => {
let w: BrowserWindow; let w: BrowserWindow;
let s: http.Server;
afterEach(() => {
if (s) {
s.close();
s = null as unknown as http.Server;
}
});
beforeEach(async () => { beforeEach(async () => {
w = new BrowserWindow({ show: false }); w = new BrowserWindow({ show: false });
@ -494,19 +511,18 @@ describe('webContents module', () => {
}); });
it('rejects if the load is aborted', async () => { it('rejects if the load is aborted', async () => {
const s = http.createServer(() => { /* never complete the request */ }); s = http.createServer(() => { /* never complete the request */ });
const { port } = await listen(s); const { port } = await listen(s);
const p = expect(w.loadURL(`http://127.0.0.1:${port}`)).to.eventually.be.rejectedWith(Error, /ERR_ABORTED/); const p = expect(w.loadURL(`http://127.0.0.1:${port}`)).to.eventually.be.rejectedWith(Error, /ERR_ABORTED/);
// load a different file before the first load completes, causing the // load a different file before the first load completes, causing the
// first load to be aborted. // first load to be aborted.
await w.loadFile(path.join(fixturesPath, 'pages', 'base-page.html')); await w.loadFile(path.join(fixturesPath, 'pages', 'base-page.html'));
await p; await p;
s.close();
}); });
it("doesn't reject when a subframe fails to load", async () => { it("doesn't reject when a subframe fails to load", async () => {
let resp = null as unknown as http.ServerResponse; let resp = null as unknown as http.ServerResponse;
const s = http.createServer((req, res) => { s = http.createServer((req, res) => {
res.writeHead(200, { 'Content-Type': 'text/html' }); res.writeHead(200, { 'Content-Type': 'text/html' });
res.write('<iframe src="http://err.name.not.resolved"></iframe>'); res.write('<iframe src="http://err.name.not.resolved"></iframe>');
resp = res; resp = res;
@ -524,12 +540,11 @@ describe('webContents module', () => {
await p; await p;
resp.end(); resp.end();
await main; await main;
s.close();
}); });
it("doesn't resolve when a subframe loads", async () => { it("doesn't resolve when a subframe loads", async () => {
let resp = null as unknown as http.ServerResponse; let resp = null as unknown as http.ServerResponse;
const s = http.createServer((req, res) => { s = http.createServer((req, res) => {
res.writeHead(200, { 'Content-Type': 'text/html' }); res.writeHead(200, { 'Content-Type': 'text/html' });
res.write('<iframe src="about:blank"></iframe>'); res.write('<iframe src="about:blank"></iframe>');
resp = res; resp = res;
@ -548,7 +563,6 @@ describe('webContents module', () => {
resp.destroy(); // cause the main request to fail resp.destroy(); // cause the main request to fail
await expect(main).to.eventually.be.rejected() await expect(main).to.eventually.be.rejected()
.and.have.property('errno', -355); // ERR_INCOMPLETE_CHUNKED_ENCODING .and.have.property('errno', -355); // ERR_INCOMPLETE_CHUNKED_ENCODING
s.close();
}); });
it('subsequent load failures reject each time', async () => { it('subsequent load failures reject each time', async () => {
@ -820,6 +834,7 @@ describe('webContents module', () => {
}); });
describe('isFocused() API', () => { describe('isFocused() API', () => {
afterEach(closeAllWindows);
it('returns false when the window is hidden', async () => { it('returns false when the window is hidden', async () => {
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
await w.loadURL('about:blank'); await w.loadURL('about:blank');
@ -1177,6 +1192,7 @@ describe('webContents module', () => {
}); });
describe('startDrag({file, icon})', () => { describe('startDrag({file, icon})', () => {
afterEach(closeAllWindows);
it('throws errors for a missing file or a missing/empty icon', () => { it('throws errors for a missing file or a missing/empty icon', () => {
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
expect(() => { expect(() => {
@ -1281,6 +1297,7 @@ describe('webContents module', () => {
}); });
describe('userAgent APIs', () => { describe('userAgent APIs', () => {
afterEach(closeAllWindows);
it('is not empty by default', () => { it('is not empty by default', () => {
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
const userAgent = w.webContents.getUserAgent(); const userAgent = w.webContents.getUserAgent();
@ -1311,6 +1328,7 @@ describe('webContents module', () => {
}); });
describe('audioMuted APIs', () => { describe('audioMuted APIs', () => {
afterEach(closeAllWindows);
it('can set the audio mute level (functions)', () => { it('can set the audio mute level (functions)', () => {
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
@ -1526,6 +1544,9 @@ describe('webContents module', () => {
res.end(); res.end();
}); });
}); });
defer(() => {
server.close();
});
listen(server).then(({ url }) => { listen(server).then(({ url }) => {
const content = `<iframe src=${url}></iframe>`; const content = `<iframe src=${url}></iframe>`;
w.webContents.on('did-frame-finish-load', (e, isMainFrame) => { w.webContents.on('did-frame-finish-load', (e, isMainFrame) => {
@ -1538,8 +1559,6 @@ describe('webContents module', () => {
done(); done();
} catch (e) { } catch (e) {
done(e); done(e);
} finally {
server.close();
} }
} }
}); });
@ -2023,12 +2042,13 @@ describe('webContents module', () => {
return done(); return done();
} catch (e) { } catch (e) {
return done(e); return done(e);
} finally {
server.close();
} }
} }
res.end('<a id="a" href="/should_have_referrer" target="_blank">link</a>'); res.end('<a id="a" href="/should_have_referrer" target="_blank">link</a>');
}); });
defer(() => {
server.close();
});
listen(server).then(({ url }) => { listen(server).then(({ url }) => {
w.webContents.once('did-finish-load', () => { w.webContents.once('did-finish-load', () => {
w.webContents.setWindowOpenHandler(details => { w.webContents.setWindowOpenHandler(details => {
@ -2055,6 +2075,9 @@ describe('webContents module', () => {
} }
res.end(''); res.end('');
}); });
defer(() => {
server.close();
});
listen(server).then(({ url }) => { listen(server).then(({ url }) => {
w.webContents.once('did-finish-load', () => { w.webContents.once('did-finish-load', () => {
w.webContents.setWindowOpenHandler(details => { w.webContents.setWindowOpenHandler(details => {
@ -2788,7 +2811,10 @@ describe('webContents module', () => {
}); });
describe('close() method', () => { describe('close() method', () => {
afterEach(closeAllWindows); afterEach(async () => {
await closeAllWindows();
await cleanupWebContents();
});
it('closes when close() is called', async () => { it('closes when close() is called', async () => {
const w = (webContents as typeof ElectronInternal.WebContents).create(); const w = (webContents as typeof ElectronInternal.WebContents).create();

View file

@ -9,7 +9,11 @@ import { defer, ifdescribe, waitUntil } from './lib/spec-helpers';
import { closeAllWindows } from './lib/window-helpers'; import { closeAllWindows } from './lib/window-helpers';
describe('WebContentsView', () => { describe('WebContentsView', () => {
afterEach(closeAllWindows); afterEach(async () => {
await closeAllWindows();
const existingWCS = webContents.getAllWebContents();
existingWCS.forEach((contents) => contents.close());
});
it('can be instantiated with no arguments', () => { it('can be instantiated with no arguments', () => {
// eslint-disable-next-line no-new // eslint-disable-next-line no-new

View file

@ -495,6 +495,9 @@ describe('webFrameMain module', () => {
it('is not emitted upon cross-origin navigation', async () => { it('is not emitted upon cross-origin navigation', async () => {
const server = await createServer(); const server = await createServer();
defer(() => {
server.server.close();
});
const w = new BrowserWindow({ show: false }); const w = new BrowserWindow({ show: false });
await w.webContents.loadURL(server.url); await w.webContents.loadURL(server.url);

View file

@ -77,8 +77,9 @@ describe('webFrame module', () => {
describe('api', () => { describe('api', () => {
let w: WebContents; let w: WebContents;
let win: BrowserWindow;
before(async () => { before(async () => {
const win = new BrowserWindow({ show: false, webPreferences: { contextIsolation: false, nodeIntegration: true } }); win = new BrowserWindow({ show: false, webPreferences: { contextIsolation: false, nodeIntegration: true } });
await win.loadURL('data:text/html,<iframe name="test"></iframe>'); await win.loadURL('data:text/html,<iframe name="test"></iframe>');
w = win.webContents; w = win.webContents;
await w.executeJavaScript(` await w.executeJavaScript(`
@ -89,6 +90,11 @@ describe('webFrame module', () => {
`); `);
}); });
after(() => {
win.close();
win = null as unknown as BrowserWindow;
});
describe('top', () => { describe('top', () => {
it('is self for top frame', async () => { it('is self for top frame', async () => {
const equal = await w.executeJavaScript('isSameWebFrame(webFrame.top, webFrame)'); const equal = await w.executeJavaScript('isSameWebFrame(webFrame.top, webFrame)');

View file

@ -589,12 +589,12 @@ describe('webRequest module', () => {
it('can be proxyed', async () => { it('can be proxyed', async () => {
// Setup server. // Setup server.
const reqHeaders : { [key: string] : any } = {}; const reqHeaders : { [key: string] : any } = {};
const server = http.createServer((req, res) => { let server = http.createServer((req, res) => {
reqHeaders[req.url!] = req.headers; reqHeaders[req.url!] = req.headers;
res.setHeader('foo1', 'bar1'); res.setHeader('foo1', 'bar1');
res.end('ok'); res.end('ok');
}); });
const wss = new WebSocket.Server({ noServer: true }); let wss = new WebSocket.Server({ noServer: true });
wss.on('connection', function connection (ws) { wss.on('connection', function connection (ws) {
ws.on('message', function incoming (message) { ws.on('message', function incoming (message) {
if (message === 'foo') { if (message === 'foo') {
@ -660,9 +660,12 @@ describe('webRequest module', () => {
}); });
// Cleanup. // Cleanup.
after(() => { defer(() => {
contents.destroy(); contents.destroy();
server.close(); server.close();
server = null as unknown as http.Server;
wss.close();
wss = null as unknown as WebSocket.Server;
ses.webRequest.onBeforeRequest(null); ses.webRequest.onBeforeRequest(null);
ses.webRequest.onBeforeSendHeaders(null); ses.webRequest.onBeforeSendHeaders(null);
ses.webRequest.onHeadersReceived(null); ses.webRequest.onHeadersReceived(null);

View file

@ -655,7 +655,9 @@ describe('chromium features', () => {
expect(size).to.be.a('number'); expect(size).to.be.a('number');
}); });
it('should lock the keyboard', async () => { // TODO: Re-enable for windows on GitHub Actions,
// fullscreen tests seem to hang on GHA specifically
ifit(process.platform !== 'win32' || process.arch === 'arm64')('should lock the keyboard', async () => {
const w = new BrowserWindow({ show: true }); const w = new BrowserWindow({ show: true });
await w.loadFile(path.join(fixturesPath, 'pages', 'modal.html')); await w.loadFile(path.join(fixturesPath, 'pages', 'modal.html'));
@ -672,8 +674,11 @@ describe('chromium features', () => {
w.webContents.sendInputEvent({ type: 'keyDown', keyCode: 'Escape' }); w.webContents.sendInputEvent({ type: 'keyDown', keyCode: 'Escape' });
await setTimeout(1000); await setTimeout(1000);
const openAfter1 = await w.webContents.executeJavaScript('document.getElementById(\'favDialog\').open'); await expect(waitUntil(async () => {
expect(openAfter1).to.be.true(); return await w.webContents.executeJavaScript(
'document.getElementById(\'favDialog\').open'
);
})).to.eventually.be.fulfilled();
expect(w.isFullScreen()).to.be.false(); expect(w.isFullScreen()).to.be.false();
// Test that with lock, with ESC: // Test that with lock, with ESC:
@ -683,7 +688,6 @@ describe('chromium features', () => {
await w.webContents.executeJavaScript(` await w.webContents.executeJavaScript(`
document.body.requestFullscreen(); document.body.requestFullscreen();
`, true); `, true);
await enterFS2; await enterFS2;
// Request keyboard lock after window has gone fullscreen // Request keyboard lock after window has gone fullscreen
@ -698,8 +702,12 @@ describe('chromium features', () => {
w.webContents.sendInputEvent({ type: 'keyDown', keyCode: 'Escape' }); w.webContents.sendInputEvent({ type: 'keyDown', keyCode: 'Escape' });
await setTimeout(1000); await setTimeout(1000);
const openAfter2 = await w.webContents.executeJavaScript('document.getElementById(\'favDialog\').open'); await expect(waitUntil(async () => {
expect(openAfter2).to.be.false(); const openAfter2 = await w.webContents.executeJavaScript(
'document.getElementById(\'favDialog\').open'
);
return (openAfter2 === false);
})).to.eventually.be.fulfilled();
expect(w.isFullScreen()).to.be.true(); expect(w.isFullScreen()).to.be.true();
}); });
}); });
@ -2530,6 +2538,7 @@ describe('chromium features', () => {
describe('websockets', () => { describe('websockets', () => {
it('has user agent', async () => { it('has user agent', async () => {
const server = http.createServer(); const server = http.createServer();
defer(() => server.close());
const { port } = await listen(server); const { port } = await listen(server);
const wss = new ws.Server({ server }); const wss = new ws.Server({ server });
const finished = new Promise<string | undefined>((resolve, reject) => { const finished = new Promise<string | undefined>((resolve, reject) => {
@ -2973,7 +2982,9 @@ describe('iframe using HTML fullscreen API while window is OS-fullscreened', ()
server.close(); server.close();
}); });
ifit(process.platform !== 'darwin')('can fullscreen from out-of-process iframes (non-macOS)', async () => { // TODO: Re-enable for windows on GitHub Actions,
// fullscreen tests seem to hang on GHA specifically
ifit(process.platform !== 'darwin' && (process.platform !== 'win32' || process.arch === 'arm64'))('can fullscreen from out-of-process iframes (non-macOS)', async () => {
const fullscreenChange = once(ipcMain, 'fullscreenChange'); const fullscreenChange = once(ipcMain, 'fullscreenChange');
const html = const html =
`<iframe style="width: 0" frameborder=0 src="${crossSiteUrl}" allowfullscreen></iframe>`; `<iframe style="width: 0" frameborder=0 src="${crossSiteUrl}" allowfullscreen></iframe>`;
@ -2989,12 +3000,12 @@ describe('iframe using HTML fullscreen API while window is OS-fullscreened', ()
"document.querySelector('iframe').contentWindow.postMessage('exitFullscreen', '*')" "document.querySelector('iframe').contentWindow.postMessage('exitFullscreen', '*')"
); );
await setTimeout(500); await expect(waitUntil(async () => {
const width = await w.webContents.executeJavaScript( const width = await w.webContents.executeJavaScript(
"document.querySelector('iframe').offsetWidth" "document.querySelector('iframe').offsetWidth"
); );
expect(width).to.equal(0); return width === 0;
})).to.eventually.be.fulfilled();
}); });
ifit(process.platform === 'darwin')('can fullscreen from out-of-process iframes (macOS)', async () => { ifit(process.platform === 'darwin')('can fullscreen from out-of-process iframes (macOS)', async () => {
@ -3026,8 +3037,9 @@ describe('iframe using HTML fullscreen API while window is OS-fullscreened', ()
await once(w, 'leave-full-screen'); await once(w, 'leave-full-screen');
}); });
// TODO(jkleinsc) fix this flaky test on WOA // TODO: Re-enable for windows on GitHub Actions,
ifit(process.platform !== 'win32' || process.arch !== 'arm64')('can fullscreen from in-process iframes', async () => { // fullscreen tests seem to hang on GHA specifically
ifit(process.platform !== 'win32' || process.arch === 'arm64')('can fullscreen from in-process iframes', async () => {
if (process.platform === 'darwin') await once(w, 'enter-full-screen'); if (process.platform === 'darwin') await once(w, 'enter-full-screen');
const fullscreenChange = once(ipcMain, 'fullscreenChange'); const fullscreenChange = once(ipcMain, 'fullscreenChange');
@ -3702,13 +3714,6 @@ describe('navigator.usb', () => {
}); });
await sesWin.loadFile(path.join(fixturesPath, 'pages', 'blank.html')); await sesWin.loadFile(path.join(fixturesPath, 'pages', 'blank.html'));
server = http.createServer((req, res) => {
res.setHeader('Content-Type', 'text/html');
res.end('<body>');
});
serverUrl = (await listen(server)).url;
const devices = await getDevices(); const devices = await getDevices();
expect(devices).to.be.an('array').that.is.empty(); expect(devices).to.be.an('array').that.is.empty();
}); });

View file

@ -10,7 +10,7 @@ import * as path from 'node:path';
import { emittedNTimes, emittedUntil } from './lib/events-helpers'; import { emittedNTimes, emittedUntil } from './lib/events-helpers';
import { ifit, listen, waitUntil } from './lib/spec-helpers'; import { ifit, listen, waitUntil } from './lib/spec-helpers';
import { closeAllWindows, closeWindow } from './lib/window-helpers'; import { closeAllWindows, closeWindow, cleanupWebContents } from './lib/window-helpers';
const uuid = require('uuid'); const uuid = require('uuid');
@ -23,6 +23,7 @@ describe('chrome extensions', () => {
let server: http.Server; let server: http.Server;
let url: string; let url: string;
let port: number; let port: number;
let wss: WebSocket.Server;
before(async () => { before(async () => {
server = http.createServer((req, res) => { server = http.createServer((req, res) => {
if (req.url === '/cors') { if (req.url === '/cors') {
@ -31,7 +32,7 @@ describe('chrome extensions', () => {
res.end(emptyPage); res.end(emptyPage);
}); });
const wss = new WebSocket.Server({ noServer: true }); wss = new WebSocket.Server({ noServer: true });
wss.on('connection', function connection (ws) { wss.on('connection', function connection (ws) {
ws.on('message', function incoming (message) { ws.on('message', function incoming (message) {
if (message === 'foo') { if (message === 'foo') {
@ -42,8 +43,10 @@ describe('chrome extensions', () => {
({ port, url } = await listen(server)); ({ port, url } = await listen(server));
}); });
after(() => { after(async () => {
server.close(); server.close();
wss.close();
await cleanupWebContents();
}); });
afterEach(closeAllWindows); afterEach(closeAllWindows);
afterEach(() => { afterEach(() => {
@ -283,6 +286,10 @@ describe('chrome extensions', () => {
w = new BrowserWindow({ show: false, webPreferences: { session: customSession, nodeIntegration: true, contextIsolation: false } }); w = new BrowserWindow({ show: false, webPreferences: { session: customSession, nodeIntegration: true, contextIsolation: false } });
await w.loadURL(url); await w.loadURL(url);
}); });
afterEach(() => {
w.close();
w = null as unknown as BrowserWindow;
});
it('getAcceptLanguages()', async () => { it('getAcceptLanguages()', async () => {
const result = await exec('getAcceptLanguages'); const result = await exec('getAcceptLanguages');
expect(result).to.be.an('array').and.deep.equal(['en-US', 'en']); expect(result).to.be.an('array').and.deep.equal(['en-US', 'en']);
@ -308,6 +315,10 @@ describe('chrome extensions', () => {
w = new BrowserWindow({ show: false, webPreferences: { session: customSession, nodeIntegration: true, contextIsolation: false } }); w = new BrowserWindow({ show: false, webPreferences: { session: customSession, nodeIntegration: true, contextIsolation: false } });
await w.loadURL(url); await w.loadURL(url);
}); });
afterEach(async () => {
w.close();
w = null as unknown as BrowserWindow;
});
it('getManifest()', async () => { it('getManifest()', async () => {
const result = await exec('getManifest'); const result = await exec('getManifest');
expect(result).to.be.an('object').with.property('name', 'chrome-runtime'); expect(result).to.be.an('object').with.property('name', 'chrome-runtime');
@ -358,6 +369,11 @@ describe('chrome extensions', () => {
w = new BrowserWindow({ show: false, webPreferences: { session: customSession, sandbox: true, contextIsolation: true } }); w = new BrowserWindow({ show: false, webPreferences: { session: customSession, sandbox: true, contextIsolation: true } });
}); });
afterEach(() => {
w.close();
w = null as unknown as BrowserWindow;
});
describe('onBeforeRequest', () => { describe('onBeforeRequest', () => {
async function haveRejectedFetch () { async function haveRejectedFetch () {
try { try {
@ -426,6 +442,7 @@ describe('chrome extensions', () => {
customSession = session.fromPartition(`persist:${uuid.v4()}`); customSession = session.fromPartition(`persist:${uuid.v4()}`);
await customSession.loadExtension(path.join(fixtures, 'extensions', 'chrome-api')); await customSession.loadExtension(path.join(fixtures, 'extensions', 'chrome-api'));
}); });
afterEach(closeAllWindows);
it('executeScript', async () => { it('executeScript', async () => {
const w = new BrowserWindow({ show: false, webPreferences: { session: customSession, nodeIntegration: true } }); const w = new BrowserWindow({ show: false, webPreferences: { session: customSession, nodeIntegration: true } });
@ -492,6 +509,7 @@ describe('chrome extensions', () => {
}); });
describe('background pages', () => { describe('background pages', () => {
afterEach(closeAllWindows);
it('loads a lazy background page when sending a message', async () => { it('loads a lazy background page when sending a message', async () => {
const customSession = session.fromPartition(`persist:${uuid.v4()}`); const customSession = session.fromPartition(`persist:${uuid.v4()}`);
await customSession.loadExtension(path.join(fixtures, 'extensions', 'lazy-background-page')); await customSession.loadExtension(path.join(fixtures, 'extensions', 'lazy-background-page'));
@ -559,8 +577,9 @@ describe('chrome extensions', () => {
describe('devtools extensions', () => { describe('devtools extensions', () => {
let showPanelTimeoutId: any = null; let showPanelTimeoutId: any = null;
afterEach(() => { afterEach(async () => {
if (showPanelTimeoutId) clearTimeout(showPanelTimeoutId); if (showPanelTimeoutId) clearTimeout(showPanelTimeoutId);
await closeAllWindows();
}); });
const showLastDevToolsPanel = (w: BrowserWindow) => { const showLastDevToolsPanel = (w: BrowserWindow) => {
w.webContents.once('devtools-opened', () => { w.webContents.once('devtools-opened', () => {
@ -695,13 +714,14 @@ describe('chrome extensions', () => {
} }
}); });
({ port, url } = await listen(server)); ({ port } = await listen(server));
session.defaultSession.loadExtension(contentScript); session.defaultSession.loadExtension(contentScript);
}); });
after(() => { after(() => {
session.defaultSession.removeExtension('content-script-test'); session.defaultSession.removeExtension('content-script-test');
server.close();
}); });
beforeEach(() => { beforeEach(() => {
@ -758,10 +778,11 @@ describe('chrome extensions', () => {
}); });
describe('extension ui pages', () => { describe('extension ui pages', () => {
afterEach(() => { afterEach(async () => {
for (const e of session.defaultSession.getAllExtensions()) { for (const e of session.defaultSession.getAllExtensions()) {
session.defaultSession.removeExtension(e.id); session.defaultSession.removeExtension(e.id);
} }
await closeAllWindows();
}); });
it('loads a ui page of an extension', async () => { it('loads a ui page of an extension', async () => {
@ -782,6 +803,7 @@ describe('chrome extensions', () => {
}); });
describe('manifest v3', () => { describe('manifest v3', () => {
afterEach(closeAllWindows);
it('registers background service worker', async () => { it('registers background service worker', async () => {
const customSession = session.fromPartition(`persist:${uuid.v4()}`); const customSession = session.fromPartition(`persist:${uuid.v4()}`);
const registrationPromise = new Promise<string>(resolve => { const registrationPromise = new Promise<string>(resolve => {
@ -1033,6 +1055,7 @@ describe('chrome extensions', () => {
}); });
describe('get', () => { describe('get', () => {
afterEach(closeAllWindows);
it('returns tab properties', async () => { it('returns tab properties', async () => {
await w.loadURL(url); await w.loadURL(url);
@ -1173,6 +1196,7 @@ describe('chrome extensions', () => {
}); });
describe('query', () => { describe('query', () => {
afterEach(closeAllWindows);
it('can query for a tab with specific properties', async () => { it('can query for a tab with specific properties', async () => {
await w.loadURL(url); await w.loadURL(url);

View file

@ -1,8 +1,10 @@
<html> <html>
<body> <body>
<script type="text/javascript" charset="utf-8"> <script type="text/javascript" charset="utf-8">
if (typeof require !== 'undefined') {
const {ipcRenderer, webFrame} = require('electron') const {ipcRenderer, webFrame} = require('electron')
ipcRenderer.send('webview-parent-zoom-level', webFrame.getZoomFactor(), webFrame.getZoomLevel()) ipcRenderer.send('webview-parent-zoom-level', webFrame.getZoomFactor(), webFrame.getZoomLevel())
}
</script> </script>
</body> </body>
</html> </html>

View file

@ -4,6 +4,8 @@ const fs = require('node:fs');
const path = require('node:path'); const path = require('node:path');
const v8 = require('node:v8'); const v8 = require('node:v8');
const FAILURE_STATUS_KEY = 'Electron_Spec_Runner_Failures';
// We want to terminate on errors, not throw up a dialog // We want to terminate on errors, not throw up a dialog
process.on('uncaughtException', (err) => { process.on('uncaughtException', (err) => {
console.error('Unhandled exception in main spec runner:', err); console.error('Unhandled exception in main spec runner:', err);
@ -131,7 +133,7 @@ app.whenReady().then(async () => {
const validTestPaths = argv.files && argv.files.map(file => const validTestPaths = argv.files && argv.files.map(file =>
path.isAbsolute(file) path.isAbsolute(file)
? path.relative(baseElectronDir, file) ? path.relative(baseElectronDir, file)
: file); : path.normalize(file));
const filter = (file) => { const filter = (file) => {
if (!/-spec\.[tj]s$/.test(file)) { if (!/-spec\.[tj]s$/.test(file)) {
return false; return false;
@ -155,17 +157,7 @@ app.whenReady().then(async () => {
const { getFiles } = require('./get-files'); const { getFiles } = require('./get-files');
const testFiles = await getFiles(__dirname, filter); const testFiles = await getFiles(__dirname, filter);
const VISIBILITY_SPEC = ('visibility-state-spec.ts'); for (const file of testFiles.sort()) {
const sortedFiles = testFiles.sort((a, b) => {
// If visibility-state-spec is in the list, move it to the first position
// so that it gets executed first to avoid other specs interferring with it.
if (a.indexOf(VISIBILITY_SPEC) > -1) {
return -1;
} else {
return a.localeCompare(b);
}
});
for (const file of sortedFiles) {
mocha.addFile(file); mocha.addFile(file);
} }
@ -178,7 +170,12 @@ app.whenReady().then(async () => {
const cb = () => { const cb = () => {
// Ensure the callback is called after runner is defined // Ensure the callback is called after runner is defined
process.nextTick(() => { process.nextTick(() => {
if (process.env.ELECTRON_FORCE_TEST_SUITE_EXIT === 'true') {
console.log(`${FAILURE_STATUS_KEY}: ${runner.failures}`);
process.kill(process.pid);
} else {
process.exit(runner.failures); process.exit(runner.failures);
}
}); });
}; };

View file

@ -16,7 +16,7 @@ export async function copyApp (targetDir: string): Promise<string> {
// On windows and linux we should read the zip manifest files and then copy each of those files // On windows and linux we should read the zip manifest files and then copy each of those files
// one by one // one by one
const baseDir = path.dirname(process.execPath); const baseDir = path.dirname(process.execPath);
const zipManifestPath = path.resolve(__dirname, '..', '..', 'script', 'zip_manifests', `dist_zip.${process.platform === 'win32' ? 'win' : 'linux'}.${process.arch}.manifest`); const zipManifestPath = path.resolve(__dirname, '..', '..', 'script', 'zip_manifests', `dist_zip.${process.platform === 'win32' ? 'win' : 'linux'}.${process.arch === 'ia32' ? 'x86' : process.arch}.manifest`);
const filesToCopy = (fs.readFileSync(zipManifestPath, 'utf-8')).split('\n').filter(f => f !== 'LICENSE' && f !== 'LICENSES.chromium.html' && f !== 'version' && f.trim()); const filesToCopy = (fs.readFileSync(zipManifestPath, 'utf-8')).split('\n').filter(f => f !== 'LICENSE' && f !== 'LICENSES.chromium.html' && f !== 'version' && f.trim());
await Promise.all( await Promise.all(
filesToCopy.map(async rel => { filesToCopy.map(async rel => {

View file

@ -1,4 +1,4 @@
import { BaseWindow, BrowserWindow } from 'electron/main'; import { BaseWindow, BrowserWindow, webContents } from 'electron/main';
import { expect } from 'chai'; import { expect } from 'chai';
@ -48,8 +48,23 @@ export const closeWindow = async (
} }
}; };
export async function closeAllWindows () { export async function closeAllWindows (assertNotWindows = false) {
let windowsClosed = 0;
for (const w of BaseWindow.getAllWindows()) { for (const w of BaseWindow.getAllWindows()) {
await closeWindow(w, { assertNotWindows: false }); await closeWindow(w, { assertNotWindows });
windowsClosed++;
} }
return windowsClosed;
}
export async function cleanupWebContents () {
let webContentsDestroyed = 0;
const existingWCS = webContents.getAllWebContents();
for (const contents of existingWCS) {
const isDestroyed = once(contents, 'destroyed');
contents.destroy();
await isDestroyed;
webContentsDestroyed++;
}
return webContentsDestroyed;
} }

View file

@ -1,4 +1,4 @@
import { BaseWindow, BrowserWindow, BrowserWindowConstructorOptions, WebContents, WebContentsView } from 'electron/main'; import { BaseWindow, BrowserWindow, BrowserWindowConstructorOptions, webContents, WebContents, WebContentsView } from 'electron/main';
import { expect } from 'chai'; import { expect } from 'chai';
@ -23,6 +23,8 @@ ifdescribe(process.platform !== 'linux')('document.visibilityState', () => {
afterEach(async () => { afterEach(async () => {
await closeAllWindows(); await closeAllWindows();
w = null as unknown as BrowserWindow; w = null as unknown as BrowserWindow;
const existingWCS = webContents.getAllWebContents();
existingWCS.forEach((contents) => contents.close());
}); });
const load = () => w.webContents.loadFile(path.resolve(__dirname, 'fixtures', 'chromium', 'visibilitystate.html')); const load = () => w.webContents.loadFile(path.resolve(__dirname, 'fixtures', 'chromium', 'visibilitystate.html'));

View file

@ -1700,6 +1700,7 @@ describe('<webview> tag', function () {
await loadWebViewAndWaitForEvent(w, { await loadWebViewAndWaitForEvent(w, {
src: `file://${fixtures}/pages/dom-ready.html?port=${port}` src: `file://${fixtures}/pages/dom-ready.html?port=${port}`
}, 'dom-ready'); }, 'dom-ready');
defer(() => { server.close(); });
}); });
itremote('throws a custom error when an API method is called before the event is emitted', () => { itremote('throws a custom error when an API method is called before the event is emitted', () => {