build: clean up scripts folder, move release scripts, move zip manifest logic (#18945)

* build: move zip manifest logic in zip_manifests dir

* build: remove unused get-version.py script

* chore: move all release/sudowoodo related scripts into script/releases

* chore: update paths to zip manifests in CI configs

* build: fix path to ci release build script for arm tests
This commit is contained in:
Samuel Attard 2019-06-24 10:18:04 -07:00 committed by GitHub
parent 5686a0713e
commit fb01c94511
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 122 additions and 147 deletions

View file

@ -0,0 +1,237 @@
if (!process.env.CI) require('dotenv-safe').load()
const assert = require('assert')
const request = require('request')
const buildAppVeyorURL = 'https://ci.appveyor.com/api/builds'
const vstsURL = 'https://github.visualstudio.com/electron/_apis/build'
const appVeyorJobs = {
'electron-x64': 'electron-x64-release',
'electron-ia32': 'electron-ia32-release'
}
const circleCIJobs = [
'linux-arm-publish',
'linux-arm64-publish',
'linux-ia32-publish',
'linux-x64-publish',
'mas-publish',
'osx-publish'
]
const vstsArmJobs = [
'electron-arm-testing',
'electron-arm64-testing'
]
async function makeRequest (requestOptions, parseResponse) {
return new Promise((resolve, reject) => {
request(requestOptions, (err, res, body) => {
if (!err && res.statusCode >= 200 && res.statusCode < 300) {
if (parseResponse) {
const build = JSON.parse(body)
resolve(build)
} else {
resolve(body)
}
} else {
console.error('Error occurred while requesting:', requestOptions.url)
if (parseResponse) {
try {
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body), requestOptions)
} catch (err) {
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
}
} else {
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
}
reject(err)
}
})
})
}
async function circleCIcall (buildUrl, targetBranch, job, options) {
console.log(`Triggering CircleCI to run build job: ${job} on branch: ${targetBranch} with release flag.`)
const buildRequest = {
'build_parameters': {
'CIRCLE_JOB': job
}
}
if (!options.ghRelease) {
buildRequest.build_parameters.UPLOAD_TO_S3 = 1
}
const circleResponse = await makeRequest({
method: 'POST',
url: buildUrl,
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
},
body: JSON.stringify(buildRequest)
}, true).catch(err => {
console.log('Error calling CircleCI:', err)
})
console.log(`CircleCI release build request for ${job} successful. Check ${circleResponse.build_url} for status.`)
}
function buildAppVeyor (targetBranch, options) {
const validJobs = Object.keys(appVeyorJobs)
if (options.job) {
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`)
callAppVeyor(targetBranch, options.job, options)
} else {
validJobs.forEach((job) => callAppVeyor(targetBranch, job, options))
}
}
async function callAppVeyor (targetBranch, job, options) {
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`)
const environmentVariables = {
ELECTRON_RELEASE: 1
}
if (!options.ghRelease) {
environmentVariables.UPLOAD_TO_S3 = 1
}
const requestOpts = {
url: buildAppVeyorURL,
auth: {
bearer: process.env.APPVEYOR_CLOUD_TOKEN
},
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
accountName: 'electron-bot',
projectSlug: appVeyorJobs[job],
branch: targetBranch,
environmentVariables
}),
method: 'POST'
}
const appVeyorResponse = await makeRequest(requestOpts, true).catch(err => {
console.log('Error calling AppVeyor:', err)
})
const buildUrl = `https://ci.appveyor.com/project/electron-bot/${appVeyorJobs[job]}/build/${appVeyorResponse.version}`
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`)
}
function buildCircleCI (targetBranch, options) {
const circleBuildUrl = `https://circleci.com/api/v1.1/project/github/electron/electron/tree/${targetBranch}?circle-token=${process.env.CIRCLE_TOKEN}`
if (options.job) {
assert(circleCIJobs.includes(options.job), `Unknown CircleCI job name: ${options.job}. Valid values are: ${circleCIJobs}.`)
circleCIcall(circleBuildUrl, targetBranch, options.job, options)
} else {
circleCIJobs.forEach((job) => circleCIcall(circleBuildUrl, targetBranch, job, options))
}
}
async function buildVSTS (targetBranch, options) {
if (options.armTest) {
assert(vstsArmJobs.includes(options.job), `Unknown VSTS CI arm test job name: ${options.job}. Valid values are: ${vstsArmJobs}.`)
}
console.log(`Triggering VSTS to run build on branch: ${targetBranch} with release flag.`)
const environmentVariables = {
ELECTRON_RELEASE: 1
}
if (options.armTest) {
environmentVariables.CIRCLE_BUILD_NUM = options.circleBuildNum
} else {
if (!options.ghRelease) {
environmentVariables.UPLOAD_TO_S3 = 1
}
}
const requestOpts = {
url: `${vstsURL}/definitions?api-version=4.1`,
auth: {
user: '',
password: process.env.VSTS_TOKEN
},
headers: {
'Content-Type': 'application/json'
}
}
const vstsResponse = await makeRequest(requestOpts, true).catch(err => {
console.log('Error calling VSTS to get build definitions:', err)
})
const buildsToRun = vstsResponse.value.filter(build => build.name === options.job)
buildsToRun.forEach((build) => callVSTSBuild(build, targetBranch, environmentVariables))
}
async function callVSTSBuild (build, targetBranch, environmentVariables) {
const buildBody = {
definition: build,
sourceBranch: targetBranch,
priority: 'high'
}
if (Object.keys(environmentVariables).length !== 0) {
buildBody.parameters = JSON.stringify(environmentVariables)
}
const requestOpts = {
url: `${vstsURL}/builds?api-version=4.1`,
auth: {
user: '',
password: process.env.VSTS_TOKEN
},
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(buildBody),
method: 'POST'
}
const vstsResponse = await makeRequest(requestOpts, true).catch(err => {
console.log(`Error calling VSTS for job ${build.name}`, err)
})
console.log(`VSTS release build request for ${build.name} successful. Check ${vstsResponse._links.web.href} for status.`)
}
function runRelease (targetBranch, options) {
if (options.ci) {
switch (options.ci) {
case 'CircleCI': {
buildCircleCI(targetBranch, options)
break
}
case 'AppVeyor': {
buildAppVeyor(targetBranch, options)
break
}
case 'VSTS': {
buildVSTS(targetBranch, options)
break
}
default: {
console.log(`Error! Unknown CI: ${options.ci}.`)
process.exit(1)
}
}
} else {
buildCircleCI(targetBranch, options)
buildAppVeyor(targetBranch, options)
buildVSTS(targetBranch, options)
}
}
module.exports = runRelease
if (require.main === module) {
const args = require('minimist')(process.argv.slice(2), {
boolean: ['ghRelease', 'armTest']
})
const targetBranch = args._[0]
if (args._.length < 1) {
console.log(`Trigger CI to build release builds of electron.
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--ci=CircleCI|AppVeyor|VSTS]
[--ghRelease] [--armTest] [--circleBuildNum=xxx] TARGET_BRANCH
`)
process.exit(0)
}
runRelease(targetBranch, args)
}

View file

@ -0,0 +1,38 @@
if (!process.env.CI) require('dotenv-safe').load()
const octokit = require('@octokit/rest')({
auth: process.env.ELECTRON_GITHUB_TOKEN
})
if (process.argv.length < 3) {
console.log('Usage: find-release version')
process.exit(1)
}
const version = process.argv[2]
async function findRelease () {
const releases = await octokit.repos.listReleases({
owner: 'electron',
repo: version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
})
const targetRelease = releases.data.find(release => release.tag_name === version)
let returnObject = {}
if (targetRelease) {
returnObject = {
id: targetRelease.id,
draft: targetRelease.draft,
exists: true
}
} else {
returnObject = {
exists: false,
draft: false
}
}
console.log(JSON.stringify(returnObject))
}
findRelease()

View file

@ -0,0 +1,47 @@
#!/usr/bin/env python
# Download individual checksum files for Electron zip files from S3,
# concatenate them, and upload to GitHub.
from __future__ import print_function
import argparse
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/..")
from lib.config import s3_config
from lib.util import boto_path_dirs
sys.path.extend(boto_path_dirs())
from boto.s3.connection import S3Connection
def main():
args = parse_args()
bucket_name, access_key, secret_key = s3_config()
s3 = S3Connection(access_key, secret_key)
bucket = s3.get_bucket(bucket_name)
if bucket is None:
print('S3 bucket "{}" does not exist!'.format(bucket_name), file=sys.stderr)
return 1
prefix = 'atom-shell/tmp/{0}/'.format(args.version)
shasums = [s3_object.get_contents_as_string().strip()
for s3_object in bucket.list(prefix, delimiter='/')
if s3_object.key.endswith('.sha256sum')]
print('\n'.join(shasums))
return 0
def parse_args():
parser = argparse.ArgumentParser(description='Upload SHASUMS files to GitHub')
parser.add_argument('-v', '--version', help='Specify the version',
required=True)
return parser.parse_args()
if __name__ == '__main__':
sys.exit(main())

1
script/release/notes/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
.cache

186
script/release/notes/index.js Executable file
View file

@ -0,0 +1,186 @@
#!/usr/bin/env node
const { GitProcess } = require('dugite')
const minimist = require('minimist')
const path = require('path')
const semver = require('semver')
const { ELECTRON_DIR } = require('../../lib/utils')
const notesGenerator = require('./notes.js')
const semverify = version => version.replace(/^origin\//, '').replace('x', '0').replace(/-/g, '.')
const runGit = async (args) => {
const response = await GitProcess.exec(args, ELECTRON_DIR)
if (response.exitCode !== 0) {
throw new Error(response.stderr.trim())
}
return response.stdout.trim()
}
const tagIsSupported = tag => tag && !tag.includes('nightly') && !tag.includes('unsupported')
const tagIsBeta = tag => tag.includes('beta')
const tagIsStable = tag => tagIsSupported(tag) && !tagIsBeta(tag)
const getTagsOf = async (point) => {
return (await runGit(['tag', '--merged', point]))
.split('\n')
.map(tag => tag.trim())
.filter(tag => semver.valid(tag))
.sort(semver.compare)
}
const getTagsOnBranch = async (point) => {
const masterTags = await getTagsOf('master')
if (point === 'master') {
return masterTags
}
const masterTagsSet = new Set(masterTags)
return (await getTagsOf(point)).filter(tag => !masterTagsSet.has(tag))
}
const getBranchOf = async (point) => {
const branches = (await runGit(['branch', '-a', '--contains', point]))
.split('\n')
.map(branch => branch.trim())
.filter(branch => !!branch)
const current = branches.find(branch => branch.startsWith('* '))
return current ? current.slice(2) : branches.shift()
}
const getAllBranches = async () => {
return (await runGit(['branch', '--remote']))
.split('\n')
.map(branch => branch.trim())
.filter(branch => !!branch)
.filter(branch => branch !== 'origin/HEAD -> origin/master')
.sort()
}
const getStabilizationBranches = async () => {
return (await getAllBranches())
.filter(branch => /^origin\/\d+-\d+-x$/.test(branch))
}
const getPreviousStabilizationBranch = async (current) => {
const stabilizationBranches = (await getStabilizationBranches())
.filter(branch => branch !== current && branch !== `origin/${current}`)
if (!semver.valid(current)) {
// since we don't seem to be on a stabilization branch right now,
// pick a placeholder name that will yield the newest branch
// as a comparison point.
current = 'v999.999.999'
}
let newestMatch = null
for (const branch of stabilizationBranches) {
if (semver.gte(semverify(branch), semverify(current))) {
continue
}
if (newestMatch && semver.lte(semverify(branch), semverify(newestMatch))) {
continue
}
newestMatch = branch
}
return newestMatch
}
const getPreviousPoint = async (point) => {
const currentBranch = await getBranchOf(point)
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop()
const currentIsStable = tagIsStable(currentTag)
try {
// First see if there's an earlier tag on the same branch
// that can serve as a reference point.
let tags = (await getTagsOnBranch(`${point}^`)).filter(tag => tagIsSupported(tag))
if (currentIsStable) {
tags = tags.filter(tag => tagIsStable(tag))
}
if (tags.length) {
return tags.pop()
}
} catch (error) {
console.log('error', error)
}
// Otherwise, use the newest stable release that preceeds this branch.
// To reach that you may have to walk past >1 branch, e.g. to get past
// 2-1-x which never had a stable release.
let branch = currentBranch
while (branch) {
const prevBranch = await getPreviousStabilizationBranch(branch)
const tags = (await getTagsOnBranch(prevBranch)).filter(tag => tagIsStable(tag))
if (tags.length) {
return tags.pop()
}
branch = prevBranch
}
}
async function getReleaseNotes (range, newVersion, explicitLinks) {
const rangeList = range.split('..') || ['HEAD']
const to = rangeList.pop()
const from = rangeList.pop() || (await getPreviousPoint(to))
if (!newVersion) {
newVersion = to
}
console.log(`Generating release notes between ${from} and ${to} for version ${newVersion}`)
const notes = await notesGenerator.get(from, to, newVersion)
const ret = {
text: notesGenerator.render(notes, explicitLinks)
}
if (notes.unknown.length) {
ret.warning = `You have ${notes.unknown.length} unknown release notes. Please fix them before releasing.`
}
return ret
}
async function main () {
const opts = minimist(process.argv.slice(2), {
boolean: [ 'explicit-links', 'help' ],
string: [ 'version' ]
})
opts.range = opts._.shift()
if (opts.help || !opts.range) {
const name = path.basename(process.argv[1])
console.log(`
easy usage: ${name} version
full usage: ${name} [begin..]end [--version version] [--explicit-links]
* 'begin' and 'end' are two git references -- tags, branches, etc --
from which the release notes are generated.
* if omitted, 'begin' defaults to the previous tag in end's branch.
* if omitted, 'version' defaults to 'end'. Specifying a version is
useful if you're making notes on a new version that isn't tagged yet.
* 'explicit-links' makes every note's issue, commit, or pull an MD link
For example, these invocations are equivalent:
${process.argv[1]} v4.0.1
${process.argv[1]} v4.0.0..v4.0.1 --version v4.0.1
`)
return 0
}
const notes = await getReleaseNotes(opts.range, opts.version, opts['explicit-links'])
console.log(notes.text)
if (notes.warning) {
throw new Error(notes.warning)
}
}
if (process.mainModule === module) {
main().catch((err) => {
console.error('Error Occurred:', err)
process.exit(1)
})
}
module.exports = getReleaseNotes

View file

@ -0,0 +1,738 @@
#!/usr/bin/env node
const childProcess = require('child_process')
const fs = require('fs')
const os = require('os')
const path = require('path')
const { GitProcess } = require('dugite')
const octokit = require('@octokit/rest')({
auth: process.env.ELECTRON_GITHUB_TOKEN
})
const semver = require('semver')
const { ELECTRON_VERSION, SRC_DIR } = require('../../lib/utils')
const MAX_FAIL_COUNT = 3
const CHECK_INTERVAL = 5000
const CACHE_DIR = path.resolve(__dirname, '.cache')
const NO_NOTES = 'No notes'
const FOLLOW_REPOS = [ 'electron/electron', 'electron/libchromiumcontent', 'electron/node' ]
const breakTypes = new Set(['breaking-change'])
const docTypes = new Set(['doc', 'docs'])
const featTypes = new Set(['feat', 'feature'])
const fixTypes = new Set(['fix'])
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'vendor', 'perf', 'style', 'ci'])
const knownTypes = new Set([...breakTypes.keys(), ...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()])
const runGit = async (dir, args) => {
const response = await GitProcess.exec(args, dir)
if (response.exitCode !== 0) {
throw new Error(response.stderr.trim())
}
return response.stdout.trim()
}
const getCommonAncestor = async (dir, point1, point2) => {
return runGit(dir, ['merge-base', point1, point2])
}
const setPullRequest = (commit, owner, repo, number) => {
if (!owner || !repo || !number) {
throw new Error(JSON.stringify({ owner, repo, number }, null, 2))
}
if (!commit.originalPr) {
commit.originalPr = commit.pr
}
commit.pr = { owner, repo, number }
if (!commit.originalPr) {
commit.originalPr = commit.pr
}
}
const getNoteFromClerk = async (number, owner, repo) => {
const comments = await getComments(number, owner, repo)
if (!comments || !comments.data) return
const CLERK_LOGIN = 'release-clerk[bot]'
const CLERK_NO_NOTES = '**No Release Notes**'
const PERSIST_LEAD = '**Release Notes Persisted**\n\n'
const QUOTE_LEAD = '> '
for (const comment of comments.data.reverse()) {
if (comment.user.login !== CLERK_LOGIN) {
continue
}
if (comment.body === CLERK_NO_NOTES) {
return NO_NOTES
}
if (comment.body.startsWith(PERSIST_LEAD)) {
return comment.body
.slice(PERSIST_LEAD.length).trim() // remove PERSIST_LEAD
.split('\r?\n') // break into lines
.map(line => line.trim())
.filter(line => line.startsWith(QUOTE_LEAD)) // notes are quoted
.map(line => line.slice(QUOTE_LEAD.length)) // unquote the lines
.join(' ') // join the note lines
.trim()
}
}
}
// copied from https://github.com/electron/clerk/blob/master/src/index.ts#L4-L13
const OMIT_FROM_RELEASE_NOTES_KEYS = [
'no-notes',
'no notes',
'no_notes',
'none',
'no',
'nothing',
'empty',
'blank'
]
const getNoteFromBody = body => {
if (!body) {
return null
}
const NOTE_PREFIX = 'Notes: '
const NOTE_HEADER = '#### Release Notes'
let note = body
.split(/\r?\n\r?\n/) // split into paragraphs
.map(paragraph => paragraph.trim())
.map(paragraph => paragraph.startsWith(NOTE_HEADER) ? paragraph.slice(NOTE_HEADER.length).trim() : paragraph)
.find(paragraph => paragraph.startsWith(NOTE_PREFIX))
if (note) {
note = note
.slice(NOTE_PREFIX.length)
.replace(/<!--.*-->/, '') // '<!-- change summary here-->'
.replace(/\r?\n/, ' ') // remove newlines
.trim()
}
if (note && OMIT_FROM_RELEASE_NOTES_KEYS.includes(note.toLowerCase())) {
return NO_NOTES
}
return note
}
/**
* Looks for our project's conventions in the commit message:
*
* 'semantic: some description' -- sets type, subject
* 'some description (#99999)' -- sets subject, pr
* 'Fixes #3333' -- sets issueNumber
* 'Merge pull request #99999 from ${branchname}' -- sets pr
* 'This reverts commit ${sha}' -- sets revertHash
* line starting with 'BREAKING CHANGE' in body -- sets breakingChange
* 'Backport of #99999' -- sets pr
*/
const parseCommitMessage = (commitMessage, owner, repo, commit = {}) => {
// split commitMessage into subject & body
let subject = commitMessage
let body = ''
const pos = subject.indexOf('\n')
if (pos !== -1) {
body = subject.slice(pos).trim()
subject = subject.slice(0, pos).trim()
}
if (!commit.originalSubject) {
commit.originalSubject = subject
}
if (body) {
commit.body = body
const note = getNoteFromBody(body)
if (note) { commit.note = note }
}
// if the subject ends in ' (#dddd)', treat it as a pull request id
let match
if ((match = subject.match(/^(.*)\s\(#(\d+)\)$/))) {
setPullRequest(commit, owner, repo, parseInt(match[2]))
subject = match[1]
}
// if the subject begins with 'word:', treat it as a semantic commit
if ((match = subject.match(/^(\w+):\s(.*)$/))) {
const type = match[1].toLocaleLowerCase()
if (knownTypes.has(type)) {
commit.type = type
subject = match[2]
}
}
// Check for GitHub commit message that indicates a PR
if ((match = subject.match(/^Merge pull request #(\d+) from (.*)$/))) {
setPullRequest(commit, owner, repo, parseInt(match[1]))
commit.pr.branch = match[2].trim()
}
// Check for a trop comment that indicates a PR
if ((match = commitMessage.match(/\bBackport of #(\d+)\b/))) {
setPullRequest(commit, owner, repo, parseInt(match[1]))
}
// https://help.github.com/articles/closing-issues-using-keywords/
if ((match = subject.match(/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/))) {
commit.issueNumber = parseInt(match[1])
if (!commit.type) {
commit.type = 'fix'
}
}
// look for 'fixes' in markdown; e.g. 'Fixes [#8952](https://github.com/electron/electron/issues/8952)'
if (!commit.issueNumber && ((match = commitMessage.match(/Fixes \[#(\d+)\]\(https:\/\/github.com\/(\w+)\/(\w+)\/issues\/(\d+)\)/)))) {
commit.issueNumber = parseInt(match[1])
if (commit.pr && commit.pr.number === commit.issueNumber) {
commit.pr = null
}
if (commit.originalPr && commit.originalPr.number === commit.issueNumber) {
commit.originalPr = null
}
if (!commit.type) {
commit.type = 'fix'
}
}
// https://www.conventionalcommits.org/en
if (commitMessage
.split(/\r?\n/) // split into lines
.map(line => line.trim())
.some(line => line.startsWith('BREAKING CHANGE'))) {
commit.type = 'breaking-change'
}
// Check for a reversion commit
if ((match = body.match(/This reverts commit ([a-f0-9]{40})\./))) {
commit.revertHash = match[1]
}
// Edge case: manual backport where commit has `owner/repo#pull` notation
if (commitMessage.toLowerCase().includes('backport') &&
((match = commitMessage.match(/\b(\w+)\/(\w+)#(\d+)\b/)))) {
const [ , owner, repo, number ] = match
if (FOLLOW_REPOS.includes(`${owner}/${repo}`)) {
setPullRequest(commit, owner, repo, number)
}
}
// Edge case: manual backport where commit has a link to the backport PR
if (commitMessage.includes('ackport') &&
((match = commitMessage.match(/https:\/\/github\.com\/(\w+)\/(\w+)\/pull\/(\d+)/)))) {
const [ , owner, repo, number ] = match
if (FOLLOW_REPOS.includes(`${owner}/${repo}`)) {
setPullRequest(commit, owner, repo, number)
}
}
// Legacy commits: pre-semantic commits
if (!commit.type || commit.type === 'chore') {
const commitMessageLC = commitMessage.toLocaleLowerCase()
if ((match = commitMessageLC.match(/\bchore\((\w+)\):/))) {
// example: 'Chore(docs): description'
commit.type = knownTypes.has(match[1]) ? match[1] : 'chore'
} else if (commitMessageLC.match(/\b(?:fix|fixes|fixed)/)) {
// example: 'fix a bug'
commit.type = 'fix'
} else if (commitMessageLC.match(/\[(?:docs|doc)\]/)) {
// example: '[docs]
commit.type = 'doc'
}
}
commit.subject = subject.trim()
return commit
}
const getLocalCommitHashes = async (dir, ref) => {
const args = ['log', '-z', `--format=%H`, ref]
return (await runGit(dir, args)).split(`\0`).map(hash => hash.trim())
}
/*
* possible properties:
* breakingChange, email, hash, issueNumber, originalSubject, parentHashes,
* pr { owner, repo, number, branch }, revertHash, subject, type
*/
const getLocalCommitDetails = async (module, point1, point2) => {
const { owner, repo, dir } = module
const fieldSep = '||'
const format = ['%H', '%P', '%aE', '%B'].join(fieldSep)
const args = ['log', '-z', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`]
const commits = (await runGit(dir, args)).split(`\0`).map(field => field.trim())
const details = []
for (const commit of commits) {
if (!commit) {
continue
}
const [ hash, parentHashes, email, commitMessage ] = commit.split(fieldSep, 4).map(field => field.trim())
details.push(parseCommitMessage(commitMessage, owner, repo, {
email,
hash,
owner,
repo,
parentHashes: parentHashes.split()
}))
}
return details
}
const checkCache = async (name, operation) => {
const filename = path.resolve(CACHE_DIR, name)
if (fs.existsSync(filename)) {
return JSON.parse(fs.readFileSync(filename, 'utf8'))
}
const response = await operation()
if (response) {
fs.writeFileSync(filename, JSON.stringify(response))
}
return response
}
// helper function to add some resiliency to volatile GH api endpoints
async function runRetryable (fn, maxRetries) {
let lastError
for (let i = 0; i < maxRetries; i++) {
try {
return await fn()
} catch (error) {
await new Promise((resolve, reject) => setTimeout(resolve, CHECK_INTERVAL))
lastError = error
}
}
// Silently eat 404s.
if (lastError.status !== 404) throw lastError
}
const getPullRequest = async (number, owner, repo) => {
const name = `${owner}-${repo}-pull-${number}`
return checkCache(name, async () => {
return runRetryable(octokit.pulls.get({
number,
owner,
repo
}), MAX_FAIL_COUNT)
})
}
const getComments = async (number, owner, repo) => {
const name = `${owner}-${repo}-pull-${number}-comments`
return checkCache(name, async () => {
return runRetryable(octokit.issues.listComments({
number,
owner,
repo,
per_page: 100
}), MAX_FAIL_COUNT)
})
}
const addRepoToPool = async (pool, repo, from, to) => {
const commonAncestor = await getCommonAncestor(repo.dir, from, to)
const oldHashes = await getLocalCommitHashes(repo.dir, from)
oldHashes.forEach(hash => { pool.processedHashes.add(hash) })
const commits = await getLocalCommitDetails(repo, commonAncestor, to)
pool.commits.push(...commits)
}
/***
**** Other Repos
***/
// other repos - gyp
const getGypSubmoduleRef = async (dir, point) => {
// example: '160000 commit 028b0af83076cec898f4ebce208b7fadb715656e libchromiumcontent'
const response = await runGit(
path.dirname(dir),
['ls-tree', '-t', point, path.basename(dir)]
)
const line = response.split('\n').filter(line => line.startsWith('160000')).shift()
const tokens = line ? line.split(/\s/).map(token => token.trim()) : null
const ref = tokens && tokens.length >= 3 ? tokens[2] : null
return ref
}
const getDependencyCommitsGyp = async (pool, fromRef, toRef) => {
const commits = []
const repos = [{
owner: 'electron',
repo: 'libchromiumcontent',
dir: path.resolve(ELECTRON_VERSION, 'vendor', 'libchromiumcontent')
}, {
owner: 'electron',
repo: 'node',
dir: path.resolve(ELECTRON_VERSION, 'vendor', 'node')
}]
for (const repo of repos) {
const from = await getGypSubmoduleRef(repo.dir, fromRef)
const to = await getGypSubmoduleRef(repo.dir, toRef)
await addRepoToPool(pool, repo, from, to)
}
return commits
}
// other repos - gn
const getDepsVariable = async (ref, key) => {
// get a copy of that reference point's DEPS file
const deps = await runGit(ELECTRON_VERSION, ['show', `${ref}:DEPS`])
const filename = path.resolve(os.tmpdir(), 'DEPS')
fs.writeFileSync(filename, deps)
// query the DEPS file
const response = childProcess.spawnSync(
'gclient',
['getdep', '--deps-file', filename, '--var', key],
{ encoding: 'utf8' }
)
// cleanup
fs.unlinkSync(filename)
return response.stdout.trim()
}
const getDependencyCommitsGN = async (pool, fromRef, toRef) => {
const repos = [{ // just node
owner: 'electron',
repo: 'node',
dir: path.resolve(SRC_DIR, 'third_party', 'electron_node'),
deps_variable_name: 'node_version'
}]
for (const repo of repos) {
// the 'DEPS' file holds the dependency reference point
const key = repo.deps_variable_name
const from = await getDepsVariable(fromRef, key)
const to = await getDepsVariable(toRef, key)
await addRepoToPool(pool, repo, from, to)
}
}
// other repos - controller
const getDependencyCommits = async (pool, from, to) => {
const filename = path.resolve(ELECTRON_VERSION, 'vendor', 'libchromiumcontent')
const useGyp = fs.existsSync(filename)
return useGyp
? getDependencyCommitsGyp(pool, from, to)
: getDependencyCommitsGN(pool, from, to)
}
// Changes are interesting if they make a change relative to a previous
// release in the same series. For example if you fix a Y.0.0 bug, that
// should be included in the Y.0.1 notes even if it's also tropped back
// to X.0.1.
//
// The phrase 'previous release' is important: if this is the first
// prerelease or first stable release in a series, we omit previous
// branches' changes. Otherwise we will have an overwhelmingly long
// list of mostly-irrelevant changes.
const shouldIncludeMultibranchChanges = (version) => {
let show = true
if (semver.valid(version)) {
const prerelease = semver.prerelease(version)
show = prerelease
? parseInt(prerelease.pop()) > 1
: semver.patch(version) > 0
}
return show
}
/***
**** Main
***/
const getNotes = async (fromRef, toRef, newVersion) => {
if (!fs.existsSync(CACHE_DIR)) {
fs.mkdirSync(CACHE_DIR)
}
const pool = {
processedHashes: new Set(),
commits: []
}
// get the electron/electron commits
const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_VERSION }
await addRepoToPool(pool, electron, fromRef, toRef)
// Don't include submodules if comparing across major versions;
// there's just too much churn otherwise.
const includeDeps = semver.valid(fromRef) &&
semver.valid(toRef) &&
semver.major(fromRef) === semver.major(toRef)
if (includeDeps) {
await getDependencyCommits(pool, fromRef, toRef)
}
// remove any old commits
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash))
// if a commmit _and_ revert occurred in the unprocessed set, skip them both
for (const commit of pool.commits) {
const revertHash = commit.revertHash
if (!revertHash) {
continue
}
const revert = pool.commits.find(commit => commit.hash === revertHash)
if (!revert) {
continue
}
commit.note = NO_NOTES
revert.note = NO_NOTES
pool.processedHashes.add(commit.hash)
pool.processedHashes.add(revertHash)
}
// scrape PRs for release note 'Notes:' comments
for (const commit of pool.commits) {
let pr = commit.pr
let prSubject
while (pr && !commit.note) {
const note = await getNoteFromClerk(pr.number, pr.owner, pr.repo)
if (note) {
commit.note = note
}
// if we already have all the data we need, stop scraping the PRs
if (commit.note && commit.type && prSubject) {
break
}
const prData = await getPullRequest(pr.number, pr.owner, pr.repo)
if (!prData || !prData.data) {
break
}
// try to pull a release note from the pull comment
const prParsed = parseCommitMessage(`${prData.data.title}\n\n${prData.data.body}`, pr.owner, pr.repo)
if (!commit.note) {
commit.note = prParsed.note
}
if (!commit.type || prParsed.type === 'breaking-change') {
commit.type = prParsed.type
}
prSubject = prSubject || prParsed.subject
pr = prParsed.pr && (prParsed.pr.number !== pr.number) ? prParsed.pr : null
}
// if we still don't have a note, it's because someone missed a 'Notes:
// comment in a PR somewhere... use the PR subject as a fallback.
commit.note = commit.note || prSubject
}
// remove non-user-facing commits
pool.commits = pool.commits
.filter(commit => commit.note !== NO_NOTES)
.filter(commit => !((commit.note || commit.subject).match(/^[Bb]ump v\d+\.\d+\.\d+/)))
if (!shouldIncludeMultibranchChanges(newVersion)) {
// load all the prDatas
await Promise.all(
pool.commits.map(commit => new Promise(async (resolve) => {
const { pr } = commit
if (typeof pr === 'object') {
const prData = await getPullRequest(pr.number, pr.owner, pr.repo)
if (prData) {
commit.prData = prData
}
}
resolve()
}))
)
// remove items that already landed in a previous major/minor series
pool.commits = pool.commits
.filter(commit => {
if (!commit.prData) {
return true
}
const reducer = (accumulator, current) => {
if (!semver.valid(accumulator)) { return current }
if (!semver.valid(current)) { return accumulator }
return semver.lt(accumulator, current) ? accumulator : current
}
const earliestRelease = commit.prData.data.labels
.map(label => label.name.match(/merged\/(\d+)-(\d+)-x/))
.filter(label => !!label)
.map(label => `${label[1]}.${label[2]}.0`)
.reduce(reducer, null)
if (!semver.valid(earliestRelease)) {
return true
}
return semver.diff(earliestRelease, newVersion).includes('patch')
})
}
const notes = {
breaking: [],
docs: [],
feat: [],
fix: [],
other: [],
unknown: [],
name: newVersion
}
pool.commits.forEach(commit => {
const str = commit.type
if (!str) {
notes.unknown.push(commit)
} else if (breakTypes.has(str)) {
notes.breaking.push(commit)
} else if (docTypes.has(str)) {
notes.docs.push(commit)
} else if (featTypes.has(str)) {
notes.feat.push(commit)
} else if (fixTypes.has(str)) {
notes.fix.push(commit)
} else if (otherTypes.has(str)) {
notes.other.push(commit)
} else {
notes.unknown.push(commit)
}
})
return notes
}
/***
**** Render
***/
const renderLink = (commit, explicitLinks) => {
let link
const pr = commit.originalPr
if (pr) {
const { owner, repo, number } = pr
const url = `https://github.com/${owner}/${repo}/pull/${number}`
const text = owner === 'electron' && repo === 'electron'
? `#${number}`
: `${owner}/${repo}#${number}`
link = explicitLinks ? `[${text}](${url})` : text
} else {
const { owner, repo, hash } = commit
const url = `https://github.com/${owner}/${repo}/commit/${hash}`
const text = owner === 'electron' && repo === 'electron'
? `${hash.slice(0, 8)}`
: `${owner}/${repo}@${hash.slice(0, 8)}`
link = explicitLinks ? `[${text}](${url})` : text
}
return link
}
const renderCommit = (commit, explicitLinks) => {
// clean up the note
let note = commit.note || commit.subject
note = note.trim()
if (note.length !== 0) {
note = note[0].toUpperCase() + note.substr(1)
if (!note.endsWith('.')) {
note = note + '.'
}
const commonVerbs = {
'Added': [ 'Add' ],
'Backported': [ 'Backport' ],
'Cleaned': [ 'Clean' ],
'Disabled': [ 'Disable' ],
'Ensured': [ 'Ensure' ],
'Exported': [ 'Export' ],
'Fixed': [ 'Fix', 'Fixes' ],
'Handled': [ 'Handle' ],
'Improved': [ 'Improve' ],
'Made': [ 'Make' ],
'Removed': [ 'Remove' ],
'Repaired': [ 'Repair' ],
'Reverted': [ 'Revert' ],
'Stopped': [ 'Stop' ],
'Updated': [ 'Update' ],
'Upgraded': [ 'Upgrade' ]
}
for (const [key, values] of Object.entries(commonVerbs)) {
for (const value of values) {
const start = `${value} `
if (note.startsWith(start)) {
note = `${key} ${note.slice(start.length)}`
}
}
}
}
const link = renderLink(commit, explicitLinks)
return { note, link }
}
const renderNotes = (notes, explicitLinks) => {
const rendered = [ `# Release Notes for ${notes.name}\n\n` ]
const renderSection = (title, commits) => {
if (commits.length === 0) {
return
}
const notes = new Map()
for (const note of commits.map(commit => renderCommit(commit, explicitLinks))) {
if (!notes.has(note.note)) {
notes.set(note.note, [note.link])
} else {
notes.get(note.note).push(note.link)
}
}
rendered.push(`## ${title}\n\n`)
const lines = []
notes.forEach((links, key) => lines.push(` * ${key} ${links.map(link => link.toString()).sort().join(', ')}\n`))
rendered.push(...lines.sort(), '\n')
}
renderSection('Breaking Changes', notes.breaking)
renderSection('Features', notes.feat)
renderSection('Fixes', notes.fix)
renderSection('Other Changes', notes.other)
if (notes.docs.length) {
const docs = notes.docs.map(commit => renderLink(commit, explicitLinks)).sort()
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n')
}
renderSection('Unknown', notes.unknown)
return rendered.join('')
}
/***
**** Module
***/
module.exports = {
get: getNotes,
render: renderNotes
}

214
script/release/prepare-release.js Executable file
View file

@ -0,0 +1,214 @@
#!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load()
const args = require('minimist')(process.argv.slice(2), {
boolean: ['automaticRelease', 'notesOnly', 'stable']
})
const ciReleaseBuild = require('./ci-release-build')
const octokit = require('@octokit/rest')({
auth: process.env.ELECTRON_GITHUB_TOKEN
})
const { execSync } = require('child_process')
const { GitProcess } = require('dugite')
const path = require('path')
const readline = require('readline')
const releaseNotesGenerator = require('./notes/index.js')
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js')
const bumpType = args._[0]
const targetRepo = bumpType === 'nightly' ? 'nightlies' : 'electron'
require('colors')
const pass = '\u2713'.green
const fail = '\u2717'.red
if (!bumpType && !args.notesOnly) {
console.log(`Usage: prepare-release [stable | beta | nightly]` +
` (--stable) (--notesOnly) (--automaticRelease) (--branch)`)
process.exit(1)
}
async function getNewVersion (dryRun) {
if (!dryRun) {
console.log(`Bumping for new "${bumpType}" version.`)
}
const bumpScript = path.join(__dirname, 'version-bumper.js')
const scriptArgs = ['node', bumpScript, `--bump=${bumpType}`]
if (dryRun) scriptArgs.push('--dryRun')
try {
let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'UTF-8' })
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim()
const newVersion = `v${bumpVersion}`
if (!dryRun) {
console.log(`${pass} Successfully bumped version to ${newVersion}`)
}
return newVersion
} catch (err) {
console.log(`${fail} Could not bump version, error was:`, err)
throw err
}
}
async function getReleaseNotes (currentBranch, newVersion) {
if (bumpType === 'nightly') {
return { text: 'Nightlies do not get release notes, please compare tags for info.' }
}
console.log(`Generating release notes for ${currentBranch}.`)
const releaseNotes = await releaseNotesGenerator(currentBranch, newVersion)
if (releaseNotes.warning) {
console.warn(releaseNotes.warning)
}
return releaseNotes
}
async function createRelease (branchToTarget, isBeta) {
const newVersion = await getNewVersion()
const releaseNotes = await getReleaseNotes(branchToTarget, newVersion)
await tagRelease(newVersion)
console.log(`Checking for existing draft release.`)
const releases = await octokit.repos.listReleases({
owner: 'electron',
repo: targetRepo
}).catch(err => {
console.log(`${fail} Could not get releases. Error was: `, err)
})
const drafts = releases.data.filter(release => release.draft &&
release.tag_name === newVersion)
if (drafts.length > 0) {
console.log(`${fail} Aborting because draft release for
${drafts[0].tag_name} already exists.`)
process.exit(1)
}
console.log(`${pass} A draft release does not exist; creating one.`)
let releaseBody
let releaseIsPrelease = false
if (isBeta) {
if (newVersion.indexOf('nightly') > 0) {
releaseBody = `Note: This is a nightly release. Please file new issues ` +
`for any bugs you find in it.\n \n This release is published to npm ` +
`under the nightly tag and can be installed via npm install electron@nightly, ` +
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`
} else {
releaseBody = `Note: This is a beta release. Please file new issues ` +
`for any bugs you find in it.\n \n This release is published to npm ` +
`under the beta tag and can be installed via npm install electron@beta, ` +
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`
}
releaseIsPrelease = true
} else {
releaseBody = releaseNotes.text
}
const release = await octokit.repos.createRelease({
owner: 'electron',
repo: targetRepo,
tag_name: newVersion,
draft: true,
name: `electron ${newVersion}`,
body: releaseBody,
prerelease: releaseIsPrelease,
target_commitish: newVersion.indexOf('nightly') !== -1 ? 'master' : branchToTarget
}).catch(err => {
console.log(`${fail} Error creating new release: `, err)
process.exit(1)
})
console.log(`Release has been created with id: ${release.data.id}.`)
console.log(`${pass} Draft release for ${newVersion} successful.`)
}
async function pushRelease (branch) {
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR)
if (pushDetails.exitCode === 0) {
console.log(`${pass} Successfully pushed the release. Wait for ` +
`release builds to finish before running "npm run release".`)
} else {
console.log(`${fail} Error pushing the release: ${pushDetails.stderr}`)
process.exit(1)
}
}
async function runReleaseBuilds (branch) {
await ciReleaseBuild(branch, {
ghRelease: true,
automaticRelease: args.automaticRelease
})
}
async function tagRelease (version) {
console.log(`Tagging release ${version}.`)
const checkoutDetails = await GitProcess.exec([ 'tag', '-a', '-m', version, version ], ELECTRON_DIR)
if (checkoutDetails.exitCode === 0) {
console.log(`${pass} Successfully tagged ${version}.`)
} else {
console.log(`${fail} Error tagging ${version}: ` +
`${checkoutDetails.stderr}`)
process.exit(1)
}
}
async function verifyNewVersion () {
const newVersion = await getNewVersion(true)
let response
if (args.automaticRelease) {
response = 'y'
} else {
response = await promptForVersion(newVersion)
}
if (response.match(/^y/i)) {
console.log(`${pass} Starting release of ${newVersion}`)
} else {
console.log(`${fail} Aborting release of ${newVersion}`)
process.exit()
}
}
async function promptForVersion (version) {
return new Promise((resolve, reject) => {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
})
rl.question(`Do you want to create the release ${version.green} (y/N)? `, (answer) => {
rl.close()
resolve(answer)
})
})
}
// function to determine if there have been commits to master since the last release
async function changesToRelease () {
const lastCommitWasRelease = new RegExp(`^Bump v[0-9.]*(-beta[0-9.]*)?(-nightly[0-9.]*)?$`, 'g')
const lastCommit = await GitProcess.exec(['log', '-n', '1', `--pretty=format:'%s'`], ELECTRON_DIR)
return !lastCommitWasRelease.test(lastCommit.stdout)
}
async function prepareRelease (isBeta, notesOnly) {
if (args.dryRun) {
const newVersion = await getNewVersion(true)
console.log(newVersion)
} else {
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(ELECTRON_DIR)
if (notesOnly) {
const newVersion = await getNewVersion(true)
const releaseNotes = await getReleaseNotes(currentBranch, newVersion)
console.log(`Draft release notes are: \n${releaseNotes.text}`)
} else {
const changes = await changesToRelease(currentBranch)
if (changes) {
await verifyNewVersion()
await createRelease(currentBranch, isBeta)
await pushRelease(currentBranch)
await runReleaseBuilds(currentBranch)
} else {
console.log(`There are no new changes to this branch since the last release, aborting release.`)
process.exit(1)
}
}
}
}
prepareRelease(!args.stable, args.notesOnly)

View file

@ -0,0 +1,174 @@
const temp = require('temp')
const fs = require('fs')
const path = require('path')
const childProcess = require('child_process')
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils')
const request = require('request')
const semver = require('semver')
const rootPackageJson = require('../../package.json')
const octokit = require('@octokit/rest')({
headers: { 'User-Agent': 'electron-npm-publisher' }
})
if (!process.env.ELECTRON_NPM_OTP) {
console.error('Please set ELECTRON_NPM_OTP')
process.exit(1)
}
let tempDir
temp.track() // track and cleanup files at exit
const files = [
'cli.js',
'index.js',
'install.js',
'package.json',
'README.md',
'LICENSE'
]
const jsonFields = [
'name',
'version',
'repository',
'description',
'license',
'author',
'keywords'
]
let npmTag = ''
new Promise((resolve, reject) => {
temp.mkdir('electron-npm', (err, dirPath) => {
if (err) {
reject(err)
} else {
resolve(dirPath)
}
})
})
.then((dirPath) => {
tempDir = dirPath
// copy files from `/npm` to temp directory
files.forEach((name) => {
const noThirdSegment = name === 'README.md' || name === 'LICENSE'
fs.writeFileSync(
path.join(tempDir, name),
fs.readFileSync(path.join(ELECTRON_DIR, noThirdSegment ? '' : 'npm', name))
)
})
// copy from root package.json to temp/package.json
const packageJson = require(path.join(tempDir, 'package.json'))
jsonFields.forEach((fieldName) => {
packageJson[fieldName] = rootPackageJson[fieldName]
})
fs.writeFileSync(
path.join(tempDir, 'package.json'),
JSON.stringify(packageJson, null, 2)
)
return octokit.repos.listReleases({
owner: 'electron',
repo: rootPackageJson.version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
})
})
.then((releases) => {
// download electron.d.ts from release
const release = releases.data.find(
(release) => release.tag_name === `v${rootPackageJson.version}`
)
if (!release) {
throw new Error(`cannot find release with tag v${rootPackageJson.version}`)
}
return release
})
.then((release) => {
const tsdAsset = release.assets.find((asset) => asset.name === 'electron.d.ts')
if (!tsdAsset) {
throw new Error(`cannot find electron.d.ts from v${rootPackageJson.version} release assets`)
}
return new Promise((resolve, reject) => {
request.get({
url: tsdAsset.url,
headers: {
'accept': 'application/octet-stream',
'user-agent': 'electron-npm-publisher'
}
}, (err, response, body) => {
if (err || response.statusCode !== 200) {
reject(err || new Error('Cannot download electron.d.ts'))
} else {
fs.writeFileSync(path.join(tempDir, 'electron.d.ts'), body)
resolve(release)
}
})
})
})
.then(async (release) => {
const currentBranch = await getCurrentBranch()
if (release.tag_name.indexOf('nightly') > 0) {
if (currentBranch === 'master') {
// Nightlies get published to their own module, so master nightlies should be tagged as latest
npmTag = 'latest'
} else {
npmTag = `nightly-${currentBranch}`
}
const currentJson = JSON.parse(fs.readFileSync(path.join(tempDir, 'package.json'), 'utf8'))
currentJson.name = 'electron-nightly'
rootPackageJson.name = 'electron-nightly'
fs.writeFileSync(
path.join(tempDir, 'package.json'),
JSON.stringify(currentJson, null, 2)
)
} else {
if (currentBranch === 'master') {
// This should never happen, master releases should be nightly releases
// this is here just-in-case
npmTag = 'master'
} else if (!release.prerelease) {
// Tag the release with a `2-0-x` style tag
npmTag = currentBranch
} else {
// Tag the release with a `beta-3-0-x` style tag
npmTag = `beta-${currentBranch}`
}
}
})
.then(() => childProcess.execSync('npm pack', { cwd: tempDir }))
.then(() => {
// test that the package can install electron prebuilt from github release
const tarballPath = path.join(tempDir, `${rootPackageJson.name}-${rootPackageJson.version}.tgz`)
return new Promise((resolve, reject) => {
childProcess.execSync(`npm install ${tarballPath} --force --silent`, {
env: Object.assign({}, process.env, { electron_config_cache: tempDir }),
cwd: tempDir
})
resolve(tarballPath)
})
})
.then((tarballPath) => childProcess.execSync(`npm publish ${tarballPath} --tag ${npmTag} --otp=${process.env.ELECTRON_NPM_OTP}`))
.then(() => {
const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString())
const localVersion = rootPackageJson.version
const parsedLocalVersion = semver.parse(localVersion)
if (rootPackageJson.name === 'electron') {
// We should only customly add dist tags for non-nightly releases where the package name is still
// "electron"
if (parsedLocalVersion.prerelease.length === 0 &&
semver.gt(localVersion, currentTags.latest)) {
childProcess.execSync(`npm dist-tag add electron@${localVersion} latest --otp=${process.env.ELECTRON_NPM_OTP}`)
}
if (parsedLocalVersion.prerelease[0] === 'beta' &&
semver.gt(localVersion, currentTags.beta)) {
childProcess.execSync(`npm dist-tag add electron@${localVersion} beta --otp=${process.env.ELECTRON_NPM_OTP}`)
}
}
})
.catch((err) => {
console.error(`Error: ${err}`)
process.exit(1)
})

View file

@ -0,0 +1,114 @@
#!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load()
require('colors')
const pass = '\u2713'.green
const fail = '\u2717'.red
const args = require('minimist')(process.argv.slice(2), {
string: ['tag', 'releaseID'],
default: { releaseID: '' }
})
const { execSync } = require('child_process')
const { GitProcess } = require('dugite')
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js')
const octokit = require('@octokit/rest')({
auth: process.env.ELECTRON_GITHUB_TOKEN
})
const path = require('path')
function getLastBumpCommit (tag) {
const data = execSync(`git log -n1 --grep "Bump ${tag}" --format='format:{"hash": "%H", "message": "%s"}'`).toString()
return JSON.parse(data)
}
async function revertBumpCommit (tag) {
const branch = await getCurrentBranch()
const commitToRevert = getLastBumpCommit(tag).hash
await GitProcess.exec(['revert', commitToRevert], ELECTRON_DIR)
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR)
if (pushDetails.exitCode === 0) {
console.log(`${pass} successfully reverted release commit.`)
} else {
const error = GitProcess.parseError(pushDetails.stderr)
console.error(`${fail} could not push release commit: `, error)
process.exit(1)
}
}
async function deleteDraft (releaseId, targetRepo) {
try {
const result = await octokit.repos.getRelease({
owner: 'electron',
repo: targetRepo,
release_id: parseInt(releaseId, 10)
})
console.log(result)
if (!result.data.draft) {
console.log(`${fail} published releases cannot be deleted.`)
return false
} else {
await octokit.repos.deleteRelease({
owner: 'electron',
repo: targetRepo,
release_id: result.data.id
})
}
console.log(`${pass} successfully deleted draft with id ${releaseId} from ${targetRepo}`)
return true
} catch (err) {
console.error(`${fail} couldn't delete draft with id ${releaseId} from ${targetRepo}: `, err)
return false
}
}
async function deleteTag (tag, targetRepo) {
try {
await octokit.git.deleteRef({
owner: 'electron',
repo: targetRepo,
ref: `tags/${tag}`
})
console.log(`${pass} successfully deleted tag ${tag} from ${targetRepo}`)
} catch (err) {
console.log(`${fail} couldn't delete tag ${tag} from ${targetRepo}: `, err)
}
}
async function cleanReleaseArtifacts () {
const releaseId = args.releaseID.length > 0 ? args.releaseID : null
const isNightly = args.tag.includes('nightly')
// try to revert commit regardless of tag and draft deletion status
await revertBumpCommit(args.tag)
if (releaseId) {
if (isNightly) {
const deletedNightlyDraft = await deleteDraft(releaseId, 'nightlies')
// don't delete tag unless draft deleted successfully
if (deletedNightlyDraft) {
await Promise.all([
deleteTag(args.tag, 'electron'),
deleteTag(args.tag, 'nightlies')
])
}
} else {
const deletedElectronDraft = await deleteDraft(releaseId, 'electron')
// don't delete tag unless draft deleted successfully
if (deletedElectronDraft) {
await deleteTag(args.tag, 'electron')
}
}
} else {
await Promise.all([
deleteTag(args.tag, 'electron'),
deleteTag(args.tag, 'nightlies')
])
}
console.log(`${pass} failed release artifact cleanup complete`)
}
cleanReleaseArtifacts()

450
script/release/release.js Executable file
View file

@ -0,0 +1,450 @@
#!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load()
require('colors')
const args = require('minimist')(process.argv.slice(2), {
boolean: [
'validateRelease',
'skipVersionCheck',
'automaticRelease',
'verboseNugget'
],
default: { 'verboseNugget': false }
})
const fs = require('fs')
const { execSync } = require('child_process')
const nugget = require('nugget')
const got = require('got')
const pkg = require('../../package.json')
const pkgVersion = `v${pkg.version}`
const pass = '\u2713'.green
const path = require('path')
const fail = '\u2717'.red
const sumchecker = require('sumchecker')
const temp = require('temp').track()
const { URL } = require('url')
const { ELECTRON_DIR } = require('../lib/utils')
const octokit = require('@octokit/rest')({
auth: process.env.ELECTRON_GITHUB_TOKEN
})
const targetRepo = pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
let failureCount = 0
async function getDraftRelease (version, skipValidation) {
const releaseInfo = await octokit.repos.listReleases({
owner: 'electron',
repo: targetRepo
})
const versionToCheck = version || pkgVersion
const drafts = releaseInfo.data.filter(release => {
return release.tag_name === versionToCheck && release.draft === true
})
const draft = drafts[0]
if (!skipValidation) {
failureCount = 0
check(drafts.length === 1, 'one draft exists', true)
if (versionToCheck.indexOf('beta') > -1) {
check(draft.prerelease, 'draft is a prerelease')
}
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes')
check((failureCount === 0), `Draft release looks good to go.`, true)
}
return draft
}
async function validateReleaseAssets (release, validatingRelease) {
const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort()
const extantAssets = release.assets.map(asset => asset.name).sort()
const downloadUrls = release.assets.map(asset => asset.browser_download_url).sort()
failureCount = 0
requiredAssets.forEach(asset => {
check(extantAssets.includes(asset), asset)
})
check((failureCount === 0), `All required GitHub assets exist for release`, true)
if (!validatingRelease || !release.draft) {
if (release.draft) {
await verifyAssets(release)
} else {
await verifyShasums(downloadUrls)
.catch(err => {
console.log(`${fail} error verifyingShasums`, err)
})
}
const s3Urls = s3UrlsForVersion(release.tag_name)
await verifyShasums(s3Urls, true)
}
}
function check (condition, statement, exitIfFail = false) {
if (condition) {
console.log(`${pass} ${statement}`)
} else {
failureCount++
console.log(`${fail} ${statement}`)
if (exitIfFail) process.exit(1)
}
}
function assetsForVersion (version, validatingRelease) {
const patterns = [
`electron-${version}-darwin-x64-dsym.zip`,
`electron-${version}-darwin-x64-symbols.zip`,
`electron-${version}-darwin-x64.zip`,
`electron-${version}-linux-arm64-symbols.zip`,
`electron-${version}-linux-arm64.zip`,
`electron-${version}-linux-armv7l-symbols.zip`,
`electron-${version}-linux-armv7l.zip`,
`electron-${version}-linux-ia32-symbols.zip`,
`electron-${version}-linux-ia32.zip`,
`electron-${version}-linux-x64-symbols.zip`,
`electron-${version}-linux-x64.zip`,
`electron-${version}-mas-x64-dsym.zip`,
`electron-${version}-mas-x64-symbols.zip`,
`electron-${version}-mas-x64.zip`,
`electron-${version}-win32-ia32-pdb.zip`,
`electron-${version}-win32-ia32-symbols.zip`,
`electron-${version}-win32-ia32.zip`,
`electron-${version}-win32-x64-pdb.zip`,
`electron-${version}-win32-x64-symbols.zip`,
`electron-${version}-win32-x64.zip`,
`electron-api.json`,
`electron.d.ts`,
`ffmpeg-${version}-darwin-x64.zip`,
`ffmpeg-${version}-linux-arm64.zip`,
`ffmpeg-${version}-linux-armv7l.zip`,
`ffmpeg-${version}-linux-ia32.zip`,
`ffmpeg-${version}-linux-x64.zip`,
`ffmpeg-${version}-mas-x64.zip`,
`ffmpeg-${version}-win32-ia32.zip`,
`ffmpeg-${version}-win32-x64.zip`
]
if (!validatingRelease) {
patterns.push('SHASUMS256.txt')
}
return patterns
}
function s3UrlsForVersion (version) {
const bucket = `https://gh-contractor-zcbenz.s3.amazonaws.com/`
const patterns = [
`${bucket}atom-shell/dist/${version}/iojs-${version}-headers.tar.gz`,
`${bucket}atom-shell/dist/${version}/iojs-${version}.tar.gz`,
`${bucket}atom-shell/dist/${version}/node-${version}.tar.gz`,
`${bucket}atom-shell/dist/${version}/node.lib`,
`${bucket}atom-shell/dist/${version}/win-x64/iojs.lib`,
`${bucket}atom-shell/dist/${version}/win-x86/iojs.lib`,
`${bucket}atom-shell/dist/${version}/x64/node.lib`,
`${bucket}atom-shell/dist/${version}/SHASUMS.txt`,
`${bucket}atom-shell/dist/${version}/SHASUMS256.txt`,
`${bucket}atom-shell/dist/index.json`
]
return patterns
}
function runScript (scriptName, scriptArgs, cwd) {
const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`
const scriptOptions = {
encoding: 'UTF-8'
}
if (cwd) scriptOptions.cwd = cwd
try {
return execSync(scriptCommand, scriptOptions)
} catch (err) {
console.log(`${fail} Error running ${scriptName}`, err)
process.exit(1)
}
}
function uploadNodeShasums () {
console.log('Uploading Node SHASUMS file to S3.')
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py')
runScript(scriptPath, ['-v', pkgVersion])
console.log(`${pass} Done uploading Node SHASUMS file to S3.`)
}
function uploadIndexJson () {
console.log('Uploading index.json to S3.')
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py')
runScript(scriptPath, [pkgVersion])
console.log(`${pass} Done uploading index.json to S3.`)
}
async function createReleaseShasums (release) {
const fileName = 'SHASUMS256.txt'
const existingAssets = release.assets.filter(asset => asset.name === fileName)
if (existingAssets.length > 0) {
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`)
await octokit.repos.deleteReleaseAsset({
owner: 'electron',
repo: targetRepo,
asset_id: existingAssets[0].id
}).catch(err => {
console.log(`${fail} Error deleting ${fileName} on GitHub:`, err)
})
}
console.log(`Creating and uploading the release ${fileName}.`)
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'merge-electron-checksums.py')
const checksums = runScript(scriptPath, ['-v', pkgVersion])
console.log(`${pass} Generated release SHASUMS.`)
const filePath = await saveShaSumFile(checksums, fileName)
console.log(`${pass} Created ${fileName} file.`)
await uploadShasumFile(filePath, fileName, release.id)
console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`)
}
async function uploadShasumFile (filePath, fileName, releaseId) {
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`
return octokit.repos.uploadReleaseAsset({
url: uploadUrl,
headers: {
'content-type': 'text/plain',
'content-length': fs.statSync(filePath).size
},
file: fs.createReadStream(filePath),
name: fileName
}).catch(err => {
console.log(`${fail} Error uploading ${filePath} to GitHub:`, err)
process.exit(1)
})
}
function saveShaSumFile (checksums, fileName) {
return new Promise((resolve, reject) => {
temp.open(fileName, (err, info) => {
if (err) {
console.log(`${fail} Could not create ${fileName} file`)
process.exit(1)
} else {
fs.writeFileSync(info.fd, checksums)
fs.close(info.fd, (err) => {
if (err) {
console.log(`${fail} Could close ${fileName} file`)
process.exit(1)
}
resolve(info.path)
})
}
})
})
}
async function publishRelease (release) {
return octokit.repos.updateRelease({
owner: 'electron',
repo: targetRepo,
release_id: release.id,
tag_name: release.tag_name,
draft: false
}).catch(err => {
console.log(`${fail} Error publishing release:`, err)
process.exit(1)
})
}
async function makeRelease (releaseToValidate) {
if (releaseToValidate) {
if (releaseToValidate === true) {
releaseToValidate = pkgVersion
} else {
console.log('Release to validate !=== true')
}
console.log(`Validating release ${releaseToValidate}`)
const release = await getDraftRelease(releaseToValidate)
await validateReleaseAssets(release, true)
} else {
let draftRelease = await getDraftRelease()
uploadNodeShasums()
uploadIndexJson()
await createReleaseShasums(draftRelease)
// Fetch latest version of release before verifying
draftRelease = await getDraftRelease(pkgVersion, true)
await validateReleaseAssets(draftRelease)
await publishRelease(draftRelease)
console.log(`${pass} SUCCESS!!! Release has been published. Please run ` +
`"npm run publish-to-npm" to publish release to npm.`)
}
}
async function makeTempDir () {
return new Promise((resolve, reject) => {
temp.mkdir('electron-publish', (err, dirPath) => {
if (err) {
reject(err)
} else {
resolve(dirPath)
}
})
})
}
async function verifyAssets (release) {
const downloadDir = await makeTempDir()
console.log(`Downloading files from GitHub to verify shasums`)
const shaSumFile = 'SHASUMS256.txt'
let filesToCheck = await Promise.all(release.assets.map(async asset => {
const requestOptions = await octokit.repos.getReleaseAsset.endpoint({
owner: 'electron',
repo: targetRepo,
asset_id: asset.id,
headers: {
Accept: 'application/octet-stream'
}
})
const { url, headers } = requestOptions
headers.authorization = `token ${process.env.ELECTRON_GITHUB_TOKEN}`
const response = await got(url, {
followRedirect: false,
method: 'HEAD',
headers
})
await downloadFiles(response.headers.location, downloadDir, asset.name)
return asset.name
})).catch(err => {
console.log(`${fail} Error downloading files from GitHub`, err)
process.exit(1)
})
filesToCheck = filesToCheck.filter(fileName => fileName !== shaSumFile)
let checkerOpts
await validateChecksums({
algorithm: 'sha256',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile,
checkerOpts,
fileSource: 'GitHub'
})
}
function downloadFiles (urls, directory, targetName) {
return new Promise((resolve, reject) => {
const nuggetOpts = { dir: directory }
nuggetOpts.quiet = !args.verboseNugget
if (targetName) nuggetOpts.target = targetName
nugget(urls, nuggetOpts, (err) => {
if (err) {
reject(err)
} else {
console.log(`${pass} all files downloaded successfully!`)
resolve()
}
})
})
}
async function verifyShasums (urls, isS3) {
const fileSource = isS3 ? 'S3' : 'GitHub'
console.log(`Downloading files from ${fileSource} to verify shasums`)
const downloadDir = await makeTempDir()
let filesToCheck = []
try {
if (!isS3) {
await downloadFiles(urls, downloadDir)
filesToCheck = urls.map(url => {
const currentUrl = new URL(url)
return path.basename(currentUrl.pathname)
}).filter(file => file.indexOf('SHASUMS') === -1)
} else {
const s3VersionPath = `/atom-shell/dist/${pkgVersion}/`
await Promise.all(urls.map(async (url) => {
const currentUrl = new URL(url)
const dirname = path.dirname(currentUrl.pathname)
const filename = path.basename(currentUrl.pathname)
const s3VersionPathIdx = dirname.indexOf(s3VersionPath)
if (s3VersionPathIdx === -1 || dirname === s3VersionPath) {
if (s3VersionPathIdx !== -1 && filename.indexof('SHASUMS') === -1) {
filesToCheck.push(filename)
}
await downloadFiles(url, downloadDir)
} else {
const subDirectory = dirname.substr(s3VersionPathIdx + s3VersionPath.length)
const fileDirectory = path.join(downloadDir, subDirectory)
try {
fs.statSync(fileDirectory)
} catch (err) {
fs.mkdirSync(fileDirectory)
}
filesToCheck.push(path.join(subDirectory, filename))
await downloadFiles(url, fileDirectory)
}
}))
}
} catch (err) {
console.log(`${fail} Error downloading files from ${fileSource}`, err)
process.exit(1)
}
console.log(`${pass} Successfully downloaded the files from ${fileSource}.`)
let checkerOpts
if (isS3) {
checkerOpts = { defaultTextEncoding: 'binary' }
}
await validateChecksums({
algorithm: 'sha256',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile: 'SHASUMS256.txt',
checkerOpts,
fileSource
})
if (isS3) {
await validateChecksums({
algorithm: 'sha1',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile: 'SHASUMS.txt',
checkerOpts,
fileSource
})
}
}
async function validateChecksums (validationArgs) {
console.log(`Validating checksums for files from ${validationArgs.fileSource} ` +
`against ${validationArgs.shaSumFile}.`)
const shaSumFilePath = path.join(validationArgs.fileDirectory, validationArgs.shaSumFile)
const checker = new sumchecker.ChecksumValidator(validationArgs.algorithm,
shaSumFilePath, validationArgs.checkerOpts)
await checker.validate(validationArgs.fileDirectory, validationArgs.filesToCheck)
.catch(err => {
if (err instanceof sumchecker.ChecksumMismatchError) {
console.error(`${fail} The checksum of ${err.filename} from ` +
`${validationArgs.fileSource} did not match the shasum in ` +
`${validationArgs.shaSumFile}`)
} else if (err instanceof sumchecker.ChecksumParseError) {
console.error(`${fail} The checksum file ${validationArgs.shaSumFile} ` +
`from ${validationArgs.fileSource} could not be parsed.`, err)
} else if (err instanceof sumchecker.NoChecksumFoundError) {
console.error(`${fail} The file ${err.filename} from ` +
`${validationArgs.fileSource} was not in the shasum file ` +
`${validationArgs.shaSumFile}.`)
} else {
console.error(`${fail} Error matching files from ` +
`${validationArgs.fileSource} shasums in ${validationArgs.shaSumFile}.`, err)
}
process.exit(1)
})
console.log(`${pass} All files from ${validationArgs.fileSource} match ` +
`shasums defined in ${validationArgs.shaSumFile}.`)
}
makeRelease(args.validateRelease)

View file

@ -0,0 +1,67 @@
#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import urllib2
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../..")
from lib.config import s3_config
from lib.util import s3put, scoped_cwd, safe_mkdir, get_out_dir, ELECTRON_DIR
OUT_DIR = get_out_dir()
BASE_URL = 'https://electron-metadumper.herokuapp.com/?version='
version = sys.argv[1]
authToken = os.getenv('META_DUMPER_AUTH_HEADER')
def is_json(myjson):
try:
json.loads(myjson)
except ValueError:
return False
return True
def get_content(retry_count = 5):
try:
request = urllib2.Request(
BASE_URL + version,
headers={"Authorization" : authToken}
)
proposed_content = urllib2.urlopen(
request
).read()
if is_json(proposed_content):
return proposed_content
print("bad attempt")
raise Exception("Failed to fetch valid JSON from the metadumper service")
except Exception as e:
if retry_count == 0:
raise e
return get_content(retry_count - 1)
def main():
if not authToken or authToken == "":
raise Exception("Please set META_DUMPER_AUTH_HEADER")
# Upload the index.json.
with scoped_cwd(ELECTRON_DIR):
safe_mkdir(OUT_DIR)
index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))
new_content = get_content()
with open(index_json, "w") as f:
f.write(new_content)
bucket, access_key, secret_key = s3_config()
s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
[index_json])
if __name__ == '__main__':
sys.exit(main())

View file

@ -0,0 +1,103 @@
#!/usr/bin/env python
import argparse
import hashlib
import os
import shutil
import sys
import tempfile
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../..")
from lib.config import s3_config
from lib.util import download, rm_rf, s3put, safe_mkdir
DIST_URL = 'https://electronjs.org/headers/'
def main():
args = parse_args()
dist_url = args.dist_url
if dist_url[-1] != "/":
dist_url += "/"
url = dist_url + args.version + '/'
directory, files = download_files(url, get_files_list(args.version))
checksums = [
create_checksum('sha1', directory, 'SHASUMS.txt', files),
create_checksum('sha256', directory, 'SHASUMS256.txt', files)
]
if args.target_dir is None:
bucket, access_key, secret_key = s3_config()
s3put(bucket, access_key, secret_key, directory,
'atom-shell/dist/{0}'.format(args.version), checksums)
else:
copy_files(checksums, args.target_dir)
rm_rf(directory)
def parse_args():
parser = argparse.ArgumentParser(description='upload sumsha file')
parser.add_argument('-v', '--version', help='Specify the version',
required=True)
parser.add_argument('-u', '--dist-url',
help='Specify the dist url for downloading',
required=False, default=DIST_URL)
parser.add_argument('-t', '--target-dir',
help='Specify target dir of checksums',
required=False)
return parser.parse_args()
def get_files_list(version):
return [
{ "filename": 'node-{0}.tar.gz'.format(version), "required": True },
{ "filename": 'node-{0}-headers.tar.gz'.format(version), "required": True },
{ "filename": 'iojs-{0}.tar.gz'.format(version), "required": True },
{ "filename": 'iojs-{0}-headers.tar.gz'.format(version), "required": True },
{ "filename": 'node.lib', "required": False },
{ "filename": 'x64/node.lib', "required": False },
{ "filename": 'win-x86/iojs.lib', "required": False },
{ "filename": 'win-x64/iojs.lib', "required": False },
{ "filename": 'win-x86/node.lib', "required": False },
{ "filename": 'win-x64/node.lib', "required": False }
]
def download_files(url, files):
directory = tempfile.mkdtemp(prefix='electron-tmp')
result = []
for optional_f in files:
required = optional_f['required']
f = optional_f['filename']
try:
result.append(download(f, url + f, os.path.join(directory, f)))
except Exception:
if required:
raise
return directory, result
def create_checksum(algorithm, directory, filename, files):
lines = []
for path in files:
h = hashlib.new(algorithm)
with open(path, 'r') as f:
h.update(f.read())
lines.append(h.hexdigest() + ' ' + os.path.relpath(path, directory))
checksum_file = os.path.join(directory, filename)
with open(checksum_file, 'w') as f:
f.write('\n'.join(lines) + '\n')
return checksum_file
def copy_files(source_files, output_dir):
for source_file in source_files:
output_path = os.path.join(output_dir, os.path.basename(source_file))
safe_mkdir(os.path.dirname(output_path))
shutil.copy2(source_file, output_path)
if __name__ == '__main__':
sys.exit(main())

View file

@ -0,0 +1,84 @@
#!/usr/bin/env python
import argparse
import glob
import os
import shutil
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../..")
from lib.config import PLATFORM, get_target_arch, s3_config
from lib.util import safe_mkdir, scoped_cwd, s3put, get_out_dir, get_dist_dir
DIST_DIR = get_dist_dir()
OUT_DIR = get_out_dir()
GEN_DIR = os.path.join(OUT_DIR, 'gen')
HEADER_TAR_NAMES = [
'node-{0}.tar.gz',
'node-{0}-headers.tar.gz',
'iojs-{0}.tar.gz',
'iojs-{0}-headers.tar.gz'
]
def main():
args = parse_args()
# Upload node's headers to S3.
bucket, access_key, secret_key = s3_config()
upload_node(bucket, access_key, secret_key, args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload sumsha file')
parser.add_argument('-v', '--version', help='Specify the version',
required=True)
return parser.parse_args()
def upload_node(bucket, access_key, secret_key, version):
with scoped_cwd(GEN_DIR):
generated_tar = os.path.join(GEN_DIR, 'node_headers.tar.gz')
for header_tar in HEADER_TAR_NAMES:
versioned_header_tar = header_tar.format(version)
shutil.copy2(generated_tar, os.path.join(GEN_DIR, versioned_header_tar))
s3put(bucket, access_key, secret_key, GEN_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('node-*.tar.gz'))
s3put(bucket, access_key, secret_key, GEN_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('iojs-*.tar.gz'))
if PLATFORM == 'win32':
if get_target_arch() == 'ia32':
node_lib = os.path.join(DIST_DIR, 'node.lib')
iojs_lib = os.path.join(DIST_DIR, 'win-x86', 'iojs.lib')
v4_node_lib = os.path.join(DIST_DIR, 'win-x86', 'node.lib')
else:
node_lib = os.path.join(DIST_DIR, 'x64', 'node.lib')
iojs_lib = os.path.join(DIST_DIR, 'win-x64', 'iojs.lib')
v4_node_lib = os.path.join(DIST_DIR, 'win-x64', 'node.lib')
safe_mkdir(os.path.dirname(node_lib))
safe_mkdir(os.path.dirname(iojs_lib))
# Copy electron.lib to node.lib and iojs.lib.
electron_lib = os.path.join(OUT_DIR, 'electron.lib')
shutil.copy2(electron_lib, node_lib)
shutil.copy2(electron_lib, iojs_lib)
shutil.copy2(electron_lib, v4_node_lib)
# Upload the node.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [node_lib])
# Upload the iojs.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [iojs_lib])
# Upload the v4 node.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [v4_node_lib])
if __name__ == '__main__':
sys.exit(main())

View file

@ -0,0 +1,56 @@
#!/usr/bin/env python
import os
import glob
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../..")
from lib.config import PLATFORM, s3_config, enable_verbose_mode
from lib.util import get_electron_branding, execute, rm_rf, safe_mkdir, s3put, \
get_out_dir, ELECTRON_DIR
RELEASE_DIR = get_out_dir()
PROJECT_NAME = get_electron_branding()['project_name']
PRODUCT_NAME = get_electron_branding()['product_name']
SYMBOLS_DIR = os.path.join(RELEASE_DIR, 'breakpad_symbols')
PDB_LIST = [
os.path.join(RELEASE_DIR, '{0}.exe.pdb'.format(PROJECT_NAME))
]
def main():
os.chdir(ELECTRON_DIR)
if PLATFORM == 'win32':
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
else:
files = glob.glob(SYMBOLS_DIR + '/*/*/*.sym')
# The file upload needs to be atom-shell/symbols/:symbol_name/:hash/:symbol
os.chdir(SYMBOLS_DIR)
files = [os.path.relpath(f, os.getcwd()) for f in files]
# The symbol server needs lowercase paths, it will fail otherwise
# So lowercase all the file paths here
files = [f.lower() for f in files]
bucket, access_key, secret_key = s3_config()
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols',
files)
if __name__ == '__main__':
sys.exit(main())

View file

@ -0,0 +1,85 @@
if (!process.env.CI) require('dotenv-safe').load()
const fs = require('fs')
const octokit = require('@octokit/rest')({
auth: process.env.ELECTRON_GITHUB_TOKEN
})
if (process.argv.length < 6) {
console.log('Usage: upload-to-github filePath fileName releaseId')
process.exit(1)
}
const filePath = process.argv[2]
const fileName = process.argv[3]
const releaseId = process.argv[4]
const releaseVersion = process.argv[5]
const getHeaders = (filePath, fileName) => {
const extension = fileName.split('.').pop()
const size = fs.statSync(filePath).size
const options = {
'json': 'text/json',
'zip': 'application/zip',
'txt': 'text/plain',
'ts': 'application/typescript'
}
return {
'content-type': options[extension],
'content-length': size
}
}
const targetRepo = releaseVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`
let retry = 0
function uploadToGitHub () {
octokit.repos.uploadReleaseAsset({
url: uploadUrl,
headers: getHeaders(filePath, fileName),
file: fs.createReadStream(filePath),
name: fileName
}).then(() => {
console.log(`Successfully uploaded ${fileName} to GitHub.`)
process.exit()
}).catch((err) => {
if (retry < 4) {
console.log(`Error uploading ${fileName} to GitHub, will retry. Error was:`, err)
retry++
octokit.repos.listAssetsForRelease({
owner: 'electron',
repo: targetRepo,
release_id: releaseId
}).then(assets => {
console.log('Got list of assets for existing release:')
console.log(JSON.stringify(assets.data, null, ' '))
const existingAssets = assets.data.filter(asset => asset.name === fileName)
if (existingAssets.length > 0) {
console.log(`${fileName} already exists; will delete before retrying upload.`)
octokit.repos.deleteReleaseAsset({
owner: 'electron',
repo: targetRepo,
asset_id: existingAssets[0].id
}).catch((deleteErr) => {
console.log(`Failed to delete existing asset ${fileName}. Error was:`, deleteErr)
}).then(uploadToGitHub)
} else {
console.log(`Current asset ${fileName} not found in existing assets; retrying upload.`)
uploadToGitHub()
}
}).catch((getReleaseErr) => {
console.log(`Fatal: Unable to get current release assets via getRelease! Error was:`, getReleaseErr)
})
} else {
console.log(`Error retrying uploading ${fileName} to GitHub:`, err)
process.exitCode = 1
}
})
}
uploadToGitHub()

View file

@ -0,0 +1,211 @@
#!/usr/bin/env python
from __future__ import print_function
import argparse
import datetime
import errno
import hashlib
import json
import os
import shutil
import subprocess
import sys
import tempfile
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../..")
from io import StringIO
from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
get_zip_name
from lib.util import get_electron_branding, execute, get_electron_version, \
scoped_cwd, s3put, get_electron_exec, \
get_out_dir, SRC_DIR, ELECTRON_DIR
ELECTRON_REPO = 'electron/electron'
ELECTRON_VERSION = get_electron_version()
PROJECT_NAME = get_electron_branding()['project_name']
PRODUCT_NAME = get_electron_branding()['product_name']
OUT_DIR = get_out_dir()
DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if args.upload_to_s3:
utcnow = datetime.datetime.utcnow()
args.upload_timestamp = utcnow.strftime('%Y%m%d')
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
tag_exists = False
release = get_release(args.version)
if not release['draft']:
tag_exists = True
if not args.upload_to_s3:
assert release['exists'], 'Release does not exist; cannot upload to GitHub!'
assert tag_exists == args.overwrite, \
'You have to pass --overwrite to overwrite a published release'
# Upload Electron files.
# Rename dist.zip to get_zip_name('electron', version, suffix='')
electron_zip = os.path.join(OUT_DIR, DIST_NAME)
shutil.copy2(os.path.join(OUT_DIR, 'dist.zip'), electron_zip)
upload_electron(release, electron_zip, args)
if get_target_arch() != 'mips64el':
symbols_zip = os.path.join(OUT_DIR, SYMBOLS_NAME)
shutil.copy2(os.path.join(OUT_DIR, 'symbols.zip'), symbols_zip)
upload_electron(release, symbols_zip, args)
if PLATFORM == 'darwin':
api_path = os.path.join(ELECTRON_DIR, 'electron-api.json')
upload_electron(release, api_path, args)
ts_defs_path = os.path.join(ELECTRON_DIR, 'electron.d.ts')
upload_electron(release, ts_defs_path, args)
dsym_zip = os.path.join(OUT_DIR, DSYM_NAME)
shutil.copy2(os.path.join(OUT_DIR, 'dsym.zip'), dsym_zip)
upload_electron(release, dsym_zip, args)
elif PLATFORM == 'win32':
pdb_zip = os.path.join(OUT_DIR, PDB_NAME)
shutil.copy2(os.path.join(OUT_DIR, 'pdb.zip'), pdb_zip)
upload_electron(release, pdb_zip, args)
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
ffmpeg_zip = os.path.join(OUT_DIR, ffmpeg)
ffmpeg_build_path = os.path.join(SRC_DIR, 'out', 'ffmpeg', 'ffmpeg.zip')
shutil.copy2(ffmpeg_build_path, ffmpeg_zip)
upload_electron(release, ffmpeg_zip, args)
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
chromedriver_zip = os.path.join(OUT_DIR, chromedriver)
shutil.copy2(os.path.join(OUT_DIR, 'chromedriver.zip'), chromedriver_zip)
upload_electron(release, chromedriver_zip, args)
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
mksnapshot_zip = os.path.join(OUT_DIR, mksnapshot)
if get_target_arch().startswith('arm'):
# Upload the x64 binary for arm/arm64 mksnapshot
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION, 'x64')
mksnapshot_zip = os.path.join(OUT_DIR, mksnapshot)
shutil.copy2(os.path.join(OUT_DIR, 'mksnapshot.zip'), mksnapshot_zip)
upload_electron(release, mksnapshot_zip, args)
if not tag_exists and not args.upload_to_s3:
# Upload symbols to symbol server.
run_python_upload_script('upload-symbols.py')
if PLATFORM == 'win32':
run_python_upload_script('upload-node-headers.py', '-v', args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ELECTRON_VERSION)
parser.add_argument('-o', '--overwrite',
help='Overwrite a published release',
action='store_true')
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
parser.add_argument('-s', '--upload_to_s3',
help='Upload assets to s3 bucket',
dest='upload_to_s3',
action='store_true',
default=False,
required=False)
return parser.parse_args()
def run_python_upload_script(script, *args):
script_path = os.path.join(
ELECTRON_DIR, 'script', 'release', 'uploaders', script)
return execute([sys.executable, script_path] + list(args))
def get_electron_build_version():
if get_target_arch().startswith('arm') or os.environ.has_key('CI'):
# In CI we just build as told.
return ELECTRON_VERSION
electron = get_electron_exec()
return subprocess.check_output([electron, '--version']).strip()
def upload_electron(release, file_path, args):
filename = os.path.basename(file_path)
# if upload_to_s3 is set, skip github upload.
if args.upload_to_s3:
bucket, access_key, secret_key = s3_config()
key_prefix = 'electron-artifacts/{0}_{1}'.format(args.version,
args.upload_timestamp)
s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
key_prefix, [file_path])
upload_sha256_checksum(args.version, file_path, key_prefix)
s3url = 'https://gh-contractor-zcbenz.s3.amazonaws.com'
print('{0} uploaded to {1}/{2}/{0}'.format(filename, s3url, key_prefix))
return
# Upload the file.
upload_io_to_github(release, filename, file_path, args.version)
# Upload the checksum file.
upload_sha256_checksum(args.version, file_path)
def upload_io_to_github(release, filename, filepath, version):
print('Uploading %s to Github' % \
(filename))
script_path = os.path.join(
ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-to-github.js')
execute(['node', script_path, filepath, filename, str(release['id']),
version])
def upload_sha256_checksum(version, file_path, key_prefix=None):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
if key_prefix is None:
key_prefix = 'atom-shell/tmp/{0}'.format(version)
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
sha256.update(f.read())
filename = os.path.basename(file_path)
with open(checksum_path, 'w') as checksum:
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
key_prefix, [checksum_path])
def auth_token():
token = get_env_var('GITHUB_TOKEN')
message = ('Error: Please set the $ELECTRON_GITHUB_TOKEN '
'environment variable, which is your personal token')
assert token, message
return token
def get_release(version):
script_path = os.path.join(
ELECTRON_DIR, 'script', 'release', 'find-github-release.js')
release_info = execute(['node', script_path, version])
release = json.loads(release_info)
return release
if __name__ == '__main__':
sys.exit(main())

View file

@ -0,0 +1,149 @@
#!/usr/bin/env node
const { GitProcess } = require('dugite')
const fs = require('fs')
const semver = require('semver')
const path = require('path')
const { promisify } = require('util')
const minimist = require('minimist')
const { ELECTRON_DIR } = require('../lib/utils')
const versionUtils = require('./version-utils')
const writeFile = promisify(fs.writeFile)
const readFile = promisify(fs.readFile)
function parseCommandLine () {
let help
const opts = minimist(process.argv.slice(2), {
string: [ 'bump', 'version' ],
boolean: [ 'dryRun', 'help' ],
alias: { 'version': ['v'] },
unknown: arg => { help = true }
})
if (help || opts.help || !opts.bump) {
console.log(`
Bump release version number. Possible arguments:\n
--bump=patch to increment patch version\n
--version={version} to set version number directly\n
--dryRun to print the next version without updating files
Note that you can use both --bump and --stable simultaneously.
`)
process.exit(0)
}
return opts
}
// run the script
async function main () {
const opts = parseCommandLine()
const currentVersion = await versionUtils.getElectronVersion()
const version = await nextVersion(opts.bump, currentVersion)
const parsed = semver.parse(version)
const components = {
major: parsed.major,
minor: parsed.minor,
patch: parsed.patch,
pre: parsed.prerelease
}
// print would-be new version and exit early
if (opts.dryRun) {
console.log(`new version number would be: ${version}\n`)
return 0
}
// update all version-related files
await Promise.all([
updateVersion(version),
updatePackageJSON(version),
updateWinRC(components)
])
// commit all updated version-related files
await commitVersionBump(version)
console.log(`Bumped to version: ${version}`)
}
// get next version for release based on [nightly, beta, stable]
async function nextVersion (bumpType, version) {
if (versionUtils.isNightly(version) || versionUtils.isBeta(version)) {
switch (bumpType) {
case 'nightly':
version = await versionUtils.nextNightly(version)
break
case 'beta':
version = await versionUtils.nextBeta(version)
break
case 'stable':
version = semver.valid(semver.coerce(version))
break
default:
throw new Error('Invalid bump type.')
}
} else if (versionUtils.isStable(version)) {
switch (bumpType) {
case 'nightly':
version = versionUtils.nextNightly(version)
break
case 'beta':
throw new Error('Cannot bump to beta from stable.')
case 'stable':
version = semver.inc(version, 'patch')
break
default:
throw new Error('Invalid bump type.')
}
} else {
throw new Error(`Invalid current version: ${version}`)
}
return version
}
// update VERSION file with latest release info
async function updateVersion (version) {
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION')
await writeFile(versionPath, version, 'utf8')
}
// update package metadata files with new version
async function updatePackageJSON (version) {
const filePath = path.resolve(ELECTRON_DIR, 'package.json')
const file = require(filePath)
file.version = version
await writeFile(filePath, JSON.stringify(file, null, 2))
}
// push bump commit to release branch
async function commitVersionBump (version) {
const gitArgs = ['commit', '-a', '-m', `Bump v${version}`, '-n']
await GitProcess.exec(gitArgs, ELECTRON_DIR)
}
// updates atom.rc file with new semver values
async function updateWinRC (components) {
const filePath = path.resolve(ELECTRON_DIR, 'shell', 'browser', 'resources', 'win', 'atom.rc')
const data = await readFile(filePath, 'utf8')
const arr = data.split('\n')
arr.forEach((line, idx) => {
if (line.includes('FILEVERSION')) {
arr[idx] = ` FILEVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`
arr[idx + 1] = ` PRODUCTVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`
} else if (line.includes('FileVersion')) {
arr[idx] = ` VALUE "FileVersion", "${versionUtils.makeVersion(components, '.')}"`
arr[idx + 5] = ` VALUE "ProductVersion", "${versionUtils.makeVersion(components, '.')}"`
}
})
await writeFile(filePath, arr.join('\n'))
}
if (process.mainModule === module) {
main().catch((error) => {
console.error(error)
process.exit(1)
})
}
module.exports = { nextVersion }

View file

@ -0,0 +1,99 @@
const path = require('path')
const fs = require('fs')
const semver = require('semver')
const { GitProcess } = require('dugite')
const { promisify } = require('util')
const { ELECTRON_DIR } = require('../lib/utils')
const readFile = promisify(fs.readFile)
const preType = {
NONE: 'none',
PARTIAL: 'partial',
FULL: 'full'
}
const getCurrentDate = () => {
const d = new Date()
const dd = `${d.getDate()}`.padStart(2, '0')
const mm = `${d.getMonth() + 1}`.padStart(2, '0')
const yyyy = d.getFullYear()
return `${yyyy}${mm}${dd}`
}
const isNightly = v => v.includes('nightly')
const isBeta = v => v.includes('beta')
const isStable = v => {
const parsed = semver.parse(v)
return !!(parsed && parsed.prerelease.length === 0)
}
const makeVersion = (components, delim, pre = preType.NONE) => {
let version = [components.major, components.minor, components.patch].join(delim)
if (pre === preType.PARTIAL) {
version += `${delim}${components.pre[1] || 0}`
} else if (pre === preType.FULL) {
version += `-${components.pre[0]}${delim}${components.pre[1]}`
}
return version
}
async function nextBeta (v) {
const next = semver.coerce(semver.clean(v))
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-beta.*`], ELECTRON_DIR)
const tags = tagBlob.stdout.split('\n').filter(e => e !== '')
tags.sort((t1, t2) => semver.gt(t1, t2))
// increment the latest existing beta tag or start at beta.1 if it's a new beta line
return tags.length === 0 ? semver.inc(next, 'beta', 'prerelease') : semver.inc(tags.pop(), 'prerelease')
}
async function getElectronVersion () {
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION')
const version = await readFile(versionPath, 'utf8')
return version.trim()
}
async function nextNightly (v) {
let next = semver.valid(semver.coerce(v))
const pre = `nightly.${getCurrentDate()}`
const branch = (await GitProcess.exec(['rev-parse', '--abbrev-ref', 'HEAD'], ELECTRON_DIR)).stdout.trim()
if (branch === 'master') {
next = semver.inc(await getLastMajorForMaster(), 'major')
} else if (isStable(v)) {
next = semver.inc(next, 'patch')
}
return `${next}-${pre}`
}
async function getLastMajorForMaster () {
let branchNames
const result = await GitProcess.exec(['branch', '-a', '--remote', '--list', 'origin/[0-9]-[0-9]-x'], ELECTRON_DIR)
if (result.exitCode === 0) {
branchNames = result.stdout.trim().split('\n')
const filtered = branchNames.map(b => b.replace('origin/', ''))
return getNextReleaseBranch(filtered)
} else {
throw new Error('Release branches could not be fetched.')
}
}
function getNextReleaseBranch (branches) {
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0'))
return converted.reduce((v1, v2) => semver.gt(v1, v2) ? v1 : v2)
}
module.exports = {
isStable,
isBeta,
isNightly,
nextBeta,
makeVersion,
getElectronVersion,
nextNightly,
preType
}