Convert all source files to JavaScript
This commit is contained in:
parent
403870a27e
commit
1f9691ae13
144 changed files with 11211 additions and 7301 deletions
|
@ -1,394 +0,0 @@
|
|||
asar = process.binding 'atom_common_asar'
|
||||
child_process = require 'child_process'
|
||||
path = require 'path'
|
||||
util = require 'util'
|
||||
|
||||
### Cache asar archive objects. ###
|
||||
cachedArchives = {}
|
||||
getOrCreateArchive = (p) ->
|
||||
archive = cachedArchives[p]
|
||||
return archive if archive?
|
||||
archive = asar.createArchive p
|
||||
return false unless archive
|
||||
cachedArchives[p] = archive
|
||||
|
||||
### Clean cache on quit. ###
|
||||
process.on 'exit', ->
|
||||
archive.destroy() for own p, archive of cachedArchives
|
||||
|
||||
### Separate asar package's path from full path. ###
|
||||
splitPath = (p) ->
|
||||
### shortcut to disable asar. ###
|
||||
return [false] if process.noAsar
|
||||
|
||||
return [false] if typeof p isnt 'string'
|
||||
return [true, p, ''] if p.substr(-5) is '.asar'
|
||||
p = path.normalize p
|
||||
index = p.lastIndexOf ".asar#{path.sep}"
|
||||
return [false] if index is -1
|
||||
[true, p.substr(0, index + 5), p.substr(index + 6)]
|
||||
|
||||
### Convert asar archive's Stats object to fs's Stats object. ###
|
||||
nextInode = 0
|
||||
uid = if process.getuid? then process.getuid() else 0
|
||||
gid = if process.getgid? then process.getgid() else 0
|
||||
fakeTime = new Date()
|
||||
asarStatsToFsStats = (stats) ->
|
||||
{
|
||||
dev: 1,
|
||||
ino: ++nextInode,
|
||||
mode: 33188,
|
||||
nlink: 1,
|
||||
uid: uid,
|
||||
gid: gid,
|
||||
rdev: 0,
|
||||
atime: stats.atime || fakeTime,
|
||||
birthtime: stats.birthtime || fakeTime,
|
||||
mtime: stats.mtime || fakeTime,
|
||||
ctime: stats.ctime || fakeTime,
|
||||
size: stats.size,
|
||||
isFile: -> stats.isFile
|
||||
isDirectory: -> stats.isDirectory
|
||||
isSymbolicLink: -> stats.isLink
|
||||
isBlockDevice: -> false
|
||||
isCharacterDevice: -> false
|
||||
isFIFO: -> false
|
||||
isSocket: -> false
|
||||
}
|
||||
|
||||
### Create a ENOENT error. ###
|
||||
notFoundError = (asarPath, filePath, callback) ->
|
||||
error = new Error("ENOENT, #{filePath} not found in #{asarPath}")
|
||||
error.code = "ENOENT"
|
||||
error.errno = -2
|
||||
unless typeof callback is 'function'
|
||||
throw error
|
||||
process.nextTick -> callback error
|
||||
|
||||
### Create a ENOTDIR error. ###
|
||||
notDirError = (callback) ->
|
||||
error = new Error('ENOTDIR, not a directory')
|
||||
error.code = 'ENOTDIR'
|
||||
error.errno = -20
|
||||
unless typeof callback is 'function'
|
||||
throw error
|
||||
process.nextTick -> callback error
|
||||
|
||||
### Create invalid archive error. ###
|
||||
invalidArchiveError = (asarPath, callback) ->
|
||||
error = new Error("Invalid package #{asarPath}")
|
||||
unless typeof callback is 'function'
|
||||
throw error
|
||||
process.nextTick -> callback error
|
||||
|
||||
### Override APIs that rely on passing file path instead of content to C++. ###
|
||||
overrideAPISync = (module, name, arg = 0) ->
|
||||
old = module[name]
|
||||
module[name] = ->
|
||||
p = arguments[arg]
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return old.apply this, arguments unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
invalidArchiveError asarPath unless archive
|
||||
|
||||
newPath = archive.copyFileOut filePath
|
||||
notFoundError asarPath, filePath unless newPath
|
||||
|
||||
arguments[arg] = newPath
|
||||
old.apply this, arguments
|
||||
|
||||
overrideAPI = (module, name, arg = 0) ->
|
||||
old = module[name]
|
||||
module[name] = ->
|
||||
p = arguments[arg]
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return old.apply this, arguments unless isAsar
|
||||
|
||||
callback = arguments[arguments.length - 1]
|
||||
return overrideAPISync module, name, arg unless typeof callback is 'function'
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return invalidArchiveError asarPath, callback unless archive
|
||||
|
||||
newPath = archive.copyFileOut filePath
|
||||
return notFoundError asarPath, filePath, callback unless newPath
|
||||
|
||||
arguments[arg] = newPath
|
||||
old.apply this, arguments
|
||||
|
||||
### Override fs APIs. ###
|
||||
exports.wrapFsWithAsar = (fs) ->
|
||||
lstatSync = fs.lstatSync
|
||||
fs.lstatSync = (p) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return lstatSync p unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
invalidArchiveError asarPath unless archive
|
||||
|
||||
stats = archive.stat filePath
|
||||
notFoundError asarPath, filePath unless stats
|
||||
|
||||
asarStatsToFsStats stats
|
||||
|
||||
lstat = fs.lstat
|
||||
fs.lstat = (p, callback) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return lstat p, callback unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return invalidArchiveError asarPath, callback unless archive
|
||||
|
||||
stats = getOrCreateArchive(asarPath).stat filePath
|
||||
return notFoundError asarPath, filePath, callback unless stats
|
||||
|
||||
process.nextTick -> callback null, asarStatsToFsStats stats
|
||||
|
||||
statSync = fs.statSync
|
||||
fs.statSync = (p) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return statSync p unless isAsar
|
||||
|
||||
### Do not distinguish links for now. ###
|
||||
fs.lstatSync p
|
||||
|
||||
stat = fs.stat
|
||||
fs.stat = (p, callback) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return stat p, callback unless isAsar
|
||||
|
||||
### Do not distinguish links for now. ###
|
||||
process.nextTick -> fs.lstat p, callback
|
||||
|
||||
statSyncNoException = fs.statSyncNoException
|
||||
fs.statSyncNoException = (p) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return statSyncNoException p unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return false unless archive
|
||||
stats = archive.stat filePath
|
||||
return false unless stats
|
||||
asarStatsToFsStats stats
|
||||
|
||||
realpathSync = fs.realpathSync
|
||||
fs.realpathSync = (p) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return realpathSync.apply this, arguments unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
invalidArchiveError asarPath unless archive
|
||||
|
||||
real = archive.realpath filePath
|
||||
notFoundError asarPath, filePath if real is false
|
||||
|
||||
path.join realpathSync(asarPath), real
|
||||
|
||||
realpath = fs.realpath
|
||||
fs.realpath = (p, cache, callback) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return realpath.apply this, arguments unless isAsar
|
||||
|
||||
if typeof cache is 'function'
|
||||
callback = cache
|
||||
cache = undefined
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return invalidArchiveError asarPath, callback unless archive
|
||||
|
||||
real = archive.realpath filePath
|
||||
if real is false
|
||||
return notFoundError asarPath, filePath, callback
|
||||
|
||||
realpath asarPath, (err, p) ->
|
||||
return callback err if err
|
||||
callback null, path.join(p, real)
|
||||
|
||||
exists = fs.exists
|
||||
fs.exists = (p, callback) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return exists p, callback unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return invalidArchiveError asarPath, callback unless archive
|
||||
|
||||
process.nextTick -> callback archive.stat(filePath) isnt false
|
||||
|
||||
existsSync = fs.existsSync
|
||||
fs.existsSync = (p) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return existsSync p unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return false unless archive
|
||||
|
||||
archive.stat(filePath) isnt false
|
||||
|
||||
open = fs.open
|
||||
readFile = fs.readFile
|
||||
fs.readFile = (p, options, callback) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return readFile.apply this, arguments unless isAsar
|
||||
|
||||
if typeof options is 'function'
|
||||
callback = options
|
||||
options = undefined
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return invalidArchiveError asarPath, callback unless archive
|
||||
|
||||
info = archive.getFileInfo filePath
|
||||
return notFoundError asarPath, filePath, callback unless info
|
||||
|
||||
if info.size is 0
|
||||
return process.nextTick -> callback null, new Buffer(0)
|
||||
|
||||
if info.unpacked
|
||||
realPath = archive.copyFileOut filePath
|
||||
return fs.readFile realPath, options, callback
|
||||
|
||||
if not options
|
||||
options = encoding: null
|
||||
else if util.isString options
|
||||
options = encoding: options
|
||||
else if not util.isObject options
|
||||
throw new TypeError('Bad arguments')
|
||||
|
||||
encoding = options.encoding
|
||||
|
||||
buffer = new Buffer(info.size)
|
||||
fd = archive.getFd()
|
||||
return notFoundError asarPath, filePath, callback unless fd >= 0
|
||||
|
||||
fs.read fd, buffer, 0, info.size, info.offset, (error) ->
|
||||
callback error, if encoding then buffer.toString encoding else buffer
|
||||
|
||||
openSync = fs.openSync
|
||||
readFileSync = fs.readFileSync
|
||||
fs.readFileSync = (p, opts) ->
|
||||
### this allows v8 to optimize this function ###
|
||||
options = opts
|
||||
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return readFileSync.apply this, arguments unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
invalidArchiveError asarPath unless archive
|
||||
|
||||
info = archive.getFileInfo filePath
|
||||
notFoundError asarPath, filePath unless info
|
||||
|
||||
if info.size is 0
|
||||
return if options then '' else new Buffer(0)
|
||||
|
||||
if info.unpacked
|
||||
realPath = archive.copyFileOut filePath
|
||||
return fs.readFileSync realPath, options
|
||||
|
||||
if not options
|
||||
options = encoding: null
|
||||
else if util.isString options
|
||||
options = encoding: options
|
||||
else if not util.isObject options
|
||||
throw new TypeError('Bad arguments')
|
||||
|
||||
encoding = options.encoding
|
||||
|
||||
buffer = new Buffer(info.size)
|
||||
fd = archive.getFd()
|
||||
notFoundError asarPath, filePath unless fd >= 0
|
||||
|
||||
fs.readSync fd, buffer, 0, info.size, info.offset
|
||||
if encoding then buffer.toString encoding else buffer
|
||||
|
||||
readdir = fs.readdir
|
||||
fs.readdir = (p, callback) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return readdir.apply this, arguments unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return invalidArchiveError asarPath, callback unless archive
|
||||
|
||||
files = archive.readdir filePath
|
||||
return notFoundError asarPath, filePath, callback unless files
|
||||
|
||||
process.nextTick -> callback null, files
|
||||
|
||||
readdirSync = fs.readdirSync
|
||||
fs.readdirSync = (p) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return readdirSync.apply this, arguments unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
invalidArchiveError asarPath unless archive
|
||||
|
||||
files = archive.readdir filePath
|
||||
notFoundError asarPath, filePath unless files
|
||||
|
||||
files
|
||||
|
||||
internalModuleReadFile = process.binding('fs').internalModuleReadFile
|
||||
process.binding('fs').internalModuleReadFile = (p) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return internalModuleReadFile p unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
return undefined unless archive
|
||||
|
||||
info = archive.getFileInfo filePath
|
||||
return undefined unless info
|
||||
return '' if info.size is 0
|
||||
|
||||
if info.unpacked
|
||||
realPath = archive.copyFileOut filePath
|
||||
return fs.readFileSync realPath, encoding: 'utf8'
|
||||
|
||||
buffer = new Buffer(info.size)
|
||||
fd = archive.getFd()
|
||||
return undefined unless fd >= 0
|
||||
|
||||
fs.readSync fd, buffer, 0, info.size, info.offset
|
||||
buffer.toString 'utf8'
|
||||
|
||||
internalModuleStat = process.binding('fs').internalModuleStat
|
||||
process.binding('fs').internalModuleStat = (p) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return internalModuleStat p unless isAsar
|
||||
|
||||
archive = getOrCreateArchive asarPath
|
||||
### -ENOENT ###
|
||||
return -34 unless archive
|
||||
|
||||
stats = archive.stat filePath
|
||||
### -ENOENT ###
|
||||
return -34 unless stats
|
||||
|
||||
if stats.isDirectory then return 1 else return 0
|
||||
|
||||
###
|
||||
Calling mkdir for directory inside asar archive should throw ENOTDIR
|
||||
error, but on Windows it throws ENOENT.
|
||||
This is to work around the recursive looping bug of mkdirp since it is
|
||||
widely used.
|
||||
###
|
||||
if process.platform is 'win32'
|
||||
mkdir = fs.mkdir
|
||||
fs.mkdir = (p, mode, callback) ->
|
||||
callback = mode if typeof mode is 'function'
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
return notDirError callback if isAsar and filePath.length
|
||||
mkdir p, mode, callback
|
||||
|
||||
mkdirSync = fs.mkdirSync
|
||||
fs.mkdirSync = (p, mode) ->
|
||||
[isAsar, asarPath, filePath] = splitPath p
|
||||
notDirError() if isAsar and filePath.length
|
||||
mkdirSync p, mode
|
||||
|
||||
overrideAPI fs, 'open'
|
||||
overrideAPI child_process, 'execFile'
|
||||
overrideAPISync process, 'dlopen', 1
|
||||
overrideAPISync require('module')._extensions, '.node', 1
|
||||
overrideAPISync fs, 'openSync'
|
||||
overrideAPISync child_process, 'execFileSync'
|
608
atom/common/lib/asar.js
Normal file
608
atom/common/lib/asar.js
Normal file
|
@ -0,0 +1,608 @@
|
|||
var asar, asarStatsToFsStats, cachedArchives, child_process, fakeTime, getOrCreateArchive, gid, invalidArchiveError, nextInode, notDirError, notFoundError, overrideAPI, overrideAPISync, path, splitPath, uid, util,
|
||||
hasProp = {}.hasOwnProperty;
|
||||
|
||||
asar = process.binding('atom_common_asar');
|
||||
|
||||
child_process = require('child_process');
|
||||
|
||||
path = require('path');
|
||||
|
||||
util = require('util');
|
||||
|
||||
|
||||
/* Cache asar archive objects. */
|
||||
|
||||
cachedArchives = {};
|
||||
|
||||
getOrCreateArchive = function(p) {
|
||||
var archive;
|
||||
archive = cachedArchives[p];
|
||||
if (archive != null) {
|
||||
return archive;
|
||||
}
|
||||
archive = asar.createArchive(p);
|
||||
if (!archive) {
|
||||
return false;
|
||||
}
|
||||
return cachedArchives[p] = archive;
|
||||
};
|
||||
|
||||
|
||||
/* Clean cache on quit. */
|
||||
|
||||
process.on('exit', function() {
|
||||
var archive, p, results;
|
||||
results = [];
|
||||
for (p in cachedArchives) {
|
||||
if (!hasProp.call(cachedArchives, p)) continue;
|
||||
archive = cachedArchives[p];
|
||||
results.push(archive.destroy());
|
||||
}
|
||||
return results;
|
||||
});
|
||||
|
||||
|
||||
/* Separate asar package's path from full path. */
|
||||
|
||||
splitPath = function(p) {
|
||||
|
||||
/* shortcut to disable asar. */
|
||||
var index;
|
||||
if (process.noAsar) {
|
||||
return [false];
|
||||
}
|
||||
if (typeof p !== 'string') {
|
||||
return [false];
|
||||
}
|
||||
if (p.substr(-5) === '.asar') {
|
||||
return [true, p, ''];
|
||||
}
|
||||
p = path.normalize(p);
|
||||
index = p.lastIndexOf(".asar" + path.sep);
|
||||
if (index === -1) {
|
||||
return [false];
|
||||
}
|
||||
return [true, p.substr(0, index + 5), p.substr(index + 6)];
|
||||
};
|
||||
|
||||
|
||||
/* Convert asar archive's Stats object to fs's Stats object. */
|
||||
|
||||
nextInode = 0;
|
||||
|
||||
uid = process.getuid != null ? process.getuid() : 0;
|
||||
|
||||
gid = process.getgid != null ? process.getgid() : 0;
|
||||
|
||||
fakeTime = new Date();
|
||||
|
||||
asarStatsToFsStats = function(stats) {
|
||||
return {
|
||||
dev: 1,
|
||||
ino: ++nextInode,
|
||||
mode: 33188,
|
||||
nlink: 1,
|
||||
uid: uid,
|
||||
gid: gid,
|
||||
rdev: 0,
|
||||
atime: stats.atime || fakeTime,
|
||||
birthtime: stats.birthtime || fakeTime,
|
||||
mtime: stats.mtime || fakeTime,
|
||||
ctime: stats.ctime || fakeTime,
|
||||
size: stats.size,
|
||||
isFile: function() {
|
||||
return stats.isFile;
|
||||
},
|
||||
isDirectory: function() {
|
||||
return stats.isDirectory;
|
||||
},
|
||||
isSymbolicLink: function() {
|
||||
return stats.isLink;
|
||||
},
|
||||
isBlockDevice: function() {
|
||||
return false;
|
||||
},
|
||||
isCharacterDevice: function() {
|
||||
return false;
|
||||
},
|
||||
isFIFO: function() {
|
||||
return false;
|
||||
},
|
||||
isSocket: function() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
/* Create a ENOENT error. */
|
||||
|
||||
notFoundError = function(asarPath, filePath, callback) {
|
||||
var error;
|
||||
error = new Error("ENOENT, " + filePath + " not found in " + asarPath);
|
||||
error.code = "ENOENT";
|
||||
error.errno = -2;
|
||||
if (typeof callback !== 'function') {
|
||||
throw error;
|
||||
}
|
||||
return process.nextTick(function() {
|
||||
return callback(error);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/* Create a ENOTDIR error. */
|
||||
|
||||
notDirError = function(callback) {
|
||||
var error;
|
||||
error = new Error('ENOTDIR, not a directory');
|
||||
error.code = 'ENOTDIR';
|
||||
error.errno = -20;
|
||||
if (typeof callback !== 'function') {
|
||||
throw error;
|
||||
}
|
||||
return process.nextTick(function() {
|
||||
return callback(error);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/* Create invalid archive error. */
|
||||
|
||||
invalidArchiveError = function(asarPath, callback) {
|
||||
var error;
|
||||
error = new Error("Invalid package " + asarPath);
|
||||
if (typeof callback !== 'function') {
|
||||
throw error;
|
||||
}
|
||||
return process.nextTick(function() {
|
||||
return callback(error);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/* Override APIs that rely on passing file path instead of content to C++. */
|
||||
|
||||
overrideAPISync = function(module, name, arg) {
|
||||
var old;
|
||||
if (arg == null) {
|
||||
arg = 0;
|
||||
}
|
||||
old = module[name];
|
||||
return module[name] = function() {
|
||||
var archive, asarPath, filePath, isAsar, newPath, p, ref;
|
||||
p = arguments[arg];
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return old.apply(this, arguments);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
invalidArchiveError(asarPath);
|
||||
}
|
||||
newPath = archive.copyFileOut(filePath);
|
||||
if (!newPath) {
|
||||
notFoundError(asarPath, filePath);
|
||||
}
|
||||
arguments[arg] = newPath;
|
||||
return old.apply(this, arguments);
|
||||
};
|
||||
};
|
||||
|
||||
overrideAPI = function(module, name, arg) {
|
||||
var old;
|
||||
if (arg == null) {
|
||||
arg = 0;
|
||||
}
|
||||
old = module[name];
|
||||
return module[name] = function() {
|
||||
var archive, asarPath, callback, filePath, isAsar, newPath, p, ref;
|
||||
p = arguments[arg];
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return old.apply(this, arguments);
|
||||
}
|
||||
callback = arguments[arguments.length - 1];
|
||||
if (typeof callback !== 'function') {
|
||||
return overrideAPISync(module, name, arg);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return invalidArchiveError(asarPath, callback);
|
||||
}
|
||||
newPath = archive.copyFileOut(filePath);
|
||||
if (!newPath) {
|
||||
return notFoundError(asarPath, filePath, callback);
|
||||
}
|
||||
arguments[arg] = newPath;
|
||||
return old.apply(this, arguments);
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
/* Override fs APIs. */
|
||||
|
||||
exports.wrapFsWithAsar = function(fs) {
|
||||
var exists, existsSync, internalModuleReadFile, internalModuleStat, lstat, lstatSync, mkdir, mkdirSync, open, openSync, readFile, readFileSync, readdir, readdirSync, realpath, realpathSync, stat, statSync, statSyncNoException;
|
||||
lstatSync = fs.lstatSync;
|
||||
fs.lstatSync = function(p) {
|
||||
var archive, asarPath, filePath, isAsar, ref, stats;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return lstatSync(p);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
invalidArchiveError(asarPath);
|
||||
}
|
||||
stats = archive.stat(filePath);
|
||||
if (!stats) {
|
||||
notFoundError(asarPath, filePath);
|
||||
}
|
||||
return asarStatsToFsStats(stats);
|
||||
};
|
||||
lstat = fs.lstat;
|
||||
fs.lstat = function(p, callback) {
|
||||
var archive, asarPath, filePath, isAsar, ref, stats;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return lstat(p, callback);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return invalidArchiveError(asarPath, callback);
|
||||
}
|
||||
stats = getOrCreateArchive(asarPath).stat(filePath);
|
||||
if (!stats) {
|
||||
return notFoundError(asarPath, filePath, callback);
|
||||
}
|
||||
return process.nextTick(function() {
|
||||
return callback(null, asarStatsToFsStats(stats));
|
||||
});
|
||||
};
|
||||
statSync = fs.statSync;
|
||||
fs.statSync = function(p) {
|
||||
var asarPath, filePath, isAsar, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return statSync(p);
|
||||
}
|
||||
|
||||
/* Do not distinguish links for now. */
|
||||
return fs.lstatSync(p);
|
||||
};
|
||||
stat = fs.stat;
|
||||
fs.stat = function(p, callback) {
|
||||
var asarPath, filePath, isAsar, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return stat(p, callback);
|
||||
}
|
||||
|
||||
/* Do not distinguish links for now. */
|
||||
return process.nextTick(function() {
|
||||
return fs.lstat(p, callback);
|
||||
});
|
||||
};
|
||||
statSyncNoException = fs.statSyncNoException;
|
||||
fs.statSyncNoException = function(p) {
|
||||
var archive, asarPath, filePath, isAsar, ref, stats;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return statSyncNoException(p);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return false;
|
||||
}
|
||||
stats = archive.stat(filePath);
|
||||
if (!stats) {
|
||||
return false;
|
||||
}
|
||||
return asarStatsToFsStats(stats);
|
||||
};
|
||||
realpathSync = fs.realpathSync;
|
||||
fs.realpathSync = function(p) {
|
||||
var archive, asarPath, filePath, isAsar, real, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return realpathSync.apply(this, arguments);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
invalidArchiveError(asarPath);
|
||||
}
|
||||
real = archive.realpath(filePath);
|
||||
if (real === false) {
|
||||
notFoundError(asarPath, filePath);
|
||||
}
|
||||
return path.join(realpathSync(asarPath), real);
|
||||
};
|
||||
realpath = fs.realpath;
|
||||
fs.realpath = function(p, cache, callback) {
|
||||
var archive, asarPath, filePath, isAsar, real, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return realpath.apply(this, arguments);
|
||||
}
|
||||
if (typeof cache === 'function') {
|
||||
callback = cache;
|
||||
cache = void 0;
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return invalidArchiveError(asarPath, callback);
|
||||
}
|
||||
real = archive.realpath(filePath);
|
||||
if (real === false) {
|
||||
return notFoundError(asarPath, filePath, callback);
|
||||
}
|
||||
return realpath(asarPath, function(err, p) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
return callback(null, path.join(p, real));
|
||||
});
|
||||
};
|
||||
exists = fs.exists;
|
||||
fs.exists = function(p, callback) {
|
||||
var archive, asarPath, filePath, isAsar, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return exists(p, callback);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return invalidArchiveError(asarPath, callback);
|
||||
}
|
||||
return process.nextTick(function() {
|
||||
return callback(archive.stat(filePath) !== false);
|
||||
});
|
||||
};
|
||||
existsSync = fs.existsSync;
|
||||
fs.existsSync = function(p) {
|
||||
var archive, asarPath, filePath, isAsar, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return existsSync(p);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return false;
|
||||
}
|
||||
return archive.stat(filePath) !== false;
|
||||
};
|
||||
open = fs.open;
|
||||
readFile = fs.readFile;
|
||||
fs.readFile = function(p, options, callback) {
|
||||
var archive, asarPath, buffer, encoding, fd, filePath, info, isAsar, realPath, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return readFile.apply(this, arguments);
|
||||
}
|
||||
if (typeof options === 'function') {
|
||||
callback = options;
|
||||
options = void 0;
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return invalidArchiveError(asarPath, callback);
|
||||
}
|
||||
info = archive.getFileInfo(filePath);
|
||||
if (!info) {
|
||||
return notFoundError(asarPath, filePath, callback);
|
||||
}
|
||||
if (info.size === 0) {
|
||||
return process.nextTick(function() {
|
||||
return callback(null, new Buffer(0));
|
||||
});
|
||||
}
|
||||
if (info.unpacked) {
|
||||
realPath = archive.copyFileOut(filePath);
|
||||
return fs.readFile(realPath, options, callback);
|
||||
}
|
||||
if (!options) {
|
||||
options = {
|
||||
encoding: null
|
||||
};
|
||||
} else if (util.isString(options)) {
|
||||
options = {
|
||||
encoding: options
|
||||
};
|
||||
} else if (!util.isObject(options)) {
|
||||
throw new TypeError('Bad arguments');
|
||||
}
|
||||
encoding = options.encoding;
|
||||
buffer = new Buffer(info.size);
|
||||
fd = archive.getFd();
|
||||
if (!(fd >= 0)) {
|
||||
return notFoundError(asarPath, filePath, callback);
|
||||
}
|
||||
return fs.read(fd, buffer, 0, info.size, info.offset, function(error) {
|
||||
return callback(error, encoding ? buffer.toString(encoding) : buffer);
|
||||
});
|
||||
};
|
||||
openSync = fs.openSync;
|
||||
readFileSync = fs.readFileSync;
|
||||
fs.readFileSync = function(p, opts) {
|
||||
|
||||
/* this allows v8 to optimize this function */
|
||||
var archive, asarPath, buffer, encoding, fd, filePath, info, isAsar, options, realPath, ref;
|
||||
options = opts;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return readFileSync.apply(this, arguments);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
invalidArchiveError(asarPath);
|
||||
}
|
||||
info = archive.getFileInfo(filePath);
|
||||
if (!info) {
|
||||
notFoundError(asarPath, filePath);
|
||||
}
|
||||
if (info.size === 0) {
|
||||
if (options) {
|
||||
return '';
|
||||
} else {
|
||||
return new Buffer(0);
|
||||
}
|
||||
}
|
||||
if (info.unpacked) {
|
||||
realPath = archive.copyFileOut(filePath);
|
||||
return fs.readFileSync(realPath, options);
|
||||
}
|
||||
if (!options) {
|
||||
options = {
|
||||
encoding: null
|
||||
};
|
||||
} else if (util.isString(options)) {
|
||||
options = {
|
||||
encoding: options
|
||||
};
|
||||
} else if (!util.isObject(options)) {
|
||||
throw new TypeError('Bad arguments');
|
||||
}
|
||||
encoding = options.encoding;
|
||||
buffer = new Buffer(info.size);
|
||||
fd = archive.getFd();
|
||||
if (!(fd >= 0)) {
|
||||
notFoundError(asarPath, filePath);
|
||||
}
|
||||
fs.readSync(fd, buffer, 0, info.size, info.offset);
|
||||
if (encoding) {
|
||||
return buffer.toString(encoding);
|
||||
} else {
|
||||
return buffer;
|
||||
}
|
||||
};
|
||||
readdir = fs.readdir;
|
||||
fs.readdir = function(p, callback) {
|
||||
var archive, asarPath, filePath, files, isAsar, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return readdir.apply(this, arguments);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return invalidArchiveError(asarPath, callback);
|
||||
}
|
||||
files = archive.readdir(filePath);
|
||||
if (!files) {
|
||||
return notFoundError(asarPath, filePath, callback);
|
||||
}
|
||||
return process.nextTick(function() {
|
||||
return callback(null, files);
|
||||
});
|
||||
};
|
||||
readdirSync = fs.readdirSync;
|
||||
fs.readdirSync = function(p) {
|
||||
var archive, asarPath, filePath, files, isAsar, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return readdirSync.apply(this, arguments);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
invalidArchiveError(asarPath);
|
||||
}
|
||||
files = archive.readdir(filePath);
|
||||
if (!files) {
|
||||
notFoundError(asarPath, filePath);
|
||||
}
|
||||
return files;
|
||||
};
|
||||
internalModuleReadFile = process.binding('fs').internalModuleReadFile;
|
||||
process.binding('fs').internalModuleReadFile = function(p) {
|
||||
var archive, asarPath, buffer, fd, filePath, info, isAsar, realPath, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return internalModuleReadFile(p);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
return void 0;
|
||||
}
|
||||
info = archive.getFileInfo(filePath);
|
||||
if (!info) {
|
||||
return void 0;
|
||||
}
|
||||
if (info.size === 0) {
|
||||
return '';
|
||||
}
|
||||
if (info.unpacked) {
|
||||
realPath = archive.copyFileOut(filePath);
|
||||
return fs.readFileSync(realPath, {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
buffer = new Buffer(info.size);
|
||||
fd = archive.getFd();
|
||||
if (!(fd >= 0)) {
|
||||
return void 0;
|
||||
}
|
||||
fs.readSync(fd, buffer, 0, info.size, info.offset);
|
||||
return buffer.toString('utf8');
|
||||
};
|
||||
internalModuleStat = process.binding('fs').internalModuleStat;
|
||||
process.binding('fs').internalModuleStat = function(p) {
|
||||
var archive, asarPath, filePath, isAsar, ref, stats;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (!isAsar) {
|
||||
return internalModuleStat(p);
|
||||
}
|
||||
archive = getOrCreateArchive(asarPath);
|
||||
|
||||
/* -ENOENT */
|
||||
if (!archive) {
|
||||
return -34;
|
||||
}
|
||||
stats = archive.stat(filePath);
|
||||
|
||||
/* -ENOENT */
|
||||
if (!stats) {
|
||||
return -34;
|
||||
}
|
||||
if (stats.isDirectory) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
Calling mkdir for directory inside asar archive should throw ENOTDIR
|
||||
error, but on Windows it throws ENOENT.
|
||||
This is to work around the recursive looping bug of mkdirp since it is
|
||||
widely used.
|
||||
*/
|
||||
if (process.platform === 'win32') {
|
||||
mkdir = fs.mkdir;
|
||||
fs.mkdir = function(p, mode, callback) {
|
||||
var asarPath, filePath, isAsar, ref;
|
||||
if (typeof mode === 'function') {
|
||||
callback = mode;
|
||||
}
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (isAsar && filePath.length) {
|
||||
return notDirError(callback);
|
||||
}
|
||||
return mkdir(p, mode, callback);
|
||||
};
|
||||
mkdirSync = fs.mkdirSync;
|
||||
fs.mkdirSync = function(p, mode) {
|
||||
var asarPath, filePath, isAsar, ref;
|
||||
ref = splitPath(p), isAsar = ref[0], asarPath = ref[1], filePath = ref[2];
|
||||
if (isAsar && filePath.length) {
|
||||
notDirError();
|
||||
}
|
||||
return mkdirSync(p, mode);
|
||||
};
|
||||
}
|
||||
overrideAPI(fs, 'open');
|
||||
overrideAPI(child_process, 'execFile');
|
||||
overrideAPISync(process, 'dlopen', 1);
|
||||
overrideAPISync(require('module')._extensions, '.node', 1);
|
||||
overrideAPISync(fs, 'openSync');
|
||||
return overrideAPISync(child_process, 'execFileSync');
|
||||
};
|
|
@ -1,22 +0,0 @@
|
|||
return (process, require, asarSource) ->
|
||||
{createArchive} = process.binding 'atom_common_asar'
|
||||
|
||||
### Make asar.coffee accessible via "require". ###
|
||||
process.binding('natives').ATOM_SHELL_ASAR = asarSource
|
||||
|
||||
### Monkey-patch the fs module. ###
|
||||
require('ATOM_SHELL_ASAR').wrapFsWithAsar require('fs')
|
||||
|
||||
### Make graceful-fs work with asar. ###
|
||||
source = process.binding 'natives'
|
||||
source['original-fs'] = source.fs
|
||||
source['fs'] = """
|
||||
var src = '(function (exports, require, module, __filename, __dirname) { ' +
|
||||
process.binding('natives')['original-fs'] +
|
||||
' });';
|
||||
var vm = require('vm');
|
||||
var fn = vm.runInThisContext(src, { filename: 'fs.js' });
|
||||
fn(exports, require, module);
|
||||
var asar = require('ATOM_SHELL_ASAR');
|
||||
asar.wrapFsWithAsar(exports);
|
||||
"""
|
15
atom/common/lib/asar_init.js
Normal file
15
atom/common/lib/asar_init.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
return function(process, require, asarSource) {
|
||||
var createArchive, source;
|
||||
createArchive = process.binding('atom_common_asar').createArchive;
|
||||
|
||||
/* Make asar.coffee accessible via "require". */
|
||||
process.binding('natives').ATOM_SHELL_ASAR = asarSource;
|
||||
|
||||
/* Monkey-patch the fs module. */
|
||||
require('ATOM_SHELL_ASAR').wrapFsWithAsar(require('fs'));
|
||||
|
||||
/* Make graceful-fs work with asar. */
|
||||
source = process.binding('natives');
|
||||
source['original-fs'] = source.fs;
|
||||
return source['fs'] = "var src = '(function (exports, require, module, __filename, __dirname) { ' +\n process.binding('natives')['original-fs'] +\n ' });';\nvar vm = require('vm');\nvar fn = vm.runInThisContext(src, { filename: 'fs.js' });\nfn(exports, require, module);\nvar asar = require('ATOM_SHELL_ASAR');\nasar.wrapFsWithAsar(exports);";
|
||||
};
|
|
@ -1,40 +0,0 @@
|
|||
fs = require 'fs'
|
||||
path = require 'path'
|
||||
timers = require 'timers'
|
||||
Module = require 'module'
|
||||
|
||||
process.atomBinding = (name) ->
|
||||
try
|
||||
process.binding "atom_#{process.type}_#{name}"
|
||||
catch e
|
||||
process.binding "atom_common_#{name}" if /No such module/.test e.message
|
||||
|
||||
unless process.env.ELECTRON_HIDE_INTERNAL_MODULES
|
||||
### Add common/api/lib to module search paths. ###
|
||||
Module.globalPaths.push path.resolve(__dirname, '..', 'api', 'lib')
|
||||
|
||||
###
|
||||
setImmediate and process.nextTick makes use of uv_check and uv_prepare to
|
||||
run the callbacks, however since we only run uv loop on requests, the
|
||||
callbacks wouldn't be called until something else activated the uv loop,
|
||||
which would delay the callbacks for arbitrary long time. So we should
|
||||
initiatively activate the uv loop once setImmediate and process.nextTick is
|
||||
called.
|
||||
###
|
||||
wrapWithActivateUvLoop = (func) ->
|
||||
->
|
||||
process.activateUvLoop()
|
||||
func.apply this, arguments
|
||||
process.nextTick = wrapWithActivateUvLoop process.nextTick
|
||||
global.setImmediate = wrapWithActivateUvLoop timers.setImmediate
|
||||
global.clearImmediate = timers.clearImmediate
|
||||
|
||||
if process.type is 'browser'
|
||||
###
|
||||
setTimeout needs to update the polling timeout of the event loop, when
|
||||
called under Chromium's event loop the node's event loop won't get a chance
|
||||
to update the timeout, so we have to force the node's event loop to
|
||||
recalculate the timeout in browser process.
|
||||
###
|
||||
global.setTimeout = wrapWithActivateUvLoop timers.setTimeout
|
||||
global.setInterval = wrapWithActivateUvLoop timers.setInterval
|
62
atom/common/lib/init.js
Normal file
62
atom/common/lib/init.js
Normal file
|
@ -0,0 +1,62 @@
|
|||
var Module, fs, path, timers, wrapWithActivateUvLoop;
|
||||
|
||||
fs = require('fs');
|
||||
|
||||
path = require('path');
|
||||
|
||||
timers = require('timers');
|
||||
|
||||
Module = require('module');
|
||||
|
||||
process.atomBinding = function(name) {
|
||||
var e, error;
|
||||
try {
|
||||
return process.binding("atom_" + process.type + "_" + name);
|
||||
} catch (error) {
|
||||
e = error;
|
||||
if (/No such module/.test(e.message)) {
|
||||
return process.binding("atom_common_" + name);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (!process.env.ELECTRON_HIDE_INTERNAL_MODULES) {
|
||||
|
||||
/* Add common/api/lib to module search paths. */
|
||||
Module.globalPaths.push(path.resolve(__dirname, '..', 'api', 'lib'));
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
setImmediate and process.nextTick makes use of uv_check and uv_prepare to
|
||||
run the callbacks, however since we only run uv loop on requests, the
|
||||
callbacks wouldn't be called until something else activated the uv loop,
|
||||
which would delay the callbacks for arbitrary long time. So we should
|
||||
initiatively activate the uv loop once setImmediate and process.nextTick is
|
||||
called.
|
||||
*/
|
||||
|
||||
wrapWithActivateUvLoop = function(func) {
|
||||
return function() {
|
||||
process.activateUvLoop();
|
||||
return func.apply(this, arguments);
|
||||
};
|
||||
};
|
||||
|
||||
process.nextTick = wrapWithActivateUvLoop(process.nextTick);
|
||||
|
||||
global.setImmediate = wrapWithActivateUvLoop(timers.setImmediate);
|
||||
|
||||
global.clearImmediate = timers.clearImmediate;
|
||||
|
||||
if (process.type === 'browser') {
|
||||
|
||||
/*
|
||||
setTimeout needs to update the polling timeout of the event loop, when
|
||||
called under Chromium's event loop the node's event loop won't get a chance
|
||||
to update the timeout, so we have to force the node's event loop to
|
||||
recalculate the timeout in browser process.
|
||||
*/
|
||||
global.setTimeout = wrapWithActivateUvLoop(timers.setTimeout);
|
||||
global.setInterval = wrapWithActivateUvLoop(timers.setInterval);
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
path = require 'path'
|
||||
Module = require 'module'
|
||||
|
||||
### Clear Node's global search paths. ###
|
||||
Module.globalPaths.length = 0
|
||||
|
||||
### Clear current and parent(init.coffee)'s search paths. ###
|
||||
module.paths = []
|
||||
module.parent.paths = []
|
||||
|
||||
### Prevent Node from adding paths outside this app to search paths. ###
|
||||
Module._nodeModulePaths = (from) ->
|
||||
from = path.resolve from
|
||||
|
||||
### If "from" is outside the app then we do nothing. ###
|
||||
skipOutsidePaths = from.startsWith process.resourcesPath
|
||||
|
||||
### Following logoic is copied from module.js. ###
|
||||
splitRe = if process.platform is 'win32' then /[\/\\]/ else /\//
|
||||
paths = []
|
||||
|
||||
parts = from.split splitRe
|
||||
for part, tip in parts by -1
|
||||
continue if part is 'node_modules'
|
||||
dir = parts.slice(0, tip + 1).join path.sep
|
||||
break if skipOutsidePaths and not dir.startsWith process.resourcesPath
|
||||
paths.push path.join(dir, 'node_modules')
|
||||
|
||||
paths
|
45
atom/common/lib/reset-search-paths.js
Normal file
45
atom/common/lib/reset-search-paths.js
Normal file
|
@ -0,0 +1,45 @@
|
|||
var Module, path;
|
||||
|
||||
path = require('path');
|
||||
|
||||
Module = require('module');
|
||||
|
||||
|
||||
/* Clear Node's global search paths. */
|
||||
|
||||
Module.globalPaths.length = 0;
|
||||
|
||||
|
||||
/* Clear current and parent(init.coffee)'s search paths. */
|
||||
|
||||
module.paths = [];
|
||||
|
||||
module.parent.paths = [];
|
||||
|
||||
|
||||
/* Prevent Node from adding paths outside this app to search paths. */
|
||||
|
||||
Module._nodeModulePaths = function(from) {
|
||||
var dir, i, part, parts, paths, skipOutsidePaths, splitRe, tip;
|
||||
from = path.resolve(from);
|
||||
|
||||
/* If "from" is outside the app then we do nothing. */
|
||||
skipOutsidePaths = from.startsWith(process.resourcesPath);
|
||||
|
||||
/* Following logoic is copied from module.js. */
|
||||
splitRe = process.platform === 'win32' ? /[\/\\]/ : /\//;
|
||||
paths = [];
|
||||
parts = from.split(splitRe);
|
||||
for (tip = i = parts.length - 1; i >= 0; tip = i += -1) {
|
||||
part = parts[tip];
|
||||
if (part === 'node_modules') {
|
||||
continue;
|
||||
}
|
||||
dir = parts.slice(0, tip + 1).join(path.sep);
|
||||
if (skipOutsidePaths && !dir.startsWith(process.resourcesPath)) {
|
||||
break;
|
||||
}
|
||||
paths.push(path.join(dir, 'node_modules'));
|
||||
}
|
||||
return paths;
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue