Update node_modules

This commit is contained in:
crazy-max 2020-03-23 09:00:06 +00:00
parent e59cd9b39f
commit dbe4e9f697
36 changed files with 726 additions and 634 deletions

6
node_modules/at-least-node/LICENSE generated vendored Normal file
View File

@ -0,0 +1,6 @@
The ISC License
Copyright (c) 2020 Ryan Zimmerman <opensrc@ryanzim.com>
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

25
node_modules/at-least-node/README.md generated vendored Normal file
View File

@ -0,0 +1,25 @@
# at-least-node
![npm](https://img.shields.io/npm/v/at-least-node)
![node](https://img.shields.io/node/v/at-least-node)
![NPM](https://img.shields.io/npm/l/at-least-node)
Sometimes you need to check if you're on _at least_ a given Node.js version, but you don't want to pull in the whole [`semver`](https://www.npmjs.com/package/semver) kitchen sink. That's what `at-least-node` is for.
| Package | Size |
| --------------- | ------- |
| `at-least-node` | 2.6 kB |
| `semver` | 75.5 kB |
```js
const atLeastNode = require('at-least-node')
atLeastNode('10.12.0')
// -> true on Node 10.12.0+, false on anything below that
```
When passing in a version string:
- You cannot include a leading `v` (i.e. `v10.12.0`)
- You cannot omit sections (i.e. `10.12`)
- You cannot use pre-releases (i.e. `1.0.0-beta`)
- There is no input validation, if you make a mistake, the resulting behavior is undefined

5
node_modules/at-least-node/index.js generated vendored Normal file
View File

@ -0,0 +1,5 @@
module.exports = r => {
const n = process.versions.node.split('.').map(x => parseInt(x, 10))
r = r.split('.').map(x => parseInt(x, 10))
return n[0] > r[0] || (n[0] === r[0] && (n[1] > r[1] || (n[1] === r[1] && n[2] >= r[2])))
}

63
node_modules/at-least-node/package.json generated vendored Normal file
View File

@ -0,0 +1,63 @@
{
"_args": [
[
"at-least-node@1.0.0",
"/home/runner/work/ghaction-github-pages/ghaction-github-pages"
]
],
"_from": "at-least-node@1.0.0",
"_id": "at-least-node@1.0.0",
"_inBundle": false,
"_integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==",
"_location": "/at-least-node",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "at-least-node@1.0.0",
"name": "at-least-node",
"escapedName": "at-least-node",
"rawSpec": "1.0.0",
"saveSpec": null,
"fetchSpec": "1.0.0"
},
"_requiredBy": [
"/fs-extra"
],
"_resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz",
"_spec": "1.0.0",
"_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages",
"author": {
"name": "Ryan Zimmerman",
"email": "opensrc@ryanzim.com"
},
"bugs": {
"url": "https://github.com/RyanZim/at-least-node/issues"
},
"description": "Lightweight Node.js version sniffing/comparison",
"devDependencies": {
"ava": "^3.1.0",
"rewire": "^4.0.1",
"semver": "^7.1.2"
},
"engines": {
"node": ">= 4.0.0"
},
"files": [],
"homepage": "https://github.com/RyanZim/at-least-node#readme",
"keywords": [
"semver",
"feature"
],
"license": "ISC",
"main": "index.js",
"name": "at-least-node",
"repository": {
"type": "git",
"url": "git+https://github.com/RyanZim/at-least-node.git"
},
"scripts": {
"test": "ava"
},
"version": "1.0.0"
}

25
node_modules/fs-extra/CHANGELOG.md generated vendored
View File

@ -1,3 +1,28 @@
9.0.0 / 2020-03-19
------------------
### Breaking changes
- Requires Node.js version 10 or greater ([#725](https://github.com/jprichardson/node-fs-extra/issues/725), [#751](https://github.com/jprichardson/node-fs-extra/pull/751))
- Switched `ensureDir*` to use a fork of https://github.com/sindresorhus/make-dir to make use of native recursive `fs.mkdir` where possible ([#619](https://github.com/jprichardson/node-fs-extra/issues/619), [#756](https://github.com/jprichardson/node-fs-extra/pull/756))
- Properly preserve `atime` for `copy*` with `preserveTimestamps` option ([#633](https://github.com/jprichardson/node-fs-extra/pull/633))
**The following changes, allthough technically breaking, will not affect the vast majority of users:**
- `outputJson` now outputs objects as they were when the function was called, even if they are mutated later ([#702](https://github.com/jprichardson/node-fs-extra/issues/702), [#768](https://github.com/jprichardson/node-fs-extra/pull/768))
- Cannot pass `null` as an options parameter to `*Json*` methods ([#745](https://github.com/jprichardson/node-fs-extra/issues/745), [#768](https://github.com/jprichardson/node-fs-extra/pull/768))
### Improvements
- Add promise shims for `fs.writev` & `fs.opendir` ([#747](https://github.com/jprichardson/node-fs-extra/pull/747))
- Better errors for `ensureFile` ([#696](https://github.com/jprichardson/node-fs-extra/issues/696), [#744](https://github.com/jprichardson/node-fs-extra/pull/744))
- Better file comparison for older Node versions ([#694](https://github.com/jprichardson/node-fs-extra/pull/694))
### Miscellaneous changes
- Peformance optimizations ([#762](https://github.com/jprichardson/node-fs-extra/issues/762), [#764](https://github.com/jprichardson/node-fs-extra/pull/764))
- Add missing documentation for aliases ([#758](https://github.com/jprichardson/node-fs-extra/issues/758), [#766](https://github.com/jprichardson/node-fs-extra/pull/766))
- Update `universalify` dependency ([#767](https://github.com/jprichardson/node-fs-extra/pull/767))
8.1.0 / 2019-06-28 8.1.0 / 2019-06-28
------------------ ------------------

2
node_modules/fs-extra/README.md generated vendored
View File

@ -143,7 +143,7 @@ Methods
- [writeJsonSync](docs/writeJson-sync.md) - [writeJsonSync](docs/writeJson-sync.md)
**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()` & `fs.write()`](docs/fs-read-write.md) **NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()`, `fs.write()`, & `fs.writev()`](docs/fs-read-write-writev.md)
### What happened to `walk()` and `walkSync()`? ### What happened to `walk()` and `walkSync()`?

View File

@ -2,8 +2,8 @@
const fs = require('graceful-fs') const fs = require('graceful-fs')
const path = require('path') const path = require('path')
const mkdirpSync = require('../mkdirs').mkdirsSync const mkdirsSync = require('../mkdirs').mkdirsSync
const utimesSync = require('../util/utimes.js').utimesMillisSync const utimesMillisSync = require('../util/utimes').utimesMillisSync
const stat = require('../util/stat') const stat = require('../util/stat')
function copySync (src, dest, opts) { function copySync (src, dest, opts) {
@ -29,7 +29,7 @@ function copySync (src, dest, opts) {
function handleFilterAndCopy (destStat, src, dest, opts) { function handleFilterAndCopy (destStat, src, dest, opts) {
if (opts.filter && !opts.filter(src, dest)) return if (opts.filter && !opts.filter(src, dest)) return
const destParent = path.dirname(dest) const destParent = path.dirname(dest)
if (!fs.existsSync(destParent)) mkdirpSync(destParent) if (!fs.existsSync(destParent)) mkdirsSync(destParent)
return startCopy(destStat, src, dest, opts) return startCopy(destStat, src, dest, opts)
} }
@ -64,49 +64,51 @@ function mayCopyFile (srcStat, src, dest, opts) {
} }
function copyFile (srcStat, src, dest, opts) { function copyFile (srcStat, src, dest, opts) {
if (typeof fs.copyFileSync === 'function') {
fs.copyFileSync(src, dest) fs.copyFileSync(src, dest)
fs.chmodSync(dest, srcStat.mode) if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)
if (opts.preserveTimestamps) { return setDestMode(dest, srcStat.mode)
return utimesSync(dest, srcStat.atime, srcStat.mtime)
}
return
}
return copyFileFallback(srcStat, src, dest, opts)
} }
function copyFileFallback (srcStat, src, dest, opts) { function handleTimestamps (srcMode, src, dest) {
const BUF_LENGTH = 64 * 1024 // Make sure the file is writable before setting the timestamp
const _buff = require('../util/buffer')(BUF_LENGTH) // otherwise open fails with EPERM when invoked with 'r+'
// (through utimes call)
if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)
return setDestTimestamps(src, dest)
}
const fdr = fs.openSync(src, 'r') function fileIsNotWritable (srcMode) {
const fdw = fs.openSync(dest, 'w', srcStat.mode) return (srcMode & 0o200) === 0
let pos = 0 }
while (pos < srcStat.size) { function makeFileWritable (dest, srcMode) {
const bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos) return setDestMode(dest, srcMode | 0o200)
fs.writeSync(fdw, _buff, 0, bytesRead) }
pos += bytesRead
}
if (opts.preserveTimestamps) fs.futimesSync(fdw, srcStat.atime, srcStat.mtime) function setDestMode (dest, srcMode) {
return fs.chmodSync(dest, srcMode)
}
fs.closeSync(fdr) function setDestTimestamps (src, dest) {
fs.closeSync(fdw) // The initial srcStat.atime cannot be trusted
// because it is modified by the read(2) system call
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
const updatedSrcStat = fs.statSync(src)
return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
} }
function onDir (srcStat, destStat, src, dest, opts) { function onDir (srcStat, destStat, src, dest, opts) {
if (!destStat) return mkDirAndCopy(srcStat, src, dest, opts) if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)
if (destStat && !destStat.isDirectory()) { if (destStat && !destStat.isDirectory()) {
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
} }
return copyDir(src, dest, opts) return copyDir(src, dest, opts)
} }
function mkDirAndCopy (srcStat, src, dest, opts) { function mkDirAndCopy (srcMode, src, dest, opts) {
fs.mkdirSync(dest) fs.mkdirSync(dest)
copyDir(src, dest, opts) copyDir(src, dest, opts)
return fs.chmodSync(dest, srcStat.mode) return setDestMode(dest, srcMode)
} }
function copyDir (src, dest, opts) { function copyDir (src, dest, opts) {

View File

@ -2,9 +2,9 @@
const fs = require('graceful-fs') const fs = require('graceful-fs')
const path = require('path') const path = require('path')
const mkdirp = require('../mkdirs').mkdirs const mkdirs = require('../mkdirs').mkdirs
const pathExists = require('../path-exists').pathExists const pathExists = require('../path-exists').pathExists
const utimes = require('../util/utimes').utimesMillis const utimesMillis = require('../util/utimes').utimesMillis
const stat = require('../util/stat') const stat = require('../util/stat')
function copy (src, dest, opts, cb) { function copy (src, dest, opts, cb) {
@ -43,7 +43,7 @@ function checkParentDir (destStat, src, dest, opts, cb) {
pathExists(destParent, (err, dirExists) => { pathExists(destParent, (err, dirExists) => {
if (err) return cb(err) if (err) return cb(err)
if (dirExists) return startCopy(destStat, src, dest, opts, cb) if (dirExists) return startCopy(destStat, src, dest, opts, cb)
mkdirp(destParent, err => { mkdirs(destParent, err => {
if (err) return cb(err) if (err) return cb(err)
return startCopy(destStat, src, dest, opts, cb) return startCopy(destStat, src, dest, opts, cb)
}) })
@ -92,49 +92,69 @@ function mayCopyFile (srcStat, src, dest, opts, cb) {
} }
function copyFile (srcStat, src, dest, opts, cb) { function copyFile (srcStat, src, dest, opts, cb) {
if (typeof fs.copyFile === 'function') { fs.copyFile(src, dest, err => {
return fs.copyFile(src, dest, err => {
if (err) return cb(err) if (err) return cb(err)
return setDestModeAndTimestamps(srcStat, dest, opts, cb) if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb)
}) return setDestMode(dest, srcStat.mode, cb)
}
return copyFileFallback(srcStat, src, dest, opts, cb)
}
function copyFileFallback (srcStat, src, dest, opts, cb) {
const rs = fs.createReadStream(src)
rs.on('error', err => cb(err)).once('open', () => {
const ws = fs.createWriteStream(dest, { mode: srcStat.mode })
ws.on('error', err => cb(err))
.on('open', () => rs.pipe(ws))
.once('close', () => setDestModeAndTimestamps(srcStat, dest, opts, cb))
}) })
} }
function setDestModeAndTimestamps (srcStat, dest, opts, cb) { function handleTimestampsAndMode (srcMode, src, dest, cb) {
fs.chmod(dest, srcStat.mode, err => { // Make sure the file is writable before setting the timestamp
// otherwise open fails with EPERM when invoked with 'r+'
// (through utimes call)
if (fileIsNotWritable(srcMode)) {
return makeFileWritable(dest, srcMode, err => {
if (err) return cb(err) if (err) return cb(err)
if (opts.preserveTimestamps) { return setDestTimestampsAndMode(srcMode, src, dest, cb)
return utimes(dest, srcStat.atime, srcStat.mtime, cb) })
} }
return cb() return setDestTimestampsAndMode(srcMode, src, dest, cb)
}
function fileIsNotWritable (srcMode) {
return (srcMode & 0o200) === 0
}
function makeFileWritable (dest, srcMode, cb) {
return setDestMode(dest, srcMode | 0o200, cb)
}
function setDestTimestampsAndMode (srcMode, src, dest, cb) {
setDestTimestamps(src, dest, err => {
if (err) return cb(err)
return setDestMode(dest, srcMode, cb)
})
}
function setDestMode (dest, srcMode, cb) {
return fs.chmod(dest, srcMode, cb)
}
function setDestTimestamps (src, dest, cb) {
// The initial srcStat.atime cannot be trusted
// because it is modified by the read(2) system call
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
fs.stat(src, (err, updatedSrcStat) => {
if (err) return cb(err)
return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb)
}) })
} }
function onDir (srcStat, destStat, src, dest, opts, cb) { function onDir (srcStat, destStat, src, dest, opts, cb) {
if (!destStat) return mkDirAndCopy(srcStat, src, dest, opts, cb) if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb)
if (destStat && !destStat.isDirectory()) { if (destStat && !destStat.isDirectory()) {
return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))
} }
return copyDir(src, dest, opts, cb) return copyDir(src, dest, opts, cb)
} }
function mkDirAndCopy (srcStat, src, dest, opts, cb) { function mkDirAndCopy (srcMode, src, dest, opts, cb) {
fs.mkdir(dest, err => { fs.mkdir(dest, err => {
if (err) return cb(err) if (err) return cb(err)
copyDir(src, dest, opts, err => { copyDir(src, dest, opts, err => {
if (err) return cb(err) if (err) return cb(err)
return fs.chmod(dest, srcStat.mode, cb) return setDestMode(dest, srcMode, cb)
}) })
}) })
} }

View File

@ -30,7 +30,7 @@ function emptyDirSync (dir) {
let items let items
try { try {
items = fs.readdirSync(dir) items = fs.readdirSync(dir)
} catch (err) { } catch {
return mkdir.mkdirsSync(dir) return mkdir.mkdirsSync(dir)
} }

View File

@ -4,7 +4,6 @@ const u = require('universalify').fromCallback
const path = require('path') const path = require('path')
const fs = require('graceful-fs') const fs = require('graceful-fs')
const mkdir = require('../mkdirs') const mkdir = require('../mkdirs')
const pathExists = require('../path-exists').pathExists
function createFile (file, callback) { function createFile (file, callback) {
function makeFile () { function makeFile () {
@ -17,13 +16,26 @@ function createFile (file, callback) {
fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err
if (!err && stats.isFile()) return callback() if (!err && stats.isFile()) return callback()
const dir = path.dirname(file) const dir = path.dirname(file)
pathExists(dir, (err, dirExists) => { fs.stat(dir, (err, stats) => {
if (err) return callback(err) if (err) {
if (dirExists) return makeFile() // if the directory doesn't exist, make it
mkdir.mkdirs(dir, err => { if (err.code === 'ENOENT') {
return mkdir.mkdirs(dir, err => {
if (err) return callback(err) if (err) return callback(err)
makeFile() makeFile()
}) })
}
return callback(err)
}
if (stats.isDirectory()) makeFile()
else {
// parent is not a directory
// This is just to cause an internal ENOTDIR error to be thrown
fs.readdir(dir, err => {
if (err) return callback(err)
})
}
}) })
}) })
} }
@ -32,12 +44,20 @@ function createFileSync (file) {
let stats let stats
try { try {
stats = fs.statSync(file) stats = fs.statSync(file)
} catch (e) {} } catch {}
if (stats && stats.isFile()) return if (stats && stats.isFile()) return
const dir = path.dirname(file) const dir = path.dirname(file)
if (!fs.existsSync(dir)) { try {
mkdir.mkdirsSync(dir) if (!fs.statSync(dir).isDirectory()) {
// parent is not a directory
// This is just to cause an internal ENOTDIR error to be thrown
fs.readdirSync(dir)
}
} catch (err) {
// If the stat call above failed because the directory doesn't exist, create it
if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)
else throw err
} }
fs.writeFileSync(file, '') fs.writeFileSync(file, '')

View File

@ -34,8 +34,8 @@ function symlinkPaths (srcpath, dstpath, callback) {
return callback(err) return callback(err)
} }
return callback(null, { return callback(null, {
'toCwd': srcpath, toCwd: srcpath,
'toDst': srcpath toDst: srcpath
}) })
}) })
} else { } else {
@ -45,8 +45,8 @@ function symlinkPaths (srcpath, dstpath, callback) {
if (err) return callback(err) if (err) return callback(err)
if (exists) { if (exists) {
return callback(null, { return callback(null, {
'toCwd': relativeToDst, toCwd: relativeToDst,
'toDst': srcpath toDst: srcpath
}) })
} else { } else {
return fs.lstat(srcpath, (err) => { return fs.lstat(srcpath, (err) => {
@ -55,8 +55,8 @@ function symlinkPaths (srcpath, dstpath, callback) {
return callback(err) return callback(err)
} }
return callback(null, { return callback(null, {
'toCwd': srcpath, toCwd: srcpath,
'toDst': path.relative(dstdir, srcpath) toDst: path.relative(dstdir, srcpath)
}) })
}) })
} }
@ -70,8 +70,8 @@ function symlinkPathsSync (srcpath, dstpath) {
exists = fs.existsSync(srcpath) exists = fs.existsSync(srcpath)
if (!exists) throw new Error('absolute srcpath does not exist') if (!exists) throw new Error('absolute srcpath does not exist')
return { return {
'toCwd': srcpath, toCwd: srcpath,
'toDst': srcpath toDst: srcpath
} }
} else { } else {
const dstdir = path.dirname(dstpath) const dstdir = path.dirname(dstpath)
@ -79,15 +79,15 @@ function symlinkPathsSync (srcpath, dstpath) {
exists = fs.existsSync(relativeToDst) exists = fs.existsSync(relativeToDst)
if (exists) { if (exists) {
return { return {
'toCwd': relativeToDst, toCwd: relativeToDst,
'toDst': srcpath toDst: srcpath
} }
} else { } else {
exists = fs.existsSync(srcpath) exists = fs.existsSync(srcpath)
if (!exists) throw new Error('relative srcpath does not exist') if (!exists) throw new Error('relative srcpath does not exist')
return { return {
'toCwd': srcpath, toCwd: srcpath,
'toDst': path.relative(dstdir, srcpath) toDst: path.relative(dstdir, srcpath)
} }
} }
} }

View File

@ -19,7 +19,7 @@ function symlinkTypeSync (srcpath, type) {
if (type) return type if (type) return type
try { try {
stats = fs.lstatSync(srcpath) stats = fs.lstatSync(srcpath)
} catch (e) { } catch {
return 'file' return 'file'
} }
return (stats && stats.isDirectory()) ? 'dir' : 'file' return (stats && stats.isDirectory()) ? 'dir' : 'file'

View File

@ -18,15 +18,16 @@ const api = [
'fsync', 'fsync',
'ftruncate', 'ftruncate',
'futimes', 'futimes',
'lchown',
'lchmod', 'lchmod',
'lchown',
'link', 'link',
'lstat', 'lstat',
'mkdir', 'mkdir',
'mkdtemp', 'mkdtemp',
'open', 'open',
'readFile', 'opendir',
'readdir', 'readdir',
'readFile',
'readlink', 'readlink',
'realpath', 'realpath',
'rename', 'rename',
@ -39,8 +40,7 @@ const api = [
'writeFile' 'writeFile'
].filter(key => { ].filter(key => {
// Some commands are not available on some systems. Ex: // Some commands are not available on some systems. Ex:
// fs.copyFile was added in Node.js v8.5.0 // fs.opendir was added in Node.js v12.12.0
// fs.mkdtemp was added in Node.js v5.10.0
// fs.lchown is not available on at least some Linux // fs.lchown is not available on at least some Linux
return typeof fs[key] === 'function' return typeof fs[key] === 'function'
}) })
@ -71,7 +71,7 @@ exports.exists = function (filename, callback) {
}) })
} }
// fs.read() & fs.write need special treatment due to multiple callback args // fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args
exports.read = function (fd, buffer, offset, length, position, callback) { exports.read = function (fd, buffer, offset, length, position, callback) {
if (typeof callback === 'function') { if (typeof callback === 'function') {
@ -103,6 +103,25 @@ exports.write = function (fd, buffer, ...args) {
}) })
} }
// fs.writev only available in Node v12.9.0+
if (typeof fs.writev === 'function') {
// Function signature is
// s.writev(fd, buffers[, position], callback)
// We need to handle the optional arg, so we use ...args
exports.writev = function (fd, buffers, ...args) {
if (typeof args[args.length - 1] === 'function') {
return fs.writev(fd, buffers, ...args)
}
return new Promise((resolve, reject) => {
fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {
if (err) return reject(err)
resolve({ bytesWritten, buffers })
})
})
}
}
// fs.realpath.native only available in Node v9.2+ // fs.realpath.native only available in Node v9.2+
if (typeof fs.realpath.native === 'function') { if (typeof fs.realpath.native === 'function') {
exports.realpath.native = u(fs.realpath.native) exports.realpath.native = u(fs.realpath.native)

29
node_modules/fs-extra/lib/index.js generated vendored
View File

@ -1,22 +1,21 @@
'use strict' 'use strict'
module.exports = Object.assign( module.exports = {
{},
// Export promiseified graceful-fs: // Export promiseified graceful-fs:
require('./fs'), ...require('./fs'),
// Export extra methods: // Export extra methods:
require('./copy-sync'), ...require('./copy-sync'),
require('./copy'), ...require('./copy'),
require('./empty'), ...require('./empty'),
require('./ensure'), ...require('./ensure'),
require('./json'), ...require('./json'),
require('./mkdirs'), ...require('./mkdirs'),
require('./move-sync'), ...require('./move-sync'),
require('./move'), ...require('./move'),
require('./output'), ...require('./output'),
require('./path-exists'), ...require('./path-exists'),
require('./remove') ...require('./remove')
) }
// Export fs.promises as a getter property so that we don't trigger // Export fs.promises as a getter property so that we don't trigger
// ExperimentalWarning before fs.promises is actually accessed. // ExperimentalWarning before fs.promises is actually accessed.

View File

@ -1,6 +1,6 @@
'use strict' 'use strict'
const u = require('universalify').fromCallback const u = require('universalify').fromPromise
const jsonFile = require('./jsonfile') const jsonFile = require('./jsonfile')
jsonFile.outputJson = u(require('./output-json')) jsonFile.outputJson = u(require('./output-json'))

View File

@ -1,12 +1,11 @@
'use strict' 'use strict'
const u = require('universalify').fromCallback
const jsonFile = require('jsonfile') const jsonFile = require('jsonfile')
module.exports = { module.exports = {
// jsonfile exports // jsonfile exports
readJson: u(jsonFile.readFile), readJson: jsonFile.readFile,
readJsonSync: jsonFile.readFileSync, readJsonSync: jsonFile.readFileSync,
writeJson: u(jsonFile.writeFile), writeJson: jsonFile.writeFile,
writeJsonSync: jsonFile.writeFileSync writeJsonSync: jsonFile.writeFileSync
} }

View File

@ -1,18 +1,12 @@
'use strict' 'use strict'
const fs = require('graceful-fs') const { stringify } = require('jsonfile/utils')
const path = require('path') const { outputFileSync } = require('../output')
const mkdir = require('../mkdirs')
const jsonFile = require('./jsonfile')
function outputJsonSync (file, data, options) { function outputJsonSync (file, data, options) {
const dir = path.dirname(file) const str = stringify(data, options)
if (!fs.existsSync(dir)) { outputFileSync(file, str, options)
mkdir.mkdirsSync(dir)
}
jsonFile.writeJsonSync(file, data, options)
} }
module.exports = outputJsonSync module.exports = outputJsonSync

View File

@ -1,27 +1,12 @@
'use strict' 'use strict'
const path = require('path') const { stringify } = require('jsonfile/utils')
const mkdir = require('../mkdirs') const { outputFile } = require('../output')
const pathExists = require('../path-exists').pathExists
const jsonFile = require('./jsonfile')
function outputJson (file, data, options, callback) { async function outputJson (file, data, options = {}) {
if (typeof options === 'function') { const str = stringify(data, options)
callback = options
options = {}
}
const dir = path.dirname(file) await outputFile(file, str, options)
pathExists(dir, (err, itDoes) => {
if (err) return callback(err)
if (itDoes) return jsonFile.writeJson(file, data, options, callback)
mkdir.mkdirs(dir, err => {
if (err) return callback(err)
jsonFile.writeJson(file, data, options, callback)
})
})
} }
module.exports = outputJson module.exports = outputJson

View File

@ -1,14 +1,14 @@
'use strict' 'use strict'
const u = require('universalify').fromCallback const u = require('universalify').fromPromise
const mkdirs = u(require('./mkdirs')) const { makeDir: _makeDir, makeDirSync } = require('./make-dir')
const mkdirsSync = require('./mkdirs-sync') const makeDir = u(_makeDir)
module.exports = { module.exports = {
mkdirs, mkdirs: makeDir,
mkdirsSync, mkdirsSync: makeDirSync,
// alias // alias
mkdirp: mkdirs, mkdirp: makeDir,
mkdirpSync: mkdirsSync, mkdirpSync: makeDirSync,
ensureDir: mkdirs, ensureDir: makeDir,
ensureDirSync: mkdirsSync ensureDirSync: makeDirSync
} }

142
node_modules/fs-extra/lib/mkdirs/make-dir.js generated vendored Normal file
View File

@ -0,0 +1,142 @@
// Adapted from https://github.com/sindresorhus/make-dir
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict'
const fs = require('../fs')
const path = require('path')
const atLeastNode = require('at-least-node')
const useNativeRecursiveOption = atLeastNode('10.12.0')
// https://github.com/nodejs/node/issues/8987
// https://github.com/libuv/libuv/pull/1088
const checkPath = pth => {
if (process.platform === 'win32') {
const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, ''))
if (pathHasInvalidWinCharacters) {
const error = new Error(`Path contains invalid characters: ${pth}`)
error.code = 'EINVAL'
throw error
}
}
}
const processOptions = options => {
// Must be defined here so we get fresh process.umask()
const defaults = { mode: 0o777 & (~process.umask()) }
if (typeof options === 'number') options = { mode: options }
return { ...defaults, ...options }
}
const permissionError = pth => {
// This replicates the exception of `fs.mkdir` with native the
// `recusive` option when run on an invalid drive under Windows.
const error = new Error(`operation not permitted, mkdir '${pth}'`)
error.code = 'EPERM'
error.errno = -4048
error.path = pth
error.syscall = 'mkdir'
return error
}
module.exports.makeDir = async (input, options) => {
checkPath(input)
options = processOptions(options)
if (useNativeRecursiveOption) {
const pth = path.resolve(input)
return fs.mkdir(pth, {
mode: options.mode,
recursive: true
})
}
const make = async pth => {
try {
await fs.mkdir(pth, options.mode)
} catch (error) {
if (error.code === 'EPERM') {
throw error
}
if (error.code === 'ENOENT') {
if (path.dirname(pth) === pth) {
throw permissionError(pth)
}
if (error.message.includes('null bytes')) {
throw error
}
await make(path.dirname(pth))
return make(pth)
}
try {
const stats = await fs.stat(pth)
if (!stats.isDirectory()) {
// This error is never exposed to the user
// it is caught below, and the original error is thrown
throw new Error('The path is not a directory')
}
} catch {
throw error
}
}
}
return make(path.resolve(input))
}
module.exports.makeDirSync = (input, options) => {
checkPath(input)
options = processOptions(options)
if (useNativeRecursiveOption) {
const pth = path.resolve(input)
return fs.mkdirSync(pth, {
mode: options.mode,
recursive: true
})
}
const make = pth => {
try {
fs.mkdirSync(pth, options.mode)
} catch (error) {
if (error.code === 'EPERM') {
throw error
}
if (error.code === 'ENOENT') {
if (path.dirname(pth) === pth) {
throw permissionError(pth)
}
if (error.message.includes('null bytes')) {
throw error
}
make(path.dirname(pth))
return make(pth)
}
try {
if (!fs.statSync(pth).isDirectory()) {
// This error is never exposed to the user
// it is caught below, and the original error is thrown
throw new Error('The path is not a directory')
}
} catch {
throw error
}
}
}
return make(path.resolve(input))
}

View File

@ -1,54 +0,0 @@
'use strict'
const fs = require('graceful-fs')
const path = require('path')
const invalidWin32Path = require('./win32').invalidWin32Path
const o777 = parseInt('0777', 8)
function mkdirsSync (p, opts, made) {
if (!opts || typeof opts !== 'object') {
opts = { mode: opts }
}
let mode = opts.mode
const xfs = opts.fs || fs
if (process.platform === 'win32' && invalidWin32Path(p)) {
const errInval = new Error(p + ' contains invalid WIN32 path characters.')
errInval.code = 'EINVAL'
throw errInval
}
if (mode === undefined) {
mode = o777 & (~process.umask())
}
if (!made) made = null
p = path.resolve(p)
try {
xfs.mkdirSync(p, mode)
made = made || p
} catch (err0) {
if (err0.code === 'ENOENT') {
if (path.dirname(p) === p) throw err0
made = mkdirsSync(path.dirname(p), opts, made)
mkdirsSync(p, opts, made)
} else {
// In the case of any other error, just see if there's a dir there
// already. If so, then hooray! If not, then something is borked.
let stat
try {
stat = xfs.statSync(p)
} catch (err1) {
throw err0
}
if (!stat.isDirectory()) throw err0
}
}
return made
}
module.exports = mkdirsSync

View File

@ -1,63 +0,0 @@
'use strict'
const fs = require('graceful-fs')
const path = require('path')
const invalidWin32Path = require('./win32').invalidWin32Path
const o777 = parseInt('0777', 8)
function mkdirs (p, opts, callback, made) {
if (typeof opts === 'function') {
callback = opts
opts = {}
} else if (!opts || typeof opts !== 'object') {
opts = { mode: opts }
}
if (process.platform === 'win32' && invalidWin32Path(p)) {
const errInval = new Error(p + ' contains invalid WIN32 path characters.')
errInval.code = 'EINVAL'
return callback(errInval)
}
let mode = opts.mode
const xfs = opts.fs || fs
if (mode === undefined) {
mode = o777 & (~process.umask())
}
if (!made) made = null
callback = callback || function () {}
p = path.resolve(p)
xfs.mkdir(p, mode, er => {
if (!er) {
made = made || p
return callback(null, made)
}
switch (er.code) {
case 'ENOENT':
if (path.dirname(p) === p) return callback(er)
mkdirs(path.dirname(p), opts, (er, made) => {
if (er) callback(er, made)
else mkdirs(p, opts, callback, made)
})
break
// In the case of any other error, just see if there's a dir
// there already. If so, then hooray! If not, then something
// is borked.
default:
xfs.stat(p, (er2, stat) => {
// if the stat fails, then that's super weird.
// let the original error be the failure reason.
if (er2 || !stat.isDirectory()) callback(er, made)
else callback(null, made)
})
break
}
})
}
module.exports = mkdirs

View File

@ -1,25 +0,0 @@
'use strict'
const path = require('path')
// get drive on windows
function getRootPath (p) {
p = path.normalize(path.resolve(p)).split(path.sep)
if (p.length > 0) return p[0]
return null
}
// http://stackoverflow.com/a/62888/10333 contains more accurate
// TODO: expand to include the rest
const INVALID_PATH_CHARS = /[<>:"|?*]/
function invalidWin32Path (p) {
const rp = getRootPath(p)
p = p.replace(rp, '')
return INVALID_PATH_CHARS.test(p)
}
module.exports = {
getRootPath,
invalidWin32Path
}

View File

@ -302,7 +302,7 @@ function rmkidsSync (p, options) {
try { try {
const ret = options.rmdirSync(p, options) const ret = options.rmdirSync(p, options)
return ret return ret
} catch (er) { } } catch {}
} while (Date.now() - startTime < 500) // give up after 500ms } while (Date.now() - startTime < 500) // give up after 500ms
} else { } else {
const ret = options.rmdirSync(p, options) const ret = options.rmdirSync(p, options)

View File

@ -1,12 +0,0 @@
'use strict'
/* eslint-disable node/no-deprecated-api */
module.exports = function (size) {
if (typeof Buffer.allocUnsafe === 'function') {
try {
return Buffer.allocUnsafe(size)
} catch (e) {
return new Buffer(size)
}
}
return new Buffer(size)
}

View File

@ -1,70 +1,29 @@
'use strict' 'use strict'
const fs = require('graceful-fs') const fs = require('../fs')
const path = require('path') const path = require('path')
const util = require('util')
const atLeastNode = require('at-least-node')
const NODE_VERSION_MAJOR_WITH_BIGINT = 10 const nodeSupportsBigInt = atLeastNode('10.5.0')
const NODE_VERSION_MINOR_WITH_BIGINT = 5 const stat = (file) => nodeSupportsBigInt ? fs.stat(file, { bigint: true }) : fs.stat(file)
const NODE_VERSION_PATCH_WITH_BIGINT = 0 const statSync = (file) => nodeSupportsBigInt ? fs.statSync(file, { bigint: true }) : fs.statSync(file)
const nodeVersion = process.versions.node.split('.')
const nodeVersionMajor = Number.parseInt(nodeVersion[0], 10)
const nodeVersionMinor = Number.parseInt(nodeVersion[1], 10)
const nodeVersionPatch = Number.parseInt(nodeVersion[2], 10)
function nodeSupportsBigInt () { function getStats (src, dest) {
if (nodeVersionMajor > NODE_VERSION_MAJOR_WITH_BIGINT) { return Promise.all([
return true stat(src),
} else if (nodeVersionMajor === NODE_VERSION_MAJOR_WITH_BIGINT) { stat(dest).catch(err => {
if (nodeVersionMinor > NODE_VERSION_MINOR_WITH_BIGINT) { if (err.code === 'ENOENT') return null
return true throw err
} else if (nodeVersionMinor === NODE_VERSION_MINOR_WITH_BIGINT) {
if (nodeVersionPatch >= NODE_VERSION_PATCH_WITH_BIGINT) {
return true
}
}
}
return false
}
function getStats (src, dest, cb) {
if (nodeSupportsBigInt()) {
fs.stat(src, { bigint: true }, (err, srcStat) => {
if (err) return cb(err)
fs.stat(dest, { bigint: true }, (err, destStat) => {
if (err) {
if (err.code === 'ENOENT') return cb(null, { srcStat, destStat: null })
return cb(err)
}
return cb(null, { srcStat, destStat })
}) })
}) ]).then(([srcStat, destStat]) => ({ srcStat, destStat }))
} else {
fs.stat(src, (err, srcStat) => {
if (err) return cb(err)
fs.stat(dest, (err, destStat) => {
if (err) {
if (err.code === 'ENOENT') return cb(null, { srcStat, destStat: null })
return cb(err)
}
return cb(null, { srcStat, destStat })
})
})
}
} }
function getStatsSync (src, dest) { function getStatsSync (src, dest) {
let srcStat, destStat let destStat
if (nodeSupportsBigInt()) { const srcStat = statSync(src)
srcStat = fs.statSync(src, { bigint: true })
} else {
srcStat = fs.statSync(src)
}
try { try {
if (nodeSupportsBigInt()) { destStat = statSync(dest)
destStat = fs.statSync(dest, { bigint: true })
} else {
destStat = fs.statSync(dest)
}
} catch (err) { } catch (err) {
if (err.code === 'ENOENT') return { srcStat, destStat: null } if (err.code === 'ENOENT') return { srcStat, destStat: null }
throw err throw err
@ -73,10 +32,10 @@ function getStatsSync (src, dest) {
} }
function checkPaths (src, dest, funcName, cb) { function checkPaths (src, dest, funcName, cb) {
getStats(src, dest, (err, stats) => { util.callbackify(getStats)(src, dest, (err, stats) => {
if (err) return cb(err) if (err) return cb(err)
const { srcStat, destStat } = stats const { srcStat, destStat } = stats
if (destStat && destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { if (destStat && areIdentical(srcStat, destStat)) {
return cb(new Error('Source and destination must not be the same.')) return cb(new Error('Source and destination must not be the same.'))
} }
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
@ -88,7 +47,7 @@ function checkPaths (src, dest, funcName, cb) {
function checkPathsSync (src, dest, funcName) { function checkPathsSync (src, dest, funcName) {
const { srcStat, destStat } = getStatsSync(src, dest) const { srcStat, destStat } = getStatsSync(src, dest)
if (destStat && destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { if (destStat && areIdentical(srcStat, destStat)) {
throw new Error('Source and destination must not be the same.') throw new Error('Source and destination must not be the same.')
} }
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
@ -105,29 +64,18 @@ function checkParentPaths (src, srcStat, dest, funcName, cb) {
const srcParent = path.resolve(path.dirname(src)) const srcParent = path.resolve(path.dirname(src))
const destParent = path.resolve(path.dirname(dest)) const destParent = path.resolve(path.dirname(dest))
if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() if (destParent === srcParent || destParent === path.parse(destParent).root) return cb()
if (nodeSupportsBigInt()) { const callback = (err, destStat) => {
fs.stat(destParent, { bigint: true }, (err, destStat) => {
if (err) { if (err) {
if (err.code === 'ENOENT') return cb() if (err.code === 'ENOENT') return cb()
return cb(err) return cb(err)
} }
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { if (areIdentical(srcStat, destStat)) {
return cb(new Error(errMsg(src, dest, funcName))) return cb(new Error(errMsg(src, dest, funcName)))
} }
return checkParentPaths(src, srcStat, destParent, funcName, cb) return checkParentPaths(src, srcStat, destParent, funcName, cb)
})
} else {
fs.stat(destParent, (err, destStat) => {
if (err) {
if (err.code === 'ENOENT') return cb()
return cb(err)
}
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
return cb(new Error(errMsg(src, dest, funcName)))
}
return checkParentPaths(src, srcStat, destParent, funcName, cb)
})
} }
if (nodeSupportsBigInt) fs.stat(destParent, { bigint: true }, callback)
else fs.stat(destParent, callback)
} }
function checkParentPathsSync (src, srcStat, dest, funcName) { function checkParentPathsSync (src, srcStat, dest, funcName) {
@ -136,21 +84,40 @@ function checkParentPathsSync (src, srcStat, dest, funcName) {
if (destParent === srcParent || destParent === path.parse(destParent).root) return if (destParent === srcParent || destParent === path.parse(destParent).root) return
let destStat let destStat
try { try {
if (nodeSupportsBigInt()) { destStat = statSync(destParent)
destStat = fs.statSync(destParent, { bigint: true })
} else {
destStat = fs.statSync(destParent)
}
} catch (err) { } catch (err) {
if (err.code === 'ENOENT') return if (err.code === 'ENOENT') return
throw err throw err
} }
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { if (areIdentical(srcStat, destStat)) {
throw new Error(errMsg(src, dest, funcName)) throw new Error(errMsg(src, dest, funcName))
} }
return checkParentPathsSync(src, srcStat, destParent, funcName) return checkParentPathsSync(src, srcStat, destParent, funcName)
} }
function areIdentical (srcStat, destStat) {
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
if (nodeSupportsBigInt || destStat.ino < Number.MAX_SAFE_INTEGER) {
// definitive answer
return true
}
// Use additional heuristics if we can't use 'bigint'.
// Different 'ino' could be represented the same if they are >= Number.MAX_SAFE_INTEGER
// See issue 657
if (destStat.size === srcStat.size &&
destStat.mode === srcStat.mode &&
destStat.nlink === srcStat.nlink &&
destStat.atimeMs === srcStat.atimeMs &&
destStat.mtimeMs === srcStat.mtimeMs &&
destStat.ctimeMs === srcStat.ctimeMs &&
destStat.birthtimeMs === srcStat.birthtimeMs) {
// heuristic answer
return true
}
}
return false
}
// return true if dest is a subdir of src, otherwise false. // return true if dest is a subdir of src, otherwise false.
// It only checks the path strings. // It only checks the path strings.
function isSrcSubdir (src, dest) { function isSrcSubdir (src, dest) {

View File

@ -1,56 +1,6 @@
'use strict' 'use strict'
const fs = require('graceful-fs') const fs = require('graceful-fs')
const os = require('os')
const path = require('path')
// HFS, ext{2,3}, FAT do not, Node.js v0.10 does not
function hasMillisResSync () {
let tmpfile = path.join('millis-test-sync' + Date.now().toString() + Math.random().toString().slice(2))
tmpfile = path.join(os.tmpdir(), tmpfile)
// 550 millis past UNIX epoch
const d = new Date(1435410243862)
fs.writeFileSync(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141')
const fd = fs.openSync(tmpfile, 'r+')
fs.futimesSync(fd, d, d)
fs.closeSync(fd)
return fs.statSync(tmpfile).mtime > 1435410243000
}
function hasMillisRes (callback) {
let tmpfile = path.join('millis-test' + Date.now().toString() + Math.random().toString().slice(2))
tmpfile = path.join(os.tmpdir(), tmpfile)
// 550 millis past UNIX epoch
const d = new Date(1435410243862)
fs.writeFile(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141', err => {
if (err) return callback(err)
fs.open(tmpfile, 'r+', (err, fd) => {
if (err) return callback(err)
fs.futimes(fd, d, d, err => {
if (err) return callback(err)
fs.close(fd, err => {
if (err) return callback(err)
fs.stat(tmpfile, (err, stats) => {
if (err) return callback(err)
callback(null, stats.mtime > 1435410243000)
})
})
})
})
})
}
function timeRemoveMillis (timestamp) {
if (typeof timestamp === 'number') {
return Math.floor(timestamp / 1000) * 1000
} else if (timestamp instanceof Date) {
return new Date(Math.floor(timestamp.getTime() / 1000) * 1000)
} else {
throw new Error('fs-extra: timeRemoveMillis() unknown parameter type')
}
}
function utimesMillis (path, atime, mtime, callback) { function utimesMillis (path, atime, mtime, callback) {
// if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)
@ -71,9 +21,6 @@ function utimesMillisSync (path, atime, mtime) {
} }
module.exports = { module.exports = {
hasMillisRes,
hasMillisResSync,
timeRemoveMillis,
utimesMillis, utimesMillis,
utimesMillisSync utimesMillisSync
} }

37
node_modules/fs-extra/package.json generated vendored
View File

@ -1,31 +1,31 @@
{ {
"_args": [ "_args": [
[ [
"fs-extra@8.1.0", "fs-extra@9.0.0",
"/home/runner/work/ghaction-github-pages/ghaction-github-pages" "/home/runner/work/ghaction-github-pages/ghaction-github-pages"
] ]
], ],
"_from": "fs-extra@8.1.0", "_from": "fs-extra@9.0.0",
"_id": "fs-extra@8.1.0", "_id": "fs-extra@9.0.0",
"_inBundle": false, "_inBundle": false,
"_integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", "_integrity": "sha512-pmEYSk3vYsG/bF651KPUXZ+hvjpgWYw/Gc7W9NFUe3ZVLczKKWIij3IKpOrQcdw4TILtibFslZ0UmR8Vvzig4g==",
"_location": "/fs-extra", "_location": "/fs-extra",
"_phantomChildren": {}, "_phantomChildren": {},
"_requested": { "_requested": {
"type": "version", "type": "version",
"registry": true, "registry": true,
"raw": "fs-extra@8.1.0", "raw": "fs-extra@9.0.0",
"name": "fs-extra", "name": "fs-extra",
"escapedName": "fs-extra", "escapedName": "fs-extra",
"rawSpec": "8.1.0", "rawSpec": "9.0.0",
"saveSpec": null, "saveSpec": null,
"fetchSpec": "8.1.0" "fetchSpec": "9.0.0"
}, },
"_requiredBy": [ "_requiredBy": [
"/" "/"
], ],
"_resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", "_resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.0.0.tgz",
"_spec": "8.1.0", "_spec": "9.0.0",
"_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages", "_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages",
"author": { "author": {
"name": "JP Richardson", "name": "JP Richardson",
@ -35,25 +35,25 @@
"url": "https://github.com/jprichardson/node-fs-extra/issues" "url": "https://github.com/jprichardson/node-fs-extra/issues"
}, },
"dependencies": { "dependencies": {
"at-least-node": "^1.0.0",
"graceful-fs": "^4.2.0", "graceful-fs": "^4.2.0",
"jsonfile": "^4.0.0", "jsonfile": "^6.0.1",
"universalify": "^0.1.0" "universalify": "^1.0.0"
}, },
"description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as mkdir -p, cp -r, and rm -rf.", "description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as mkdir -p, cp -r, and rm -rf.",
"devDependencies": { "devDependencies": {
"coveralls": "^3.0.0", "coveralls": "^3.0.0",
"istanbul": "^0.4.5",
"klaw": "^2.1.1", "klaw": "^2.1.1",
"klaw-sync": "^3.0.2", "klaw-sync": "^3.0.2",
"minimist": "^1.1.1", "minimist": "^1.1.1",
"mocha": "^5.0.5", "mocha": "^5.0.5",
"nyc": "^15.0.0",
"proxyquire": "^2.0.1", "proxyquire": "^2.0.1",
"read-dir-files": "^0.1.1", "read-dir-files": "^0.1.1",
"semver": "^5.3.0", "standard": "^14.1.0"
"standard": "^12.0.1"
}, },
"engines": { "engines": {
"node": ">=6 <7 || >=8" "node": ">=10"
}, },
"files": [ "files": [
"lib/", "lib/",
@ -81,7 +81,8 @@
"create", "create",
"text", "text",
"output", "output",
"move" "move",
"promise"
], ],
"license": "MIT", "license": "MIT",
"main": "./lib/index.js", "main": "./lib/index.js",
@ -91,7 +92,7 @@
"url": "git+https://github.com/jprichardson/node-fs-extra.git" "url": "git+https://github.com/jprichardson/node-fs-extra.git"
}, },
"scripts": { "scripts": {
"coverage": "istanbul cover -i 'lib/**' -x '**/__tests__/**' test.js", "coverage": "nyc -r lcovonly npm run unit",
"coveralls": "coveralls < coverage/lcov.info", "coveralls": "coveralls < coverage/lcov.info",
"full-ci": "npm run lint && npm run coverage", "full-ci": "npm run lint && npm run coverage",
"lint": "standard", "lint": "standard",
@ -99,5 +100,5 @@
"test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha", "test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha",
"unit": "node test.js" "unit": "node test.js"
}, },
"version": "8.1.0" "version": "9.0.0"
} }

20
node_modules/jsonfile/CHANGELOG.md generated vendored
View File

@ -1,3 +1,23 @@
6.0.1 / 2020-03-07
------------------
- Update dependency ([#130](https://github.com/jprichardson/node-jsonfile/pull/130))
- Fix code style ([#129](https://github.com/jprichardson/node-jsonfile/pull/129))
6.0.0 / 2020-02-24
------------------
- **BREAKING:** Drop support for Node 6 & 8 ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
- **BREAKING:** Do not allow passing `null` as options to `readFile()` or `writeFile()` ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
- Refactor internals ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
5.0.0 / 2018-09-08
------------------
- **BREAKING:** Drop Node 4 support
- **BREAKING:** If no callback is passed to an asynchronous method, a promise is now returned ([#109](https://github.com/jprichardson/node-jsonfile/pull/109))
- Cleanup docs
4.0.0 / 2017-07-12 4.0.0 / 2017-07-12
------------------ ------------------

129
node_modules/jsonfile/README.md generated vendored
View File

@ -1,7 +1,7 @@
Node.js - jsonfile Node.js - jsonfile
================ ================
Easily read/write JSON files. Easily read/write JSON files in Node.js. _Note: this module cannot be used in the browser._
[![npm Package](https://img.shields.io/npm/v/jsonfile.svg?style=flat-square)](https://www.npmjs.org/package/jsonfile) [![npm Package](https://img.shields.io/npm/v/jsonfile.svg?style=flat-square)](https://www.npmjs.org/package/jsonfile)
[![build status](https://secure.travis-ci.org/jprichardson/node-jsonfile.svg)](http://travis-ci.org/jprichardson/node-jsonfile) [![build status](https://secure.travis-ci.org/jprichardson/node-jsonfile.svg)](http://travis-ci.org/jprichardson/node-jsonfile)
@ -26,101 +26,138 @@ Installation
API API
--- ---
* [`readFile(filename, [options], callback)`](#readfilefilename-options-callback)
* [`readFileSync(filename, [options])`](#readfilesyncfilename-options)
* [`writeFile(filename, obj, [options], callback)`](#writefilefilename-obj-options-callback)
* [`writeFileSync(filename, obj, [options])`](#writefilesyncfilename-obj-options)
----
### readFile(filename, [options], callback) ### readFile(filename, [options], callback)
`options` (`object`, default `undefined`): Pass in any `fs.readFile` options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse). `options` (`object`, default `undefined`): Pass in any [`fs.readFile`](https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
- `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback. - `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback.
If `false`, returns `null` for the object. If `false`, returns `null` for the object.
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/data.json' const file = '/tmp/data.json'
jsonfile.readFile(file, function(err, obj) { jsonfile.readFile(file, function (err, obj) {
if (err) console.error(err)
console.dir(obj) console.dir(obj)
}) })
``` ```
You can also use this method with promises. The `readFile` method will return a promise if you do not pass a callback function.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
jsonfile.readFile(file)
.then(obj => console.dir(obj))
.catch(error => console.error(error))
```
----
### readFileSync(filename, [options]) ### readFileSync(filename, [options])
`options` (`object`, default `undefined`): Pass in any `fs.readFileSync` options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse). `options` (`object`, default `undefined`): Pass in any [`fs.readFileSync`](https://nodejs.org/api/fs.html#fs_fs_readfilesync_path_options) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
- `throws` (`boolean`, default: `true`). If an error is encountered reading or parsing the file, throw the error. If `false`, returns `null` for the object. - `throws` (`boolean`, default: `true`). If an error is encountered reading or parsing the file, throw the error. If `false`, returns `null` for the object.
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/data.json' const file = '/tmp/data.json'
console.dir(jsonfile.readFileSync(file)) console.dir(jsonfile.readFileSync(file))
``` ```
----
### writeFile(filename, obj, [options], callback) ### writeFile(filename, obj, [options], callback)
`options`: Pass in any `fs.writeFile` options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces` and override `EOL` string. `options`: Pass in any [`fs.writeFile`](https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces` and override `EOL` string.
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/data.json' const file = '/tmp/data.json'
var obj = {name: 'JP'} const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, function (err) { jsonfile.writeFile(file, obj, function (err) {
console.error(err) if (err) console.error(err)
}) })
``` ```
Or use with promises as follows:
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj)
.then(res => {
console.log('Write complete')
})
.catch(error => console.error(error))
```
**formatting with spaces:** **formatting with spaces:**
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/data.json' const file = '/tmp/data.json'
var obj = {name: 'JP'} const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, {spaces: 2}, function(err) { jsonfile.writeFile(file, obj, { spaces: 2 }, function (err) {
console.error(err) if (err) console.error(err)
}) })
``` ```
**overriding EOL:** **overriding EOL:**
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/data.json' const file = '/tmp/data.json'
var obj = {name: 'JP'} const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, {spaces: 2, EOL: '\r\n'}, function(err) { jsonfile.writeFile(file, obj, { spaces: 2, EOL: '\r\n' }, function (err) {
console.error(err) if (err) console.error(err)
}) })
``` ```
**appending to an existing JSON file:** **appending to an existing JSON file:**
You can use `fs.writeFile` option `{flag: 'a'}` to achieve this. You can use `fs.writeFile` option `{ flag: 'a' }` to achieve this.
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/mayAlreadyExistedData.json' const file = '/tmp/mayAlreadyExistedData.json'
var obj = {name: 'JP'} const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, {flag: 'a'}, function (err) { jsonfile.writeFile(file, obj, { flag: 'a' }, function (err) {
console.error(err) if (err) console.error(err)
}) })
``` ```
----
### writeFileSync(filename, obj, [options]) ### writeFileSync(filename, obj, [options])
`options`: Pass in any `fs.writeFileSync` options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces` and override `EOL` string. `options`: Pass in any [`fs.writeFileSync`](https://nodejs.org/api/fs.html#fs_fs_writefilesync_file_data_options) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces` and override `EOL` string.
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/data.json' const file = '/tmp/data.json'
var obj = {name: 'JP'} const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj) jsonfile.writeFileSync(file, obj)
``` ```
@ -128,36 +165,36 @@ jsonfile.writeFileSync(file, obj)
**formatting with spaces:** **formatting with spaces:**
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/data.json' const file = '/tmp/data.json'
var obj = {name: 'JP'} const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, {spaces: 2}) jsonfile.writeFileSync(file, obj, { spaces: 2 })
``` ```
**overriding EOL:** **overriding EOL:**
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/data.json' const file = '/tmp/data.json'
var obj = {name: 'JP'} const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, {spaces: 2, EOL: '\r\n'}) jsonfile.writeFileSync(file, obj, { spaces: 2, EOL: '\r\n' })
``` ```
**appending to an existing JSON file:** **appending to an existing JSON file:**
You can use `fs.writeFileSync` option `{flag: 'a'}` to achieve this. You can use `fs.writeFileSync` option `{ flag: 'a' }` to achieve this.
```js ```js
var jsonfile = require('jsonfile') const jsonfile = require('jsonfile')
var file = '/tmp/mayAlreadyExistedData.json' const file = '/tmp/mayAlreadyExistedData.json'
var obj = {name: 'JP'} const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, {flag: 'a'}) jsonfile.writeFileSync(file, obj, { flag: 'a' })
``` ```
License License

116
node_modules/jsonfile/index.js generated vendored
View File

@ -1,69 +1,58 @@
var _fs let _fs
try { try {
_fs = require('graceful-fs') _fs = require('graceful-fs')
} catch (_) { } catch (_) {
_fs = require('fs') _fs = require('fs')
} }
const universalify = require('universalify')
const { stringify, stripBom } = require('./utils')
function readFile (file, options, callback) { async function _readFile (file, options = {}) {
if (callback == null) {
callback = options
options = {}
}
if (typeof options === 'string') { if (typeof options === 'string') {
options = {encoding: options} options = { encoding: options }
} }
options = options || {} const fs = options.fs || _fs
var fs = options.fs || _fs
var shouldThrow = true const shouldThrow = 'throws' in options ? options.throws : true
if ('throws' in options) {
shouldThrow = options.throws
}
fs.readFile(file, options, function (err, data) { let data = await universalify.fromCallback(fs.readFile)(file, options)
if (err) return callback(err)
data = stripBom(data) data = stripBom(data)
var obj let obj
try { try {
obj = JSON.parse(data, options ? options.reviver : null) obj = JSON.parse(data, options ? options.reviver : null)
} catch (err2) { } catch (err) {
if (shouldThrow) { if (shouldThrow) {
err2.message = file + ': ' + err2.message err.message = `${file}: ${err.message}`
return callback(err2) throw err
} else { } else {
return callback(null, null) return null
} }
} }
callback(null, obj) return obj
})
} }
function readFileSync (file, options) { const readFile = universalify.fromPromise(_readFile)
options = options || {}
function readFileSync (file, options = {}) {
if (typeof options === 'string') { if (typeof options === 'string') {
options = {encoding: options} options = { encoding: options }
} }
var fs = options.fs || _fs const fs = options.fs || _fs
var shouldThrow = true const shouldThrow = 'throws' in options ? options.throws : true
if ('throws' in options) {
shouldThrow = options.throws
}
try { try {
var content = fs.readFileSync(file, options) let content = fs.readFileSync(file, options)
content = stripBom(content) content = stripBom(content)
return JSON.parse(content, options.reviver) return JSON.parse(content, options.reviver)
} catch (err) { } catch (err) {
if (shouldThrow) { if (shouldThrow) {
err.message = file + ': ' + err.message err.message = `${file}: ${err.message}`
throw err throw err
} else { } else {
return null return null
@ -71,64 +60,29 @@ function readFileSync (file, options) {
} }
} }
function stringify (obj, options) { async function _writeFile (file, obj, options = {}) {
var spaces const fs = options.fs || _fs
var EOL = '\n'
if (typeof options === 'object' && options !== null) {
if (options.spaces) {
spaces = options.spaces
}
if (options.EOL) {
EOL = options.EOL
}
}
var str = JSON.stringify(obj, options ? options.replacer : null, spaces) const str = stringify(obj, options)
return str.replace(/\n/g, EOL) + EOL await universalify.fromCallback(fs.writeFile)(file, str, options)
} }
function writeFile (file, obj, options, callback) { const writeFile = universalify.fromPromise(_writeFile)
if (callback == null) {
callback = options
options = {}
}
options = options || {}
var fs = options.fs || _fs
var str = '' function writeFileSync (file, obj, options = {}) {
try { const fs = options.fs || _fs
str = stringify(obj, options)
} catch (err) {
// Need to return whether a callback was passed or not
if (callback) callback(err, null)
return
}
fs.writeFile(file, str, options, callback) const str = stringify(obj, options)
}
function writeFileSync (file, obj, options) {
options = options || {}
var fs = options.fs || _fs
var str = stringify(obj, options)
// not sure if fs.writeFileSync returns anything, but just in case // not sure if fs.writeFileSync returns anything, but just in case
return fs.writeFileSync(file, str, options) return fs.writeFileSync(file, str, options)
} }
function stripBom (content) { const jsonfile = {
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified readFile,
if (Buffer.isBuffer(content)) content = content.toString('utf8') readFileSync,
content = content.replace(/^\uFEFF/, '') writeFile,
return content writeFileSync
}
var jsonfile = {
readFile: readFile,
readFileSync: readFileSync,
writeFile: writeFile,
writeFileSync: writeFileSync
} }
module.exports = jsonfile module.exports = jsonfile

30
node_modules/jsonfile/package.json generated vendored
View File

@ -1,31 +1,31 @@
{ {
"_args": [ "_args": [
[ [
"jsonfile@4.0.0", "jsonfile@6.0.1",
"/home/runner/work/ghaction-github-pages/ghaction-github-pages" "/home/runner/work/ghaction-github-pages/ghaction-github-pages"
] ]
], ],
"_from": "jsonfile@4.0.0", "_from": "jsonfile@6.0.1",
"_id": "jsonfile@4.0.0", "_id": "jsonfile@6.0.1",
"_inBundle": false, "_inBundle": false,
"_integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", "_integrity": "sha512-jR2b5v7d2vIOust+w3wtFKZIfpC2pnRmFAhAC/BuweZFQR8qZzxH1OyrQ10HmdVYiXWkYUqPVsz91cG7EL2FBg==",
"_location": "/jsonfile", "_location": "/jsonfile",
"_phantomChildren": {}, "_phantomChildren": {},
"_requested": { "_requested": {
"type": "version", "type": "version",
"registry": true, "registry": true,
"raw": "jsonfile@4.0.0", "raw": "jsonfile@6.0.1",
"name": "jsonfile", "name": "jsonfile",
"escapedName": "jsonfile", "escapedName": "jsonfile",
"rawSpec": "4.0.0", "rawSpec": "6.0.1",
"saveSpec": null, "saveSpec": null,
"fetchSpec": "4.0.0" "fetchSpec": "6.0.1"
}, },
"_requiredBy": [ "_requiredBy": [
"/fs-extra" "/fs-extra"
], ],
"_resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", "_resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.0.1.tgz",
"_spec": "4.0.0", "_spec": "6.0.1",
"_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages", "_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages",
"author": { "author": {
"name": "JP Richardson", "name": "JP Richardson",
@ -35,16 +35,18 @@
"url": "https://github.com/jprichardson/node-jsonfile/issues" "url": "https://github.com/jprichardson/node-jsonfile/issues"
}, },
"dependencies": { "dependencies": {
"graceful-fs": "^4.1.6" "graceful-fs": "^4.1.6",
"universalify": "^1.0.0"
}, },
"description": "Easily read/write JSON files.", "description": "Easily read/write JSON files.",
"devDependencies": { "devDependencies": {
"mocha": "2.x", "mocha": "^5.2.0",
"rimraf": "^2.4.0", "rimraf": "^2.4.0",
"standard": "^10.0.3" "standard": "^12.0.1"
}, },
"files": [ "files": [
"index.js" "index.js",
"utils.js"
], ],
"homepage": "https://github.com/jprichardson/node-jsonfile#readme", "homepage": "https://github.com/jprichardson/node-jsonfile#readme",
"keywords": [ "keywords": [
@ -70,5 +72,5 @@
"test": "npm run lint && npm run unit", "test": "npm run lint && npm run unit",
"unit": "mocha" "unit": "mocha"
}, },
"version": "4.0.0" "version": "6.0.1"
} }

15
node_modules/jsonfile/utils.js generated vendored Normal file
View File

@ -0,0 +1,15 @@
function stringify (obj, options = {}) {
const EOL = options.EOL || '\n'
const str = JSON.stringify(obj, options ? options.replacer : null, options.spaces)
return str.replace(/\n/g, EOL) + EOL
}
function stripBom (content) {
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
if (Buffer.isBuffer(content)) content = content.toString('utf8')
return content.replace(/^\uFEFF/, '')
}
module.exports = { stringify, stripBom }

View File

@ -21,7 +21,7 @@ npm install universalify
Takes a callback-based function to universalify, and returns the universalified function. Takes a callback-based function to universalify, and returns the universalified function.
Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with more than three arguments, and does not ensure that the callback is only called once. Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with three or more arguments, and does not ensure that the callback is only called once.
```js ```js
function callbackFn (n, cb) { function callbackFn (n, cb) {

22
node_modules/universalify/index.js generated vendored
View File

@ -1,25 +1,23 @@
'use strict' 'use strict'
exports.fromCallback = function (fn) { exports.fromCallback = function (fn) {
return Object.defineProperty(function () { return Object.defineProperty(function (...args) {
if (typeof arguments[arguments.length - 1] === 'function') fn.apply(this, arguments) if (typeof args[args.length - 1] === 'function') fn.apply(this, args)
else { else {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
arguments[arguments.length] = (err, res) => { fn.apply(
if (err) return reject(err) this,
resolve(res) args.concat([(err, res) => err ? reject(err) : resolve(res)])
} )
arguments.length++
fn.apply(this, arguments)
}) })
} }
}, 'name', { value: fn.name }) }, 'name', { value: fn.name })
} }
exports.fromPromise = function (fn) { exports.fromPromise = function (fn) {
return Object.defineProperty(function () { return Object.defineProperty(function (...args) {
const cb = arguments[arguments.length - 1] const cb = args[args.length - 1]
if (typeof cb !== 'function') return fn.apply(this, arguments) if (typeof cb !== 'function') return fn.apply(this, args)
else fn.apply(this, arguments).then(r => cb(null, r), cb) else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb)
}, 'name', { value: fn.name }) }, 'name', { value: fn.name })
} }

View File

@ -1,31 +1,32 @@
{ {
"_args": [ "_args": [
[ [
"universalify@0.1.2", "universalify@1.0.0",
"/home/runner/work/ghaction-github-pages/ghaction-github-pages" "/home/runner/work/ghaction-github-pages/ghaction-github-pages"
] ]
], ],
"_from": "universalify@0.1.2", "_from": "universalify@1.0.0",
"_id": "universalify@0.1.2", "_id": "universalify@1.0.0",
"_inBundle": false, "_inBundle": false,
"_integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", "_integrity": "sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==",
"_location": "/universalify", "_location": "/universalify",
"_phantomChildren": {}, "_phantomChildren": {},
"_requested": { "_requested": {
"type": "version", "type": "version",
"registry": true, "registry": true,
"raw": "universalify@0.1.2", "raw": "universalify@1.0.0",
"name": "universalify", "name": "universalify",
"escapedName": "universalify", "escapedName": "universalify",
"rawSpec": "0.1.2", "rawSpec": "1.0.0",
"saveSpec": null, "saveSpec": null,
"fetchSpec": "0.1.2" "fetchSpec": "1.0.0"
}, },
"_requiredBy": [ "_requiredBy": [
"/fs-extra" "/fs-extra",
"/jsonfile"
], ],
"_resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "_resolved": "https://registry.npmjs.org/universalify/-/universalify-1.0.0.tgz",
"_spec": "0.1.2", "_spec": "1.0.0",
"_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages", "_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages",
"author": { "author": {
"name": "Ryan Zimmerman", "name": "Ryan Zimmerman",
@ -38,12 +39,12 @@
"devDependencies": { "devDependencies": {
"colortape": "^0.1.2", "colortape": "^0.1.2",
"coveralls": "^3.0.1", "coveralls": "^3.0.1",
"nyc": "^10.2.0", "nyc": "^15.0.0",
"standard": "^10.0.1", "standard": "^14.3.1",
"tape": "^4.6.3" "tape": "^4.6.3"
}, },
"engines": { "engines": {
"node": ">= 4.0.0" "node": ">= 10.0.0"
}, },
"files": [ "files": [
"index.js" "index.js"
@ -63,5 +64,5 @@
"scripts": { "scripts": {
"test": "standard && nyc tape test/*.js | colortape" "test": "standard && nyc tape test/*.js | colortape"
}, },
"version": "0.1.2" "version": "1.0.0"
} }