Update node_modules
This commit is contained in:
parent
e59cd9b39f
commit
dbe4e9f697
6
node_modules/at-least-node/LICENSE
generated
vendored
Normal file
6
node_modules/at-least-node/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
The ISC License
|
||||
Copyright (c) 2020 Ryan Zimmerman <opensrc@ryanzim.com>
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
25
node_modules/at-least-node/README.md
generated
vendored
Normal file
25
node_modules/at-least-node/README.md
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
# at-least-node
|
||||
|
||||
![npm](https://img.shields.io/npm/v/at-least-node)
|
||||
![node](https://img.shields.io/node/v/at-least-node)
|
||||
![NPM](https://img.shields.io/npm/l/at-least-node)
|
||||
|
||||
Sometimes you need to check if you're on _at least_ a given Node.js version, but you don't want to pull in the whole [`semver`](https://www.npmjs.com/package/semver) kitchen sink. That's what `at-least-node` is for.
|
||||
|
||||
| Package | Size |
|
||||
| --------------- | ------- |
|
||||
| `at-least-node` | 2.6 kB |
|
||||
| `semver` | 75.5 kB |
|
||||
|
||||
```js
|
||||
const atLeastNode = require('at-least-node')
|
||||
atLeastNode('10.12.0')
|
||||
// -> true on Node 10.12.0+, false on anything below that
|
||||
```
|
||||
|
||||
When passing in a version string:
|
||||
|
||||
- You cannot include a leading `v` (i.e. `v10.12.0`)
|
||||
- You cannot omit sections (i.e. `10.12`)
|
||||
- You cannot use pre-releases (i.e. `1.0.0-beta`)
|
||||
- There is no input validation, if you make a mistake, the resulting behavior is undefined
|
5
node_modules/at-least-node/index.js
generated
vendored
Normal file
5
node_modules/at-least-node/index.js
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
module.exports = r => {
|
||||
const n = process.versions.node.split('.').map(x => parseInt(x, 10))
|
||||
r = r.split('.').map(x => parseInt(x, 10))
|
||||
return n[0] > r[0] || (n[0] === r[0] && (n[1] > r[1] || (n[1] === r[1] && n[2] >= r[2])))
|
||||
}
|
63
node_modules/at-least-node/package.json
generated
vendored
Normal file
63
node_modules/at-least-node/package.json
generated
vendored
Normal file
|
@ -0,0 +1,63 @@
|
|||
{
|
||||
"_args": [
|
||||
[
|
||||
"at-least-node@1.0.0",
|
||||
"/home/runner/work/ghaction-github-pages/ghaction-github-pages"
|
||||
]
|
||||
],
|
||||
"_from": "at-least-node@1.0.0",
|
||||
"_id": "at-least-node@1.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==",
|
||||
"_location": "/at-least-node",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "at-least-node@1.0.0",
|
||||
"name": "at-least-node",
|
||||
"escapedName": "at-least-node",
|
||||
"rawSpec": "1.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "1.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/fs-extra"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz",
|
||||
"_spec": "1.0.0",
|
||||
"_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages",
|
||||
"author": {
|
||||
"name": "Ryan Zimmerman",
|
||||
"email": "opensrc@ryanzim.com"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/RyanZim/at-least-node/issues"
|
||||
},
|
||||
"description": "Lightweight Node.js version sniffing/comparison",
|
||||
"devDependencies": {
|
||||
"ava": "^3.1.0",
|
||||
"rewire": "^4.0.1",
|
||||
"semver": "^7.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
},
|
||||
"files": [],
|
||||
"homepage": "https://github.com/RyanZim/at-least-node#readme",
|
||||
"keywords": [
|
||||
"semver",
|
||||
"feature"
|
||||
],
|
||||
"license": "ISC",
|
||||
"main": "index.js",
|
||||
"name": "at-least-node",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/RyanZim/at-least-node.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "ava"
|
||||
},
|
||||
"version": "1.0.0"
|
||||
}
|
25
node_modules/fs-extra/CHANGELOG.md
generated
vendored
25
node_modules/fs-extra/CHANGELOG.md
generated
vendored
|
@ -1,3 +1,28 @@
|
|||
9.0.0 / 2020-03-19
|
||||
------------------
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Requires Node.js version 10 or greater ([#725](https://github.com/jprichardson/node-fs-extra/issues/725), [#751](https://github.com/jprichardson/node-fs-extra/pull/751))
|
||||
- Switched `ensureDir*` to use a fork of https://github.com/sindresorhus/make-dir to make use of native recursive `fs.mkdir` where possible ([#619](https://github.com/jprichardson/node-fs-extra/issues/619), [#756](https://github.com/jprichardson/node-fs-extra/pull/756))
|
||||
- Properly preserve `atime` for `copy*` with `preserveTimestamps` option ([#633](https://github.com/jprichardson/node-fs-extra/pull/633))
|
||||
|
||||
**The following changes, allthough technically breaking, will not affect the vast majority of users:**
|
||||
|
||||
- `outputJson` now outputs objects as they were when the function was called, even if they are mutated later ([#702](https://github.com/jprichardson/node-fs-extra/issues/702), [#768](https://github.com/jprichardson/node-fs-extra/pull/768))
|
||||
- Cannot pass `null` as an options parameter to `*Json*` methods ([#745](https://github.com/jprichardson/node-fs-extra/issues/745), [#768](https://github.com/jprichardson/node-fs-extra/pull/768))
|
||||
|
||||
### Improvements
|
||||
|
||||
- Add promise shims for `fs.writev` & `fs.opendir` ([#747](https://github.com/jprichardson/node-fs-extra/pull/747))
|
||||
- Better errors for `ensureFile` ([#696](https://github.com/jprichardson/node-fs-extra/issues/696), [#744](https://github.com/jprichardson/node-fs-extra/pull/744))
|
||||
- Better file comparison for older Node versions ([#694](https://github.com/jprichardson/node-fs-extra/pull/694))
|
||||
|
||||
### Miscellaneous changes
|
||||
- Peformance optimizations ([#762](https://github.com/jprichardson/node-fs-extra/issues/762), [#764](https://github.com/jprichardson/node-fs-extra/pull/764))
|
||||
- Add missing documentation for aliases ([#758](https://github.com/jprichardson/node-fs-extra/issues/758), [#766](https://github.com/jprichardson/node-fs-extra/pull/766))
|
||||
- Update `universalify` dependency ([#767](https://github.com/jprichardson/node-fs-extra/pull/767))
|
||||
|
||||
8.1.0 / 2019-06-28
|
||||
------------------
|
||||
|
||||
|
|
2
node_modules/fs-extra/README.md
generated
vendored
2
node_modules/fs-extra/README.md
generated
vendored
|
@ -143,7 +143,7 @@ Methods
|
|||
- [writeJsonSync](docs/writeJson-sync.md)
|
||||
|
||||
|
||||
**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()` & `fs.write()`](docs/fs-read-write.md)
|
||||
**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()`, `fs.write()`, & `fs.writev()`](docs/fs-read-write-writev.md)
|
||||
|
||||
### What happened to `walk()` and `walkSync()`?
|
||||
|
||||
|
|
60
node_modules/fs-extra/lib/copy-sync/copy-sync.js
generated
vendored
60
node_modules/fs-extra/lib/copy-sync/copy-sync.js
generated
vendored
|
@ -2,8 +2,8 @@
|
|||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const mkdirpSync = require('../mkdirs').mkdirsSync
|
||||
const utimesSync = require('../util/utimes.js').utimesMillisSync
|
||||
const mkdirsSync = require('../mkdirs').mkdirsSync
|
||||
const utimesMillisSync = require('../util/utimes').utimesMillisSync
|
||||
const stat = require('../util/stat')
|
||||
|
||||
function copySync (src, dest, opts) {
|
||||
|
@ -29,7 +29,7 @@ function copySync (src, dest, opts) {
|
|||
function handleFilterAndCopy (destStat, src, dest, opts) {
|
||||
if (opts.filter && !opts.filter(src, dest)) return
|
||||
const destParent = path.dirname(dest)
|
||||
if (!fs.existsSync(destParent)) mkdirpSync(destParent)
|
||||
if (!fs.existsSync(destParent)) mkdirsSync(destParent)
|
||||
return startCopy(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
|
@ -64,49 +64,51 @@ function mayCopyFile (srcStat, src, dest, opts) {
|
|||
}
|
||||
|
||||
function copyFile (srcStat, src, dest, opts) {
|
||||
if (typeof fs.copyFileSync === 'function') {
|
||||
fs.copyFileSync(src, dest)
|
||||
fs.chmodSync(dest, srcStat.mode)
|
||||
if (opts.preserveTimestamps) {
|
||||
return utimesSync(dest, srcStat.atime, srcStat.mtime)
|
||||
}
|
||||
return
|
||||
}
|
||||
return copyFileFallback(srcStat, src, dest, opts)
|
||||
fs.copyFileSync(src, dest)
|
||||
if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)
|
||||
return setDestMode(dest, srcStat.mode)
|
||||
}
|
||||
|
||||
function copyFileFallback (srcStat, src, dest, opts) {
|
||||
const BUF_LENGTH = 64 * 1024
|
||||
const _buff = require('../util/buffer')(BUF_LENGTH)
|
||||
function handleTimestamps (srcMode, src, dest) {
|
||||
// Make sure the file is writable before setting the timestamp
|
||||
// otherwise open fails with EPERM when invoked with 'r+'
|
||||
// (through utimes call)
|
||||
if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)
|
||||
return setDestTimestamps(src, dest)
|
||||
}
|
||||
|
||||
const fdr = fs.openSync(src, 'r')
|
||||
const fdw = fs.openSync(dest, 'w', srcStat.mode)
|
||||
let pos = 0
|
||||
function fileIsNotWritable (srcMode) {
|
||||
return (srcMode & 0o200) === 0
|
||||
}
|
||||
|
||||
while (pos < srcStat.size) {
|
||||
const bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos)
|
||||
fs.writeSync(fdw, _buff, 0, bytesRead)
|
||||
pos += bytesRead
|
||||
}
|
||||
function makeFileWritable (dest, srcMode) {
|
||||
return setDestMode(dest, srcMode | 0o200)
|
||||
}
|
||||
|
||||
if (opts.preserveTimestamps) fs.futimesSync(fdw, srcStat.atime, srcStat.mtime)
|
||||
function setDestMode (dest, srcMode) {
|
||||
return fs.chmodSync(dest, srcMode)
|
||||
}
|
||||
|
||||
fs.closeSync(fdr)
|
||||
fs.closeSync(fdw)
|
||||
function setDestTimestamps (src, dest) {
|
||||
// The initial srcStat.atime cannot be trusted
|
||||
// because it is modified by the read(2) system call
|
||||
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
||||
const updatedSrcStat = fs.statSync(src)
|
||||
return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
|
||||
}
|
||||
|
||||
function onDir (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) return mkDirAndCopy(srcStat, src, dest, opts)
|
||||
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)
|
||||
if (destStat && !destStat.isDirectory()) {
|
||||
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
|
||||
}
|
||||
return copyDir(src, dest, opts)
|
||||
}
|
||||
|
||||
function mkDirAndCopy (srcStat, src, dest, opts) {
|
||||
function mkDirAndCopy (srcMode, src, dest, opts) {
|
||||
fs.mkdirSync(dest)
|
||||
copyDir(src, dest, opts)
|
||||
return fs.chmodSync(dest, srcStat.mode)
|
||||
return setDestMode(dest, srcMode)
|
||||
}
|
||||
|
||||
function copyDir (src, dest, opts) {
|
||||
|
|
76
node_modules/fs-extra/lib/copy/copy.js
generated
vendored
76
node_modules/fs-extra/lib/copy/copy.js
generated
vendored
|
@ -2,9 +2,9 @@
|
|||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const mkdirp = require('../mkdirs').mkdirs
|
||||
const mkdirs = require('../mkdirs').mkdirs
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
const utimes = require('../util/utimes').utimesMillis
|
||||
const utimesMillis = require('../util/utimes').utimesMillis
|
||||
const stat = require('../util/stat')
|
||||
|
||||
function copy (src, dest, opts, cb) {
|
||||
|
@ -43,7 +43,7 @@ function checkParentDir (destStat, src, dest, opts, cb) {
|
|||
pathExists(destParent, (err, dirExists) => {
|
||||
if (err) return cb(err)
|
||||
if (dirExists) return startCopy(destStat, src, dest, opts, cb)
|
||||
mkdirp(destParent, err => {
|
||||
mkdirs(destParent, err => {
|
||||
if (err) return cb(err)
|
||||
return startCopy(destStat, src, dest, opts, cb)
|
||||
})
|
||||
|
@ -92,49 +92,69 @@ function mayCopyFile (srcStat, src, dest, opts, cb) {
|
|||
}
|
||||
|
||||
function copyFile (srcStat, src, dest, opts, cb) {
|
||||
if (typeof fs.copyFile === 'function') {
|
||||
return fs.copyFile(src, dest, err => {
|
||||
if (err) return cb(err)
|
||||
return setDestModeAndTimestamps(srcStat, dest, opts, cb)
|
||||
})
|
||||
}
|
||||
return copyFileFallback(srcStat, src, dest, opts, cb)
|
||||
}
|
||||
|
||||
function copyFileFallback (srcStat, src, dest, opts, cb) {
|
||||
const rs = fs.createReadStream(src)
|
||||
rs.on('error', err => cb(err)).once('open', () => {
|
||||
const ws = fs.createWriteStream(dest, { mode: srcStat.mode })
|
||||
ws.on('error', err => cb(err))
|
||||
.on('open', () => rs.pipe(ws))
|
||||
.once('close', () => setDestModeAndTimestamps(srcStat, dest, opts, cb))
|
||||
fs.copyFile(src, dest, err => {
|
||||
if (err) return cb(err)
|
||||
if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb)
|
||||
return setDestMode(dest, srcStat.mode, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function setDestModeAndTimestamps (srcStat, dest, opts, cb) {
|
||||
fs.chmod(dest, srcStat.mode, err => {
|
||||
function handleTimestampsAndMode (srcMode, src, dest, cb) {
|
||||
// Make sure the file is writable before setting the timestamp
|
||||
// otherwise open fails with EPERM when invoked with 'r+'
|
||||
// (through utimes call)
|
||||
if (fileIsNotWritable(srcMode)) {
|
||||
return makeFileWritable(dest, srcMode, err => {
|
||||
if (err) return cb(err)
|
||||
return setDestTimestampsAndMode(srcMode, src, dest, cb)
|
||||
})
|
||||
}
|
||||
return setDestTimestampsAndMode(srcMode, src, dest, cb)
|
||||
}
|
||||
|
||||
function fileIsNotWritable (srcMode) {
|
||||
return (srcMode & 0o200) === 0
|
||||
}
|
||||
|
||||
function makeFileWritable (dest, srcMode, cb) {
|
||||
return setDestMode(dest, srcMode | 0o200, cb)
|
||||
}
|
||||
|
||||
function setDestTimestampsAndMode (srcMode, src, dest, cb) {
|
||||
setDestTimestamps(src, dest, err => {
|
||||
if (err) return cb(err)
|
||||
if (opts.preserveTimestamps) {
|
||||
return utimes(dest, srcStat.atime, srcStat.mtime, cb)
|
||||
}
|
||||
return cb()
|
||||
return setDestMode(dest, srcMode, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function setDestMode (dest, srcMode, cb) {
|
||||
return fs.chmod(dest, srcMode, cb)
|
||||
}
|
||||
|
||||
function setDestTimestamps (src, dest, cb) {
|
||||
// The initial srcStat.atime cannot be trusted
|
||||
// because it is modified by the read(2) system call
|
||||
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
||||
fs.stat(src, (err, updatedSrcStat) => {
|
||||
if (err) return cb(err)
|
||||
return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function onDir (srcStat, destStat, src, dest, opts, cb) {
|
||||
if (!destStat) return mkDirAndCopy(srcStat, src, dest, opts, cb)
|
||||
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb)
|
||||
if (destStat && !destStat.isDirectory()) {
|
||||
return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))
|
||||
}
|
||||
return copyDir(src, dest, opts, cb)
|
||||
}
|
||||
|
||||
function mkDirAndCopy (srcStat, src, dest, opts, cb) {
|
||||
function mkDirAndCopy (srcMode, src, dest, opts, cb) {
|
||||
fs.mkdir(dest, err => {
|
||||
if (err) return cb(err)
|
||||
copyDir(src, dest, opts, err => {
|
||||
if (err) return cb(err)
|
||||
return fs.chmod(dest, srcStat.mode, cb)
|
||||
return setDestMode(dest, srcMode, cb)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
2
node_modules/fs-extra/lib/empty/index.js
generated
vendored
2
node_modules/fs-extra/lib/empty/index.js
generated
vendored
|
@ -30,7 +30,7 @@ function emptyDirSync (dir) {
|
|||
let items
|
||||
try {
|
||||
items = fs.readdirSync(dir)
|
||||
} catch (err) {
|
||||
} catch {
|
||||
return mkdir.mkdirsSync(dir)
|
||||
}
|
||||
|
||||
|
|
42
node_modules/fs-extra/lib/ensure/file.js
generated
vendored
42
node_modules/fs-extra/lib/ensure/file.js
generated
vendored
|
@ -4,7 +4,6 @@ const u = require('universalify').fromCallback
|
|||
const path = require('path')
|
||||
const fs = require('graceful-fs')
|
||||
const mkdir = require('../mkdirs')
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
|
||||
function createFile (file, callback) {
|
||||
function makeFile () {
|
||||
|
@ -17,13 +16,26 @@ function createFile (file, callback) {
|
|||
fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err
|
||||
if (!err && stats.isFile()) return callback()
|
||||
const dir = path.dirname(file)
|
||||
pathExists(dir, (err, dirExists) => {
|
||||
if (err) return callback(err)
|
||||
if (dirExists) return makeFile()
|
||||
mkdir.mkdirs(dir, err => {
|
||||
if (err) return callback(err)
|
||||
makeFile()
|
||||
})
|
||||
fs.stat(dir, (err, stats) => {
|
||||
if (err) {
|
||||
// if the directory doesn't exist, make it
|
||||
if (err.code === 'ENOENT') {
|
||||
return mkdir.mkdirs(dir, err => {
|
||||
if (err) return callback(err)
|
||||
makeFile()
|
||||
})
|
||||
}
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
if (stats.isDirectory()) makeFile()
|
||||
else {
|
||||
// parent is not a directory
|
||||
// This is just to cause an internal ENOTDIR error to be thrown
|
||||
fs.readdir(dir, err => {
|
||||
if (err) return callback(err)
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -32,12 +44,20 @@ function createFileSync (file) {
|
|||
let stats
|
||||
try {
|
||||
stats = fs.statSync(file)
|
||||
} catch (e) {}
|
||||
} catch {}
|
||||
if (stats && stats.isFile()) return
|
||||
|
||||
const dir = path.dirname(file)
|
||||
if (!fs.existsSync(dir)) {
|
||||
mkdir.mkdirsSync(dir)
|
||||
try {
|
||||
if (!fs.statSync(dir).isDirectory()) {
|
||||
// parent is not a directory
|
||||
// This is just to cause an internal ENOTDIR error to be thrown
|
||||
fs.readdirSync(dir)
|
||||
}
|
||||
} catch (err) {
|
||||
// If the stat call above failed because the directory doesn't exist, create it
|
||||
if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)
|
||||
else throw err
|
||||
}
|
||||
|
||||
fs.writeFileSync(file, '')
|
||||
|
|
24
node_modules/fs-extra/lib/ensure/symlink-paths.js
generated
vendored
24
node_modules/fs-extra/lib/ensure/symlink-paths.js
generated
vendored
|
@ -34,8 +34,8 @@ function symlinkPaths (srcpath, dstpath, callback) {
|
|||
return callback(err)
|
||||
}
|
||||
return callback(null, {
|
||||
'toCwd': srcpath,
|
||||
'toDst': srcpath
|
||||
toCwd: srcpath,
|
||||
toDst: srcpath
|
||||
})
|
||||
})
|
||||
} else {
|
||||
|
@ -45,8 +45,8 @@ function symlinkPaths (srcpath, dstpath, callback) {
|
|||
if (err) return callback(err)
|
||||
if (exists) {
|
||||
return callback(null, {
|
||||
'toCwd': relativeToDst,
|
||||
'toDst': srcpath
|
||||
toCwd: relativeToDst,
|
||||
toDst: srcpath
|
||||
})
|
||||
} else {
|
||||
return fs.lstat(srcpath, (err) => {
|
||||
|
@ -55,8 +55,8 @@ function symlinkPaths (srcpath, dstpath, callback) {
|
|||
return callback(err)
|
||||
}
|
||||
return callback(null, {
|
||||
'toCwd': srcpath,
|
||||
'toDst': path.relative(dstdir, srcpath)
|
||||
toCwd: srcpath,
|
||||
toDst: path.relative(dstdir, srcpath)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -70,8 +70,8 @@ function symlinkPathsSync (srcpath, dstpath) {
|
|||
exists = fs.existsSync(srcpath)
|
||||
if (!exists) throw new Error('absolute srcpath does not exist')
|
||||
return {
|
||||
'toCwd': srcpath,
|
||||
'toDst': srcpath
|
||||
toCwd: srcpath,
|
||||
toDst: srcpath
|
||||
}
|
||||
} else {
|
||||
const dstdir = path.dirname(dstpath)
|
||||
|
@ -79,15 +79,15 @@ function symlinkPathsSync (srcpath, dstpath) {
|
|||
exists = fs.existsSync(relativeToDst)
|
||||
if (exists) {
|
||||
return {
|
||||
'toCwd': relativeToDst,
|
||||
'toDst': srcpath
|
||||
toCwd: relativeToDst,
|
||||
toDst: srcpath
|
||||
}
|
||||
} else {
|
||||
exists = fs.existsSync(srcpath)
|
||||
if (!exists) throw new Error('relative srcpath does not exist')
|
||||
return {
|
||||
'toCwd': srcpath,
|
||||
'toDst': path.relative(dstdir, srcpath)
|
||||
toCwd: srcpath,
|
||||
toDst: path.relative(dstdir, srcpath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
2
node_modules/fs-extra/lib/ensure/symlink-type.js
generated
vendored
2
node_modules/fs-extra/lib/ensure/symlink-type.js
generated
vendored
|
@ -19,7 +19,7 @@ function symlinkTypeSync (srcpath, type) {
|
|||
if (type) return type
|
||||
try {
|
||||
stats = fs.lstatSync(srcpath)
|
||||
} catch (e) {
|
||||
} catch {
|
||||
return 'file'
|
||||
}
|
||||
return (stats && stats.isDirectory()) ? 'dir' : 'file'
|
||||
|
|
29
node_modules/fs-extra/lib/fs/index.js
generated
vendored
29
node_modules/fs-extra/lib/fs/index.js
generated
vendored
|
@ -18,15 +18,16 @@ const api = [
|
|||
'fsync',
|
||||
'ftruncate',
|
||||
'futimes',
|
||||
'lchown',
|
||||
'lchmod',
|
||||
'lchown',
|
||||
'link',
|
||||
'lstat',
|
||||
'mkdir',
|
||||
'mkdtemp',
|
||||
'open',
|
||||
'readFile',
|
||||
'opendir',
|
||||
'readdir',
|
||||
'readFile',
|
||||
'readlink',
|
||||
'realpath',
|
||||
'rename',
|
||||
|
@ -39,8 +40,7 @@ const api = [
|
|||
'writeFile'
|
||||
].filter(key => {
|
||||
// Some commands are not available on some systems. Ex:
|
||||
// fs.copyFile was added in Node.js v8.5.0
|
||||
// fs.mkdtemp was added in Node.js v5.10.0
|
||||
// fs.opendir was added in Node.js v12.12.0
|
||||
// fs.lchown is not available on at least some Linux
|
||||
return typeof fs[key] === 'function'
|
||||
})
|
||||
|
@ -71,7 +71,7 @@ exports.exists = function (filename, callback) {
|
|||
})
|
||||
}
|
||||
|
||||
// fs.read() & fs.write need special treatment due to multiple callback args
|
||||
// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args
|
||||
|
||||
exports.read = function (fd, buffer, offset, length, position, callback) {
|
||||
if (typeof callback === 'function') {
|
||||
|
@ -103,6 +103,25 @@ exports.write = function (fd, buffer, ...args) {
|
|||
})
|
||||
}
|
||||
|
||||
// fs.writev only available in Node v12.9.0+
|
||||
if (typeof fs.writev === 'function') {
|
||||
// Function signature is
|
||||
// s.writev(fd, buffers[, position], callback)
|
||||
// We need to handle the optional arg, so we use ...args
|
||||
exports.writev = function (fd, buffers, ...args) {
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
return fs.writev(fd, buffers, ...args)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {
|
||||
if (err) return reject(err)
|
||||
resolve({ bytesWritten, buffers })
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// fs.realpath.native only available in Node v9.2+
|
||||
if (typeof fs.realpath.native === 'function') {
|
||||
exports.realpath.native = u(fs.realpath.native)
|
||||
|
|
29
node_modules/fs-extra/lib/index.js
generated
vendored
29
node_modules/fs-extra/lib/index.js
generated
vendored
|
@ -1,22 +1,21 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = Object.assign(
|
||||
{},
|
||||
module.exports = {
|
||||
// Export promiseified graceful-fs:
|
||||
require('./fs'),
|
||||
...require('./fs'),
|
||||
// Export extra methods:
|
||||
require('./copy-sync'),
|
||||
require('./copy'),
|
||||
require('./empty'),
|
||||
require('./ensure'),
|
||||
require('./json'),
|
||||
require('./mkdirs'),
|
||||
require('./move-sync'),
|
||||
require('./move'),
|
||||
require('./output'),
|
||||
require('./path-exists'),
|
||||
require('./remove')
|
||||
)
|
||||
...require('./copy-sync'),
|
||||
...require('./copy'),
|
||||
...require('./empty'),
|
||||
...require('./ensure'),
|
||||
...require('./json'),
|
||||
...require('./mkdirs'),
|
||||
...require('./move-sync'),
|
||||
...require('./move'),
|
||||
...require('./output'),
|
||||
...require('./path-exists'),
|
||||
...require('./remove')
|
||||
}
|
||||
|
||||
// Export fs.promises as a getter property so that we don't trigger
|
||||
// ExperimentalWarning before fs.promises is actually accessed.
|
||||
|
|
2
node_modules/fs-extra/lib/json/index.js
generated
vendored
2
node_modules/fs-extra/lib/json/index.js
generated
vendored
|
@ -1,6 +1,6 @@
|
|||
'use strict'
|
||||
|
||||
const u = require('universalify').fromCallback
|
||||
const u = require('universalify').fromPromise
|
||||
const jsonFile = require('./jsonfile')
|
||||
|
||||
jsonFile.outputJson = u(require('./output-json'))
|
||||
|
|
5
node_modules/fs-extra/lib/json/jsonfile.js
generated
vendored
5
node_modules/fs-extra/lib/json/jsonfile.js
generated
vendored
|
@ -1,12 +1,11 @@
|
|||
'use strict'
|
||||
|
||||
const u = require('universalify').fromCallback
|
||||
const jsonFile = require('jsonfile')
|
||||
|
||||
module.exports = {
|
||||
// jsonfile exports
|
||||
readJson: u(jsonFile.readFile),
|
||||
readJson: jsonFile.readFile,
|
||||
readJsonSync: jsonFile.readFileSync,
|
||||
writeJson: u(jsonFile.writeFile),
|
||||
writeJson: jsonFile.writeFile,
|
||||
writeJsonSync: jsonFile.writeFileSync
|
||||
}
|
||||
|
|
14
node_modules/fs-extra/lib/json/output-json-sync.js
generated
vendored
14
node_modules/fs-extra/lib/json/output-json-sync.js
generated
vendored
|
@ -1,18 +1,12 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const mkdir = require('../mkdirs')
|
||||
const jsonFile = require('./jsonfile')
|
||||
const { stringify } = require('jsonfile/utils')
|
||||
const { outputFileSync } = require('../output')
|
||||
|
||||
function outputJsonSync (file, data, options) {
|
||||
const dir = path.dirname(file)
|
||||
const str = stringify(data, options)
|
||||
|
||||
if (!fs.existsSync(dir)) {
|
||||
mkdir.mkdirsSync(dir)
|
||||
}
|
||||
|
||||
jsonFile.writeJsonSync(file, data, options)
|
||||
outputFileSync(file, str, options)
|
||||
}
|
||||
|
||||
module.exports = outputJsonSync
|
||||
|
|
25
node_modules/fs-extra/lib/json/output-json.js
generated
vendored
25
node_modules/fs-extra/lib/json/output-json.js
generated
vendored
|
@ -1,27 +1,12 @@
|
|||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const mkdir = require('../mkdirs')
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
const jsonFile = require('./jsonfile')
|
||||
const { stringify } = require('jsonfile/utils')
|
||||
const { outputFile } = require('../output')
|
||||
|
||||
function outputJson (file, data, options, callback) {
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
async function outputJson (file, data, options = {}) {
|
||||
const str = stringify(data, options)
|
||||
|
||||
const dir = path.dirname(file)
|
||||
|
||||
pathExists(dir, (err, itDoes) => {
|
||||
if (err) return callback(err)
|
||||
if (itDoes) return jsonFile.writeJson(file, data, options, callback)
|
||||
|
||||
mkdir.mkdirs(dir, err => {
|
||||
if (err) return callback(err)
|
||||
jsonFile.writeJson(file, data, options, callback)
|
||||
})
|
||||
})
|
||||
await outputFile(file, str, options)
|
||||
}
|
||||
|
||||
module.exports = outputJson
|
||||
|
|
18
node_modules/fs-extra/lib/mkdirs/index.js
generated
vendored
18
node_modules/fs-extra/lib/mkdirs/index.js
generated
vendored
|
@ -1,14 +1,14 @@
|
|||
'use strict'
|
||||
const u = require('universalify').fromCallback
|
||||
const mkdirs = u(require('./mkdirs'))
|
||||
const mkdirsSync = require('./mkdirs-sync')
|
||||
const u = require('universalify').fromPromise
|
||||
const { makeDir: _makeDir, makeDirSync } = require('./make-dir')
|
||||
const makeDir = u(_makeDir)
|
||||
|
||||
module.exports = {
|
||||
mkdirs,
|
||||
mkdirsSync,
|
||||
mkdirs: makeDir,
|
||||
mkdirsSync: makeDirSync,
|
||||
// alias
|
||||
mkdirp: mkdirs,
|
||||
mkdirpSync: mkdirsSync,
|
||||
ensureDir: mkdirs,
|
||||
ensureDirSync: mkdirsSync
|
||||
mkdirp: makeDir,
|
||||
mkdirpSync: makeDirSync,
|
||||
ensureDir: makeDir,
|
||||
ensureDirSync: makeDirSync
|
||||
}
|
||||
|
|
142
node_modules/fs-extra/lib/mkdirs/make-dir.js
generated
vendored
Normal file
142
node_modules/fs-extra/lib/mkdirs/make-dir.js
generated
vendored
Normal file
|
@ -0,0 +1,142 @@
|
|||
// Adapted from https://github.com/sindresorhus/make-dir
|
||||
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'use strict'
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const atLeastNode = require('at-least-node')
|
||||
|
||||
const useNativeRecursiveOption = atLeastNode('10.12.0')
|
||||
|
||||
// https://github.com/nodejs/node/issues/8987
|
||||
// https://github.com/libuv/libuv/pull/1088
|
||||
const checkPath = pth => {
|
||||
if (process.platform === 'win32') {
|
||||
const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, ''))
|
||||
|
||||
if (pathHasInvalidWinCharacters) {
|
||||
const error = new Error(`Path contains invalid characters: ${pth}`)
|
||||
error.code = 'EINVAL'
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const processOptions = options => {
|
||||
// Must be defined here so we get fresh process.umask()
|
||||
const defaults = { mode: 0o777 & (~process.umask()) }
|
||||
if (typeof options === 'number') options = { mode: options }
|
||||
return { ...defaults, ...options }
|
||||
}
|
||||
|
||||
const permissionError = pth => {
|
||||
// This replicates the exception of `fs.mkdir` with native the
|
||||
// `recusive` option when run on an invalid drive under Windows.
|
||||
const error = new Error(`operation not permitted, mkdir '${pth}'`)
|
||||
error.code = 'EPERM'
|
||||
error.errno = -4048
|
||||
error.path = pth
|
||||
error.syscall = 'mkdir'
|
||||
return error
|
||||
}
|
||||
|
||||
module.exports.makeDir = async (input, options) => {
|
||||
checkPath(input)
|
||||
options = processOptions(options)
|
||||
|
||||
if (useNativeRecursiveOption) {
|
||||
const pth = path.resolve(input)
|
||||
|
||||
return fs.mkdir(pth, {
|
||||
mode: options.mode,
|
||||
recursive: true
|
||||
})
|
||||
}
|
||||
|
||||
const make = async pth => {
|
||||
try {
|
||||
await fs.mkdir(pth, options.mode)
|
||||
} catch (error) {
|
||||
if (error.code === 'EPERM') {
|
||||
throw error
|
||||
}
|
||||
|
||||
if (error.code === 'ENOENT') {
|
||||
if (path.dirname(pth) === pth) {
|
||||
throw permissionError(pth)
|
||||
}
|
||||
|
||||
if (error.message.includes('null bytes')) {
|
||||
throw error
|
||||
}
|
||||
|
||||
await make(path.dirname(pth))
|
||||
return make(pth)
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(pth)
|
||||
if (!stats.isDirectory()) {
|
||||
// This error is never exposed to the user
|
||||
// it is caught below, and the original error is thrown
|
||||
throw new Error('The path is not a directory')
|
||||
}
|
||||
} catch {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return make(path.resolve(input))
|
||||
}
|
||||
|
||||
module.exports.makeDirSync = (input, options) => {
|
||||
checkPath(input)
|
||||
options = processOptions(options)
|
||||
|
||||
if (useNativeRecursiveOption) {
|
||||
const pth = path.resolve(input)
|
||||
|
||||
return fs.mkdirSync(pth, {
|
||||
mode: options.mode,
|
||||
recursive: true
|
||||
})
|
||||
}
|
||||
|
||||
const make = pth => {
|
||||
try {
|
||||
fs.mkdirSync(pth, options.mode)
|
||||
} catch (error) {
|
||||
if (error.code === 'EPERM') {
|
||||
throw error
|
||||
}
|
||||
|
||||
if (error.code === 'ENOENT') {
|
||||
if (path.dirname(pth) === pth) {
|
||||
throw permissionError(pth)
|
||||
}
|
||||
|
||||
if (error.message.includes('null bytes')) {
|
||||
throw error
|
||||
}
|
||||
|
||||
make(path.dirname(pth))
|
||||
return make(pth)
|
||||
}
|
||||
|
||||
try {
|
||||
if (!fs.statSync(pth).isDirectory()) {
|
||||
// This error is never exposed to the user
|
||||
// it is caught below, and the original error is thrown
|
||||
throw new Error('The path is not a directory')
|
||||
}
|
||||
} catch {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return make(path.resolve(input))
|
||||
}
|
54
node_modules/fs-extra/lib/mkdirs/mkdirs-sync.js
generated
vendored
54
node_modules/fs-extra/lib/mkdirs/mkdirs-sync.js
generated
vendored
|
@ -1,54 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const invalidWin32Path = require('./win32').invalidWin32Path
|
||||
|
||||
const o777 = parseInt('0777', 8)
|
||||
|
||||
function mkdirsSync (p, opts, made) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
opts = { mode: opts }
|
||||
}
|
||||
|
||||
let mode = opts.mode
|
||||
const xfs = opts.fs || fs
|
||||
|
||||
if (process.platform === 'win32' && invalidWin32Path(p)) {
|
||||
const errInval = new Error(p + ' contains invalid WIN32 path characters.')
|
||||
errInval.code = 'EINVAL'
|
||||
throw errInval
|
||||
}
|
||||
|
||||
if (mode === undefined) {
|
||||
mode = o777 & (~process.umask())
|
||||
}
|
||||
if (!made) made = null
|
||||
|
||||
p = path.resolve(p)
|
||||
|
||||
try {
|
||||
xfs.mkdirSync(p, mode)
|
||||
made = made || p
|
||||
} catch (err0) {
|
||||
if (err0.code === 'ENOENT') {
|
||||
if (path.dirname(p) === p) throw err0
|
||||
made = mkdirsSync(path.dirname(p), opts, made)
|
||||
mkdirsSync(p, opts, made)
|
||||
} else {
|
||||
// In the case of any other error, just see if there's a dir there
|
||||
// already. If so, then hooray! If not, then something is borked.
|
||||
let stat
|
||||
try {
|
||||
stat = xfs.statSync(p)
|
||||
} catch (err1) {
|
||||
throw err0
|
||||
}
|
||||
if (!stat.isDirectory()) throw err0
|
||||
}
|
||||
}
|
||||
|
||||
return made
|
||||
}
|
||||
|
||||
module.exports = mkdirsSync
|
63
node_modules/fs-extra/lib/mkdirs/mkdirs.js
generated
vendored
63
node_modules/fs-extra/lib/mkdirs/mkdirs.js
generated
vendored
|
@ -1,63 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const invalidWin32Path = require('./win32').invalidWin32Path
|
||||
|
||||
const o777 = parseInt('0777', 8)
|
||||
|
||||
function mkdirs (p, opts, callback, made) {
|
||||
if (typeof opts === 'function') {
|
||||
callback = opts
|
||||
opts = {}
|
||||
} else if (!opts || typeof opts !== 'object') {
|
||||
opts = { mode: opts }
|
||||
}
|
||||
|
||||
if (process.platform === 'win32' && invalidWin32Path(p)) {
|
||||
const errInval = new Error(p + ' contains invalid WIN32 path characters.')
|
||||
errInval.code = 'EINVAL'
|
||||
return callback(errInval)
|
||||
}
|
||||
|
||||
let mode = opts.mode
|
||||
const xfs = opts.fs || fs
|
||||
|
||||
if (mode === undefined) {
|
||||
mode = o777 & (~process.umask())
|
||||
}
|
||||
if (!made) made = null
|
||||
|
||||
callback = callback || function () {}
|
||||
p = path.resolve(p)
|
||||
|
||||
xfs.mkdir(p, mode, er => {
|
||||
if (!er) {
|
||||
made = made || p
|
||||
return callback(null, made)
|
||||
}
|
||||
switch (er.code) {
|
||||
case 'ENOENT':
|
||||
if (path.dirname(p) === p) return callback(er)
|
||||
mkdirs(path.dirname(p), opts, (er, made) => {
|
||||
if (er) callback(er, made)
|
||||
else mkdirs(p, opts, callback, made)
|
||||
})
|
||||
break
|
||||
|
||||
// In the case of any other error, just see if there's a dir
|
||||
// there already. If so, then hooray! If not, then something
|
||||
// is borked.
|
||||
default:
|
||||
xfs.stat(p, (er2, stat) => {
|
||||
// if the stat fails, then that's super weird.
|
||||
// let the original error be the failure reason.
|
||||
if (er2 || !stat.isDirectory()) callback(er, made)
|
||||
else callback(null, made)
|
||||
})
|
||||
break
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = mkdirs
|
25
node_modules/fs-extra/lib/mkdirs/win32.js
generated
vendored
25
node_modules/fs-extra/lib/mkdirs/win32.js
generated
vendored
|
@ -1,25 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
|
||||
// get drive on windows
|
||||
function getRootPath (p) {
|
||||
p = path.normalize(path.resolve(p)).split(path.sep)
|
||||
if (p.length > 0) return p[0]
|
||||
return null
|
||||
}
|
||||
|
||||
// http://stackoverflow.com/a/62888/10333 contains more accurate
|
||||
// TODO: expand to include the rest
|
||||
const INVALID_PATH_CHARS = /[<>:"|?*]/
|
||||
|
||||
function invalidWin32Path (p) {
|
||||
const rp = getRootPath(p)
|
||||
p = p.replace(rp, '')
|
||||
return INVALID_PATH_CHARS.test(p)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getRootPath,
|
||||
invalidWin32Path
|
||||
}
|
2
node_modules/fs-extra/lib/remove/rimraf.js
generated
vendored
2
node_modules/fs-extra/lib/remove/rimraf.js
generated
vendored
|
@ -302,7 +302,7 @@ function rmkidsSync (p, options) {
|
|||
try {
|
||||
const ret = options.rmdirSync(p, options)
|
||||
return ret
|
||||
} catch (er) { }
|
||||
} catch {}
|
||||
} while (Date.now() - startTime < 500) // give up after 500ms
|
||||
} else {
|
||||
const ret = options.rmdirSync(p, options)
|
||||
|
|
12
node_modules/fs-extra/lib/util/buffer.js
generated
vendored
12
node_modules/fs-extra/lib/util/buffer.js
generated
vendored
|
@ -1,12 +0,0 @@
|
|||
'use strict'
|
||||
/* eslint-disable node/no-deprecated-api */
|
||||
module.exports = function (size) {
|
||||
if (typeof Buffer.allocUnsafe === 'function') {
|
||||
try {
|
||||
return Buffer.allocUnsafe(size)
|
||||
} catch (e) {
|
||||
return new Buffer(size)
|
||||
}
|
||||
}
|
||||
return new Buffer(size)
|
||||
}
|
143
node_modules/fs-extra/lib/util/stat.js
generated
vendored
143
node_modules/fs-extra/lib/util/stat.js
generated
vendored
|
@ -1,70 +1,29 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const util = require('util')
|
||||
const atLeastNode = require('at-least-node')
|
||||
|
||||
const NODE_VERSION_MAJOR_WITH_BIGINT = 10
|
||||
const NODE_VERSION_MINOR_WITH_BIGINT = 5
|
||||
const NODE_VERSION_PATCH_WITH_BIGINT = 0
|
||||
const nodeVersion = process.versions.node.split('.')
|
||||
const nodeVersionMajor = Number.parseInt(nodeVersion[0], 10)
|
||||
const nodeVersionMinor = Number.parseInt(nodeVersion[1], 10)
|
||||
const nodeVersionPatch = Number.parseInt(nodeVersion[2], 10)
|
||||
const nodeSupportsBigInt = atLeastNode('10.5.0')
|
||||
const stat = (file) => nodeSupportsBigInt ? fs.stat(file, { bigint: true }) : fs.stat(file)
|
||||
const statSync = (file) => nodeSupportsBigInt ? fs.statSync(file, { bigint: true }) : fs.statSync(file)
|
||||
|
||||
function nodeSupportsBigInt () {
|
||||
if (nodeVersionMajor > NODE_VERSION_MAJOR_WITH_BIGINT) {
|
||||
return true
|
||||
} else if (nodeVersionMajor === NODE_VERSION_MAJOR_WITH_BIGINT) {
|
||||
if (nodeVersionMinor > NODE_VERSION_MINOR_WITH_BIGINT) {
|
||||
return true
|
||||
} else if (nodeVersionMinor === NODE_VERSION_MINOR_WITH_BIGINT) {
|
||||
if (nodeVersionPatch >= NODE_VERSION_PATCH_WITH_BIGINT) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function getStats (src, dest, cb) {
|
||||
if (nodeSupportsBigInt()) {
|
||||
fs.stat(src, { bigint: true }, (err, srcStat) => {
|
||||
if (err) return cb(err)
|
||||
fs.stat(dest, { bigint: true }, (err, destStat) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') return cb(null, { srcStat, destStat: null })
|
||||
return cb(err)
|
||||
}
|
||||
return cb(null, { srcStat, destStat })
|
||||
})
|
||||
function getStats (src, dest) {
|
||||
return Promise.all([
|
||||
stat(src),
|
||||
stat(dest).catch(err => {
|
||||
if (err.code === 'ENOENT') return null
|
||||
throw err
|
||||
})
|
||||
} else {
|
||||
fs.stat(src, (err, srcStat) => {
|
||||
if (err) return cb(err)
|
||||
fs.stat(dest, (err, destStat) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') return cb(null, { srcStat, destStat: null })
|
||||
return cb(err)
|
||||
}
|
||||
return cb(null, { srcStat, destStat })
|
||||
})
|
||||
})
|
||||
}
|
||||
]).then(([srcStat, destStat]) => ({ srcStat, destStat }))
|
||||
}
|
||||
|
||||
function getStatsSync (src, dest) {
|
||||
let srcStat, destStat
|
||||
if (nodeSupportsBigInt()) {
|
||||
srcStat = fs.statSync(src, { bigint: true })
|
||||
} else {
|
||||
srcStat = fs.statSync(src)
|
||||
}
|
||||
let destStat
|
||||
const srcStat = statSync(src)
|
||||
try {
|
||||
if (nodeSupportsBigInt()) {
|
||||
destStat = fs.statSync(dest, { bigint: true })
|
||||
} else {
|
||||
destStat = fs.statSync(dest)
|
||||
}
|
||||
destStat = statSync(dest)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') return { srcStat, destStat: null }
|
||||
throw err
|
||||
|
@ -73,10 +32,10 @@ function getStatsSync (src, dest) {
|
|||
}
|
||||
|
||||
function checkPaths (src, dest, funcName, cb) {
|
||||
getStats(src, dest, (err, stats) => {
|
||||
util.callbackify(getStats)(src, dest, (err, stats) => {
|
||||
if (err) return cb(err)
|
||||
const { srcStat, destStat } = stats
|
||||
if (destStat && destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
|
||||
if (destStat && areIdentical(srcStat, destStat)) {
|
||||
return cb(new Error('Source and destination must not be the same.'))
|
||||
}
|
||||
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
||||
|
@ -88,7 +47,7 @@ function checkPaths (src, dest, funcName, cb) {
|
|||
|
||||
function checkPathsSync (src, dest, funcName) {
|
||||
const { srcStat, destStat } = getStatsSync(src, dest)
|
||||
if (destStat && destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
|
||||
if (destStat && areIdentical(srcStat, destStat)) {
|
||||
throw new Error('Source and destination must not be the same.')
|
||||
}
|
||||
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
||||
|
@ -105,29 +64,18 @@ function checkParentPaths (src, srcStat, dest, funcName, cb) {
|
|||
const srcParent = path.resolve(path.dirname(src))
|
||||
const destParent = path.resolve(path.dirname(dest))
|
||||
if (destParent === srcParent || destParent === path.parse(destParent).root) return cb()
|
||||
if (nodeSupportsBigInt()) {
|
||||
fs.stat(destParent, { bigint: true }, (err, destStat) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') return cb()
|
||||
return cb(err)
|
||||
}
|
||||
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
|
||||
return cb(new Error(errMsg(src, dest, funcName)))
|
||||
}
|
||||
return checkParentPaths(src, srcStat, destParent, funcName, cb)
|
||||
})
|
||||
} else {
|
||||
fs.stat(destParent, (err, destStat) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') return cb()
|
||||
return cb(err)
|
||||
}
|
||||
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
|
||||
return cb(new Error(errMsg(src, dest, funcName)))
|
||||
}
|
||||
return checkParentPaths(src, srcStat, destParent, funcName, cb)
|
||||
})
|
||||
const callback = (err, destStat) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') return cb()
|
||||
return cb(err)
|
||||
}
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
return cb(new Error(errMsg(src, dest, funcName)))
|
||||
}
|
||||
return checkParentPaths(src, srcStat, destParent, funcName, cb)
|
||||
}
|
||||
if (nodeSupportsBigInt) fs.stat(destParent, { bigint: true }, callback)
|
||||
else fs.stat(destParent, callback)
|
||||
}
|
||||
|
||||
function checkParentPathsSync (src, srcStat, dest, funcName) {
|
||||
|
@ -136,21 +84,40 @@ function checkParentPathsSync (src, srcStat, dest, funcName) {
|
|||
if (destParent === srcParent || destParent === path.parse(destParent).root) return
|
||||
let destStat
|
||||
try {
|
||||
if (nodeSupportsBigInt()) {
|
||||
destStat = fs.statSync(destParent, { bigint: true })
|
||||
} else {
|
||||
destStat = fs.statSync(destParent)
|
||||
}
|
||||
destStat = statSync(destParent)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') return
|
||||
throw err
|
||||
}
|
||||
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
throw new Error(errMsg(src, dest, funcName))
|
||||
}
|
||||
return checkParentPathsSync(src, srcStat, destParent, funcName)
|
||||
}
|
||||
|
||||
function areIdentical (srcStat, destStat) {
|
||||
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
|
||||
if (nodeSupportsBigInt || destStat.ino < Number.MAX_SAFE_INTEGER) {
|
||||
// definitive answer
|
||||
return true
|
||||
}
|
||||
// Use additional heuristics if we can't use 'bigint'.
|
||||
// Different 'ino' could be represented the same if they are >= Number.MAX_SAFE_INTEGER
|
||||
// See issue 657
|
||||
if (destStat.size === srcStat.size &&
|
||||
destStat.mode === srcStat.mode &&
|
||||
destStat.nlink === srcStat.nlink &&
|
||||
destStat.atimeMs === srcStat.atimeMs &&
|
||||
destStat.mtimeMs === srcStat.mtimeMs &&
|
||||
destStat.ctimeMs === srcStat.ctimeMs &&
|
||||
destStat.birthtimeMs === srcStat.birthtimeMs) {
|
||||
// heuristic answer
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// return true if dest is a subdir of src, otherwise false.
|
||||
// It only checks the path strings.
|
||||
function isSrcSubdir (src, dest) {
|
||||
|
|
53
node_modules/fs-extra/lib/util/utimes.js
generated
vendored
53
node_modules/fs-extra/lib/util/utimes.js
generated
vendored
|
@ -1,56 +1,6 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
|
||||
// HFS, ext{2,3}, FAT do not, Node.js v0.10 does not
|
||||
function hasMillisResSync () {
|
||||
let tmpfile = path.join('millis-test-sync' + Date.now().toString() + Math.random().toString().slice(2))
|
||||
tmpfile = path.join(os.tmpdir(), tmpfile)
|
||||
|
||||
// 550 millis past UNIX epoch
|
||||
const d = new Date(1435410243862)
|
||||
fs.writeFileSync(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141')
|
||||
const fd = fs.openSync(tmpfile, 'r+')
|
||||
fs.futimesSync(fd, d, d)
|
||||
fs.closeSync(fd)
|
||||
return fs.statSync(tmpfile).mtime > 1435410243000
|
||||
}
|
||||
|
||||
function hasMillisRes (callback) {
|
||||
let tmpfile = path.join('millis-test' + Date.now().toString() + Math.random().toString().slice(2))
|
||||
tmpfile = path.join(os.tmpdir(), tmpfile)
|
||||
|
||||
// 550 millis past UNIX epoch
|
||||
const d = new Date(1435410243862)
|
||||
fs.writeFile(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141', err => {
|
||||
if (err) return callback(err)
|
||||
fs.open(tmpfile, 'r+', (err, fd) => {
|
||||
if (err) return callback(err)
|
||||
fs.futimes(fd, d, d, err => {
|
||||
if (err) return callback(err)
|
||||
fs.close(fd, err => {
|
||||
if (err) return callback(err)
|
||||
fs.stat(tmpfile, (err, stats) => {
|
||||
if (err) return callback(err)
|
||||
callback(null, stats.mtime > 1435410243000)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function timeRemoveMillis (timestamp) {
|
||||
if (typeof timestamp === 'number') {
|
||||
return Math.floor(timestamp / 1000) * 1000
|
||||
} else if (timestamp instanceof Date) {
|
||||
return new Date(Math.floor(timestamp.getTime() / 1000) * 1000)
|
||||
} else {
|
||||
throw new Error('fs-extra: timeRemoveMillis() unknown parameter type')
|
||||
}
|
||||
}
|
||||
|
||||
function utimesMillis (path, atime, mtime, callback) {
|
||||
// if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)
|
||||
|
@ -71,9 +21,6 @@ function utimesMillisSync (path, atime, mtime) {
|
|||
}
|
||||
|
||||
module.exports = {
|
||||
hasMillisRes,
|
||||
hasMillisResSync,
|
||||
timeRemoveMillis,
|
||||
utimesMillis,
|
||||
utimesMillisSync
|
||||
}
|
||||
|
|
37
node_modules/fs-extra/package.json
generated
vendored
37
node_modules/fs-extra/package.json
generated
vendored
|
@ -1,31 +1,31 @@
|
|||
{
|
||||
"_args": [
|
||||
[
|
||||
"fs-extra@8.1.0",
|
||||
"fs-extra@9.0.0",
|
||||
"/home/runner/work/ghaction-github-pages/ghaction-github-pages"
|
||||
]
|
||||
],
|
||||
"_from": "fs-extra@8.1.0",
|
||||
"_id": "fs-extra@8.1.0",
|
||||
"_from": "fs-extra@9.0.0",
|
||||
"_id": "fs-extra@9.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
|
||||
"_integrity": "sha512-pmEYSk3vYsG/bF651KPUXZ+hvjpgWYw/Gc7W9NFUe3ZVLczKKWIij3IKpOrQcdw4TILtibFslZ0UmR8Vvzig4g==",
|
||||
"_location": "/fs-extra",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "fs-extra@8.1.0",
|
||||
"raw": "fs-extra@9.0.0",
|
||||
"name": "fs-extra",
|
||||
"escapedName": "fs-extra",
|
||||
"rawSpec": "8.1.0",
|
||||
"rawSpec": "9.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "8.1.0"
|
||||
"fetchSpec": "9.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
|
||||
"_spec": "8.1.0",
|
||||
"_resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.0.0.tgz",
|
||||
"_spec": "9.0.0",
|
||||
"_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages",
|
||||
"author": {
|
||||
"name": "JP Richardson",
|
||||
|
@ -35,25 +35,25 @@
|
|||
"url": "https://github.com/jprichardson/node-fs-extra/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"at-least-node": "^1.0.0",
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^4.0.0",
|
||||
"universalify": "^0.1.0"
|
||||
"jsonfile": "^6.0.1",
|
||||
"universalify": "^1.0.0"
|
||||
},
|
||||
"description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as mkdir -p, cp -r, and rm -rf.",
|
||||
"devDependencies": {
|
||||
"coveralls": "^3.0.0",
|
||||
"istanbul": "^0.4.5",
|
||||
"klaw": "^2.1.1",
|
||||
"klaw-sync": "^3.0.2",
|
||||
"minimist": "^1.1.1",
|
||||
"mocha": "^5.0.5",
|
||||
"nyc": "^15.0.0",
|
||||
"proxyquire": "^2.0.1",
|
||||
"read-dir-files": "^0.1.1",
|
||||
"semver": "^5.3.0",
|
||||
"standard": "^12.0.1"
|
||||
"standard": "^14.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6 <7 || >=8"
|
||||
"node": ">=10"
|
||||
},
|
||||
"files": [
|
||||
"lib/",
|
||||
|
@ -81,7 +81,8 @@
|
|||
"create",
|
||||
"text",
|
||||
"output",
|
||||
"move"
|
||||
"move",
|
||||
"promise"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "./lib/index.js",
|
||||
|
@ -91,7 +92,7 @@
|
|||
"url": "git+https://github.com/jprichardson/node-fs-extra.git"
|
||||
},
|
||||
"scripts": {
|
||||
"coverage": "istanbul cover -i 'lib/**' -x '**/__tests__/**' test.js",
|
||||
"coverage": "nyc -r lcovonly npm run unit",
|
||||
"coveralls": "coveralls < coverage/lcov.info",
|
||||
"full-ci": "npm run lint && npm run coverage",
|
||||
"lint": "standard",
|
||||
|
@ -99,5 +100,5 @@
|
|||
"test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha",
|
||||
"unit": "node test.js"
|
||||
},
|
||||
"version": "8.1.0"
|
||||
"version": "9.0.0"
|
||||
}
|
||||
|
|
20
node_modules/jsonfile/CHANGELOG.md
generated
vendored
20
node_modules/jsonfile/CHANGELOG.md
generated
vendored
|
@ -1,3 +1,23 @@
|
|||
6.0.1 / 2020-03-07
|
||||
------------------
|
||||
|
||||
- Update dependency ([#130](https://github.com/jprichardson/node-jsonfile/pull/130))
|
||||
- Fix code style ([#129](https://github.com/jprichardson/node-jsonfile/pull/129))
|
||||
|
||||
6.0.0 / 2020-02-24
|
||||
------------------
|
||||
|
||||
- **BREAKING:** Drop support for Node 6 & 8 ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
|
||||
- **BREAKING:** Do not allow passing `null` as options to `readFile()` or `writeFile()` ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
|
||||
- Refactor internals ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
|
||||
|
||||
5.0.0 / 2018-09-08
|
||||
------------------
|
||||
|
||||
- **BREAKING:** Drop Node 4 support
|
||||
- **BREAKING:** If no callback is passed to an asynchronous method, a promise is now returned ([#109](https://github.com/jprichardson/node-jsonfile/pull/109))
|
||||
- Cleanup docs
|
||||
|
||||
4.0.0 / 2017-07-12
|
||||
------------------
|
||||
|
||||
|
|
129
node_modules/jsonfile/README.md
generated
vendored
129
node_modules/jsonfile/README.md
generated
vendored
|
@ -1,7 +1,7 @@
|
|||
Node.js - jsonfile
|
||||
================
|
||||
|
||||
Easily read/write JSON files.
|
||||
Easily read/write JSON files in Node.js. _Note: this module cannot be used in the browser._
|
||||
|
||||
[![npm Package](https://img.shields.io/npm/v/jsonfile.svg?style=flat-square)](https://www.npmjs.org/package/jsonfile)
|
||||
[![build status](https://secure.travis-ci.org/jprichardson/node-jsonfile.svg)](http://travis-ci.org/jprichardson/node-jsonfile)
|
||||
|
@ -26,101 +26,138 @@ Installation
|
|||
API
|
||||
---
|
||||
|
||||
* [`readFile(filename, [options], callback)`](#readfilefilename-options-callback)
|
||||
* [`readFileSync(filename, [options])`](#readfilesyncfilename-options)
|
||||
* [`writeFile(filename, obj, [options], callback)`](#writefilefilename-obj-options-callback)
|
||||
* [`writeFileSync(filename, obj, [options])`](#writefilesyncfilename-obj-options)
|
||||
|
||||
----
|
||||
|
||||
### readFile(filename, [options], callback)
|
||||
|
||||
`options` (`object`, default `undefined`): Pass in any `fs.readFile` options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
|
||||
`options` (`object`, default `undefined`): Pass in any [`fs.readFile`](https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
|
||||
- `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback.
|
||||
If `false`, returns `null` for the object.
|
||||
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
var file = '/tmp/data.json'
|
||||
jsonfile.readFile(file, function(err, obj) {
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
jsonfile.readFile(file, function (err, obj) {
|
||||
if (err) console.error(err)
|
||||
console.dir(obj)
|
||||
})
|
||||
```
|
||||
|
||||
You can also use this method with promises. The `readFile` method will return a promise if you do not pass a callback function.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
jsonfile.readFile(file)
|
||||
.then(obj => console.dir(obj))
|
||||
.catch(error => console.error(error))
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### readFileSync(filename, [options])
|
||||
|
||||
`options` (`object`, default `undefined`): Pass in any `fs.readFileSync` options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
|
||||
`options` (`object`, default `undefined`): Pass in any [`fs.readFileSync`](https://nodejs.org/api/fs.html#fs_fs_readfilesync_path_options) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
|
||||
- `throws` (`boolean`, default: `true`). If an error is encountered reading or parsing the file, throw the error. If `false`, returns `null` for the object.
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
var file = '/tmp/data.json'
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
|
||||
console.dir(jsonfile.readFileSync(file))
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### writeFile(filename, obj, [options], callback)
|
||||
|
||||
`options`: Pass in any `fs.writeFile` options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces` and override `EOL` string.
|
||||
`options`: Pass in any [`fs.writeFile`](https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces` and override `EOL` string.
|
||||
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
var file = '/tmp/data.json'
|
||||
var obj = {name: 'JP'}
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, function (err) {
|
||||
console.error(err)
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
Or use with promises as follows:
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj)
|
||||
.then(res => {
|
||||
console.log('Write complete')
|
||||
})
|
||||
.catch(error => console.error(error))
|
||||
```
|
||||
|
||||
|
||||
**formatting with spaces:**
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
var file = '/tmp/data.json'
|
||||
var obj = {name: 'JP'}
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, {spaces: 2}, function(err) {
|
||||
console.error(err)
|
||||
jsonfile.writeFile(file, obj, { spaces: 2 }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
**overriding EOL:**
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
var file = '/tmp/data.json'
|
||||
var obj = {name: 'JP'}
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, {spaces: 2, EOL: '\r\n'}, function(err) {
|
||||
console.error(err)
|
||||
jsonfile.writeFile(file, obj, { spaces: 2, EOL: '\r\n' }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
**appending to an existing JSON file:**
|
||||
|
||||
You can use `fs.writeFile` option `{flag: 'a'}` to achieve this.
|
||||
You can use `fs.writeFile` option `{ flag: 'a' }` to achieve this.
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
var file = '/tmp/mayAlreadyExistedData.json'
|
||||
var obj = {name: 'JP'}
|
||||
const file = '/tmp/mayAlreadyExistedData.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, {flag: 'a'}, function (err) {
|
||||
console.error(err)
|
||||
jsonfile.writeFile(file, obj, { flag: 'a' }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### writeFileSync(filename, obj, [options])
|
||||
|
||||
`options`: Pass in any `fs.writeFileSync` options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces` and override `EOL` string.
|
||||
`options`: Pass in any [`fs.writeFileSync`](https://nodejs.org/api/fs.html#fs_fs_writefilesync_file_data_options) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces` and override `EOL` string.
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
var file = '/tmp/data.json'
|
||||
var obj = {name: 'JP'}
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj)
|
||||
```
|
||||
|
@ -128,36 +165,36 @@ jsonfile.writeFileSync(file, obj)
|
|||
**formatting with spaces:**
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
var file = '/tmp/data.json'
|
||||
var obj = {name: 'JP'}
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, {spaces: 2})
|
||||
jsonfile.writeFileSync(file, obj, { spaces: 2 })
|
||||
```
|
||||
|
||||
**overriding EOL:**
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
var file = '/tmp/data.json'
|
||||
var obj = {name: 'JP'}
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, {spaces: 2, EOL: '\r\n'})
|
||||
jsonfile.writeFileSync(file, obj, { spaces: 2, EOL: '\r\n' })
|
||||
```
|
||||
|
||||
**appending to an existing JSON file:**
|
||||
|
||||
You can use `fs.writeFileSync` option `{flag: 'a'}` to achieve this.
|
||||
You can use `fs.writeFileSync` option `{ flag: 'a' }` to achieve this.
|
||||
|
||||
```js
|
||||
var jsonfile = require('jsonfile')
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
var file = '/tmp/mayAlreadyExistedData.json'
|
||||
var obj = {name: 'JP'}
|
||||
const file = '/tmp/mayAlreadyExistedData.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, {flag: 'a'})
|
||||
jsonfile.writeFileSync(file, obj, { flag: 'a' })
|
||||
```
|
||||
|
||||
License
|
||||
|
|
128
node_modules/jsonfile/index.js
generated
vendored
128
node_modules/jsonfile/index.js
generated
vendored
|
@ -1,69 +1,58 @@
|
|||
var _fs
|
||||
let _fs
|
||||
try {
|
||||
_fs = require('graceful-fs')
|
||||
} catch (_) {
|
||||
_fs = require('fs')
|
||||
}
|
||||
const universalify = require('universalify')
|
||||
const { stringify, stripBom } = require('./utils')
|
||||
|
||||
function readFile (file, options, callback) {
|
||||
if (callback == null) {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
|
||||
async function _readFile (file, options = {}) {
|
||||
if (typeof options === 'string') {
|
||||
options = {encoding: options}
|
||||
options = { encoding: options }
|
||||
}
|
||||
|
||||
options = options || {}
|
||||
var fs = options.fs || _fs
|
||||
const fs = options.fs || _fs
|
||||
|
||||
var shouldThrow = true
|
||||
if ('throws' in options) {
|
||||
shouldThrow = options.throws
|
||||
}
|
||||
const shouldThrow = 'throws' in options ? options.throws : true
|
||||
|
||||
fs.readFile(file, options, function (err, data) {
|
||||
if (err) return callback(err)
|
||||
let data = await universalify.fromCallback(fs.readFile)(file, options)
|
||||
|
||||
data = stripBom(data)
|
||||
data = stripBom(data)
|
||||
|
||||
var obj
|
||||
try {
|
||||
obj = JSON.parse(data, options ? options.reviver : null)
|
||||
} catch (err2) {
|
||||
if (shouldThrow) {
|
||||
err2.message = file + ': ' + err2.message
|
||||
return callback(err2)
|
||||
} else {
|
||||
return callback(null, null)
|
||||
}
|
||||
let obj
|
||||
try {
|
||||
obj = JSON.parse(data, options ? options.reviver : null)
|
||||
} catch (err) {
|
||||
if (shouldThrow) {
|
||||
err.message = `${file}: ${err.message}`
|
||||
throw err
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
callback(null, obj)
|
||||
})
|
||||
return obj
|
||||
}
|
||||
|
||||
function readFileSync (file, options) {
|
||||
options = options || {}
|
||||
const readFile = universalify.fromPromise(_readFile)
|
||||
|
||||
function readFileSync (file, options = {}) {
|
||||
if (typeof options === 'string') {
|
||||
options = {encoding: options}
|
||||
options = { encoding: options }
|
||||
}
|
||||
|
||||
var fs = options.fs || _fs
|
||||
const fs = options.fs || _fs
|
||||
|
||||
var shouldThrow = true
|
||||
if ('throws' in options) {
|
||||
shouldThrow = options.throws
|
||||
}
|
||||
const shouldThrow = 'throws' in options ? options.throws : true
|
||||
|
||||
try {
|
||||
var content = fs.readFileSync(file, options)
|
||||
let content = fs.readFileSync(file, options)
|
||||
content = stripBom(content)
|
||||
return JSON.parse(content, options.reviver)
|
||||
} catch (err) {
|
||||
if (shouldThrow) {
|
||||
err.message = file + ': ' + err.message
|
||||
err.message = `${file}: ${err.message}`
|
||||
throw err
|
||||
} else {
|
||||
return null
|
||||
|
@ -71,64 +60,29 @@ function readFileSync (file, options) {
|
|||
}
|
||||
}
|
||||
|
||||
function stringify (obj, options) {
|
||||
var spaces
|
||||
var EOL = '\n'
|
||||
if (typeof options === 'object' && options !== null) {
|
||||
if (options.spaces) {
|
||||
spaces = options.spaces
|
||||
}
|
||||
if (options.EOL) {
|
||||
EOL = options.EOL
|
||||
}
|
||||
}
|
||||
async function _writeFile (file, obj, options = {}) {
|
||||
const fs = options.fs || _fs
|
||||
|
||||
var str = JSON.stringify(obj, options ? options.replacer : null, spaces)
|
||||
const str = stringify(obj, options)
|
||||
|
||||
return str.replace(/\n/g, EOL) + EOL
|
||||
await universalify.fromCallback(fs.writeFile)(file, str, options)
|
||||
}
|
||||
|
||||
function writeFile (file, obj, options, callback) {
|
||||
if (callback == null) {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
options = options || {}
|
||||
var fs = options.fs || _fs
|
||||
const writeFile = universalify.fromPromise(_writeFile)
|
||||
|
||||
var str = ''
|
||||
try {
|
||||
str = stringify(obj, options)
|
||||
} catch (err) {
|
||||
// Need to return whether a callback was passed or not
|
||||
if (callback) callback(err, null)
|
||||
return
|
||||
}
|
||||
function writeFileSync (file, obj, options = {}) {
|
||||
const fs = options.fs || _fs
|
||||
|
||||
fs.writeFile(file, str, options, callback)
|
||||
}
|
||||
|
||||
function writeFileSync (file, obj, options) {
|
||||
options = options || {}
|
||||
var fs = options.fs || _fs
|
||||
|
||||
var str = stringify(obj, options)
|
||||
const str = stringify(obj, options)
|
||||
// not sure if fs.writeFileSync returns anything, but just in case
|
||||
return fs.writeFileSync(file, str, options)
|
||||
}
|
||||
|
||||
function stripBom (content) {
|
||||
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
|
||||
if (Buffer.isBuffer(content)) content = content.toString('utf8')
|
||||
content = content.replace(/^\uFEFF/, '')
|
||||
return content
|
||||
}
|
||||
|
||||
var jsonfile = {
|
||||
readFile: readFile,
|
||||
readFileSync: readFileSync,
|
||||
writeFile: writeFile,
|
||||
writeFileSync: writeFileSync
|
||||
const jsonfile = {
|
||||
readFile,
|
||||
readFileSync,
|
||||
writeFile,
|
||||
writeFileSync
|
||||
}
|
||||
|
||||
module.exports = jsonfile
|
||||
|
|
30
node_modules/jsonfile/package.json
generated
vendored
30
node_modules/jsonfile/package.json
generated
vendored
|
@ -1,31 +1,31 @@
|
|||
{
|
||||
"_args": [
|
||||
[
|
||||
"jsonfile@4.0.0",
|
||||
"jsonfile@6.0.1",
|
||||
"/home/runner/work/ghaction-github-pages/ghaction-github-pages"
|
||||
]
|
||||
],
|
||||
"_from": "jsonfile@4.0.0",
|
||||
"_id": "jsonfile@4.0.0",
|
||||
"_from": "jsonfile@6.0.1",
|
||||
"_id": "jsonfile@6.0.1",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
|
||||
"_integrity": "sha512-jR2b5v7d2vIOust+w3wtFKZIfpC2pnRmFAhAC/BuweZFQR8qZzxH1OyrQ10HmdVYiXWkYUqPVsz91cG7EL2FBg==",
|
||||
"_location": "/jsonfile",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "jsonfile@4.0.0",
|
||||
"raw": "jsonfile@6.0.1",
|
||||
"name": "jsonfile",
|
||||
"escapedName": "jsonfile",
|
||||
"rawSpec": "4.0.0",
|
||||
"rawSpec": "6.0.1",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "4.0.0"
|
||||
"fetchSpec": "6.0.1"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/fs-extra"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
|
||||
"_spec": "4.0.0",
|
||||
"_resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.0.1.tgz",
|
||||
"_spec": "6.0.1",
|
||||
"_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages",
|
||||
"author": {
|
||||
"name": "JP Richardson",
|
||||
|
@ -35,16 +35,18 @@
|
|||
"url": "https://github.com/jprichardson/node-jsonfile/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
"graceful-fs": "^4.1.6",
|
||||
"universalify": "^1.0.0"
|
||||
},
|
||||
"description": "Easily read/write JSON files.",
|
||||
"devDependencies": {
|
||||
"mocha": "2.x",
|
||||
"mocha": "^5.2.0",
|
||||
"rimraf": "^2.4.0",
|
||||
"standard": "^10.0.3"
|
||||
"standard": "^12.0.1"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
"index.js",
|
||||
"utils.js"
|
||||
],
|
||||
"homepage": "https://github.com/jprichardson/node-jsonfile#readme",
|
||||
"keywords": [
|
||||
|
@ -70,5 +72,5 @@
|
|||
"test": "npm run lint && npm run unit",
|
||||
"unit": "mocha"
|
||||
},
|
||||
"version": "4.0.0"
|
||||
"version": "6.0.1"
|
||||
}
|
||||
|
|
15
node_modules/jsonfile/utils.js
generated
vendored
Normal file
15
node_modules/jsonfile/utils.js
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
function stringify (obj, options = {}) {
|
||||
const EOL = options.EOL || '\n'
|
||||
|
||||
const str = JSON.stringify(obj, options ? options.replacer : null, options.spaces)
|
||||
|
||||
return str.replace(/\n/g, EOL) + EOL
|
||||
}
|
||||
|
||||
function stripBom (content) {
|
||||
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
|
||||
if (Buffer.isBuffer(content)) content = content.toString('utf8')
|
||||
return content.replace(/^\uFEFF/, '')
|
||||
}
|
||||
|
||||
module.exports = { stringify, stripBom }
|
2
node_modules/universalify/README.md
generated
vendored
2
node_modules/universalify/README.md
generated
vendored
|
@ -21,7 +21,7 @@ npm install universalify
|
|||
|
||||
Takes a callback-based function to universalify, and returns the universalified function.
|
||||
|
||||
Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with more than three arguments, and does not ensure that the callback is only called once.
|
||||
Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with three or more arguments, and does not ensure that the callback is only called once.
|
||||
|
||||
```js
|
||||
function callbackFn (n, cb) {
|
||||
|
|
22
node_modules/universalify/index.js
generated
vendored
22
node_modules/universalify/index.js
generated
vendored
|
@ -1,25 +1,23 @@
|
|||
'use strict'
|
||||
|
||||
exports.fromCallback = function (fn) {
|
||||
return Object.defineProperty(function () {
|
||||
if (typeof arguments[arguments.length - 1] === 'function') fn.apply(this, arguments)
|
||||
return Object.defineProperty(function (...args) {
|
||||
if (typeof args[args.length - 1] === 'function') fn.apply(this, args)
|
||||
else {
|
||||
return new Promise((resolve, reject) => {
|
||||
arguments[arguments.length] = (err, res) => {
|
||||
if (err) return reject(err)
|
||||
resolve(res)
|
||||
}
|
||||
arguments.length++
|
||||
fn.apply(this, arguments)
|
||||
fn.apply(
|
||||
this,
|
||||
args.concat([(err, res) => err ? reject(err) : resolve(res)])
|
||||
)
|
||||
})
|
||||
}
|
||||
}, 'name', { value: fn.name })
|
||||
}
|
||||
|
||||
exports.fromPromise = function (fn) {
|
||||
return Object.defineProperty(function () {
|
||||
const cb = arguments[arguments.length - 1]
|
||||
if (typeof cb !== 'function') return fn.apply(this, arguments)
|
||||
else fn.apply(this, arguments).then(r => cb(null, r), cb)
|
||||
return Object.defineProperty(function (...args) {
|
||||
const cb = args[args.length - 1]
|
||||
if (typeof cb !== 'function') return fn.apply(this, args)
|
||||
else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb)
|
||||
}, 'name', { value: fn.name })
|
||||
}
|
||||
|
|
29
node_modules/universalify/package.json
generated
vendored
29
node_modules/universalify/package.json
generated
vendored
|
@ -1,31 +1,32 @@
|
|||
{
|
||||
"_args": [
|
||||
[
|
||||
"universalify@0.1.2",
|
||||
"universalify@1.0.0",
|
||||
"/home/runner/work/ghaction-github-pages/ghaction-github-pages"
|
||||
]
|
||||
],
|
||||
"_from": "universalify@0.1.2",
|
||||
"_id": "universalify@0.1.2",
|
||||
"_from": "universalify@1.0.0",
|
||||
"_id": "universalify@1.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"_integrity": "sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==",
|
||||
"_location": "/universalify",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "universalify@0.1.2",
|
||||
"raw": "universalify@1.0.0",
|
||||
"name": "universalify",
|
||||
"escapedName": "universalify",
|
||||
"rawSpec": "0.1.2",
|
||||
"rawSpec": "1.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "0.1.2"
|
||||
"fetchSpec": "1.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/fs-extra"
|
||||
"/fs-extra",
|
||||
"/jsonfile"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"_spec": "0.1.2",
|
||||
"_resolved": "https://registry.npmjs.org/universalify/-/universalify-1.0.0.tgz",
|
||||
"_spec": "1.0.0",
|
||||
"_where": "/home/runner/work/ghaction-github-pages/ghaction-github-pages",
|
||||
"author": {
|
||||
"name": "Ryan Zimmerman",
|
||||
|
@ -38,12 +39,12 @@
|
|||
"devDependencies": {
|
||||
"colortape": "^0.1.2",
|
||||
"coveralls": "^3.0.1",
|
||||
"nyc": "^10.2.0",
|
||||
"standard": "^10.0.1",
|
||||
"nyc": "^15.0.0",
|
||||
"standard": "^14.3.1",
|
||||
"tape": "^4.6.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
"node": ">= 10.0.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
|
@ -63,5 +64,5 @@
|
|||
"scripts": {
|
||||
"test": "standard && nyc tape test/*.js | colortape"
|
||||
},
|
||||
"version": "0.1.2"
|
||||
"version": "1.0.0"
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue
Block a user