chore: bump version to 0.7.1

This commit is contained in:
ethan.chen
2025-05-27 14:16:48 +08:00
commit 63eda91fd6
103 changed files with 15868 additions and 0 deletions

53
node_modules/.package-lock.json generated vendored Normal file
View File

@@ -0,0 +1,53 @@
{
"name": "lyroc",
"version": "0.7.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"node_modules/fs-extra": {
"version": "11.3.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz",
"integrity": "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==",
"dev": true,
"license": "MIT",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"engines": {
"node": ">=14.14"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
"dev": true,
"license": "ISC"
},
"node_modules/jsonfile": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
"integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"universalify": "^2.0.0"
},
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/universalify": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
"integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 10.0.0"
}
}
}
}

15
node_modules/fs-extra/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
(The MIT License)
Copyright (c) 2011-2024 JP Richardson
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

292
node_modules/fs-extra/README.md generated vendored Normal file
View File

@@ -0,0 +1,292 @@
Node.js: fs-extra
=================
`fs-extra` adds file system methods that aren't included in the native `fs` module and adds promise support to the `fs` methods. It also uses [`graceful-fs`](https://github.com/isaacs/node-graceful-fs) to prevent `EMFILE` errors. It should be a drop in replacement for `fs`.
[![npm Package](https://img.shields.io/npm/v/fs-extra.svg)](https://www.npmjs.org/package/fs-extra)
[![License](https://img.shields.io/npm/l/fs-extra.svg)](https://github.com/jprichardson/node-fs-extra/blob/master/LICENSE)
[![build status](https://img.shields.io/github/actions/workflow/status/jprichardson/node-fs-extra/ci.yml?branch=master)](https://github.com/jprichardson/node-fs-extra/actions/workflows/ci.yml?query=branch%3Amaster)
[![downloads per month](http://img.shields.io/npm/dm/fs-extra.svg)](https://www.npmjs.org/package/fs-extra)
[![JavaScript Style Guide](https://img.shields.io/badge/code_style-standard-brightgreen.svg)](https://standardjs.com)
Why?
----
I got tired of including `mkdirp`, `rimraf`, and `ncp` in most of my projects.
Installation
------------
npm install fs-extra
Usage
-----
### CommonJS
`fs-extra` is a drop in replacement for native `fs`. All methods in `fs` are attached to `fs-extra`. All `fs` methods return promises if the callback isn't passed.
You don't ever need to include the original `fs` module again:
```js
const fs = require('fs') // this is no longer necessary
```
you can now do this:
```js
const fs = require('fs-extra')
```
or if you prefer to make it clear that you're using `fs-extra` and not `fs`, you may want
to name your `fs` variable `fse` like so:
```js
const fse = require('fs-extra')
```
you can also keep both, but it's redundant:
```js
const fs = require('fs')
const fse = require('fs-extra')
```
### ESM
There is also an `fs-extra/esm` import, that supports both default and named exports. However, note that `fs` methods are not included in `fs-extra/esm`; you still need to import `fs` and/or `fs/promises` seperately:
```js
import { readFileSync } from 'fs'
import { readFile } from 'fs/promises'
import { outputFile, outputFileSync } from 'fs-extra/esm'
```
Default exports are supported:
```js
import fs from 'fs'
import fse from 'fs-extra/esm'
// fse.readFileSync is not a function; must use fs.readFileSync
```
but you probably want to just use regular `fs-extra` instead of `fs-extra/esm` for default exports:
```js
import fs from 'fs-extra'
// both fs and fs-extra methods are defined
```
Sync vs Async vs Async/Await
-------------
Most methods are async by default. All async methods will return a promise if the callback isn't passed.
Sync methods on the other hand will throw if an error occurs.
Also Async/Await will throw an error if one occurs.
Example:
```js
const fs = require('fs-extra')
// Async with promises:
fs.copy('/tmp/myfile', '/tmp/mynewfile')
.then(() => console.log('success!'))
.catch(err => console.error(err))
// Async with callbacks:
fs.copy('/tmp/myfile', '/tmp/mynewfile', err => {
if (err) return console.error(err)
console.log('success!')
})
// Sync:
try {
fs.copySync('/tmp/myfile', '/tmp/mynewfile')
console.log('success!')
} catch (err) {
console.error(err)
}
// Async/Await:
async function copyFiles () {
try {
await fs.copy('/tmp/myfile', '/tmp/mynewfile')
console.log('success!')
} catch (err) {
console.error(err)
}
}
copyFiles()
```
Methods
-------
### Async
- [copy](docs/copy.md)
- [emptyDir](docs/emptyDir.md)
- [ensureFile](docs/ensureFile.md)
- [ensureDir](docs/ensureDir.md)
- [ensureLink](docs/ensureLink.md)
- [ensureSymlink](docs/ensureSymlink.md)
- [mkdirp](docs/ensureDir.md)
- [mkdirs](docs/ensureDir.md)
- [move](docs/move.md)
- [outputFile](docs/outputFile.md)
- [outputJson](docs/outputJson.md)
- [pathExists](docs/pathExists.md)
- [readJson](docs/readJson.md)
- [remove](docs/remove.md)
- [writeJson](docs/writeJson.md)
### Sync
- [copySync](docs/copy-sync.md)
- [emptyDirSync](docs/emptyDir-sync.md)
- [ensureFileSync](docs/ensureFile-sync.md)
- [ensureDirSync](docs/ensureDir-sync.md)
- [ensureLinkSync](docs/ensureLink-sync.md)
- [ensureSymlinkSync](docs/ensureSymlink-sync.md)
- [mkdirpSync](docs/ensureDir-sync.md)
- [mkdirsSync](docs/ensureDir-sync.md)
- [moveSync](docs/move-sync.md)
- [outputFileSync](docs/outputFile-sync.md)
- [outputJsonSync](docs/outputJson-sync.md)
- [pathExistsSync](docs/pathExists-sync.md)
- [readJsonSync](docs/readJson-sync.md)
- [removeSync](docs/remove-sync.md)
- [writeJsonSync](docs/writeJson-sync.md)
**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()`, `fs.write()`, & `fs.writev()`](docs/fs-read-write-writev.md)
### What happened to `walk()` and `walkSync()`?
They were removed from `fs-extra` in v2.0.0. If you need the functionality, `walk` and `walkSync` are available as separate packages, [`klaw`](https://github.com/jprichardson/node-klaw) and [`klaw-sync`](https://github.com/manidlou/node-klaw-sync).
Third Party
-----------
### CLI
[fse-cli](https://www.npmjs.com/package/@atao60/fse-cli) allows you to run `fs-extra` from a console or from [npm](https://www.npmjs.com) scripts.
### TypeScript
If you like TypeScript, you can use `fs-extra` with it: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/fs-extra
### File / Directory Watching
If you want to watch for changes to files or directories, then you should use [chokidar](https://github.com/paulmillr/chokidar).
### Obtain Filesystem (Devices, Partitions) Information
[fs-filesystem](https://github.com/arthurintelligence/node-fs-filesystem) allows you to read the state of the filesystem of the host on which it is run. It returns information about both the devices and the partitions (volumes) of the system.
### Misc.
- [fs-extra-debug](https://github.com/jdxcode/fs-extra-debug) - Send your fs-extra calls to [debug](https://npmjs.org/package/debug).
- [mfs](https://github.com/cadorn/mfs) - Monitor your fs-extra calls.
Hacking on fs-extra
-------------------
Wanna hack on `fs-extra`? Great! Your help is needed! [fs-extra is one of the most depended upon Node.js packages](http://nodei.co/npm/fs-extra.png?downloads=true&downloadRank=true&stars=true). This project
uses [JavaScript Standard Style](https://github.com/feross/standard) - if the name or style choices bother you,
you're gonna have to get over it :) If `standard` is good enough for `npm`, it's good enough for `fs-extra`.
[![js-standard-style](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard)
What's needed?
- First, take a look at existing issues. Those are probably going to be where the priority lies.
- More tests for edge cases. Specifically on different platforms. There can never be enough tests.
- Improve test coverage.
Note: If you make any big changes, **you should definitely file an issue for discussion first.**
### Running the Test Suite
fs-extra contains hundreds of tests.
- `npm run lint`: runs the linter ([standard](http://standardjs.com/))
- `npm run unit`: runs the unit tests
- `npm run unit-esm`: runs tests for `fs-extra/esm` exports
- `npm test`: runs the linter and all tests
When running unit tests, set the environment variable `CROSS_DEVICE_PATH` to the absolute path of an empty directory on another device (like a thumb drive) to enable cross-device move tests.
### Windows
If you run the tests on the Windows and receive a lot of symbolic link `EPERM` permission errors, it's
because on Windows you need elevated privilege to create symbolic links. You can add this to your Windows's
account by following the instructions here: http://superuser.com/questions/104845/permission-to-make-symbolic-links-in-windows-7
However, I didn't have much luck doing this.
Since I develop on Mac OS X, I use VMWare Fusion for Windows testing. I create a shared folder that I map to a drive on Windows.
I open the `Node.js command prompt` and run as `Administrator`. I then map the network drive running the following command:
net use z: "\\vmware-host\Shared Folders"
I can then navigate to my `fs-extra` directory and run the tests.
Naming
------
I put a lot of thought into the naming of these functions. Inspired by @coolaj86's request. So he deserves much of the credit for raising the issue. See discussion(s) here:
* https://github.com/jprichardson/node-fs-extra/issues/2
* https://github.com/flatiron/utile/issues/11
* https://github.com/ryanmcgrath/wrench-js/issues/29
* https://github.com/substack/node-mkdirp/issues/17
First, I believe that in as many cases as possible, the [Node.js naming schemes](http://nodejs.org/api/fs.html) should be chosen. However, there are problems with the Node.js own naming schemes.
For example, `fs.readFile()` and `fs.readdir()`: the **F** is capitalized in *File* and the **d** is not capitalized in *dir*. Perhaps a bit pedantic, but they should still be consistent. Also, Node.js has chosen a lot of POSIX naming schemes, which I believe is great. See: `fs.mkdir()`, `fs.rmdir()`, `fs.chown()`, etc.
We have a dilemma though. How do you consistently name methods that perform the following POSIX commands: `cp`, `cp -r`, `mkdir -p`, and `rm -rf`?
My perspective: when in doubt, err on the side of simplicity. A directory is just a hierarchical grouping of directories and files. Consider that for a moment. So when you want to copy it or remove it, in most cases you'll want to copy or remove all of its contents. When you want to create a directory, if the directory that it's suppose to be contained in does not exist, then in most cases you'll want to create that too.
So, if you want to remove a file or a directory regardless of whether it has contents, just call `fs.remove(path)`. If you want to copy a file or a directory whether it has contents, just call `fs.copy(source, destination)`. If you want to create a directory regardless of whether its parent directories exist, just call `fs.mkdirs(path)` or `fs.mkdirp(path)`.
Credit
------
`fs-extra` wouldn't be possible without using the modules from the following authors:
- [Isaac Shlueter](https://github.com/isaacs)
- [Charlie McConnel](https://github.com/avianflu)
- [James Halliday](https://github.com/substack)
- [Andrew Kelley](https://github.com/andrewrk)
License
-------
Licensed under MIT
Copyright (c) 2011-2024 [JP Richardson](https://github.com/jprichardson)
[1]: http://nodejs.org/docs/latest/api/fs.html
[jsonfile]: https://github.com/jprichardson/node-jsonfile

171
node_modules/fs-extra/lib/copy/copy-sync.js generated vendored Normal file
View File

@@ -0,0 +1,171 @@
'use strict'
const fs = require('graceful-fs')
const path = require('path')
const mkdirsSync = require('../mkdirs').mkdirsSync
const utimesMillisSync = require('../util/utimes').utimesMillisSync
const stat = require('../util/stat')
function copySync (src, dest, opts) {
if (typeof opts === 'function') {
opts = { filter: opts }
}
opts = opts || {}
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
// Warn about using preserveTimestamps on 32-bit node
if (opts.preserveTimestamps && process.arch === 'ia32') {
process.emitWarning(
'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' +
'\tsee https://github.com/jprichardson/node-fs-extra/issues/269',
'Warning', 'fs-extra-WARN0002'
)
}
const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts)
stat.checkParentPathsSync(src, srcStat, dest, 'copy')
if (opts.filter && !opts.filter(src, dest)) return
const destParent = path.dirname(dest)
if (!fs.existsSync(destParent)) mkdirsSync(destParent)
return getStats(destStat, src, dest, opts)
}
function getStats (destStat, src, dest, opts) {
const statSync = opts.dereference ? fs.statSync : fs.lstatSync
const srcStat = statSync(src)
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)
else if (srcStat.isFile() ||
srcStat.isCharacterDevice() ||
srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)
else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)
else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)
else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)
throw new Error(`Unknown file: ${src}`)
}
function onFile (srcStat, destStat, src, dest, opts) {
if (!destStat) return copyFile(srcStat, src, dest, opts)
return mayCopyFile(srcStat, src, dest, opts)
}
function mayCopyFile (srcStat, src, dest, opts) {
if (opts.overwrite) {
fs.unlinkSync(dest)
return copyFile(srcStat, src, dest, opts)
} else if (opts.errorOnExist) {
throw new Error(`'${dest}' already exists`)
}
}
function copyFile (srcStat, src, dest, opts) {
fs.copyFileSync(src, dest)
if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)
return setDestMode(dest, srcStat.mode)
}
function handleTimestamps (srcMode, src, dest) {
// Make sure the file is writable before setting the timestamp
// otherwise open fails with EPERM when invoked with 'r+'
// (through utimes call)
if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)
return setDestTimestamps(src, dest)
}
function fileIsNotWritable (srcMode) {
return (srcMode & 0o200) === 0
}
function makeFileWritable (dest, srcMode) {
return setDestMode(dest, srcMode | 0o200)
}
function setDestMode (dest, srcMode) {
return fs.chmodSync(dest, srcMode)
}
function setDestTimestamps (src, dest) {
// The initial srcStat.atime cannot be trusted
// because it is modified by the read(2) system call
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
const updatedSrcStat = fs.statSync(src)
return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
}
function onDir (srcStat, destStat, src, dest, opts) {
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)
return copyDir(src, dest, opts)
}
function mkDirAndCopy (srcMode, src, dest, opts) {
fs.mkdirSync(dest)
copyDir(src, dest, opts)
return setDestMode(dest, srcMode)
}
function copyDir (src, dest, opts) {
const dir = fs.opendirSync(src)
try {
let dirent
while ((dirent = dir.readSync()) !== null) {
copyDirItem(dirent.name, src, dest, opts)
}
} finally {
dir.closeSync()
}
}
function copyDirItem (item, src, dest, opts) {
const srcItem = path.join(src, item)
const destItem = path.join(dest, item)
if (opts.filter && !opts.filter(srcItem, destItem)) return
const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts)
return getStats(destStat, srcItem, destItem, opts)
}
function onLink (destStat, src, dest, opts) {
let resolvedSrc = fs.readlinkSync(src)
if (opts.dereference) {
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
}
if (!destStat) {
return fs.symlinkSync(resolvedSrc, dest)
} else {
let resolvedDest
try {
resolvedDest = fs.readlinkSync(dest)
} catch (err) {
// dest exists and is a regular file or directory,
// Windows may throw UNKNOWN error. If dest already exists,
// fs throws error anyway, so no need to guard against it here.
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)
throw err
}
if (opts.dereference) {
resolvedDest = path.resolve(process.cwd(), resolvedDest)
}
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)
}
// prevent copy if src is a subdir of dest since unlinking
// dest in this case would result in removing src contents
// and therefore a broken symlink would be created.
if (stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)
}
return copyLink(resolvedSrc, dest)
}
}
function copyLink (resolvedSrc, dest) {
fs.unlinkSync(dest)
return fs.symlinkSync(resolvedSrc, dest)
}
module.exports = copySync

182
node_modules/fs-extra/lib/copy/copy.js generated vendored Normal file
View File

@@ -0,0 +1,182 @@
'use strict'
const fs = require('../fs')
const path = require('path')
const { mkdirs } = require('../mkdirs')
const { pathExists } = require('../path-exists')
const { utimesMillis } = require('../util/utimes')
const stat = require('../util/stat')
async function copy (src, dest, opts = {}) {
if (typeof opts === 'function') {
opts = { filter: opts }
}
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
// Warn about using preserveTimestamps on 32-bit node
if (opts.preserveTimestamps && process.arch === 'ia32') {
process.emitWarning(
'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' +
'\tsee https://github.com/jprichardson/node-fs-extra/issues/269',
'Warning', 'fs-extra-WARN0001'
)
}
const { srcStat, destStat } = await stat.checkPaths(src, dest, 'copy', opts)
await stat.checkParentPaths(src, srcStat, dest, 'copy')
const include = await runFilter(src, dest, opts)
if (!include) return
// check if the parent of dest exists, and create it if it doesn't exist
const destParent = path.dirname(dest)
const dirExists = await pathExists(destParent)
if (!dirExists) {
await mkdirs(destParent)
}
await getStatsAndPerformCopy(destStat, src, dest, opts)
}
async function runFilter (src, dest, opts) {
if (!opts.filter) return true
return opts.filter(src, dest)
}
async function getStatsAndPerformCopy (destStat, src, dest, opts) {
const statFn = opts.dereference ? fs.stat : fs.lstat
const srcStat = await statFn(src)
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)
if (
srcStat.isFile() ||
srcStat.isCharacterDevice() ||
srcStat.isBlockDevice()
) return onFile(srcStat, destStat, src, dest, opts)
if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)
if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)
if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)
throw new Error(`Unknown file: ${src}`)
}
async function onFile (srcStat, destStat, src, dest, opts) {
if (!destStat) return copyFile(srcStat, src, dest, opts)
if (opts.overwrite) {
await fs.unlink(dest)
return copyFile(srcStat, src, dest, opts)
}
if (opts.errorOnExist) {
throw new Error(`'${dest}' already exists`)
}
}
async function copyFile (srcStat, src, dest, opts) {
await fs.copyFile(src, dest)
if (opts.preserveTimestamps) {
// Make sure the file is writable before setting the timestamp
// otherwise open fails with EPERM when invoked with 'r+'
// (through utimes call)
if (fileIsNotWritable(srcStat.mode)) {
await makeFileWritable(dest, srcStat.mode)
}
// Set timestamps and mode correspondingly
// Note that The initial srcStat.atime cannot be trusted
// because it is modified by the read(2) system call
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
const updatedSrcStat = await fs.stat(src)
await utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
}
return fs.chmod(dest, srcStat.mode)
}
function fileIsNotWritable (srcMode) {
return (srcMode & 0o200) === 0
}
function makeFileWritable (dest, srcMode) {
return fs.chmod(dest, srcMode | 0o200)
}
async function onDir (srcStat, destStat, src, dest, opts) {
// the dest directory might not exist, create it
if (!destStat) {
await fs.mkdir(dest)
}
const promises = []
// loop through the files in the current directory to copy everything
for await (const item of await fs.opendir(src)) {
const srcItem = path.join(src, item.name)
const destItem = path.join(dest, item.name)
promises.push(
runFilter(srcItem, destItem, opts).then(include => {
if (include) {
// only copy the item if it matches the filter function
return stat.checkPaths(srcItem, destItem, 'copy', opts).then(({ destStat }) => {
// If the item is a copyable file, `getStatsAndPerformCopy` will copy it
// If the item is a directory, `getStatsAndPerformCopy` will call `onDir` recursively
return getStatsAndPerformCopy(destStat, srcItem, destItem, opts)
})
}
})
)
}
await Promise.all(promises)
if (!destStat) {
await fs.chmod(dest, srcStat.mode)
}
}
async function onLink (destStat, src, dest, opts) {
let resolvedSrc = await fs.readlink(src)
if (opts.dereference) {
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
}
if (!destStat) {
return fs.symlink(resolvedSrc, dest)
}
let resolvedDest = null
try {
resolvedDest = await fs.readlink(dest)
} catch (e) {
// dest exists and is a regular file or directory,
// Windows may throw UNKNOWN error. If dest already exists,
// fs throws error anyway, so no need to guard against it here.
if (e.code === 'EINVAL' || e.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest)
throw e
}
if (opts.dereference) {
resolvedDest = path.resolve(process.cwd(), resolvedDest)
}
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)
}
// do not copy if src is a subdir of dest since unlinking
// dest in this case would result in removing src contents
// and therefore a broken symlink would be created.
if (stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)
}
// copy the link
await fs.unlink(dest)
return fs.symlink(resolvedSrc, dest)
}
module.exports = copy

7
node_modules/fs-extra/lib/copy/index.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
'use strict'
const u = require('universalify').fromPromise
module.exports = {
copy: u(require('./copy')),
copySync: require('./copy-sync')
}

39
node_modules/fs-extra/lib/empty/index.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
'use strict'
const u = require('universalify').fromPromise
const fs = require('../fs')
const path = require('path')
const mkdir = require('../mkdirs')
const remove = require('../remove')
const emptyDir = u(async function emptyDir (dir) {
let items
try {
items = await fs.readdir(dir)
} catch {
return mkdir.mkdirs(dir)
}
return Promise.all(items.map(item => remove.remove(path.join(dir, item))))
})
function emptyDirSync (dir) {
let items
try {
items = fs.readdirSync(dir)
} catch {
return mkdir.mkdirsSync(dir)
}
items.forEach(item => {
item = path.join(dir, item)
remove.removeSync(item)
})
}
module.exports = {
emptyDirSync,
emptydirSync: emptyDirSync,
emptyDir,
emptydir: emptyDir
}

66
node_modules/fs-extra/lib/ensure/file.js generated vendored Normal file
View File

@@ -0,0 +1,66 @@
'use strict'
const u = require('universalify').fromPromise
const path = require('path')
const fs = require('../fs')
const mkdir = require('../mkdirs')
async function createFile (file) {
let stats
try {
stats = await fs.stat(file)
} catch { }
if (stats && stats.isFile()) return
const dir = path.dirname(file)
let dirStats = null
try {
dirStats = await fs.stat(dir)
} catch (err) {
// if the directory doesn't exist, make it
if (err.code === 'ENOENT') {
await mkdir.mkdirs(dir)
await fs.writeFile(file, '')
return
} else {
throw err
}
}
if (dirStats.isDirectory()) {
await fs.writeFile(file, '')
} else {
// parent is not a directory
// This is just to cause an internal ENOTDIR error to be thrown
await fs.readdir(dir)
}
}
function createFileSync (file) {
let stats
try {
stats = fs.statSync(file)
} catch { }
if (stats && stats.isFile()) return
const dir = path.dirname(file)
try {
if (!fs.statSync(dir).isDirectory()) {
// parent is not a directory
// This is just to cause an internal ENOTDIR error to be thrown
fs.readdirSync(dir)
}
} catch (err) {
// If the stat call above failed because the directory doesn't exist, create it
if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)
else throw err
}
fs.writeFileSync(file, '')
}
module.exports = {
createFile: u(createFile),
createFileSync
}

23
node_modules/fs-extra/lib/ensure/index.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
'use strict'
const { createFile, createFileSync } = require('./file')
const { createLink, createLinkSync } = require('./link')
const { createSymlink, createSymlinkSync } = require('./symlink')
module.exports = {
// file
createFile,
createFileSync,
ensureFile: createFile,
ensureFileSync: createFileSync,
// link
createLink,
createLinkSync,
ensureLink: createLink,
ensureLinkSync: createLinkSync,
// symlink
createSymlink,
createSymlinkSync,
ensureSymlink: createSymlink,
ensureSymlinkSync: createSymlinkSync
}

64
node_modules/fs-extra/lib/ensure/link.js generated vendored Normal file
View File

@@ -0,0 +1,64 @@
'use strict'
const u = require('universalify').fromPromise
const path = require('path')
const fs = require('../fs')
const mkdir = require('../mkdirs')
const { pathExists } = require('../path-exists')
const { areIdentical } = require('../util/stat')
async function createLink (srcpath, dstpath) {
let dstStat
try {
dstStat = await fs.lstat(dstpath)
} catch {
// ignore error
}
let srcStat
try {
srcStat = await fs.lstat(srcpath)
} catch (err) {
err.message = err.message.replace('lstat', 'ensureLink')
throw err
}
if (dstStat && areIdentical(srcStat, dstStat)) return
const dir = path.dirname(dstpath)
const dirExists = await pathExists(dir)
if (!dirExists) {
await mkdir.mkdirs(dir)
}
await fs.link(srcpath, dstpath)
}
function createLinkSync (srcpath, dstpath) {
let dstStat
try {
dstStat = fs.lstatSync(dstpath)
} catch {}
try {
const srcStat = fs.lstatSync(srcpath)
if (dstStat && areIdentical(srcStat, dstStat)) return
} catch (err) {
err.message = err.message.replace('lstat', 'ensureLink')
throw err
}
const dir = path.dirname(dstpath)
const dirExists = fs.existsSync(dir)
if (dirExists) return fs.linkSync(srcpath, dstpath)
mkdir.mkdirsSync(dir)
return fs.linkSync(srcpath, dstpath)
}
module.exports = {
createLink: u(createLink),
createLinkSync
}

101
node_modules/fs-extra/lib/ensure/symlink-paths.js generated vendored Normal file
View File

@@ -0,0 +1,101 @@
'use strict'
const path = require('path')
const fs = require('../fs')
const { pathExists } = require('../path-exists')
const u = require('universalify').fromPromise
/**
* Function that returns two types of paths, one relative to symlink, and one
* relative to the current working directory. Checks if path is absolute or
* relative. If the path is relative, this function checks if the path is
* relative to symlink or relative to current working directory. This is an
* initiative to find a smarter `srcpath` to supply when building symlinks.
* This allows you to determine which path to use out of one of three possible
* types of source paths. The first is an absolute path. This is detected by
* `path.isAbsolute()`. When an absolute path is provided, it is checked to
* see if it exists. If it does it's used, if not an error is returned
* (callback)/ thrown (sync). The other two options for `srcpath` are a
* relative url. By default Node's `fs.symlink` works by creating a symlink
* using `dstpath` and expects the `srcpath` to be relative to the newly
* created symlink. If you provide a `srcpath` that does not exist on the file
* system it results in a broken symlink. To minimize this, the function
* checks to see if the 'relative to symlink' source file exists, and if it
* does it will use it. If it does not, it checks if there's a file that
* exists that is relative to the current working directory, if does its used.
* This preserves the expectations of the original fs.symlink spec and adds
* the ability to pass in `relative to current working direcotry` paths.
*/
async function symlinkPaths (srcpath, dstpath) {
if (path.isAbsolute(srcpath)) {
try {
await fs.lstat(srcpath)
} catch (err) {
err.message = err.message.replace('lstat', 'ensureSymlink')
throw err
}
return {
toCwd: srcpath,
toDst: srcpath
}
}
const dstdir = path.dirname(dstpath)
const relativeToDst = path.join(dstdir, srcpath)
const exists = await pathExists(relativeToDst)
if (exists) {
return {
toCwd: relativeToDst,
toDst: srcpath
}
}
try {
await fs.lstat(srcpath)
} catch (err) {
err.message = err.message.replace('lstat', 'ensureSymlink')
throw err
}
return {
toCwd: srcpath,
toDst: path.relative(dstdir, srcpath)
}
}
function symlinkPathsSync (srcpath, dstpath) {
if (path.isAbsolute(srcpath)) {
const exists = fs.existsSync(srcpath)
if (!exists) throw new Error('absolute srcpath does not exist')
return {
toCwd: srcpath,
toDst: srcpath
}
}
const dstdir = path.dirname(dstpath)
const relativeToDst = path.join(dstdir, srcpath)
const exists = fs.existsSync(relativeToDst)
if (exists) {
return {
toCwd: relativeToDst,
toDst: srcpath
}
}
const srcExists = fs.existsSync(srcpath)
if (!srcExists) throw new Error('relative srcpath does not exist')
return {
toCwd: srcpath,
toDst: path.relative(dstdir, srcpath)
}
}
module.exports = {
symlinkPaths: u(symlinkPaths),
symlinkPathsSync
}

34
node_modules/fs-extra/lib/ensure/symlink-type.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
'use strict'
const fs = require('../fs')
const u = require('universalify').fromPromise
async function symlinkType (srcpath, type) {
if (type) return type
let stats
try {
stats = await fs.lstat(srcpath)
} catch {
return 'file'
}
return (stats && stats.isDirectory()) ? 'dir' : 'file'
}
function symlinkTypeSync (srcpath, type) {
if (type) return type
let stats
try {
stats = fs.lstatSync(srcpath)
} catch {
return 'file'
}
return (stats && stats.isDirectory()) ? 'dir' : 'file'
}
module.exports = {
symlinkType: u(symlinkType),
symlinkTypeSync
}

67
node_modules/fs-extra/lib/ensure/symlink.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
'use strict'
const u = require('universalify').fromPromise
const path = require('path')
const fs = require('../fs')
const { mkdirs, mkdirsSync } = require('../mkdirs')
const { symlinkPaths, symlinkPathsSync } = require('./symlink-paths')
const { symlinkType, symlinkTypeSync } = require('./symlink-type')
const { pathExists } = require('../path-exists')
const { areIdentical } = require('../util/stat')
async function createSymlink (srcpath, dstpath, type) {
let stats
try {
stats = await fs.lstat(dstpath)
} catch { }
if (stats && stats.isSymbolicLink()) {
const [srcStat, dstStat] = await Promise.all([
fs.stat(srcpath),
fs.stat(dstpath)
])
if (areIdentical(srcStat, dstStat)) return
}
const relative = await symlinkPaths(srcpath, dstpath)
srcpath = relative.toDst
const toType = await symlinkType(relative.toCwd, type)
const dir = path.dirname(dstpath)
if (!(await pathExists(dir))) {
await mkdirs(dir)
}
return fs.symlink(srcpath, dstpath, toType)
}
function createSymlinkSync (srcpath, dstpath, type) {
let stats
try {
stats = fs.lstatSync(dstpath)
} catch { }
if (stats && stats.isSymbolicLink()) {
const srcStat = fs.statSync(srcpath)
const dstStat = fs.statSync(dstpath)
if (areIdentical(srcStat, dstStat)) return
}
const relative = symlinkPathsSync(srcpath, dstpath)
srcpath = relative.toDst
type = symlinkTypeSync(relative.toCwd, type)
const dir = path.dirname(dstpath)
const exists = fs.existsSync(dir)
if (exists) return fs.symlinkSync(srcpath, dstpath, type)
mkdirsSync(dir)
return fs.symlinkSync(srcpath, dstpath, type)
}
module.exports = {
createSymlink: u(createSymlink),
createSymlinkSync
}

68
node_modules/fs-extra/lib/esm.mjs generated vendored Normal file
View File

@@ -0,0 +1,68 @@
import _copy from './copy/index.js'
import _empty from './empty/index.js'
import _ensure from './ensure/index.js'
import _json from './json/index.js'
import _mkdirs from './mkdirs/index.js'
import _move from './move/index.js'
import _outputFile from './output-file/index.js'
import _pathExists from './path-exists/index.js'
import _remove from './remove/index.js'
// NOTE: Only exports fs-extra's functions; fs functions must be imported from "node:fs" or "node:fs/promises"
export const copy = _copy.copy
export const copySync = _copy.copySync
export const emptyDirSync = _empty.emptyDirSync
export const emptydirSync = _empty.emptydirSync
export const emptyDir = _empty.emptyDir
export const emptydir = _empty.emptydir
export const createFile = _ensure.createFile
export const createFileSync = _ensure.createFileSync
export const ensureFile = _ensure.ensureFile
export const ensureFileSync = _ensure.ensureFileSync
export const createLink = _ensure.createLink
export const createLinkSync = _ensure.createLinkSync
export const ensureLink = _ensure.ensureLink
export const ensureLinkSync = _ensure.ensureLinkSync
export const createSymlink = _ensure.createSymlink
export const createSymlinkSync = _ensure.createSymlinkSync
export const ensureSymlink = _ensure.ensureSymlink
export const ensureSymlinkSync = _ensure.ensureSymlinkSync
export const readJson = _json.readJson
export const readJSON = _json.readJSON
export const readJsonSync = _json.readJsonSync
export const readJSONSync = _json.readJSONSync
export const writeJson = _json.writeJson
export const writeJSON = _json.writeJSON
export const writeJsonSync = _json.writeJsonSync
export const writeJSONSync = _json.writeJSONSync
export const outputJson = _json.outputJson
export const outputJSON = _json.outputJSON
export const outputJsonSync = _json.outputJsonSync
export const outputJSONSync = _json.outputJSONSync
export const mkdirs = _mkdirs.mkdirs
export const mkdirsSync = _mkdirs.mkdirsSync
export const mkdirp = _mkdirs.mkdirp
export const mkdirpSync = _mkdirs.mkdirpSync
export const ensureDir = _mkdirs.ensureDir
export const ensureDirSync = _mkdirs.ensureDirSync
export const move = _move.move
export const moveSync = _move.moveSync
export const outputFile = _outputFile.outputFile
export const outputFileSync = _outputFile.outputFileSync
export const pathExists = _pathExists.pathExists
export const pathExistsSync = _pathExists.pathExistsSync
export const remove = _remove.remove
export const removeSync = _remove.removeSync
export default {
..._copy,
..._empty,
..._ensure,
..._json,
..._mkdirs,
..._move,
..._outputFile,
..._pathExists,
..._remove
}

146
node_modules/fs-extra/lib/fs/index.js generated vendored Normal file
View File

@@ -0,0 +1,146 @@
'use strict'
// This is adapted from https://github.com/normalize/mz
// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors
const u = require('universalify').fromCallback
const fs = require('graceful-fs')
const api = [
'access',
'appendFile',
'chmod',
'chown',
'close',
'copyFile',
'cp',
'fchmod',
'fchown',
'fdatasync',
'fstat',
'fsync',
'ftruncate',
'futimes',
'glob',
'lchmod',
'lchown',
'lutimes',
'link',
'lstat',
'mkdir',
'mkdtemp',
'open',
'opendir',
'readdir',
'readFile',
'readlink',
'realpath',
'rename',
'rm',
'rmdir',
'stat',
'statfs',
'symlink',
'truncate',
'unlink',
'utimes',
'writeFile'
].filter(key => {
// Some commands are not available on some systems. Ex:
// fs.cp was added in Node.js v16.7.0
// fs.statfs was added in Node v19.6.0, v18.15.0
// fs.glob was added in Node.js v22.0.0
// fs.lchown is not available on at least some Linux
return typeof fs[key] === 'function'
})
// Export cloned fs:
Object.assign(exports, fs)
// Universalify async methods:
api.forEach(method => {
exports[method] = u(fs[method])
})
// We differ from mz/fs in that we still ship the old, broken, fs.exists()
// since we are a drop-in replacement for the native module
exports.exists = function (filename, callback) {
if (typeof callback === 'function') {
return fs.exists(filename, callback)
}
return new Promise(resolve => {
return fs.exists(filename, resolve)
})
}
// fs.read(), fs.write(), fs.readv(), & fs.writev() need special treatment due to multiple callback args
exports.read = function (fd, buffer, offset, length, position, callback) {
if (typeof callback === 'function') {
return fs.read(fd, buffer, offset, length, position, callback)
}
return new Promise((resolve, reject) => {
fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {
if (err) return reject(err)
resolve({ bytesRead, buffer })
})
})
}
// Function signature can be
// fs.write(fd, buffer[, offset[, length[, position]]], callback)
// OR
// fs.write(fd, string[, position[, encoding]], callback)
// We need to handle both cases, so we use ...args
exports.write = function (fd, buffer, ...args) {
if (typeof args[args.length - 1] === 'function') {
return fs.write(fd, buffer, ...args)
}
return new Promise((resolve, reject) => {
fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {
if (err) return reject(err)
resolve({ bytesWritten, buffer })
})
})
}
// Function signature is
// s.readv(fd, buffers[, position], callback)
// We need to handle the optional arg, so we use ...args
exports.readv = function (fd, buffers, ...args) {
if (typeof args[args.length - 1] === 'function') {
return fs.readv(fd, buffers, ...args)
}
return new Promise((resolve, reject) => {
fs.readv(fd, buffers, ...args, (err, bytesRead, buffers) => {
if (err) return reject(err)
resolve({ bytesRead, buffers })
})
})
}
// Function signature is
// s.writev(fd, buffers[, position], callback)
// We need to handle the optional arg, so we use ...args
exports.writev = function (fd, buffers, ...args) {
if (typeof args[args.length - 1] === 'function') {
return fs.writev(fd, buffers, ...args)
}
return new Promise((resolve, reject) => {
fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {
if (err) return reject(err)
resolve({ bytesWritten, buffers })
})
})
}
// fs.realpath.native sometimes not available if fs is monkey-patched
if (typeof fs.realpath.native === 'function') {
exports.realpath.native = u(fs.realpath.native)
} else {
process.emitWarning(
'fs.realpath.native is not a function. Is fs being monkey-patched?',
'Warning', 'fs-extra-WARN0003'
)
}

16
node_modules/fs-extra/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
'use strict'
module.exports = {
// Export promiseified graceful-fs:
...require('./fs'),
// Export extra methods:
...require('./copy'),
...require('./empty'),
...require('./ensure'),
...require('./json'),
...require('./mkdirs'),
...require('./move'),
...require('./output-file'),
...require('./path-exists'),
...require('./remove')
}

16
node_modules/fs-extra/lib/json/index.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
'use strict'
const u = require('universalify').fromPromise
const jsonFile = require('./jsonfile')
jsonFile.outputJson = u(require('./output-json'))
jsonFile.outputJsonSync = require('./output-json-sync')
// aliases
jsonFile.outputJSON = jsonFile.outputJson
jsonFile.outputJSONSync = jsonFile.outputJsonSync
jsonFile.writeJSON = jsonFile.writeJson
jsonFile.writeJSONSync = jsonFile.writeJsonSync
jsonFile.readJSON = jsonFile.readJson
jsonFile.readJSONSync = jsonFile.readJsonSync
module.exports = jsonFile

11
node_modules/fs-extra/lib/json/jsonfile.js generated vendored Normal file
View File

@@ -0,0 +1,11 @@
'use strict'
const jsonFile = require('jsonfile')
module.exports = {
// jsonfile exports
readJson: jsonFile.readFile,
readJsonSync: jsonFile.readFileSync,
writeJson: jsonFile.writeFile,
writeJsonSync: jsonFile.writeFileSync
}

12
node_modules/fs-extra/lib/json/output-json-sync.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
'use strict'
const { stringify } = require('jsonfile/utils')
const { outputFileSync } = require('../output-file')
function outputJsonSync (file, data, options) {
const str = stringify(data, options)
outputFileSync(file, str, options)
}
module.exports = outputJsonSync

12
node_modules/fs-extra/lib/json/output-json.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
'use strict'
const { stringify } = require('jsonfile/utils')
const { outputFile } = require('../output-file')
async function outputJson (file, data, options = {}) {
const str = stringify(data, options)
await outputFile(file, str, options)
}
module.exports = outputJson

14
node_modules/fs-extra/lib/mkdirs/index.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
'use strict'
const u = require('universalify').fromPromise
const { makeDir: _makeDir, makeDirSync } = require('./make-dir')
const makeDir = u(_makeDir)
module.exports = {
mkdirs: makeDir,
mkdirsSync: makeDirSync,
// alias
mkdirp: makeDir,
mkdirpSync: makeDirSync,
ensureDir: makeDir,
ensureDirSync: makeDirSync
}

27
node_modules/fs-extra/lib/mkdirs/make-dir.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
'use strict'
const fs = require('../fs')
const { checkPath } = require('./utils')
const getMode = options => {
const defaults = { mode: 0o777 }
if (typeof options === 'number') return options
return ({ ...defaults, ...options }).mode
}
module.exports.makeDir = async (dir, options) => {
checkPath(dir)
return fs.mkdir(dir, {
mode: getMode(options),
recursive: true
})
}
module.exports.makeDirSync = (dir, options) => {
checkPath(dir)
return fs.mkdirSync(dir, {
mode: getMode(options),
recursive: true
})
}

21
node_modules/fs-extra/lib/mkdirs/utils.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
// Adapted from https://github.com/sindresorhus/make-dir
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict'
const path = require('path')
// https://github.com/nodejs/node/issues/8987
// https://github.com/libuv/libuv/pull/1088
module.exports.checkPath = function checkPath (pth) {
if (process.platform === 'win32') {
const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, ''))
if (pathHasInvalidWinCharacters) {
const error = new Error(`Path contains invalid characters: ${pth}`)
error.code = 'EINVAL'
throw error
}
}
}

7
node_modules/fs-extra/lib/move/index.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
'use strict'
const u = require('universalify').fromPromise
module.exports = {
move: u(require('./move')),
moveSync: require('./move-sync')
}

55
node_modules/fs-extra/lib/move/move-sync.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
'use strict'
const fs = require('graceful-fs')
const path = require('path')
const copySync = require('../copy').copySync
const removeSync = require('../remove').removeSync
const mkdirpSync = require('../mkdirs').mkdirpSync
const stat = require('../util/stat')
function moveSync (src, dest, opts) {
opts = opts || {}
const overwrite = opts.overwrite || opts.clobber || false
const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts)
stat.checkParentPathsSync(src, srcStat, dest, 'move')
if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest))
return doRename(src, dest, overwrite, isChangingCase)
}
function isParentRoot (dest) {
const parent = path.dirname(dest)
const parsedPath = path.parse(parent)
return parsedPath.root === parent
}
function doRename (src, dest, overwrite, isChangingCase) {
if (isChangingCase) return rename(src, dest, overwrite)
if (overwrite) {
removeSync(dest)
return rename(src, dest, overwrite)
}
if (fs.existsSync(dest)) throw new Error('dest already exists.')
return rename(src, dest, overwrite)
}
function rename (src, dest, overwrite) {
try {
fs.renameSync(src, dest)
} catch (err) {
if (err.code !== 'EXDEV') throw err
return moveAcrossDevice(src, dest, overwrite)
}
}
function moveAcrossDevice (src, dest, overwrite) {
const opts = {
overwrite,
errorOnExist: true,
preserveTimestamps: true
}
copySync(src, dest, opts)
return removeSync(src)
}
module.exports = moveSync

59
node_modules/fs-extra/lib/move/move.js generated vendored Normal file
View File

@@ -0,0 +1,59 @@
'use strict'
const fs = require('../fs')
const path = require('path')
const { copy } = require('../copy')
const { remove } = require('../remove')
const { mkdirp } = require('../mkdirs')
const { pathExists } = require('../path-exists')
const stat = require('../util/stat')
async function move (src, dest, opts = {}) {
const overwrite = opts.overwrite || opts.clobber || false
const { srcStat, isChangingCase = false } = await stat.checkPaths(src, dest, 'move', opts)
await stat.checkParentPaths(src, srcStat, dest, 'move')
// If the parent of dest is not root, make sure it exists before proceeding
const destParent = path.dirname(dest)
const parsedParentPath = path.parse(destParent)
if (parsedParentPath.root !== destParent) {
await mkdirp(destParent)
}
return doRename(src, dest, overwrite, isChangingCase)
}
async function doRename (src, dest, overwrite, isChangingCase) {
if (!isChangingCase) {
if (overwrite) {
await remove(dest)
} else if (await pathExists(dest)) {
throw new Error('dest already exists.')
}
}
try {
// Try w/ rename first, and try copy + remove if EXDEV
await fs.rename(src, dest)
} catch (err) {
if (err.code !== 'EXDEV') {
throw err
}
await moveAcrossDevice(src, dest, overwrite)
}
}
async function moveAcrossDevice (src, dest, overwrite) {
const opts = {
overwrite,
errorOnExist: true,
preserveTimestamps: true
}
await copy(src, dest, opts)
return remove(src)
}
module.exports = move

31
node_modules/fs-extra/lib/output-file/index.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
'use strict'
const u = require('universalify').fromPromise
const fs = require('../fs')
const path = require('path')
const mkdir = require('../mkdirs')
const pathExists = require('../path-exists').pathExists
async function outputFile (file, data, encoding = 'utf-8') {
const dir = path.dirname(file)
if (!(await pathExists(dir))) {
await mkdir.mkdirs(dir)
}
return fs.writeFile(file, data, encoding)
}
function outputFileSync (file, ...args) {
const dir = path.dirname(file)
if (!fs.existsSync(dir)) {
mkdir.mkdirsSync(dir)
}
fs.writeFileSync(file, ...args)
}
module.exports = {
outputFile: u(outputFile),
outputFileSync
}

12
node_modules/fs-extra/lib/path-exists/index.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
'use strict'
const u = require('universalify').fromPromise
const fs = require('../fs')
function pathExists (path) {
return fs.access(path).then(() => true).catch(() => false)
}
module.exports = {
pathExists: u(pathExists),
pathExistsSync: fs.existsSync
}

17
node_modules/fs-extra/lib/remove/index.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
'use strict'
const fs = require('graceful-fs')
const u = require('universalify').fromCallback
function remove (path, callback) {
fs.rm(path, { recursive: true, force: true }, callback)
}
function removeSync (path) {
fs.rmSync(path, { recursive: true, force: true })
}
module.exports = {
remove: u(remove),
removeSync
}

158
node_modules/fs-extra/lib/util/stat.js generated vendored Normal file
View File

@@ -0,0 +1,158 @@
'use strict'
const fs = require('../fs')
const path = require('path')
const u = require('universalify').fromPromise
function getStats (src, dest, opts) {
const statFunc = opts.dereference
? (file) => fs.stat(file, { bigint: true })
: (file) => fs.lstat(file, { bigint: true })
return Promise.all([
statFunc(src),
statFunc(dest).catch(err => {
if (err.code === 'ENOENT') return null
throw err
})
]).then(([srcStat, destStat]) => ({ srcStat, destStat }))
}
function getStatsSync (src, dest, opts) {
let destStat
const statFunc = opts.dereference
? (file) => fs.statSync(file, { bigint: true })
: (file) => fs.lstatSync(file, { bigint: true })
const srcStat = statFunc(src)
try {
destStat = statFunc(dest)
} catch (err) {
if (err.code === 'ENOENT') return { srcStat, destStat: null }
throw err
}
return { srcStat, destStat }
}
async function checkPaths (src, dest, funcName, opts) {
const { srcStat, destStat } = await getStats(src, dest, opts)
if (destStat) {
if (areIdentical(srcStat, destStat)) {
const srcBaseName = path.basename(src)
const destBaseName = path.basename(dest)
if (funcName === 'move' &&
srcBaseName !== destBaseName &&
srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {
return { srcStat, destStat, isChangingCase: true }
}
throw new Error('Source and destination must not be the same.')
}
if (srcStat.isDirectory() && !destStat.isDirectory()) {
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
}
if (!srcStat.isDirectory() && destStat.isDirectory()) {
throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)
}
}
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
throw new Error(errMsg(src, dest, funcName))
}
return { srcStat, destStat }
}
function checkPathsSync (src, dest, funcName, opts) {
const { srcStat, destStat } = getStatsSync(src, dest, opts)
if (destStat) {
if (areIdentical(srcStat, destStat)) {
const srcBaseName = path.basename(src)
const destBaseName = path.basename(dest)
if (funcName === 'move' &&
srcBaseName !== destBaseName &&
srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {
return { srcStat, destStat, isChangingCase: true }
}
throw new Error('Source and destination must not be the same.')
}
if (srcStat.isDirectory() && !destStat.isDirectory()) {
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
}
if (!srcStat.isDirectory() && destStat.isDirectory()) {
throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)
}
}
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
throw new Error(errMsg(src, dest, funcName))
}
return { srcStat, destStat }
}
// recursively check if dest parent is a subdirectory of src.
// It works for all file types including symlinks since it
// checks the src and dest inodes. It starts from the deepest
// parent and stops once it reaches the src parent or the root path.
async function checkParentPaths (src, srcStat, dest, funcName) {
const srcParent = path.resolve(path.dirname(src))
const destParent = path.resolve(path.dirname(dest))
if (destParent === srcParent || destParent === path.parse(destParent).root) return
let destStat
try {
destStat = await fs.stat(destParent, { bigint: true })
} catch (err) {
if (err.code === 'ENOENT') return
throw err
}
if (areIdentical(srcStat, destStat)) {
throw new Error(errMsg(src, dest, funcName))
}
return checkParentPaths(src, srcStat, destParent, funcName)
}
function checkParentPathsSync (src, srcStat, dest, funcName) {
const srcParent = path.resolve(path.dirname(src))
const destParent = path.resolve(path.dirname(dest))
if (destParent === srcParent || destParent === path.parse(destParent).root) return
let destStat
try {
destStat = fs.statSync(destParent, { bigint: true })
} catch (err) {
if (err.code === 'ENOENT') return
throw err
}
if (areIdentical(srcStat, destStat)) {
throw new Error(errMsg(src, dest, funcName))
}
return checkParentPathsSync(src, srcStat, destParent, funcName)
}
function areIdentical (srcStat, destStat) {
return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev
}
// return true if dest is a subdir of src, otherwise false.
// It only checks the path strings.
function isSrcSubdir (src, dest) {
const srcArr = path.resolve(src).split(path.sep).filter(i => i)
const destArr = path.resolve(dest).split(path.sep).filter(i => i)
return srcArr.every((cur, i) => destArr[i] === cur)
}
function errMsg (src, dest, funcName) {
return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.`
}
module.exports = {
// checkPaths
checkPaths: u(checkPaths),
checkPathsSync,
// checkParent
checkParentPaths: u(checkParentPaths),
checkParentPathsSync,
// Misc
isSrcSubdir,
areIdentical
}

36
node_modules/fs-extra/lib/util/utimes.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
'use strict'
const fs = require('../fs')
const u = require('universalify').fromPromise
async function utimesMillis (path, atime, mtime) {
// if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)
const fd = await fs.open(path, 'r+')
let closeErr = null
try {
await fs.futimes(fd, atime, mtime)
} finally {
try {
await fs.close(fd)
} catch (e) {
closeErr = e
}
}
if (closeErr) {
throw closeErr
}
}
function utimesMillisSync (path, atime, mtime) {
const fd = fs.openSync(path, 'r+')
fs.futimesSync(fd, atime, mtime)
return fs.closeSync(fd)
}
module.exports = {
utimesMillis: u(utimesMillis),
utimesMillisSync
}

71
node_modules/fs-extra/package.json generated vendored Normal file
View File

@@ -0,0 +1,71 @@
{
"name": "fs-extra",
"version": "11.3.0",
"description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as recursive mkdir, copy, and remove.",
"engines": {
"node": ">=14.14"
},
"homepage": "https://github.com/jprichardson/node-fs-extra",
"repository": {
"type": "git",
"url": "https://github.com/jprichardson/node-fs-extra"
},
"keywords": [
"fs",
"file",
"file system",
"copy",
"directory",
"extra",
"mkdirp",
"mkdir",
"mkdirs",
"recursive",
"json",
"read",
"write",
"extra",
"delete",
"remove",
"touch",
"create",
"text",
"output",
"move",
"promise"
],
"author": "JP Richardson <jprichardson@gmail.com>",
"license": "MIT",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"devDependencies": {
"klaw": "^2.1.1",
"klaw-sync": "^3.0.2",
"minimist": "^1.1.1",
"mocha": "^10.1.0",
"nyc": "^15.0.0",
"proxyquire": "^2.0.1",
"read-dir-files": "^0.1.1",
"standard": "^17.0.0"
},
"main": "./lib/index.js",
"exports": {
".": "./lib/index.js",
"./esm": "./lib/esm.mjs"
},
"files": [
"lib/",
"!lib/**/__tests__/"
],
"scripts": {
"lint": "standard",
"test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha",
"test": "npm run lint && npm run unit && npm run unit-esm",
"unit": "nyc node test.js",
"unit-esm": "node test.mjs"
},
"sideEffects": false
}

15
node_modules/graceful-fs/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

143
node_modules/graceful-fs/README.md generated vendored Normal file
View File

@@ -0,0 +1,143 @@
# graceful-fs
graceful-fs functions as a drop-in replacement for the fs module,
making various improvements.
The improvements are meant to normalize behavior across different
platforms and environments, and to make filesystem access more
resilient to errors.
## Improvements over [fs module](https://nodejs.org/api/fs.html)
* Queues up `open` and `readdir` calls, and retries them once
something closes if there is an EMFILE error from too many file
descriptors.
* fixes `lchmod` for Node versions prior to 0.6.2.
* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
`lchown` if the user isn't root.
* makes `lchmod` and `lchown` become noops, if not available.
* retries reading a file if `read` results in EAGAIN error.
On Windows, it retries renaming a file for up to one second if `EACCESS`
or `EPERM` error occurs, likely because antivirus software has locked
the directory.
## USAGE
```javascript
// use just like fs
var fs = require('graceful-fs')
// now go and do stuff with it...
fs.readFile('some-file-or-whatever', (err, data) => {
// Do stuff here.
})
```
## Sync methods
This module cannot intercept or handle `EMFILE` or `ENFILE` errors from sync
methods. If you use sync methods which open file descriptors then you are
responsible for dealing with any errors.
This is a known limitation, not a bug.
## Global Patching
If you want to patch the global fs module (or any other fs-like
module) you can do this:
```javascript
// Make sure to read the caveat below.
var realFs = require('fs')
var gracefulFs = require('graceful-fs')
gracefulFs.gracefulify(realFs)
```
This should only ever be done at the top-level application layer, in
order to delay on EMFILE errors from any fs-using dependencies. You
should **not** do this in a library, because it can cause unexpected
delays in other parts of the program.
## Changes
This module is fairly stable at this point, and used by a lot of
things. That being said, because it implements a subtle behavior
change in a core part of the node API, even modest changes can be
extremely breaking, and the versioning is thus biased towards
bumping the major when in doubt.
The main change between major versions has been switching between
providing a fully-patched `fs` module vs monkey-patching the node core
builtin, and the approach by which a non-monkey-patched `fs` was
created.
The goal is to trade `EMFILE` errors for slower fs operations. So, if
you try to open a zillion files, rather than crashing, `open`
operations will be queued up and wait for something else to `close`.
There are advantages to each approach. Monkey-patching the fs means
that no `EMFILE` errors can possibly occur anywhere in your
application, because everything is using the same core `fs` module,
which is patched. However, it can also obviously cause undesirable
side-effects, especially if the module is loaded multiple times.
Implementing a separate-but-identical patched `fs` module is more
surgical (and doesn't run the risk of patching multiple times), but
also imposes the challenge of keeping in sync with the core module.
The current approach loads the `fs` module, and then creates a
lookalike object that has all the same methods, except a few that are
patched. It is safe to use in all versions of Node from 0.8 through
7.0.
### v4
* Do not monkey-patch the fs module. This module may now be used as a
drop-in dep, and users can opt into monkey-patching the fs builtin
if their app requires it.
### v3
* Monkey-patch fs, because the eval approach no longer works on recent
node.
* fixed possible type-error throw if rename fails on windows
* verify that we *never* get EMFILE errors
* Ignore ENOSYS from chmod/chown
* clarify that graceful-fs must be used as a drop-in
### v2.1.0
* Use eval rather than monkey-patching fs.
* readdir: Always sort the results
* win32: requeue a file if error has an OK status
### v2.0
* A return to monkey patching
* wrap process.cwd
### v1.1
* wrap readFile
* Wrap fs.writeFile.
* readdir protection
* Don't clobber the fs builtin
* Handle fs.read EAGAIN errors by trying again
* Expose the curOpen counter
* No-op lchown/lchmod if not implemented
* fs.rename patch only for win32
* Patch fs.rename to handle AV software on Windows
* Close #4 Chown should not fail on einval or eperm if non-root
* Fix isaacs/fstream#1 Only wrap fs one time
* Fix #3 Start at 1024 max files, then back off on EMFILE
* lutimes that doens't blow up on Linux
* A full on-rewrite using a queue instead of just swallowing the EMFILE error
* Wrap Read/Write streams as well
### 1.0
* Update engines for node 0.6
* Be lstat-graceful on Windows
* first

23
node_modules/graceful-fs/clone.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
'use strict'
module.exports = clone
var getPrototypeOf = Object.getPrototypeOf || function (obj) {
return obj.__proto__
}
function clone (obj) {
if (obj === null || typeof obj !== 'object')
return obj
if (obj instanceof Object)
var copy = { __proto__: getPrototypeOf(obj) }
else
var copy = Object.create(null)
Object.getOwnPropertyNames(obj).forEach(function (key) {
Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))
})
return copy
}

448
node_modules/graceful-fs/graceful-fs.js generated vendored Normal file
View File

@@ -0,0 +1,448 @@
var fs = require('fs')
var polyfills = require('./polyfills.js')
var legacy = require('./legacy-streams.js')
var clone = require('./clone.js')
var util = require('util')
/* istanbul ignore next - node 0.x polyfill */
var gracefulQueue
var previousSymbol
/* istanbul ignore else - node 0.x polyfill */
if (typeof Symbol === 'function' && typeof Symbol.for === 'function') {
gracefulQueue = Symbol.for('graceful-fs.queue')
// This is used in testing by future versions
previousSymbol = Symbol.for('graceful-fs.previous')
} else {
gracefulQueue = '___graceful-fs.queue'
previousSymbol = '___graceful-fs.previous'
}
function noop () {}
function publishQueue(context, queue) {
Object.defineProperty(context, gracefulQueue, {
get: function() {
return queue
}
})
}
var debug = noop
if (util.debuglog)
debug = util.debuglog('gfs4')
else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || ''))
debug = function() {
var m = util.format.apply(util, arguments)
m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ')
console.error(m)
}
// Once time initialization
if (!fs[gracefulQueue]) {
// This queue can be shared by multiple loaded instances
var queue = global[gracefulQueue] || []
publishQueue(fs, queue)
// Patch fs.close/closeSync to shared queue version, because we need
// to retry() whenever a close happens *anywhere* in the program.
// This is essential when multiple graceful-fs instances are
// in play at the same time.
fs.close = (function (fs$close) {
function close (fd, cb) {
return fs$close.call(fs, fd, function (err) {
// This function uses the graceful-fs shared queue
if (!err) {
resetQueue()
}
if (typeof cb === 'function')
cb.apply(this, arguments)
})
}
Object.defineProperty(close, previousSymbol, {
value: fs$close
})
return close
})(fs.close)
fs.closeSync = (function (fs$closeSync) {
function closeSync (fd) {
// This function uses the graceful-fs shared queue
fs$closeSync.apply(fs, arguments)
resetQueue()
}
Object.defineProperty(closeSync, previousSymbol, {
value: fs$closeSync
})
return closeSync
})(fs.closeSync)
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) {
process.on('exit', function() {
debug(fs[gracefulQueue])
require('assert').equal(fs[gracefulQueue].length, 0)
})
}
}
if (!global[gracefulQueue]) {
publishQueue(global, fs[gracefulQueue]);
}
module.exports = patch(clone(fs))
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {
module.exports = patch(fs)
fs.__patched = true;
}
function patch (fs) {
// Everything that references the open() function needs to be in here
polyfills(fs)
fs.gracefulify = patch
fs.createReadStream = createReadStream
fs.createWriteStream = createWriteStream
var fs$readFile = fs.readFile
fs.readFile = readFile
function readFile (path, options, cb) {
if (typeof options === 'function')
cb = options, options = null
return go$readFile(path, options, cb)
function go$readFile (path, options, cb, startTime) {
return fs$readFile(path, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$writeFile = fs.writeFile
fs.writeFile = writeFile
function writeFile (path, data, options, cb) {
if (typeof options === 'function')
cb = options, options = null
return go$writeFile(path, data, options, cb)
function go$writeFile (path, data, options, cb, startTime) {
return fs$writeFile(path, data, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$appendFile = fs.appendFile
if (fs$appendFile)
fs.appendFile = appendFile
function appendFile (path, data, options, cb) {
if (typeof options === 'function')
cb = options, options = null
return go$appendFile(path, data, options, cb)
function go$appendFile (path, data, options, cb, startTime) {
return fs$appendFile(path, data, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$copyFile = fs.copyFile
if (fs$copyFile)
fs.copyFile = copyFile
function copyFile (src, dest, flags, cb) {
if (typeof flags === 'function') {
cb = flags
flags = 0
}
return go$copyFile(src, dest, flags, cb)
function go$copyFile (src, dest, flags, cb, startTime) {
return fs$copyFile(src, dest, flags, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$readdir = fs.readdir
fs.readdir = readdir
var noReaddirOptionVersions = /^v[0-5]\./
function readdir (path, options, cb) {
if (typeof options === 'function')
cb = options, options = null
var go$readdir = noReaddirOptionVersions.test(process.version)
? function go$readdir (path, options, cb, startTime) {
return fs$readdir(path, fs$readdirCallback(
path, options, cb, startTime
))
}
: function go$readdir (path, options, cb, startTime) {
return fs$readdir(path, options, fs$readdirCallback(
path, options, cb, startTime
))
}
return go$readdir(path, options, cb)
function fs$readdirCallback (path, options, cb, startTime) {
return function (err, files) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([
go$readdir,
[path, options, cb],
err,
startTime || Date.now(),
Date.now()
])
else {
if (files && files.sort)
files.sort()
if (typeof cb === 'function')
cb.call(this, err, files)
}
}
}
}
if (process.version.substr(0, 4) === 'v0.8') {
var legStreams = legacy(fs)
ReadStream = legStreams.ReadStream
WriteStream = legStreams.WriteStream
}
var fs$ReadStream = fs.ReadStream
if (fs$ReadStream) {
ReadStream.prototype = Object.create(fs$ReadStream.prototype)
ReadStream.prototype.open = ReadStream$open
}
var fs$WriteStream = fs.WriteStream
if (fs$WriteStream) {
WriteStream.prototype = Object.create(fs$WriteStream.prototype)
WriteStream.prototype.open = WriteStream$open
}
Object.defineProperty(fs, 'ReadStream', {
get: function () {
return ReadStream
},
set: function (val) {
ReadStream = val
},
enumerable: true,
configurable: true
})
Object.defineProperty(fs, 'WriteStream', {
get: function () {
return WriteStream
},
set: function (val) {
WriteStream = val
},
enumerable: true,
configurable: true
})
// legacy names
var FileReadStream = ReadStream
Object.defineProperty(fs, 'FileReadStream', {
get: function () {
return FileReadStream
},
set: function (val) {
FileReadStream = val
},
enumerable: true,
configurable: true
})
var FileWriteStream = WriteStream
Object.defineProperty(fs, 'FileWriteStream', {
get: function () {
return FileWriteStream
},
set: function (val) {
FileWriteStream = val
},
enumerable: true,
configurable: true
})
function ReadStream (path, options) {
if (this instanceof ReadStream)
return fs$ReadStream.apply(this, arguments), this
else
return ReadStream.apply(Object.create(ReadStream.prototype), arguments)
}
function ReadStream$open () {
var that = this
open(that.path, that.flags, that.mode, function (err, fd) {
if (err) {
if (that.autoClose)
that.destroy()
that.emit('error', err)
} else {
that.fd = fd
that.emit('open', fd)
that.read()
}
})
}
function WriteStream (path, options) {
if (this instanceof WriteStream)
return fs$WriteStream.apply(this, arguments), this
else
return WriteStream.apply(Object.create(WriteStream.prototype), arguments)
}
function WriteStream$open () {
var that = this
open(that.path, that.flags, that.mode, function (err, fd) {
if (err) {
that.destroy()
that.emit('error', err)
} else {
that.fd = fd
that.emit('open', fd)
}
})
}
function createReadStream (path, options) {
return new fs.ReadStream(path, options)
}
function createWriteStream (path, options) {
return new fs.WriteStream(path, options)
}
var fs$open = fs.open
fs.open = open
function open (path, flags, mode, cb) {
if (typeof mode === 'function')
cb = mode, mode = null
return go$open(path, flags, mode, cb)
function go$open (path, flags, mode, cb, startTime) {
return fs$open(path, flags, mode, function (err, fd) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
return fs
}
function enqueue (elem) {
debug('ENQUEUE', elem[0].name, elem[1])
fs[gracefulQueue].push(elem)
retry()
}
// keep track of the timeout between retry() calls
var retryTimer
// reset the startTime and lastTime to now
// this resets the start of the 60 second overall timeout as well as the
// delay between attempts so that we'll retry these jobs sooner
function resetQueue () {
var now = Date.now()
for (var i = 0; i < fs[gracefulQueue].length; ++i) {
// entries that are only a length of 2 are from an older version, don't
// bother modifying those since they'll be retried anyway.
if (fs[gracefulQueue][i].length > 2) {
fs[gracefulQueue][i][3] = now // startTime
fs[gracefulQueue][i][4] = now // lastTime
}
}
// call retry to make sure we're actively processing the queue
retry()
}
function retry () {
// clear the timer and remove it to help prevent unintended concurrency
clearTimeout(retryTimer)
retryTimer = undefined
if (fs[gracefulQueue].length === 0)
return
var elem = fs[gracefulQueue].shift()
var fn = elem[0]
var args = elem[1]
// these items may be unset if they were added by an older graceful-fs
var err = elem[2]
var startTime = elem[3]
var lastTime = elem[4]
// if we don't have a startTime we have no way of knowing if we've waited
// long enough, so go ahead and retry this item now
if (startTime === undefined) {
debug('RETRY', fn.name, args)
fn.apply(null, args)
} else if (Date.now() - startTime >= 60000) {
// it's been more than 60 seconds total, bail now
debug('TIMEOUT', fn.name, args)
var cb = args.pop()
if (typeof cb === 'function')
cb.call(null, err)
} else {
// the amount of time between the last attempt and right now
var sinceAttempt = Date.now() - lastTime
// the amount of time between when we first tried, and when we last tried
// rounded up to at least 1
var sinceStart = Math.max(lastTime - startTime, 1)
// backoff. wait longer than the total time we've been retrying, but only
// up to a maximum of 100ms
var desiredDelay = Math.min(sinceStart * 1.2, 100)
// it's been long enough since the last retry, do it again
if (sinceAttempt >= desiredDelay) {
debug('RETRY', fn.name, args)
fn.apply(null, args.concat([startTime]))
} else {
// if we can't do this job yet, push it to the end of the queue
// and let the next iteration check again
fs[gracefulQueue].push(elem)
}
}
// schedule our next run if one isn't already scheduled
if (retryTimer === undefined) {
retryTimer = setTimeout(retry, 0)
}
}

118
node_modules/graceful-fs/legacy-streams.js generated vendored Normal file
View File

@@ -0,0 +1,118 @@
var Stream = require('stream').Stream
module.exports = legacy
function legacy (fs) {
return {
ReadStream: ReadStream,
WriteStream: WriteStream
}
function ReadStream (path, options) {
if (!(this instanceof ReadStream)) return new ReadStream(path, options);
Stream.call(this);
var self = this;
this.path = path;
this.fd = null;
this.readable = true;
this.paused = false;
this.flags = 'r';
this.mode = 438; /*=0666*/
this.bufferSize = 64 * 1024;
options = options || {};
// Mixin options into this
var keys = Object.keys(options);
for (var index = 0, length = keys.length; index < length; index++) {
var key = keys[index];
this[key] = options[key];
}
if (this.encoding) this.setEncoding(this.encoding);
if (this.start !== undefined) {
if ('number' !== typeof this.start) {
throw TypeError('start must be a Number');
}
if (this.end === undefined) {
this.end = Infinity;
} else if ('number' !== typeof this.end) {
throw TypeError('end must be a Number');
}
if (this.start > this.end) {
throw new Error('start must be <= end');
}
this.pos = this.start;
}
if (this.fd !== null) {
process.nextTick(function() {
self._read();
});
return;
}
fs.open(this.path, this.flags, this.mode, function (err, fd) {
if (err) {
self.emit('error', err);
self.readable = false;
return;
}
self.fd = fd;
self.emit('open', fd);
self._read();
})
}
function WriteStream (path, options) {
if (!(this instanceof WriteStream)) return new WriteStream(path, options);
Stream.call(this);
this.path = path;
this.fd = null;
this.writable = true;
this.flags = 'w';
this.encoding = 'binary';
this.mode = 438; /*=0666*/
this.bytesWritten = 0;
options = options || {};
// Mixin options into this
var keys = Object.keys(options);
for (var index = 0, length = keys.length; index < length; index++) {
var key = keys[index];
this[key] = options[key];
}
if (this.start !== undefined) {
if ('number' !== typeof this.start) {
throw TypeError('start must be a Number');
}
if (this.start < 0) {
throw new Error('start must be >= zero');
}
this.pos = this.start;
}
this.busy = false;
this._queue = [];
if (this.fd === null) {
this._open = fs.open;
this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);
this.flush();
}
}
}

53
node_modules/graceful-fs/package.json generated vendored Normal file
View File

@@ -0,0 +1,53 @@
{
"name": "graceful-fs",
"description": "A drop-in replacement for fs, making various improvements.",
"version": "4.2.11",
"repository": {
"type": "git",
"url": "https://github.com/isaacs/node-graceful-fs"
},
"main": "graceful-fs.js",
"directories": {
"test": "test"
},
"scripts": {
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --follow-tags",
"test": "nyc --silent node test.js | tap -c -",
"posttest": "nyc report"
},
"keywords": [
"fs",
"module",
"reading",
"retry",
"retries",
"queue",
"error",
"errors",
"handling",
"EMFILE",
"EAGAIN",
"EINVAL",
"EPERM",
"EACCESS"
],
"license": "ISC",
"devDependencies": {
"import-fresh": "^2.0.0",
"mkdirp": "^0.5.0",
"rimraf": "^2.2.8",
"tap": "^16.3.4"
},
"files": [
"fs.js",
"graceful-fs.js",
"legacy-streams.js",
"polyfills.js",
"clone.js"
],
"tap": {
"reporter": "classic"
}
}

355
node_modules/graceful-fs/polyfills.js generated vendored Normal file
View File

@@ -0,0 +1,355 @@
var constants = require('constants')
var origCwd = process.cwd
var cwd = null
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
process.cwd = function() {
if (!cwd)
cwd = origCwd.call(process)
return cwd
}
try {
process.cwd()
} catch (er) {}
// This check is needed until node.js 12 is required
if (typeof process.chdir === 'function') {
var chdir = process.chdir
process.chdir = function (d) {
cwd = null
chdir.call(process, d)
}
if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)
}
module.exports = patch
function patch (fs) {
// (re-)implement some things that are known busted or missing.
// lchmod, broken prior to 0.6.2
// back-port the fix here.
if (constants.hasOwnProperty('O_SYMLINK') &&
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
patchLchmod(fs)
}
// lutimes implementation, or no-op
if (!fs.lutimes) {
patchLutimes(fs)
}
// https://github.com/isaacs/node-graceful-fs/issues/4
// Chown should not fail on einval or eperm if non-root.
// It should not fail on enosys ever, as this just indicates
// that a fs doesn't support the intended operation.
fs.chown = chownFix(fs.chown)
fs.fchown = chownFix(fs.fchown)
fs.lchown = chownFix(fs.lchown)
fs.chmod = chmodFix(fs.chmod)
fs.fchmod = chmodFix(fs.fchmod)
fs.lchmod = chmodFix(fs.lchmod)
fs.chownSync = chownFixSync(fs.chownSync)
fs.fchownSync = chownFixSync(fs.fchownSync)
fs.lchownSync = chownFixSync(fs.lchownSync)
fs.chmodSync = chmodFixSync(fs.chmodSync)
fs.fchmodSync = chmodFixSync(fs.fchmodSync)
fs.lchmodSync = chmodFixSync(fs.lchmodSync)
fs.stat = statFix(fs.stat)
fs.fstat = statFix(fs.fstat)
fs.lstat = statFix(fs.lstat)
fs.statSync = statFixSync(fs.statSync)
fs.fstatSync = statFixSync(fs.fstatSync)
fs.lstatSync = statFixSync(fs.lstatSync)
// if lchmod/lchown do not exist, then make them no-ops
if (fs.chmod && !fs.lchmod) {
fs.lchmod = function (path, mode, cb) {
if (cb) process.nextTick(cb)
}
fs.lchmodSync = function () {}
}
if (fs.chown && !fs.lchown) {
fs.lchown = function (path, uid, gid, cb) {
if (cb) process.nextTick(cb)
}
fs.lchownSync = function () {}
}
// on Windows, A/V software can lock the directory, causing this
// to fail with an EACCES or EPERM if the directory contains newly
// created files. Try again on failure, for up to 60 seconds.
// Set the timeout this long because some Windows Anti-Virus, such as Parity
// bit9, may lock files for up to a minute, causing npm package install
// failures. Also, take care to yield the scheduler. Windows scheduling gives
// CPU to a busy looping process, which can cause the program causing the lock
// contention to be starved of CPU by node, so the contention doesn't resolve.
if (platform === "win32") {
fs.rename = typeof fs.rename !== 'function' ? fs.rename
: (function (fs$rename) {
function rename (from, to, cb) {
var start = Date.now()
var backoff = 0;
fs$rename(from, to, function CB (er) {
if (er
&& (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY")
&& Date.now() - start < 60000) {
setTimeout(function() {
fs.stat(to, function (stater, st) {
if (stater && stater.code === "ENOENT")
fs$rename(from, to, CB);
else
cb(er)
})
}, backoff)
if (backoff < 100)
backoff += 10;
return;
}
if (cb) cb(er)
})
}
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)
return rename
})(fs.rename)
}
// if read() returns EAGAIN, then just try it again.
fs.read = typeof fs.read !== 'function' ? fs.read
: (function (fs$read) {
function read (fd, buffer, offset, length, position, callback_) {
var callback
if (callback_ && typeof callback_ === 'function') {
var eagCounter = 0
callback = function (er, _, __) {
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
eagCounter ++
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
}
callback_.apply(this, arguments)
}
}
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
}
// This ensures `util.promisify` works as it does for native `fs.read`.
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)
return read
})(fs.read)
fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync
: (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
var eagCounter = 0
while (true) {
try {
return fs$readSync.call(fs, fd, buffer, offset, length, position)
} catch (er) {
if (er.code === 'EAGAIN' && eagCounter < 10) {
eagCounter ++
continue
}
throw er
}
}
}})(fs.readSync)
function patchLchmod (fs) {
fs.lchmod = function (path, mode, callback) {
fs.open( path
, constants.O_WRONLY | constants.O_SYMLINK
, mode
, function (err, fd) {
if (err) {
if (callback) callback(err)
return
}
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
fs.fchmod(fd, mode, function (err) {
fs.close(fd, function(err2) {
if (callback) callback(err || err2)
})
})
})
}
fs.lchmodSync = function (path, mode) {
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
var threw = true
var ret
try {
ret = fs.fchmodSync(fd, mode)
threw = false
} finally {
if (threw) {
try {
fs.closeSync(fd)
} catch (er) {}
} else {
fs.closeSync(fd)
}
}
return ret
}
}
function patchLutimes (fs) {
if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) {
fs.lutimes = function (path, at, mt, cb) {
fs.open(path, constants.O_SYMLINK, function (er, fd) {
if (er) {
if (cb) cb(er)
return
}
fs.futimes(fd, at, mt, function (er) {
fs.close(fd, function (er2) {
if (cb) cb(er || er2)
})
})
})
}
fs.lutimesSync = function (path, at, mt) {
var fd = fs.openSync(path, constants.O_SYMLINK)
var ret
var threw = true
try {
ret = fs.futimesSync(fd, at, mt)
threw = false
} finally {
if (threw) {
try {
fs.closeSync(fd)
} catch (er) {}
} else {
fs.closeSync(fd)
}
}
return ret
}
} else if (fs.futimes) {
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
fs.lutimesSync = function () {}
}
}
function chmodFix (orig) {
if (!orig) return orig
return function (target, mode, cb) {
return orig.call(fs, target, mode, function (er) {
if (chownErOk(er)) er = null
if (cb) cb.apply(this, arguments)
})
}
}
function chmodFixSync (orig) {
if (!orig) return orig
return function (target, mode) {
try {
return orig.call(fs, target, mode)
} catch (er) {
if (!chownErOk(er)) throw er
}
}
}
function chownFix (orig) {
if (!orig) return orig
return function (target, uid, gid, cb) {
return orig.call(fs, target, uid, gid, function (er) {
if (chownErOk(er)) er = null
if (cb) cb.apply(this, arguments)
})
}
}
function chownFixSync (orig) {
if (!orig) return orig
return function (target, uid, gid) {
try {
return orig.call(fs, target, uid, gid)
} catch (er) {
if (!chownErOk(er)) throw er
}
}
}
function statFix (orig) {
if (!orig) return orig
// Older versions of Node erroneously returned signed integers for
// uid + gid.
return function (target, options, cb) {
if (typeof options === 'function') {
cb = options
options = null
}
function callback (er, stats) {
if (stats) {
if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
}
if (cb) cb.apply(this, arguments)
}
return options ? orig.call(fs, target, options, callback)
: orig.call(fs, target, callback)
}
}
function statFixSync (orig) {
if (!orig) return orig
// Older versions of Node erroneously returned signed integers for
// uid + gid.
return function (target, options) {
var stats = options ? orig.call(fs, target, options)
: orig.call(fs, target)
if (stats) {
if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
}
return stats;
}
}
// ENOSYS means that the fs doesn't support the op. Just ignore
// that, because it doesn't matter.
//
// if there's no getuid, or if getuid() is something other
// than 0, and the error is EINVAL or EPERM, then just ignore
// it.
//
// This specific case is a silent failure in cp, install, tar,
// and most other unix tools that manage permissions.
//
// When running as root, or if other types of errors are
// encountered, then it's strict.
function chownErOk (er) {
if (!er)
return true
if (er.code === "ENOSYS")
return true
var nonroot = !process.getuid || process.getuid() !== 0
if (nonroot) {
if (er.code === "EINVAL" || er.code === "EPERM")
return true
}
return false
}
}

171
node_modules/jsonfile/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,171 @@
6.1.0 / 2020-10-31
------------------
- Add `finalEOL` option to disable writing final EOL ([#115](https://github.com/jprichardson/node-jsonfile/issues/115), [#137](https://github.com/jprichardson/node-jsonfile/pull/137))
- Update dependency ([#138](https://github.com/jprichardson/node-jsonfile/pull/138))
6.0.1 / 2020-03-07
------------------
- Update dependency ([#130](https://github.com/jprichardson/node-jsonfile/pull/130))
- Fix code style ([#129](https://github.com/jprichardson/node-jsonfile/pull/129))
6.0.0 / 2020-02-24
------------------
- **BREAKING:** Drop support for Node 6 & 8 ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
- **BREAKING:** Do not allow passing `null` as options to `readFile()` or `writeFile()` ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
- Refactor internals ([#128](https://github.com/jprichardson/node-jsonfile/pull/128))
5.0.0 / 2018-09-08
------------------
- **BREAKING:** Drop Node 4 support
- **BREAKING:** If no callback is passed to an asynchronous method, a promise is now returned ([#109](https://github.com/jprichardson/node-jsonfile/pull/109))
- Cleanup docs
4.0.0 / 2017-07-12
------------------
- **BREAKING:** Remove global `spaces` option.
- **BREAKING:** Drop support for Node 0.10, 0.12, and io.js.
- Remove undocumented `passParsingErrors` option.
- Added `EOL` override option to `writeFile` when using `spaces`. [#89]
3.0.1 / 2017-07-05
------------------
- Fixed bug in `writeFile` when there was a serialization error & no callback was passed. In previous versions, an empty file would be written; now no file is written.
3.0.0 / 2017-04-25
------------------
- Changed behavior of `throws` option for `readFileSync`; now does not throw filesystem errors when `throws` is `false`
2.4.0 / 2016-09-15
------------------
### Changed
- added optional support for `graceful-fs` [#62]
2.3.1 / 2016-05-13
------------------
- fix to support BOM. [#45][#45]
2.3.0 / 2016-04-16
------------------
- add `throws` to `readFile()`. See [#39][#39]
- add support for any arbitrary `fs` module. Useful with [mock-fs](https://www.npmjs.com/package/mock-fs)
2.2.3 / 2015-10-14
------------------
- include file name in parse error. See: https://github.com/jprichardson/node-jsonfile/pull/34
2.2.2 / 2015-09-16
------------------
- split out tests into separate files
- fixed `throws` when set to `true` in `readFileSync()`. See: https://github.com/jprichardson/node-jsonfile/pull/33
2.2.1 / 2015-06-25
------------------
- fixed regression when passing in string as encoding for options in `writeFile()` and `writeFileSync()`. See: https://github.com/jprichardson/node-jsonfile/issues/28
2.2.0 / 2015-06-25
------------------
- added `options.spaces` to `writeFile()` and `writeFileSync()`
2.1.2 / 2015-06-22
------------------
- fixed if passed `readFileSync(file, 'utf8')`. See: https://github.com/jprichardson/node-jsonfile/issues/25
2.1.1 / 2015-06-19
------------------
- fixed regressions if `null` is passed for options. See: https://github.com/jprichardson/node-jsonfile/issues/24
2.1.0 / 2015-06-19
------------------
- cleanup: JavaScript Standard Style, rename files, dropped terst for assert
- methods now support JSON revivers/replacers
2.0.1 / 2015-05-24
------------------
- update license attribute https://github.com/jprichardson/node-jsonfile/pull/21
2.0.0 / 2014-07-28
------------------
* added `\n` to end of file on write. [#14](https://github.com/jprichardson/node-jsonfile/pull/14)
* added `options.throws` to `readFileSync()`
* dropped support for Node v0.8
1.2.0 / 2014-06-29
------------------
* removed semicolons
* bugfix: passed `options` to `fs.readFile` and `fs.readFileSync`. This technically changes behavior, but
changes it according to docs. [#12][#12]
1.1.1 / 2013-11-11
------------------
* fixed catching of callback bug (ffissore / #5)
1.1.0 / 2013-10-11
------------------
* added `options` param to methods, (seanodell / #4)
1.0.1 / 2013-09-05
------------------
* removed `homepage` field from package.json to remove NPM warning
1.0.0 / 2013-06-28
------------------
* added `.npmignore`, #1
* changed spacing default from `4` to `2` to follow Node conventions
0.0.1 / 2012-09-10
------------------
* Initial release.
[#89]: https://github.com/jprichardson/node-jsonfile/pull/89
[#45]: https://github.com/jprichardson/node-jsonfile/issues/45 "Reading of UTF8-encoded (w/ BOM) files fails"
[#44]: https://github.com/jprichardson/node-jsonfile/issues/44 "Extra characters in written file"
[#43]: https://github.com/jprichardson/node-jsonfile/issues/43 "Prettyfy json when written to file"
[#42]: https://github.com/jprichardson/node-jsonfile/pull/42 "Moved fs.readFileSync within the try/catch"
[#41]: https://github.com/jprichardson/node-jsonfile/issues/41 "Linux: Hidden file not working"
[#40]: https://github.com/jprichardson/node-jsonfile/issues/40 "autocreate folder doesn't work from Path-value"
[#39]: https://github.com/jprichardson/node-jsonfile/pull/39 "Add `throws` option for readFile (async)"
[#38]: https://github.com/jprichardson/node-jsonfile/pull/38 "Update README.md writeFile[Sync] signature"
[#37]: https://github.com/jprichardson/node-jsonfile/pull/37 "support append file"
[#36]: https://github.com/jprichardson/node-jsonfile/pull/36 "Add typescript definition file."
[#35]: https://github.com/jprichardson/node-jsonfile/pull/35 "Add typescript definition file."
[#34]: https://github.com/jprichardson/node-jsonfile/pull/34 "readFile JSON parse error includes filename"
[#33]: https://github.com/jprichardson/node-jsonfile/pull/33 "fix throw->throws typo in readFileSync()"
[#32]: https://github.com/jprichardson/node-jsonfile/issues/32 "readFile & readFileSync can possible have strip-comments as an option?"
[#31]: https://github.com/jprichardson/node-jsonfile/pull/31 "[Modify] Support string include is unicode escape string"
[#30]: https://github.com/jprichardson/node-jsonfile/issues/30 "How to use Jsonfile package in Meteor.js App?"
[#29]: https://github.com/jprichardson/node-jsonfile/issues/29 "writefile callback if no error?"
[#28]: https://github.com/jprichardson/node-jsonfile/issues/28 "writeFile options argument broken "
[#27]: https://github.com/jprichardson/node-jsonfile/pull/27 "Use svg instead of png to get better image quality"
[#26]: https://github.com/jprichardson/node-jsonfile/issues/26 "Breaking change to fs-extra"
[#25]: https://github.com/jprichardson/node-jsonfile/issues/25 "support string encoding param for read methods"
[#24]: https://github.com/jprichardson/node-jsonfile/issues/24 "readFile: Passing in null options with a callback throws an error"
[#23]: https://github.com/jprichardson/node-jsonfile/pull/23 "Add appendFile and appendFileSync"
[#22]: https://github.com/jprichardson/node-jsonfile/issues/22 "Default value for spaces in readme.md is outdated"
[#21]: https://github.com/jprichardson/node-jsonfile/pull/21 "Update license attribute"
[#20]: https://github.com/jprichardson/node-jsonfile/issues/20 "Add simple caching functionallity"
[#19]: https://github.com/jprichardson/node-jsonfile/pull/19 "Add appendFileSync method"
[#18]: https://github.com/jprichardson/node-jsonfile/issues/18 "Add updateFile and updateFileSync methods"
[#17]: https://github.com/jprichardson/node-jsonfile/issues/17 "seem read & write sync has sequentially problem"
[#16]: https://github.com/jprichardson/node-jsonfile/pull/16 "export spaces defaulted to null"
[#15]: https://github.com/jprichardson/node-jsonfile/issues/15 "`jsonfile.spaces` should default to `null`"
[#14]: https://github.com/jprichardson/node-jsonfile/pull/14 "Add EOL at EOF"
[#13]: https://github.com/jprichardson/node-jsonfile/issues/13 "Add a final newline"
[#12]: https://github.com/jprichardson/node-jsonfile/issues/12 "readFile doesn't accept options"
[#11]: https://github.com/jprichardson/node-jsonfile/pull/11 "Added try,catch to readFileSync"
[#10]: https://github.com/jprichardson/node-jsonfile/issues/10 "No output or error from writeFile"
[#9]: https://github.com/jprichardson/node-jsonfile/pull/9 "Change 'js' to 'jf' in example."
[#8]: https://github.com/jprichardson/node-jsonfile/pull/8 "Updated forgotten module.exports to me."
[#7]: https://github.com/jprichardson/node-jsonfile/pull/7 "Add file name in error message"
[#6]: https://github.com/jprichardson/node-jsonfile/pull/6 "Use graceful-fs when possible"
[#5]: https://github.com/jprichardson/node-jsonfile/pull/5 "Jsonfile doesn't behave nicely when used inside a test suite."
[#4]: https://github.com/jprichardson/node-jsonfile/pull/4 "Added options parameter to writeFile and writeFileSync"
[#3]: https://github.com/jprichardson/node-jsonfile/issues/3 "test2"
[#2]: https://github.com/jprichardson/node-jsonfile/issues/2 "homepage field must be a string url. Deleted."
[#1]: https://github.com/jprichardson/node-jsonfile/pull/1 "adding an `.npmignore` file"

15
node_modules/jsonfile/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
(The MIT License)
Copyright (c) 2012-2015, JP Richardson <jprichardson@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

230
node_modules/jsonfile/README.md generated vendored Normal file
View File

@@ -0,0 +1,230 @@
Node.js - jsonfile
================
Easily read/write JSON files in Node.js. _Note: this module cannot be used in the browser._
[![npm Package](https://img.shields.io/npm/v/jsonfile.svg?style=flat-square)](https://www.npmjs.org/package/jsonfile)
[![build status](https://secure.travis-ci.org/jprichardson/node-jsonfile.svg)](http://travis-ci.org/jprichardson/node-jsonfile)
[![windows Build status](https://img.shields.io/appveyor/ci/jprichardson/node-jsonfile/master.svg?label=windows%20build)](https://ci.appveyor.com/project/jprichardson/node-jsonfile/branch/master)
<a href="https://github.com/feross/standard"><img src="https://cdn.rawgit.com/feross/standard/master/sticker.svg" alt="Standard JavaScript" width="100"></a>
Why?
----
Writing `JSON.stringify()` and then `fs.writeFile()` and `JSON.parse()` with `fs.readFile()` enclosed in `try/catch` blocks became annoying.
Installation
------------
npm install --save jsonfile
API
---
* [`readFile(filename, [options], callback)`](#readfilefilename-options-callback)
* [`readFileSync(filename, [options])`](#readfilesyncfilename-options)
* [`writeFile(filename, obj, [options], callback)`](#writefilefilename-obj-options-callback)
* [`writeFileSync(filename, obj, [options])`](#writefilesyncfilename-obj-options)
----
### readFile(filename, [options], callback)
`options` (`object`, default `undefined`): Pass in any [`fs.readFile`](https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
- `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback.
If `false`, returns `null` for the object.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
jsonfile.readFile(file, function (err, obj) {
if (err) console.error(err)
console.dir(obj)
})
```
You can also use this method with promises. The `readFile` method will return a promise if you do not pass a callback function.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
jsonfile.readFile(file)
.then(obj => console.dir(obj))
.catch(error => console.error(error))
```
----
### readFileSync(filename, [options])
`options` (`object`, default `undefined`): Pass in any [`fs.readFileSync`](https://nodejs.org/api/fs.html#fs_fs_readfilesync_path_options) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
- `throws` (`boolean`, default: `true`). If an error is encountered reading or parsing the file, throw the error. If `false`, returns `null` for the object.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
console.dir(jsonfile.readFileSync(file))
```
----
### writeFile(filename, obj, [options], callback)
`options`: Pass in any [`fs.writeFile`](https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, function (err) {
if (err) console.error(err)
})
```
Or use with promises as follows:
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj)
.then(res => {
console.log('Write complete')
})
.catch(error => console.error(error))
```
**formatting with spaces:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, { spaces: 2 }, function (err) {
if (err) console.error(err)
})
```
**overriding EOL:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, { spaces: 2, EOL: '\r\n' }, function (err) {
if (err) console.error(err)
})
```
**disabling the EOL at the end of file:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, { spaces: 2, finalEOL: false }, function (err) {
if (err) console.log(err)
})
```
**appending to an existing JSON file:**
You can use `fs.writeFile` option `{ flag: 'a' }` to achieve this.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/mayAlreadyExistedData.json'
const obj = { name: 'JP' }
jsonfile.writeFile(file, obj, { flag: 'a' }, function (err) {
if (err) console.error(err)
})
```
----
### writeFileSync(filename, obj, [options])
`options`: Pass in any [`fs.writeFileSync`](https://nodejs.org/api/fs.html#fs_fs_writefilesync_file_data_options) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj)
```
**formatting with spaces:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, { spaces: 2 })
```
**overriding EOL:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, { spaces: 2, EOL: '\r\n' })
```
**disabling the EOL at the end of file:**
```js
const jsonfile = require('jsonfile')
const file = '/tmp/data.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, { spaces: 2, finalEOL: false })
```
**appending to an existing JSON file:**
You can use `fs.writeFileSync` option `{ flag: 'a' }` to achieve this.
```js
const jsonfile = require('jsonfile')
const file = '/tmp/mayAlreadyExistedData.json'
const obj = { name: 'JP' }
jsonfile.writeFileSync(file, obj, { flag: 'a' })
```
License
-------
(MIT License)
Copyright 2012-2016, JP Richardson <jprichardson@gmail.com>

88
node_modules/jsonfile/index.js generated vendored Normal file
View File

@@ -0,0 +1,88 @@
let _fs
try {
_fs = require('graceful-fs')
} catch (_) {
_fs = require('fs')
}
const universalify = require('universalify')
const { stringify, stripBom } = require('./utils')
async function _readFile (file, options = {}) {
if (typeof options === 'string') {
options = { encoding: options }
}
const fs = options.fs || _fs
const shouldThrow = 'throws' in options ? options.throws : true
let data = await universalify.fromCallback(fs.readFile)(file, options)
data = stripBom(data)
let obj
try {
obj = JSON.parse(data, options ? options.reviver : null)
} catch (err) {
if (shouldThrow) {
err.message = `${file}: ${err.message}`
throw err
} else {
return null
}
}
return obj
}
const readFile = universalify.fromPromise(_readFile)
function readFileSync (file, options = {}) {
if (typeof options === 'string') {
options = { encoding: options }
}
const fs = options.fs || _fs
const shouldThrow = 'throws' in options ? options.throws : true
try {
let content = fs.readFileSync(file, options)
content = stripBom(content)
return JSON.parse(content, options.reviver)
} catch (err) {
if (shouldThrow) {
err.message = `${file}: ${err.message}`
throw err
} else {
return null
}
}
}
async function _writeFile (file, obj, options = {}) {
const fs = options.fs || _fs
const str = stringify(obj, options)
await universalify.fromCallback(fs.writeFile)(file, str, options)
}
const writeFile = universalify.fromPromise(_writeFile)
function writeFileSync (file, obj, options = {}) {
const fs = options.fs || _fs
const str = stringify(obj, options)
// not sure if fs.writeFileSync returns anything, but just in case
return fs.writeFileSync(file, str, options)
}
const jsonfile = {
readFile,
readFileSync,
writeFile,
writeFileSync
}
module.exports = jsonfile

40
node_modules/jsonfile/package.json generated vendored Normal file
View File

@@ -0,0 +1,40 @@
{
"name": "jsonfile",
"version": "6.1.0",
"description": "Easily read/write JSON files.",
"repository": {
"type": "git",
"url": "git@github.com:jprichardson/node-jsonfile.git"
},
"keywords": [
"read",
"write",
"file",
"json",
"fs",
"fs-extra"
],
"author": "JP Richardson <jprichardson@gmail.com>",
"license": "MIT",
"dependencies": {
"universalify": "^2.0.0"
},
"optionalDependencies": {
"graceful-fs": "^4.1.6"
},
"devDependencies": {
"mocha": "^8.2.0",
"rimraf": "^2.4.0",
"standard": "^16.0.1"
},
"main": "index.js",
"files": [
"index.js",
"utils.js"
],
"scripts": {
"lint": "standard",
"test": "npm run lint && npm run unit",
"unit": "mocha"
}
}

14
node_modules/jsonfile/utils.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) {
const EOF = finalEOL ? EOL : ''
const str = JSON.stringify(obj, replacer, spaces)
return str.replace(/\n/g, EOL) + EOF
}
function stripBom (content) {
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
if (Buffer.isBuffer(content)) content = content.toString('utf8')
return content.replace(/^\uFEFF/, '')
}
module.exports = { stringify, stripBom }

20
node_modules/universalify/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,20 @@
(The MIT License)
Copyright (c) 2017, Ryan Zimmerman <opensrc@ryanzim.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the 'Software'), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

76
node_modules/universalify/README.md generated vendored Normal file
View File

@@ -0,0 +1,76 @@
# universalify
![GitHub Workflow Status (branch)](https://img.shields.io/github/actions/workflow/status/RyanZim/universalify/ci.yml?branch=master)
![Coveralls github branch](https://img.shields.io/coveralls/github/RyanZim/universalify/master.svg)
![npm](https://img.shields.io/npm/dm/universalify.svg)
![npm](https://img.shields.io/npm/l/universalify.svg)
Make a callback- or promise-based function support both promises and callbacks.
Uses the native promise implementation.
## Installation
```bash
npm install universalify
```
## API
### `universalify.fromCallback(fn)`
Takes a callback-based function to universalify, and returns the universalified function.
Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with three or more arguments, and does not ensure that the callback is only called once.
```js
function callbackFn (n, cb) {
setTimeout(() => cb(null, n), 15)
}
const fn = universalify.fromCallback(callbackFn)
// Works with Promises:
fn('Hello World!')
.then(result => console.log(result)) // -> Hello World!
.catch(error => console.error(error))
// Works with Callbacks:
fn('Hi!', (error, result) => {
if (error) return console.error(error)
console.log(result)
// -> Hi!
})
```
### `universalify.fromPromise(fn)`
Takes a promise-based function to universalify, and returns the universalified function.
Function must return a valid JS promise. `universalify` does not ensure that a valid promise is returned.
```js
function promiseFn (n) {
return new Promise(resolve => {
setTimeout(() => resolve(n), 15)
})
}
const fn = universalify.fromPromise(promiseFn)
// Works with Promises:
fn('Hello World!')
.then(result => console.log(result)) // -> Hello World!
.catch(error => console.error(error))
// Works with Callbacks:
fn('Hi!', (error, result) => {
if (error) return console.error(error)
console.log(result)
// -> Hi!
})
```
## License
MIT

24
node_modules/universalify/index.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
'use strict'
exports.fromCallback = function (fn) {
return Object.defineProperty(function (...args) {
if (typeof args[args.length - 1] === 'function') fn.apply(this, args)
else {
return new Promise((resolve, reject) => {
args.push((err, res) => (err != null) ? reject(err) : resolve(res))
fn.apply(this, args)
})
}
}, 'name', { value: fn.name })
}
exports.fromPromise = function (fn) {
return Object.defineProperty(function (...args) {
const cb = args[args.length - 1]
if (typeof cb !== 'function') return fn.apply(this, args)
else {
args.pop()
fn.apply(this, args).then(r => cb(null, r), cb)
}
}, 'name', { value: fn.name })
}

34
node_modules/universalify/package.json generated vendored Normal file
View File

@@ -0,0 +1,34 @@
{
"name": "universalify",
"version": "2.0.1",
"description": "Make a callback- or promise-based function support both promises and callbacks.",
"keywords": [
"callback",
"native",
"promise"
],
"homepage": "https://github.com/RyanZim/universalify#readme",
"bugs": "https://github.com/RyanZim/universalify/issues",
"license": "MIT",
"author": "Ryan Zimmerman <opensrc@ryanzim.com>",
"files": [
"index.js"
],
"repository": {
"type": "git",
"url": "git+https://github.com/RyanZim/universalify.git"
},
"scripts": {
"test": "standard && nyc --reporter text --reporter lcovonly tape test/*.js | colortape"
},
"devDependencies": {
"colortape": "^0.1.2",
"coveralls": "^3.0.1",
"nyc": "^15.0.0",
"standard": "^14.3.1",
"tape": "^5.0.1"
},
"engines": {
"node": ">= 10.0.0"
}
}