mirror of
https://github.com/titanscouting/tra-analysis.git
synced 2025-09-09 16:17:22 +00:00
push all website files
This commit is contained in:
1177
website/node_modules/npm/node_modules/pacote/CHANGELOG.md
generated
vendored
Normal file
1177
website/node_modules/npm/node_modules/pacote/CHANGELOG.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
21
website/node_modules/npm/node_modules/pacote/LICENSE
generated
vendored
Normal file
21
website/node_modules/npm/node_modules/pacote/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Kat Marchán
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||
OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
201
website/node_modules/npm/node_modules/pacote/README.md
generated
vendored
Normal file
201
website/node_modules/npm/node_modules/pacote/README.md
generated
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
# pacote [](https://npm.im/pacote) [](https://npm.im/pacote) [](https://travis-ci.org/zkat/pacote) [](https://ci.appveyor.com/project/zkat/pacote) [](https://coveralls.io/github/zkat/pacote?branch=latest)
|
||||
|
||||
[`pacote`](https://github.com/zkat/pacote) is a Node.js library for downloading
|
||||
[npm](https://npmjs.org)-compatible packages. It supports all package specifier
|
||||
syntax that `npm install` and its ilk support. It transparently caches anything
|
||||
needed to reduce excess operations, using [`cacache`](https://npm.im/cacache).
|
||||
|
||||
## Install
|
||||
|
||||
`$ npm install --save pacote`
|
||||
|
||||
## Table of Contents
|
||||
|
||||
* [Example](#example)
|
||||
* [Features](#features)
|
||||
* [Contributing](#contributing)
|
||||
* [API](#api)
|
||||
* [`manifest`](#manifest)
|
||||
* [`extract`](#extract)
|
||||
* [`tarball`](#tarball)
|
||||
* [`tarball.stream`](#tarball-stream)
|
||||
* [`tarball.toFile`](#tarball-to-file)
|
||||
* [`prefetch`](#prefetch)
|
||||
* [`options`](#options)
|
||||
* [`clearMemoized`](#clearMemoized)
|
||||
|
||||
### Example
|
||||
|
||||
```javascript
|
||||
const pacote = require('pacote')
|
||||
|
||||
pacote.manifest('pacote@^1').then(pkg => {
|
||||
console.log('package manifest for registry pkg:', pkg)
|
||||
// { "name": "pacote", "version": "1.0.0", ... }
|
||||
})
|
||||
|
||||
pacote.extract('http://hi.com/pkg.tgz', './here').then(() => {
|
||||
console.log('remote tarball contents extracted to ./here')
|
||||
})
|
||||
```
|
||||
|
||||
### Features
|
||||
|
||||
* Handles all package types [npm](https://npm.im/npm) does
|
||||
* [high-performance, reliable, verified local cache](https://npm.im/cacache)
|
||||
* offline mode
|
||||
* authentication support (private git, private npm registries, etc)
|
||||
* github, gitlab, and bitbucket-aware
|
||||
* semver range support for git dependencies
|
||||
|
||||
### Contributing
|
||||
|
||||
The pacote team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
|
||||
|
||||
### API
|
||||
|
||||
#### <a name="manifest"></a> `> pacote.manifest(spec, [opts])`
|
||||
|
||||
Fetches the *manifest* for a package. Manifest objects are similar and based
|
||||
on the `package.json` for that package, but with pre-processed and limited
|
||||
fields. The object has the following shape:
|
||||
|
||||
```javascript
|
||||
{
|
||||
"name": PkgName,
|
||||
"version": SemverString,
|
||||
"dependencies": { PkgName: SemverString },
|
||||
"optionalDependencies": { PkgName: SemverString },
|
||||
"devDependencies": { PkgName: SemverString },
|
||||
"peerDependencies": { PkgName: SemverString },
|
||||
"bundleDependencies": false || [PkgName],
|
||||
"bin": { BinName: Path },
|
||||
"_resolved": TarballSource, // different for each package type
|
||||
"_integrity": SubresourceIntegrityHash,
|
||||
"_shrinkwrap": null || ShrinkwrapJsonObj
|
||||
}
|
||||
```
|
||||
|
||||
Note that depending on the spec type, some additional fields might be present.
|
||||
For example, packages from `registry.npmjs.org` have additional metadata
|
||||
appended by the registry.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
pacote.manifest('pacote@1.0.0').then(pkgJson => {
|
||||
// fetched `package.json` data from the registry
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="extract"></a> `> pacote.extract(spec, destination, [opts])`
|
||||
|
||||
Extracts package data identified by `<spec>` into a directory named
|
||||
`<destination>`, which will be created if it does not already exist.
|
||||
|
||||
If `opts.digest` is provided and the data it identifies is present in the cache,
|
||||
`extract` will bypass most of its operations and go straight to extracting the
|
||||
tarball.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
pacote.extract('pacote@1.0.0', './woot', {
|
||||
digest: 'deadbeef'
|
||||
}).then(() => {
|
||||
// Succeeds as long as `pacote@1.0.0` still exists somewhere. Network and
|
||||
// other operations are bypassed entirely if `digest` is present in the cache.
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="tarball"></a> `> pacote.tarball(spec, [opts])`
|
||||
|
||||
Fetches package data identified by `<spec>` and returns the data as a buffer.
|
||||
|
||||
This API has two variants:
|
||||
|
||||
* `pacote.tarball.stream(spec, [opts])` - Same as `pacote.tarball`, except it returns a stream instead of a Promise.
|
||||
* `pacote.tarball.toFile(spec, dest, [opts])` - Instead of returning data directly, data will be written directly to `dest`, and create any required directories along the way.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
pacote.tarball('pacote@1.0.0', { cache: './my-cache' }).then(data => {
|
||||
// data is the tarball data for pacote@1.0.0
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="tarball-stream"></a> `> pacote.tarball.stream(spec, [opts])`
|
||||
|
||||
Same as `pacote.tarball`, except it returns a stream instead of a Promise.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
pacote.tarball.stream('pacote@1.0.0')
|
||||
.pipe(fs.createWriteStream('./pacote-1.0.0.tgz'))
|
||||
```
|
||||
|
||||
#### <a name="tarball-to-file"></a> `> pacote.tarball.toFile(spec, dest, [opts])`
|
||||
|
||||
Like `pacote.tarball`, but instead of returning data directly, data will be
|
||||
written directly to `dest`, and create any required directories along the way.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
pacote.tarball.toFile('pacote@1.0.0', './pacote-1.0.0.tgz')
|
||||
.then(() => /* pacote tarball written directly to ./pacote-1.0.0.tgz */)
|
||||
```
|
||||
|
||||
#### <a name="prefetch"></a> `> pacote.prefetch(spec, [opts])`
|
||||
|
||||
##### THIS API IS DEPRECATED. USE `pacote.tarball()` INSTEAD
|
||||
|
||||
Fetches package data identified by `<spec>`, usually for the purpose of warming
|
||||
up the local package cache (with `opts.cache`). It does not return anything.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
pacote.prefetch('pacote@1.0.0', { cache: './my-cache' }).then(() => {
|
||||
// ./my-cache now has both the manifest and tarball for `pacote@1.0.0`.
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="clearMemoized"></a> `> pacote.clearMemoized()`
|
||||
|
||||
This utility function can be used to force pacote to release its references
|
||||
to any memoized data in its various internal caches. It might help free
|
||||
some memory.
|
||||
|
||||
```javascript
|
||||
pacote.manifest(...).then(() => pacote.clearMemoized)
|
||||
|
||||
```
|
||||
|
||||
#### <a name="options"></a> `> options`
|
||||
|
||||
##### `opts.integrity`
|
||||
|
||||
If provided, pacote will confirm that the relevant integrity hash for each
|
||||
operation's results matches the given digest. The call will return `EINTEGRITY`
|
||||
if the check fails.
|
||||
|
||||
Additionally, `pacote.extract` will use this integrity string check the cache
|
||||
directly for matching contents before performing any other operations.
|
||||
|
||||
##### `opts.cache`
|
||||
##### `opts.cacheUid`/`opts.cacheGid`
|
||||
##### `opts.uid`/`opts.gid`
|
||||
##### `opts.scope`
|
||||
##### `opts.registry`
|
||||
##### `opts.@somescope:registry`
|
||||
##### `opts.auth`
|
||||
##### `opts.log`
|
||||
##### `opts.maxSockets`
|
||||
|
||||
Default: `silentNpmLog`
|
||||
|
||||
An [`npmlog`](https://npm.im/npmlog)-compatible logger. Will be used to log
|
||||
various events at the levels specified by `npmlog`.
|
70
website/node_modules/npm/node_modules/pacote/extract.js
generated
vendored
Normal file
70
website/node_modules/npm/node_modules/pacote/extract.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const extractStream = require('./lib/extract-stream.js')
|
||||
const fs = require('fs')
|
||||
const mkdirp = BB.promisify(require('mkdirp'))
|
||||
const npa = require('npm-package-arg')
|
||||
const optCheck = require('./lib/util/opt-check.js')
|
||||
const path = require('path')
|
||||
const rimraf = BB.promisify(require('rimraf'))
|
||||
const withTarballStream = require('./lib/with-tarball-stream.js')
|
||||
|
||||
const truncateAsync = BB.promisify(fs.truncate)
|
||||
const readFileAsync = BB.promisify(fs.readFile)
|
||||
const appendFileAsync = BB.promisify(fs.appendFile)
|
||||
|
||||
module.exports = extract
|
||||
function extract (spec, dest, opts) {
|
||||
opts = optCheck(opts)
|
||||
spec = npa(spec, opts.where)
|
||||
const startTime = Date.now()
|
||||
|
||||
return withTarballStream(spec, opts, stream => {
|
||||
return tryExtract(spec, stream, dest, opts)
|
||||
})
|
||||
.then(() => {
|
||||
if (!opts.resolved) {
|
||||
const pjson = path.join(dest, 'package.json')
|
||||
return readFileAsync(pjson, 'utf8')
|
||||
.then(str => truncateAsync(pjson)
|
||||
.then(() => appendFileAsync(pjson, str.replace(
|
||||
/}\s*$/,
|
||||
`\n,"_resolved": ${
|
||||
JSON.stringify(opts.resolved || '')
|
||||
}\n,"_integrity": ${
|
||||
JSON.stringify(opts.integrity || '')
|
||||
}\n,"_from": ${
|
||||
JSON.stringify(spec.toString())
|
||||
}\n}`
|
||||
))))
|
||||
}
|
||||
})
|
||||
.then(() => opts.log.silly(
|
||||
'extract',
|
||||
`${spec} extracted to ${dest} (${Date.now() - startTime}ms)`
|
||||
))
|
||||
}
|
||||
|
||||
function tryExtract (spec, tarStream, dest, opts) {
|
||||
return new BB((resolve, reject) => {
|
||||
tarStream.on('error', reject)
|
||||
setImmediate(resolve)
|
||||
})
|
||||
.then(() => rimraf(dest))
|
||||
.then(() => mkdirp(dest))
|
||||
.then(() => new BB((resolve, reject) => {
|
||||
const xtractor = extractStream(spec, dest, opts)
|
||||
tarStream.on('error', reject)
|
||||
xtractor.on('error', reject)
|
||||
xtractor.on('close', resolve)
|
||||
tarStream.pipe(xtractor)
|
||||
}))
|
||||
.catch(err => {
|
||||
if (err.code === 'EINTEGRITY') {
|
||||
err.message = `Verification failed while extracting ${spec}:\n${err.message}`
|
||||
}
|
||||
throw err
|
||||
})
|
||||
}
|
9
website/node_modules/npm/node_modules/pacote/index.js
generated
vendored
Normal file
9
website/node_modules/npm/node_modules/pacote/index.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
extract: require('./extract'),
|
||||
manifest: require('./manifest'),
|
||||
prefetch: require('./prefetch'),
|
||||
tarball: require('./tarball'),
|
||||
clearMemoized: require('./lib/fetch').clearMemoized
|
||||
}
|
88
website/node_modules/npm/node_modules/pacote/lib/extract-stream.js
generated
vendored
Normal file
88
website/node_modules/npm/node_modules/pacote/lib/extract-stream.js
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
'use strict'
|
||||
|
||||
const Minipass = require('minipass')
|
||||
const path = require('path')
|
||||
const tar = require('tar')
|
||||
|
||||
module.exports = extractStream
|
||||
module.exports._computeMode = computeMode
|
||||
|
||||
class Transformer extends Minipass {
|
||||
constructor (spec, opts) {
|
||||
super()
|
||||
this.spec = spec
|
||||
this.opts = opts
|
||||
this.str = ''
|
||||
}
|
||||
write (data) {
|
||||
this.str += data
|
||||
return true
|
||||
}
|
||||
end () {
|
||||
const replaced = this.str.replace(
|
||||
/}\s*$/,
|
||||
`\n,"_resolved": ${
|
||||
JSON.stringify(this.opts.resolved || '')
|
||||
}\n,"_integrity": ${
|
||||
JSON.stringify(this.opts.integrity || '')
|
||||
}\n,"_from": ${
|
||||
JSON.stringify(this.spec.toString())
|
||||
}\n}`
|
||||
)
|
||||
super.write(replaced)
|
||||
return super.end()
|
||||
}
|
||||
}
|
||||
|
||||
function computeMode (fileMode, optMode, umask) {
|
||||
return (fileMode | optMode) & ~(umask || 0)
|
||||
}
|
||||
|
||||
function pkgJsonTransform (spec, opts) {
|
||||
return entry => {
|
||||
if (entry.path === 'package.json') {
|
||||
const transformed = new Transformer(spec, opts)
|
||||
return transformed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function extractStream (spec, dest, opts) {
|
||||
opts = opts || {}
|
||||
const sawIgnores = new Set()
|
||||
return tar.x({
|
||||
cwd: dest,
|
||||
filter: (name, entry) => !entry.header.type.match(/^.*link$/i),
|
||||
strip: 1,
|
||||
onwarn: msg => opts.log && opts.log.warn('tar', msg),
|
||||
uid: opts.uid,
|
||||
gid: opts.gid,
|
||||
transform: opts.resolved && pkgJsonTransform(spec, opts),
|
||||
onentry (entry) {
|
||||
if (entry.type.toLowerCase() === 'file') {
|
||||
entry.mode = computeMode(entry.mode, opts.fmode, opts.umask)
|
||||
} else if (entry.type.toLowerCase() === 'directory') {
|
||||
entry.mode = computeMode(entry.mode, opts.dmode, opts.umask)
|
||||
} else {
|
||||
entry.mode = computeMode(entry.mode, 0, opts.umask)
|
||||
}
|
||||
|
||||
// Note: This mirrors logic in the fs read operations that are
|
||||
// employed during tarball creation, in the fstream-npm module.
|
||||
// It is duplicated here to handle tarballs that are created
|
||||
// using other means, such as system tar or git archive.
|
||||
if (entry.type.toLowerCase() === 'file') {
|
||||
const base = path.basename(entry.path)
|
||||
if (base === '.npmignore') {
|
||||
sawIgnores.add(entry.path)
|
||||
} else if (base === '.gitignore') {
|
||||
const npmignore = entry.path.replace(/\.gitignore$/, '.npmignore')
|
||||
if (!sawIgnores.has(npmignore)) {
|
||||
// Rename, may be clobbered later.
|
||||
entry.path = npmignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
72
website/node_modules/npm/node_modules/pacote/lib/fetch.js
generated
vendored
Normal file
72
website/node_modules/npm/node_modules/pacote/lib/fetch.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict'
|
||||
|
||||
const duck = require('protoduck')
|
||||
|
||||
const Fetcher = duck.define(['spec', 'opts', 'manifest'], {
|
||||
manifest: ['spec', 'opts'],
|
||||
tarball: ['spec', 'opts'],
|
||||
fromManifest: ['manifest', 'spec', 'opts'],
|
||||
clearMemoized () {}
|
||||
}, {name: 'Fetcher'})
|
||||
module.exports = Fetcher
|
||||
|
||||
module.exports.manifest = manifest
|
||||
function manifest (spec, opts) {
|
||||
const fetcher = getFetcher(spec.type)
|
||||
return fetcher.manifest(spec, opts)
|
||||
}
|
||||
|
||||
module.exports.tarball = tarball
|
||||
function tarball (spec, opts) {
|
||||
return getFetcher(spec.type).tarball(spec, opts)
|
||||
}
|
||||
|
||||
module.exports.fromManifest = fromManifest
|
||||
function fromManifest (manifest, spec, opts) {
|
||||
return getFetcher(spec.type).fromManifest(manifest, spec, opts)
|
||||
}
|
||||
|
||||
const fetchers = {}
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
function clearMemoized () {
|
||||
Object.keys(fetchers).forEach(k => {
|
||||
fetchers[k].clearMemoized()
|
||||
})
|
||||
}
|
||||
|
||||
function getFetcher (type) {
|
||||
if (!fetchers[type]) {
|
||||
// This is spelled out both to prevent sketchy stuff and to make life
|
||||
// easier for bundlers/preprocessors.
|
||||
switch (type) {
|
||||
case 'directory':
|
||||
fetchers[type] = require('./fetchers/directory')
|
||||
break
|
||||
case 'file':
|
||||
fetchers[type] = require('./fetchers/file')
|
||||
break
|
||||
case 'git':
|
||||
fetchers[type] = require('./fetchers/git')
|
||||
break
|
||||
case 'hosted':
|
||||
fetchers[type] = require('./fetchers/hosted')
|
||||
break
|
||||
case 'range':
|
||||
fetchers[type] = require('./fetchers/range')
|
||||
break
|
||||
case 'remote':
|
||||
fetchers[type] = require('./fetchers/remote')
|
||||
break
|
||||
case 'tag':
|
||||
fetchers[type] = require('./fetchers/tag')
|
||||
break
|
||||
case 'version':
|
||||
fetchers[type] = require('./fetchers/version')
|
||||
break
|
||||
default:
|
||||
throw new Error(`Invalid dependency type requested: ${type}`)
|
||||
}
|
||||
}
|
||||
return fetchers[type]
|
||||
}
|
72
website/node_modules/npm/node_modules/pacote/lib/fetchers/directory.js
generated
vendored
Normal file
72
website/node_modules/npm/node_modules/pacote/lib/fetchers/directory.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const Fetcher = require('../fetch')
|
||||
const glob = BB.promisify(require('glob'))
|
||||
const packDir = require('../util/pack-dir')
|
||||
const path = require('path')
|
||||
const pipe = BB.promisify(require('mississippi').pipe)
|
||||
const through = require('mississippi').through
|
||||
|
||||
const readFileAsync = BB.promisify(require('fs').readFile)
|
||||
|
||||
const fetchDirectory = module.exports = Object.create(null)
|
||||
|
||||
Fetcher.impl(fetchDirectory, {
|
||||
// `directory` manifests come from the actual manifest/lockfile data.
|
||||
manifest (spec, opts) {
|
||||
const pkgPath = path.join(spec.fetchSpec, 'package.json')
|
||||
const srPath = path.join(spec.fetchSpec, 'npm-shrinkwrap.json')
|
||||
return BB.join(
|
||||
readFileAsync(pkgPath).then(JSON.parse).catch({code: 'ENOENT'}, err => {
|
||||
err.code = 'ENOPACKAGEJSON'
|
||||
throw err
|
||||
}),
|
||||
readFileAsync(srPath).then(JSON.parse).catch({code: 'ENOENT'}, () => null),
|
||||
(pkg, sr) => {
|
||||
pkg._shrinkwrap = sr
|
||||
pkg._hasShrinkwrap = !!sr
|
||||
pkg._resolved = spec.fetchSpec
|
||||
pkg._integrity = false // Don't auto-calculate integrity
|
||||
pkg._shasum = false // Don't auto-calculate shasum either
|
||||
return pkg
|
||||
}
|
||||
).then(pkg => {
|
||||
if (!pkg.bin && pkg.directories && pkg.directories.bin) {
|
||||
const dirBin = pkg.directories.bin
|
||||
return glob(path.join(spec.fetchSpec, dirBin, '/**'), {nodir: true}).then(matches => {
|
||||
matches.forEach(filePath => {
|
||||
const relative = path.relative(spec.fetchSpec, filePath)
|
||||
if (relative && relative[0] !== '.') {
|
||||
if (!pkg.bin) { pkg.bin = {} }
|
||||
pkg.bin[path.basename(relative)] = relative
|
||||
}
|
||||
})
|
||||
}).then(() => pkg)
|
||||
} else {
|
||||
return pkg
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
// As of npm@5, the npm installer doesn't pack + install directories: it just
|
||||
// creates symlinks. This code is here because `npm pack` still needs the
|
||||
// ability to create a tarball from a local directory.
|
||||
tarball (spec, opts) {
|
||||
const stream = through()
|
||||
this.manifest(spec, opts).then(mani => {
|
||||
return pipe(this.fromManifest(mani, spec, opts), stream)
|
||||
}).catch(err => stream.emit('error', err))
|
||||
return stream
|
||||
},
|
||||
|
||||
// `directory` tarballs are generated in a very similar way to git tarballs.
|
||||
fromManifest (manifest, spec, opts) {
|
||||
const stream = through()
|
||||
packDir(manifest, manifest._resolved, manifest._resolved, stream, opts).catch(err => {
|
||||
stream.emit('error', err)
|
||||
})
|
||||
return stream
|
||||
}
|
||||
})
|
74
website/node_modules/npm/node_modules/pacote/lib/fetchers/file.js
generated
vendored
Normal file
74
website/node_modules/npm/node_modules/pacote/lib/fetchers/file.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const cacache = require('cacache')
|
||||
const Fetcher = require('../fetch')
|
||||
const fs = require('fs')
|
||||
const pipe = BB.promisify(require('mississippi').pipe)
|
||||
const through = require('mississippi').through
|
||||
|
||||
const readFileAsync = BB.promisify(fs.readFile)
|
||||
const statAsync = BB.promisify(fs.stat)
|
||||
|
||||
const MAX_BULK_SIZE = 2 * 1024 * 1024 // 2MB
|
||||
|
||||
// `file` packages refer to local tarball files.
|
||||
const fetchFile = module.exports = Object.create(null)
|
||||
|
||||
Fetcher.impl(fetchFile, {
|
||||
manifest (spec, opts) {
|
||||
// We can't do much here. `finalizeManifest` will take care of
|
||||
// calling `tarball` to fill out all the necessary details.
|
||||
return BB.resolve(null)
|
||||
},
|
||||
|
||||
// All the heavy lifting for `file` packages is done here.
|
||||
// They're never cached. We just read straight out of the file.
|
||||
// TODO - maybe they *should* be cached?
|
||||
tarball (spec, opts) {
|
||||
const src = spec._resolved || spec.fetchSpec
|
||||
const stream = through()
|
||||
statAsync(src).then(stat => {
|
||||
if (spec._resolved) { stream.emit('manifest', spec) }
|
||||
if (stat.size <= MAX_BULK_SIZE) {
|
||||
// YAY LET'S DO THING IN BULK
|
||||
return readFileAsync(src).then(data => {
|
||||
if (opts.cache) {
|
||||
return cacache.put(
|
||||
opts.cache, `pacote:tarball:file:${src}`, data, {
|
||||
integrity: opts.integrity
|
||||
}
|
||||
).then(integrity => ({data, integrity}))
|
||||
} else {
|
||||
return {data}
|
||||
}
|
||||
}).then(info => {
|
||||
if (info.integrity) { stream.emit('integrity', info.integrity) }
|
||||
stream.write(info.data, () => {
|
||||
stream.end()
|
||||
})
|
||||
})
|
||||
} else {
|
||||
let integrity
|
||||
const cacheWriter = !opts.cache
|
||||
? BB.resolve(null)
|
||||
: (pipe(
|
||||
fs.createReadStream(src),
|
||||
cacache.put.stream(opts.cache, `pacote:tarball:${src}`, {
|
||||
integrity: opts.integrity
|
||||
}).on('integrity', d => { integrity = d })
|
||||
))
|
||||
return cacheWriter.then(() => {
|
||||
if (integrity) { stream.emit('integrity', integrity) }
|
||||
return pipe(fs.createReadStream(src), stream)
|
||||
})
|
||||
}
|
||||
}, err => stream.emit('error', err))
|
||||
return stream
|
||||
},
|
||||
|
||||
fromManifest (manifest, spec, opts) {
|
||||
return this.tarball(manifest || spec, opts)
|
||||
}
|
||||
})
|
174
website/node_modules/npm/node_modules/pacote/lib/fetchers/git.js
generated
vendored
Normal file
174
website/node_modules/npm/node_modules/pacote/lib/fetchers/git.js
generated
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const cacache = require('cacache')
|
||||
const cacheKey = require('../util/cache-key')
|
||||
const Fetcher = require('../fetch')
|
||||
const git = require('../util/git')
|
||||
const mkdirp = BB.promisify(require('mkdirp'))
|
||||
const pickManifest = require('npm-pick-manifest')
|
||||
const optCheck = require('../util/opt-check')
|
||||
const osenv = require('osenv')
|
||||
const packDir = require('../util/pack-dir')
|
||||
const PassThrough = require('stream').PassThrough
|
||||
const path = require('path')
|
||||
const pipe = BB.promisify(require('mississippi').pipe)
|
||||
const rimraf = BB.promisify(require('rimraf'))
|
||||
const uniqueFilename = require('unique-filename')
|
||||
|
||||
// `git` dependencies are fetched from git repositories and packed up.
|
||||
const fetchGit = module.exports = Object.create(null)
|
||||
|
||||
Fetcher.impl(fetchGit, {
|
||||
manifest (spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
if (spec.hosted && spec.hosted.getDefaultRepresentation() === 'shortcut') {
|
||||
return hostedManifest(spec, opts)
|
||||
} else {
|
||||
// If it's not a shortcut, don't do fallbacks.
|
||||
return plainManifest(spec.fetchSpec, spec, opts)
|
||||
}
|
||||
},
|
||||
|
||||
tarball (spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
const stream = new PassThrough()
|
||||
this.manifest(spec, opts).then(manifest => {
|
||||
stream.emit('manifest', manifest)
|
||||
return pipe(
|
||||
this.fromManifest(
|
||||
manifest, spec, opts
|
||||
).on('integrity', i => stream.emit('integrity', i)), stream
|
||||
)
|
||||
}, err => stream.emit('error', err))
|
||||
return stream
|
||||
},
|
||||
|
||||
fromManifest (manifest, spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
let streamError
|
||||
const stream = new PassThrough().on('error', e => { streamError = e })
|
||||
const cacheName = manifest._uniqueResolved || manifest._resolved || ''
|
||||
const cacheStream = (
|
||||
opts.cache &&
|
||||
cacache.get.stream(
|
||||
opts.cache, cacheKey('packed-dir', cacheName), opts
|
||||
).on('integrity', i => stream.emit('integrity', i))
|
||||
)
|
||||
cacheStream.pipe(stream)
|
||||
cacheStream.on('error', err => {
|
||||
if (err.code !== 'ENOENT') {
|
||||
return stream.emit('error', err)
|
||||
} else {
|
||||
stream.emit('reset')
|
||||
return withTmp(opts, tmp => {
|
||||
if (streamError) { throw streamError }
|
||||
return cloneRepo(
|
||||
spec, manifest._repo, manifest._ref, manifest._rawRef, tmp, opts
|
||||
).then(HEAD => {
|
||||
if (streamError) { throw streamError }
|
||||
manifest._resolved = spec.saveSpec.replace(/(:?#.*)?$/, `#${HEAD}`)
|
||||
manifest._uniqueResolved = manifest._resolved
|
||||
return packDir(manifest, manifest._uniqueResolved, tmp, stream, opts)
|
||||
})
|
||||
}).catch(err => stream.emit('error', err))
|
||||
}
|
||||
})
|
||||
return stream
|
||||
}
|
||||
})
|
||||
|
||||
function hostedManifest (spec, opts) {
|
||||
return BB.resolve(null).then(() => {
|
||||
if (!spec.hosted.git()) {
|
||||
throw new Error(`No git url for ${spec}`)
|
||||
}
|
||||
return plainManifest(spec.hosted.git(), spec, opts)
|
||||
}).catch(err => {
|
||||
if (!spec.hosted.https()) {
|
||||
throw err
|
||||
}
|
||||
return plainManifest(spec.hosted.https(), spec, opts)
|
||||
}).catch(err => {
|
||||
if (!spec.hosted.sshurl()) {
|
||||
throw err
|
||||
}
|
||||
return plainManifest(spec.hosted.sshurl(), spec, opts)
|
||||
})
|
||||
}
|
||||
|
||||
function plainManifest (repo, spec, opts) {
|
||||
const rawRef = spec.gitCommittish || spec.gitRange
|
||||
return resolve(
|
||||
repo, spec, spec.name, opts
|
||||
).then(ref => {
|
||||
if (ref) {
|
||||
const resolved = spec.saveSpec.replace(/(?:#.*)?$/, `#${ref.sha}`)
|
||||
return {
|
||||
_repo: repo,
|
||||
_resolved: resolved,
|
||||
_spec: spec,
|
||||
_ref: ref,
|
||||
_rawRef: spec.gitCommittish || spec.gitRange,
|
||||
_uniqueResolved: resolved,
|
||||
_integrity: false,
|
||||
_shasum: false
|
||||
}
|
||||
} else {
|
||||
// We're SOL and need a full clone :(
|
||||
//
|
||||
// If we're confident enough that `rawRef` is a commit SHA,
|
||||
// then we can at least get `finalize-manifest` to cache its result.
|
||||
const resolved = spec.saveSpec.replace(/(?:#.*)?$/, rawRef ? `#${rawRef}` : '')
|
||||
return {
|
||||
_repo: repo,
|
||||
_rawRef: rawRef,
|
||||
_resolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
|
||||
_uniqueResolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
|
||||
_integrity: false,
|
||||
_shasum: false
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function resolve (url, spec, name, opts) {
|
||||
const isSemver = !!spec.gitRange
|
||||
return git.revs(url, opts).then(remoteRefs => {
|
||||
return isSemver
|
||||
? pickManifest({
|
||||
versions: remoteRefs.versions,
|
||||
'dist-tags': remoteRefs['dist-tags'],
|
||||
name: name
|
||||
}, spec.gitRange, opts)
|
||||
: remoteRefs
|
||||
? BB.resolve(
|
||||
remoteRefs.refs[spec.gitCommittish] || remoteRefs.refs[remoteRefs.shas[spec.gitCommittish]]
|
||||
)
|
||||
: null
|
||||
})
|
||||
}
|
||||
|
||||
function withTmp (opts, cb) {
|
||||
if (opts.cache) {
|
||||
// cacache has a special facility for working in a tmp dir
|
||||
return cacache.tmp.withTmp(opts.cache, {tmpPrefix: 'git-clone'}, cb)
|
||||
} else {
|
||||
const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-tmp')
|
||||
const tmpName = uniqueFilename(tmpDir, 'git-clone')
|
||||
const tmp = mkdirp(tmpName).then(() => tmpName).disposer(rimraf)
|
||||
return BB.using(tmp, cb)
|
||||
}
|
||||
}
|
||||
|
||||
// Only certain whitelisted hosted gits support shadow cloning
|
||||
const SHALLOW_HOSTS = new Set(['github', 'gist', 'gitlab', 'bitbucket'])
|
||||
function cloneRepo (spec, repo, resolvedRef, rawRef, tmp, opts) {
|
||||
const ref = resolvedRef ? resolvedRef.ref : rawRef
|
||||
if (resolvedRef && spec.hosted && SHALLOW_HOSTS.has(spec.hosted.type)) {
|
||||
return git.shallow(repo, ref, tmp, opts)
|
||||
} else {
|
||||
return git.clone(repo, ref, tmp, opts)
|
||||
}
|
||||
}
|
3
website/node_modules/npm/node_modules/pacote/lib/fetchers/hosted.js
generated
vendored
Normal file
3
website/node_modules/npm/node_modules/pacote/lib/fetchers/hosted.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('./git')
|
3
website/node_modules/npm/node_modules/pacote/lib/fetchers/range.js
generated
vendored
Normal file
3
website/node_modules/npm/node_modules/pacote/lib/fetchers/range.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('./registry')
|
39
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/check-warning-header.js
generated
vendored
Normal file
39
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/check-warning-header.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict'
|
||||
|
||||
const LRU = require('lru-cache')
|
||||
|
||||
const WARNING_REGEXP = /^\s*(\d{3})\s+(\S+)\s+"(.*)"\s+"([^"]+)"/
|
||||
const BAD_HOSTS = new LRU({ max: 50 })
|
||||
|
||||
module.exports = checkWarnings
|
||||
function checkWarnings (res, registry, opts) {
|
||||
if (res.headers.has('warning') && !BAD_HOSTS.has(registry)) {
|
||||
const warnings = {}
|
||||
res.headers.raw()['warning'].forEach(w => {
|
||||
const match = w.match(WARNING_REGEXP)
|
||||
if (match) {
|
||||
warnings[match[1]] = {
|
||||
code: match[1],
|
||||
host: match[2],
|
||||
message: match[3],
|
||||
date: new Date(match[4])
|
||||
}
|
||||
}
|
||||
})
|
||||
BAD_HOSTS.set(registry, true)
|
||||
if (warnings['199']) {
|
||||
if (warnings['199'].message.match(/ENOTFOUND/)) {
|
||||
opts.log.warn('registry', `Using stale data from ${registry} because the host is inaccessible -- are you offline?`)
|
||||
} else {
|
||||
opts.log.warn('registry', `Unexpected warning for ${registry}: ${warnings['199'].message}`)
|
||||
}
|
||||
}
|
||||
if (warnings['111']) {
|
||||
// 111 Revalidation failed -- we're using stale data
|
||||
opts.log.warn(
|
||||
'registry',
|
||||
`Using stale package data from ${registry} due to a request error during revalidation.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
109
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/fetch.js
generated
vendored
Normal file
109
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/fetch.js
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
const Buffer = require('safe-buffer').Buffer
|
||||
|
||||
const checkWarnings = require('./check-warning-header')
|
||||
const fetch = require('make-fetch-happen')
|
||||
const registryKey = require('./registry-key')
|
||||
const url = require('url')
|
||||
|
||||
module.exports = regFetch
|
||||
function regFetch (uri, registry, opts) {
|
||||
const startTime = Date.now()
|
||||
return fetch(uri, {
|
||||
agent: opts.agent,
|
||||
algorithms: opts.algorithms,
|
||||
cache: getCacheMode(opts),
|
||||
cacheManager: opts.cache,
|
||||
ca: opts.ca,
|
||||
cert: opts.cert,
|
||||
headers: getHeaders(uri, registry, opts),
|
||||
integrity: opts.integrity,
|
||||
key: opts.key,
|
||||
localAddress: opts.localAddress,
|
||||
maxSockets: opts.maxSockets,
|
||||
memoize: opts.memoize,
|
||||
noProxy: opts.noProxy,
|
||||
Promise: BB,
|
||||
proxy: opts.proxy,
|
||||
referer: opts.refer,
|
||||
retry: opts.retry,
|
||||
strictSSL: !!opts.strictSSL,
|
||||
timeout: opts.timeout,
|
||||
uid: opts.uid,
|
||||
gid: opts.gid
|
||||
}).then(res => {
|
||||
if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
|
||||
opts.log.warn('notice', res.headers.get('npm-notice'))
|
||||
}
|
||||
checkWarnings(res, registry, opts)
|
||||
if (res.status >= 400) {
|
||||
const err = new Error(`${res.status} ${res.statusText}: ${
|
||||
opts.spec ? opts.spec : uri
|
||||
}`)
|
||||
err.code = `E${res.status}`
|
||||
err.uri = uri
|
||||
err.response = res
|
||||
err.spec = opts.spec
|
||||
logRequest(uri, res, startTime, opts)
|
||||
throw err
|
||||
} else {
|
||||
res.body.on('end', () => logRequest(uri, res, startTime, opts))
|
||||
return res
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function logRequest (uri, res, startTime, opts) {
|
||||
const elapsedTime = Date.now() - startTime
|
||||
const attempt = res.headers.get('x-fetch-attempts')
|
||||
const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
|
||||
const cacheStr = res.headers.get('x-local-cache') ? ' (from cache)' : ''
|
||||
opts.log.http(
|
||||
'fetch',
|
||||
`GET ${res.status} ${uri} ${elapsedTime}ms${attemptStr}${cacheStr}`
|
||||
)
|
||||
}
|
||||
|
||||
function getCacheMode (opts) {
|
||||
return opts.offline
|
||||
? 'only-if-cached'
|
||||
: opts.preferOffline
|
||||
? 'force-cache'
|
||||
: opts.preferOnline
|
||||
? 'no-cache'
|
||||
: 'default'
|
||||
}
|
||||
|
||||
function getHeaders (uri, registry, opts) {
|
||||
const headers = Object.assign({
|
||||
'npm-in-ci': opts.isFromCI,
|
||||
'npm-scope': opts.projectScope,
|
||||
'npm-session': opts.npmSession,
|
||||
'user-agent': opts.userAgent,
|
||||
'referer': opts.refer
|
||||
}, opts.headers)
|
||||
// check for auth settings specific to this registry
|
||||
let auth = (
|
||||
opts.auth &&
|
||||
opts.auth[registryKey(registry)]
|
||||
) || opts.auth
|
||||
// If a tarball is hosted on a different place than the manifest, only send
|
||||
// credentials on `alwaysAuth`
|
||||
const shouldAuth = auth && (
|
||||
auth.alwaysAuth ||
|
||||
url.parse(uri).host === url.parse(registry).host
|
||||
)
|
||||
if (shouldAuth && auth.token) {
|
||||
headers.authorization = `Bearer ${auth.token}`
|
||||
} else if (shouldAuth && auth.username && auth.password) {
|
||||
const encoded = Buffer.from(
|
||||
`${auth.username}:${auth.password}`, 'utf8'
|
||||
).toString('base64')
|
||||
headers.authorization = `Basic ${encoded}`
|
||||
} else if (shouldAuth && auth._auth) {
|
||||
headers.authorization = `Basic ${auth._auth}`
|
||||
}
|
||||
return headers
|
||||
}
|
27
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/index.js
generated
vendored
Normal file
27
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/index.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
'use strict'
|
||||
|
||||
const cacache = require('cacache')
|
||||
const Fetcher = require('../../fetch')
|
||||
const regManifest = require('./manifest')
|
||||
const regTarball = require('./tarball')
|
||||
|
||||
const fetchRegistry = module.exports = Object.create(null)
|
||||
|
||||
Fetcher.impl(fetchRegistry, {
|
||||
manifest (spec, opts) {
|
||||
return regManifest(spec, opts)
|
||||
},
|
||||
|
||||
tarball (spec, opts) {
|
||||
return regTarball(spec, opts)
|
||||
},
|
||||
|
||||
fromManifest (manifest, spec, opts) {
|
||||
return regTarball.fromManifest(manifest, spec, opts)
|
||||
},
|
||||
|
||||
clearMemoized () {
|
||||
cacache.clearMemoized()
|
||||
regManifest.clearMemoized()
|
||||
}
|
||||
})
|
148
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/manifest.js
generated
vendored
Normal file
148
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/manifest.js
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const fetch = require('./fetch')
|
||||
const LRU = require('lru-cache')
|
||||
const optCheck = require('../../util/opt-check')
|
||||
const pickManifest = require('npm-pick-manifest')
|
||||
const pickRegistry = require('./pick-registry')
|
||||
const ssri = require('ssri')
|
||||
const url = require('url')
|
||||
|
||||
// Corgis are cute. 🐕🐶
|
||||
const CORGI_DOC = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
|
||||
const JSON_DOC = 'application/json'
|
||||
|
||||
module.exports = manifest
|
||||
function manifest (spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
|
||||
const registry = pickRegistry(spec, opts)
|
||||
const uri = metadataUrl(registry, spec.escapedName)
|
||||
|
||||
return getManifest(uri, registry, spec, opts).then(manifest => {
|
||||
return annotateManifest(uri, registry, manifest)
|
||||
})
|
||||
}
|
||||
|
||||
function metadataUrl (registry, name) {
|
||||
const normalized = registry.slice(-1) !== '/'
|
||||
? registry + '/'
|
||||
: registry
|
||||
return url.resolve(normalized, name)
|
||||
}
|
||||
|
||||
function getManifest (uri, registry, spec, opts) {
|
||||
return fetchPackument(uri, spec, registry, opts).then(packument => {
|
||||
try {
|
||||
return pickManifest(packument, spec.fetchSpec, {
|
||||
defaultTag: opts.defaultTag,
|
||||
includeDeprecated: opts.includeDeprecated
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === 'ETARGET' && packument._cached && !opts.offline) {
|
||||
opts.log.silly(
|
||||
'registry:manifest',
|
||||
`no matching version for ${spec.name}@${spec.fetchSpec} in the cache. Forcing revalidation`
|
||||
)
|
||||
opts.preferOffline = false
|
||||
opts.preferOnline = true
|
||||
return fetchPackument(uri, spec, registry, opts).then(packument => {
|
||||
return pickManifest(packument, spec.fetchSpec, {
|
||||
defaultTag: opts.defaultTag
|
||||
})
|
||||
})
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TODO - make this an opt
|
||||
const MEMO = new LRU({
|
||||
length: m => m._contentLength,
|
||||
max: 200 * 1024 * 1024, // 200MB
|
||||
maxAge: 30 * 1000 // 30s
|
||||
})
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
function clearMemoized () {
|
||||
MEMO.reset()
|
||||
}
|
||||
|
||||
function fetchPackument (uri, spec, registry, opts) {
|
||||
const mem = pickMem(opts)
|
||||
if (mem && !opts.preferOnline && mem.has(uri)) {
|
||||
return BB.resolve(mem.get(uri))
|
||||
}
|
||||
|
||||
return fetch(uri, registry, Object.assign({
|
||||
headers: {
|
||||
'pacote-req-type': 'packument',
|
||||
'pacote-pkg-id': `registry:${manifest.name}`,
|
||||
accept: opts.fullMetadata ? JSON_DOC : CORGI_DOC
|
||||
},
|
||||
spec
|
||||
}, opts, {
|
||||
// Force integrity to null: we never check integrity hashes for manifests
|
||||
integrity: null
|
||||
})).then(res => res.json().then(packument => {
|
||||
packument._cached = decodeURIComponent(res.headers.has('x-local-cache'))
|
||||
packument._contentLength = +res.headers.get('content-length')
|
||||
// NOTE - we need to call pickMem again because proxy
|
||||
// objects get reused!
|
||||
const mem = pickMem(opts)
|
||||
if (mem) {
|
||||
mem.set(uri, packument)
|
||||
}
|
||||
return packument
|
||||
}))
|
||||
}
|
||||
|
||||
class ObjProxy {
|
||||
get (key) { return this.obj[key] }
|
||||
set (key, val) { this.obj[key] = val }
|
||||
}
|
||||
|
||||
// This object is used synchronously and immediately, so
|
||||
// we can safely reuse it instead of consing up new ones
|
||||
const PROX = new ObjProxy()
|
||||
function pickMem (opts) {
|
||||
if (!opts || !opts.memoize) {
|
||||
return MEMO
|
||||
} else if (opts.memoize.get && opts.memoize.set) {
|
||||
return opts.memoize
|
||||
} else if (typeof opts.memoize === 'object') {
|
||||
PROX.obj = opts.memoize
|
||||
return PROX
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function annotateManifest (uri, registry, manifest) {
|
||||
const shasum = manifest.dist && manifest.dist.shasum
|
||||
manifest._integrity = manifest.dist && manifest.dist.integrity
|
||||
manifest._shasum = shasum
|
||||
if (!manifest._integrity && shasum) {
|
||||
// Use legacy dist.shasum field if available.
|
||||
manifest._integrity = ssri.fromHex(shasum, 'sha1').toString()
|
||||
}
|
||||
manifest._resolved = (
|
||||
manifest.dist && manifest.dist.tarball
|
||||
)
|
||||
if (!manifest._resolved) {
|
||||
const err = new Error(
|
||||
`Manifest for ${manifest.name}@${manifest.version} from ${uri} is missing a tarball url (pkg.dist.tarball). Guessing a default.`
|
||||
)
|
||||
err.code = 'ENOTARBALL'
|
||||
err.manifest = manifest
|
||||
if (!manifest._warnings) { manifest._warnings = [] }
|
||||
manifest._warnings.push(err.message)
|
||||
manifest._resolved =
|
||||
`${registry}/${manifest.name}/-/${manifest.name}-${manifest.version}.tgz`
|
||||
}
|
||||
return manifest
|
||||
}
|
17
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/pick-registry.js
generated
vendored
Normal file
17
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/pick-registry.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = pickRegistry
|
||||
function pickRegistry (spec, opts) {
|
||||
let registry = spec.scope && opts.scopeTargets[spec.scope]
|
||||
|
||||
if (!registry && opts.scope) {
|
||||
const prefix = opts.scope[0] === '@' ? '' : '@'
|
||||
registry = opts.scopeTargets[prefix + opts.scope]
|
||||
}
|
||||
|
||||
if (!registry) {
|
||||
registry = opts.registry
|
||||
}
|
||||
|
||||
return registry
|
||||
}
|
16
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/registry-key.js
generated
vendored
Normal file
16
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/registry-key.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
const url = require('url')
|
||||
|
||||
// Called a nerf dart in the main codebase. Used as a "safe"
|
||||
// key when fetching registry info from config.
|
||||
module.exports = registryKey
|
||||
function registryKey (registry) {
|
||||
const parsed = url.parse(registry)
|
||||
const formatted = url.format({
|
||||
host: parsed.host,
|
||||
pathname: parsed.pathname,
|
||||
slashes: parsed.slashes
|
||||
})
|
||||
return url.resolve(formatted, '.')
|
||||
}
|
103
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/tarball.js
generated
vendored
Normal file
103
website/node_modules/npm/node_modules/pacote/lib/fetchers/registry/tarball.js
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const fetch = require('./fetch')
|
||||
const manifest = require('./manifest')
|
||||
const optCheck = require('../../util/opt-check')
|
||||
const PassThrough = require('stream').PassThrough
|
||||
const pickRegistry = require('./pick-registry')
|
||||
const ssri = require('ssri')
|
||||
const url = require('url')
|
||||
|
||||
module.exports = tarball
|
||||
function tarball (spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
const registry = pickRegistry(spec, opts)
|
||||
const stream = new PassThrough()
|
||||
let mani
|
||||
if (
|
||||
opts.resolved &&
|
||||
// spec.type === 'version' &&
|
||||
opts.resolved.indexOf(registry) === 0
|
||||
) {
|
||||
// fakeChild is a shortcut to avoid looking up a manifest!
|
||||
mani = BB.resolve({
|
||||
name: spec.name,
|
||||
version: spec.fetchSpec,
|
||||
_integrity: opts.integrity,
|
||||
_resolved: opts.resolved,
|
||||
_fakeChild: true
|
||||
})
|
||||
} else {
|
||||
// We can't trust opts.resolved if it's going to a separate host.
|
||||
mani = manifest(spec, opts)
|
||||
}
|
||||
|
||||
mani.then(mani => {
|
||||
!mani._fakeChild && stream.emit('manifest', mani)
|
||||
const fetchStream = fromManifest(mani, spec, opts).on(
|
||||
'integrity', i => stream.emit('integrity', i)
|
||||
)
|
||||
fetchStream.on('error', err => stream.emit('error', err))
|
||||
fetchStream.pipe(stream)
|
||||
return null
|
||||
}).catch(err => stream.emit('error', err))
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.fromManifest = fromManifest
|
||||
function fromManifest (manifest, spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
opts.scope = spec.scope || opts.scope
|
||||
const stream = new PassThrough()
|
||||
const registry = pickRegistry(spec, opts)
|
||||
const uri = getTarballUrl(spec, registry, manifest, opts)
|
||||
fetch(uri, registry, Object.assign({
|
||||
headers: {
|
||||
'pacote-req-type': 'tarball',
|
||||
'pacote-pkg-id': `registry:${manifest.name}@${uri}`
|
||||
},
|
||||
integrity: manifest._integrity,
|
||||
algorithms: [
|
||||
manifest._integrity
|
||||
? ssri.parse(manifest._integrity).pickAlgorithm()
|
||||
: 'sha1'
|
||||
],
|
||||
spec
|
||||
}, opts))
|
||||
.then(res => {
|
||||
const hash = res.headers.get('x-local-cache-hash')
|
||||
if (hash) {
|
||||
stream.emit('integrity', decodeURIComponent(hash))
|
||||
}
|
||||
res.body.on('error', err => stream.emit('error', err))
|
||||
res.body.pipe(stream)
|
||||
return null
|
||||
})
|
||||
.catch(err => stream.emit('error', err))
|
||||
return stream
|
||||
}
|
||||
|
||||
function getTarballUrl (spec, registry, mani, opts) {
|
||||
const reg = url.parse(registry)
|
||||
const tarball = url.parse(mani._resolved)
|
||||
// https://github.com/npm/npm/pull/9471
|
||||
//
|
||||
// TL;DR: Some alternative registries host tarballs on http and packuments
|
||||
// on https, and vice-versa. There's also a case where people who can't use
|
||||
// SSL to access the npm registry, for example, might use
|
||||
// `--registry=http://registry.npmjs.org/`. In this case, we need to
|
||||
// rewrite `tarball` to match the protocol.
|
||||
//
|
||||
if (reg.hostname === tarball.hostname && reg.protocol !== tarball.protocol) {
|
||||
tarball.protocol = reg.protocol
|
||||
// Ports might be same host different protocol!
|
||||
if (reg.port !== tarball.port) {
|
||||
delete tarball.host
|
||||
tarball.port = reg.port
|
||||
}
|
||||
delete tarball.href
|
||||
}
|
||||
return url.format(tarball)
|
||||
}
|
30
website/node_modules/npm/node_modules/pacote/lib/fetchers/remote.js
generated
vendored
Normal file
30
website/node_modules/npm/node_modules/pacote/lib/fetchers/remote.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const Fetcher = require('../fetch')
|
||||
const fetchRegistry = require('./registry')
|
||||
|
||||
const fetchRemote = module.exports = Object.create(null)
|
||||
|
||||
Fetcher.impl(fetchRemote, {
|
||||
manifest (spec, opts) {
|
||||
// We can't get the manifest for a remote tarball until
|
||||
// we extract the tarball itself.
|
||||
// `finalize-manifest` takes care of this process of extracting
|
||||
// a manifest based on ./tarball.js
|
||||
return BB.resolve(null)
|
||||
},
|
||||
|
||||
tarball (spec, opts) {
|
||||
const uri = spec._resolved || spec.fetchSpec
|
||||
return fetchRegistry.fromManifest({
|
||||
_resolved: uri,
|
||||
_integrity: opts.integrity
|
||||
}, spec, opts)
|
||||
},
|
||||
|
||||
fromManifest (manifest, spec, opts) {
|
||||
return this.tarball(manifest || spec, opts)
|
||||
}
|
||||
})
|
3
website/node_modules/npm/node_modules/pacote/lib/fetchers/tag.js
generated
vendored
Normal file
3
website/node_modules/npm/node_modules/pacote/lib/fetchers/tag.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('./registry')
|
3
website/node_modules/npm/node_modules/pacote/lib/fetchers/version.js
generated
vendored
Normal file
3
website/node_modules/npm/node_modules/pacote/lib/fetchers/version.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('./registry')
|
253
website/node_modules/npm/node_modules/pacote/lib/finalize-manifest.js
generated
vendored
Normal file
253
website/node_modules/npm/node_modules/pacote/lib/finalize-manifest.js
generated
vendored
Normal file
@@ -0,0 +1,253 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const cacache = require('cacache')
|
||||
const cacheKey = require('./util/cache-key')
|
||||
const fetchFromManifest = require('./fetch').fromManifest
|
||||
const finished = require('./util/finished')
|
||||
const minimatch = require('minimatch')
|
||||
const normalize = require('normalize-package-data')
|
||||
const optCheck = require('./util/opt-check')
|
||||
const path = require('path')
|
||||
const pipe = BB.promisify(require('mississippi').pipe)
|
||||
const ssri = require('ssri')
|
||||
const tar = require('tar')
|
||||
|
||||
// `finalizeManifest` takes as input the various kinds of manifests that
|
||||
// manifest handlers ('lib/fetchers/*.js#manifest()') return, and makes sure
|
||||
// they are:
|
||||
//
|
||||
// * filled out with any required data that the handler couldn't fill in
|
||||
// * formatted consistently
|
||||
// * cached so we don't have to repeat this work more than necessary
|
||||
//
|
||||
// The biggest thing this package might do is do a full tarball extraction in
|
||||
// order to find missing bits of metadata required by the npm installer. For
|
||||
// example, it will fill in `_shrinkwrap`, `_integrity`, and other details that
|
||||
// the plain manifest handlers would require a tarball to fill out. If a
|
||||
// handler returns everything necessary, this process is skipped.
|
||||
//
|
||||
// If we get to the tarball phase, the corresponding tarball handler for the
|
||||
// requested type will be invoked and the entire tarball will be read from the
|
||||
// stream.
|
||||
//
|
||||
module.exports = finalizeManifest
|
||||
function finalizeManifest (pkg, spec, opts) {
|
||||
const key = finalKey(pkg, spec)
|
||||
opts = optCheck(opts)
|
||||
|
||||
const cachedManifest = (opts.cache && key && !opts.preferOnline && !opts.fullMetadata)
|
||||
? cacache.get.info(opts.cache, key, opts)
|
||||
: BB.resolve(null)
|
||||
|
||||
return cachedManifest.then(cached => {
|
||||
if (cached && cached.metadata.manifest) {
|
||||
return new Manifest(cached.metadata.manifest)
|
||||
} else {
|
||||
return tarballedProps(pkg, spec, opts).then(props => {
|
||||
return pkg && pkg.name
|
||||
? new Manifest(pkg, props, opts.fullMetadata)
|
||||
: new Manifest(props, null, opts.fullMetadata)
|
||||
}).then(manifest => {
|
||||
const cacheKey = key || finalKey(manifest, spec)
|
||||
if (!opts.cache || !cacheKey) {
|
||||
return manifest
|
||||
} else {
|
||||
opts.metadata = {
|
||||
id: manifest._id,
|
||||
manifest,
|
||||
type: 'finalized-manifest'
|
||||
}
|
||||
return cacache.put(
|
||||
opts.cache, cacheKey, '.', opts
|
||||
).then(() => manifest)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.Manifest = Manifest
|
||||
function Manifest (pkg, fromTarball, fullMetadata) {
|
||||
fromTarball = fromTarball || {}
|
||||
if (fullMetadata) {
|
||||
Object.assign(this, pkg)
|
||||
}
|
||||
this.name = pkg.name
|
||||
this.version = pkg.version
|
||||
this.engines = pkg.engines || fromTarball.engines
|
||||
this.cpu = pkg.cpu || fromTarball.cpu
|
||||
this.os = pkg.os || fromTarball.os
|
||||
this.dependencies = pkg.dependencies || {}
|
||||
this.optionalDependencies = pkg.optionalDependencies || {}
|
||||
this.devDependencies = pkg.devDependencies || {}
|
||||
const bundled = (
|
||||
pkg.bundledDependencies ||
|
||||
pkg.bundleDependencies ||
|
||||
false
|
||||
)
|
||||
this.bundleDependencies = bundled
|
||||
this.peerDependencies = pkg.peerDependencies || {}
|
||||
this.deprecated = pkg.deprecated || false
|
||||
|
||||
// These depend entirely on each handler
|
||||
this._resolved = pkg._resolved
|
||||
|
||||
// Not all handlers (or registries) provide these out of the box,
|
||||
// and if they don't, we need to extract and read the tarball ourselves.
|
||||
// These are details required by the installer.
|
||||
this._integrity = pkg._integrity || fromTarball._integrity || null
|
||||
this._shasum = pkg._shasum || fromTarball._shasum || null
|
||||
this._shrinkwrap = pkg._shrinkwrap || fromTarball._shrinkwrap || null
|
||||
this.bin = pkg.bin || fromTarball.bin || null
|
||||
|
||||
if (this.bin && Array.isArray(this.bin)) {
|
||||
// Code yanked from read-package-json.
|
||||
const m = (pkg.directories && pkg.directories.bin) || '.'
|
||||
this.bin = this.bin.reduce((acc, mf) => {
|
||||
if (mf && mf.charAt(0) !== '.') {
|
||||
const f = path.basename(mf)
|
||||
acc[f] = path.join(m, mf)
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
this._id = null
|
||||
|
||||
// TODO - freezing and inextensibility pending npm changes. See test suite.
|
||||
// Object.preventExtensions(this)
|
||||
normalize(this)
|
||||
|
||||
// I don't want this why did you give it to me. Go away. 🔥🔥🔥🔥
|
||||
delete this.readme
|
||||
|
||||
// Object.freeze(this)
|
||||
}
|
||||
|
||||
// Some things aren't filled in by standard manifest fetching.
|
||||
// If this function needs to do its work, it will grab the
|
||||
// package tarball, extract it, and take whatever it needs
|
||||
// from the stream.
|
||||
function tarballedProps (pkg, spec, opts) {
|
||||
const needsShrinkwrap = (!pkg || (
|
||||
pkg._hasShrinkwrap !== false &&
|
||||
!pkg._shrinkwrap
|
||||
))
|
||||
const needsBin = !!(!pkg || (
|
||||
!pkg.bin &&
|
||||
pkg.directories &&
|
||||
pkg.directories.bin
|
||||
))
|
||||
const needsIntegrity = !pkg || (!pkg._integrity && pkg._integrity !== false)
|
||||
const needsShasum = !pkg || (!pkg._shasum && pkg._shasum !== false)
|
||||
const needsHash = needsIntegrity || needsShasum
|
||||
const needsManifest = !pkg || !pkg.name
|
||||
const needsExtract = needsShrinkwrap || needsBin || needsManifest
|
||||
if (!needsShrinkwrap && !needsBin && !needsHash && !needsManifest) {
|
||||
return BB.resolve({})
|
||||
} else {
|
||||
opts = optCheck(opts)
|
||||
const tarStream = fetchFromManifest(pkg, spec, opts)
|
||||
const extracted = needsExtract && new tar.Parse()
|
||||
return BB.join(
|
||||
needsShrinkwrap && jsonFromStream('npm-shrinkwrap.json', extracted),
|
||||
needsManifest && jsonFromStream('package.json', extracted),
|
||||
needsBin && getPaths(extracted),
|
||||
needsHash && ssri.fromStream(tarStream, {algorithms: ['sha1', 'sha512']}),
|
||||
needsExtract && pipe(tarStream, extracted),
|
||||
(sr, mani, paths, hash) => {
|
||||
if (needsManifest && !mani) {
|
||||
const err = new Error(`Non-registry package missing package.json: ${spec}.`)
|
||||
err.code = 'ENOPACKAGEJSON'
|
||||
throw err
|
||||
}
|
||||
const extraProps = mani || {}
|
||||
delete extraProps._resolved
|
||||
// drain out the rest of the tarball
|
||||
tarStream.resume()
|
||||
// if we have directories.bin, we need to collect any matching files
|
||||
// to add to bin
|
||||
if (paths && paths.length) {
|
||||
const dirBin = mani
|
||||
? (mani && mani.directories && mani.directories.bin)
|
||||
: (pkg && pkg.directories && pkg.directories.bin)
|
||||
if (dirBin) {
|
||||
extraProps.bin = {}
|
||||
paths.forEach(filePath => {
|
||||
if (minimatch(filePath, dirBin + '/**')) {
|
||||
const relative = path.relative(dirBin, filePath)
|
||||
if (relative && relative[0] !== '.') {
|
||||
extraProps.bin[path.basename(relative)] = path.join(dirBin, relative)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
return Object.assign(extraProps, {
|
||||
_shrinkwrap: sr,
|
||||
_resolved: (mani && mani._resolved) ||
|
||||
(pkg && pkg._resolved) ||
|
||||
spec.fetchSpec,
|
||||
_integrity: needsIntegrity && hash && hash.sha512 && hash.sha512[0].toString(),
|
||||
_shasum: needsShasum && hash && hash.sha1 && hash.sha1[0].hexDigest()
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function jsonFromStream (filename, dataStream) {
|
||||
return BB.fromNode(cb => {
|
||||
dataStream.on('error', cb)
|
||||
dataStream.on('close', cb)
|
||||
dataStream.on('entry', entry => {
|
||||
const filePath = entry.header.path.replace(/[^/]+\//, '')
|
||||
if (filePath !== filename) {
|
||||
entry.resume()
|
||||
} else {
|
||||
let data = ''
|
||||
entry.on('error', cb)
|
||||
finished(entry).then(() => {
|
||||
try {
|
||||
cb(null, JSON.parse(data))
|
||||
} catch (err) {
|
||||
cb(err)
|
||||
}
|
||||
}, err => {
|
||||
cb(err)
|
||||
})
|
||||
entry.on('data', d => { data += d })
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function getPaths (dataStream) {
|
||||
return BB.fromNode(cb => {
|
||||
let paths = []
|
||||
dataStream.on('error', cb)
|
||||
dataStream.on('close', () => cb(null, paths))
|
||||
dataStream.on('entry', function handler (entry) {
|
||||
const filePath = entry.header.path.replace(/[^/]+\//, '')
|
||||
entry.resume()
|
||||
paths.push(filePath)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function finalKey (pkg, spec) {
|
||||
if (pkg && pkg._uniqueResolved) {
|
||||
// git packages have a unique, identifiable id, but no tar sha
|
||||
return cacheKey(`${spec.type}-manifest`, pkg._uniqueResolved)
|
||||
} else {
|
||||
return (
|
||||
pkg && pkg._integrity &&
|
||||
cacheKey(
|
||||
`${spec.type}-manifest`,
|
||||
`${pkg._resolved}:${ssri.stringify(pkg._integrity)}`
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
6
website/node_modules/npm/node_modules/pacote/lib/util/cache-key.js
generated
vendored
Normal file
6
website/node_modules/npm/node_modules/pacote/lib/util/cache-key.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = cacheKey
|
||||
function cacheKey (type, identifier) {
|
||||
return ['pacote', type, identifier].join(':')
|
||||
}
|
17
website/node_modules/npm/node_modules/pacote/lib/util/finished.js
generated
vendored
Normal file
17
website/node_modules/npm/node_modules/pacote/lib/util/finished.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
module.exports = function (child, hasExitCode = false) {
|
||||
return BB.fromNode(function (cb) {
|
||||
child.on('error', cb)
|
||||
child.on(hasExitCode ? 'close' : 'end', function (exitCode) {
|
||||
if (exitCode === undefined || exitCode === 0) {
|
||||
cb()
|
||||
} else {
|
||||
let err = new Error('exited with error code: ' + exitCode)
|
||||
cb(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
265
website/node_modules/npm/node_modules/pacote/lib/util/git.js
generated
vendored
Normal file
265
website/node_modules/npm/node_modules/pacote/lib/util/git.js
generated
vendored
Normal file
@@ -0,0 +1,265 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const cp = require('child_process')
|
||||
const execFileAsync = BB.promisify(cp.execFile, {
|
||||
multiArgs: true
|
||||
})
|
||||
const finished = require('./finished')
|
||||
const LRU = require('lru-cache')
|
||||
const optCheck = require('./opt-check')
|
||||
const osenv = require('osenv')
|
||||
const path = require('path')
|
||||
const pinflight = require('promise-inflight')
|
||||
const promiseRetry = require('promise-retry')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
const which = BB.promisify(require('which'))
|
||||
const semver = require('semver')
|
||||
|
||||
const GOOD_ENV_VARS = new Set([
|
||||
'GIT_ASKPASS',
|
||||
'GIT_EXEC_PATH',
|
||||
'GIT_PROXY_COMMAND',
|
||||
'GIT_SSH',
|
||||
'GIT_SSH_COMMAND',
|
||||
'GIT_SSL_CAINFO',
|
||||
'GIT_SSL_NO_VERIFY'
|
||||
])
|
||||
|
||||
const GIT_TRANSIENT_ERRORS = [
|
||||
'remote error: Internal Server Error',
|
||||
'The remote end hung up unexpectedly',
|
||||
'Connection timed out',
|
||||
'Operation timed out',
|
||||
'Failed to connect to .* Timed out',
|
||||
'Connection reset by peer',
|
||||
'SSL_ERROR_SYSCALL',
|
||||
'The requested URL returned error: 503'
|
||||
].join('|')
|
||||
|
||||
const GIT_TRANSIENT_ERROR_RE = new RegExp(GIT_TRANSIENT_ERRORS)
|
||||
|
||||
function shouldRetry (error) {
|
||||
return GIT_TRANSIENT_ERROR_RE.test(error)
|
||||
}
|
||||
|
||||
const GIT_ = 'GIT_'
|
||||
let GITENV
|
||||
function gitEnv () {
|
||||
if (GITENV) { return GITENV }
|
||||
const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-template-tmp')
|
||||
const tmpName = uniqueFilename(tmpDir, 'git-clone')
|
||||
GITENV = {
|
||||
GIT_ASKPASS: 'echo',
|
||||
GIT_TEMPLATE_DIR: tmpName
|
||||
}
|
||||
Object.keys(process.env).forEach(k => {
|
||||
if (GOOD_ENV_VARS.has(k) || !k.startsWith(GIT_)) {
|
||||
GITENV[k] = process.env[k]
|
||||
}
|
||||
})
|
||||
return GITENV
|
||||
}
|
||||
|
||||
let GITPATH
|
||||
try {
|
||||
GITPATH = which.sync('git')
|
||||
} catch (e) {}
|
||||
|
||||
module.exports.clone = fullClone
|
||||
function fullClone (repo, committish, target, opts) {
|
||||
opts = optCheck(opts)
|
||||
const gitArgs = ['clone', '--mirror', '-q', repo, path.join(target, '.git')]
|
||||
if (process.platform === 'win32') {
|
||||
gitArgs.push('--config', 'core.longpaths=true')
|
||||
}
|
||||
return execGit(gitArgs, {cwd: target}).then(() => {
|
||||
return execGit(['init'], {cwd: target})
|
||||
}).then(() => {
|
||||
return execGit(['checkout', committish || 'HEAD'], {cwd: target})
|
||||
}).then(() => {
|
||||
return updateSubmodules(target, opts)
|
||||
}).then(() => headSha(target, opts))
|
||||
}
|
||||
|
||||
module.exports.shallow = shallowClone
|
||||
function shallowClone (repo, branch, target, opts) {
|
||||
opts = optCheck(opts)
|
||||
const gitArgs = ['clone', '--depth=1', '-q']
|
||||
if (branch) {
|
||||
gitArgs.push('-b', branch)
|
||||
}
|
||||
gitArgs.push(repo, target)
|
||||
if (process.platform === 'win32') {
|
||||
gitArgs.push('--config', 'core.longpaths=true')
|
||||
}
|
||||
return execGit(gitArgs, {
|
||||
cwd: target
|
||||
}, opts).then(() => {
|
||||
return updateSubmodules(target, opts)
|
||||
}).then(() => headSha(target, opts))
|
||||
}
|
||||
|
||||
function updateSubmodules (localRepo, opts) {
|
||||
const gitArgs = ['submodule', 'update', '-q', '--init', '--recursive']
|
||||
return execGit(gitArgs, {
|
||||
cwd: localRepo
|
||||
}, opts)
|
||||
}
|
||||
|
||||
function headSha (repo, opts) {
|
||||
opts = optCheck(opts)
|
||||
return execGit(['rev-parse', '--revs-only', 'HEAD'], {cwd: repo}, opts).spread(stdout => {
|
||||
return stdout.trim()
|
||||
})
|
||||
}
|
||||
|
||||
const CARET_BRACES = '^{}'
|
||||
const REVS = new LRU({
|
||||
max: 100,
|
||||
maxAge: 5 * 60 * 1000
|
||||
})
|
||||
module.exports.revs = revs
|
||||
function revs (repo, opts) {
|
||||
opts = optCheck(opts)
|
||||
const cached = REVS.get(repo)
|
||||
if (cached) {
|
||||
return BB.resolve(cached)
|
||||
}
|
||||
return pinflight(`ls-remote:${repo}`, () => {
|
||||
return spawnGit(['ls-remote', '-h', '-t', repo], {
|
||||
env: gitEnv()
|
||||
}, opts).then((stdout) => {
|
||||
return stdout.split('\n').reduce((revs, line) => {
|
||||
const split = line.split(/\s+/, 2)
|
||||
if (split.length < 2) { return revs }
|
||||
const sha = split[0].trim()
|
||||
const ref = split[1].trim().match(/(?:refs\/[^/]+\/)?(.*)/)[1]
|
||||
if (!ref) { return revs } // ???
|
||||
if (ref.endsWith(CARET_BRACES)) { return revs } // refs/tags/x^{} crap
|
||||
const type = refType(line)
|
||||
const doc = {sha, ref, type}
|
||||
|
||||
revs.refs[ref] = doc
|
||||
// We can check out shallow clones on specific SHAs if we have a ref
|
||||
if (revs.shas[sha]) {
|
||||
revs.shas[sha].push(ref)
|
||||
} else {
|
||||
revs.shas[sha] = [ref]
|
||||
}
|
||||
|
||||
if (type === 'tag') {
|
||||
const match = ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
|
||||
if (match && semver.valid(match[1], true)) {
|
||||
revs.versions[semver.clean(match[1], true)] = doc
|
||||
}
|
||||
}
|
||||
|
||||
return revs
|
||||
}, {versions: {}, 'dist-tags': {}, refs: {}, shas: {}})
|
||||
}, err => {
|
||||
err.message = `Error while executing:\n${GITPATH} ls-remote -h -t ${repo}\n\n${err.stderr}\n${err.message}`
|
||||
throw err
|
||||
}).then(revs => {
|
||||
if (revs.refs.HEAD) {
|
||||
const HEAD = revs.refs.HEAD
|
||||
Object.keys(revs.versions).forEach(v => {
|
||||
if (v.sha === HEAD.sha) {
|
||||
revs['dist-tags'].HEAD = v
|
||||
if (!revs.refs.latest) {
|
||||
revs['dist-tags'].latest = revs.refs.HEAD
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
REVS.set(repo, revs)
|
||||
return revs
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports._exec = execGit
|
||||
function execGit (gitArgs, gitOpts, opts) {
|
||||
opts = optCheck(opts)
|
||||
return checkGit().then(gitPath => {
|
||||
return promiseRetry((retry, number) => {
|
||||
if (number !== 1) {
|
||||
opts.log.silly('pacote', 'Retrying git command: ' + gitArgs.join(' ') + ' attempt # ' + number)
|
||||
}
|
||||
return execFileAsync(gitPath, gitArgs, mkOpts(gitOpts, opts)).catch((err) => {
|
||||
if (shouldRetry(err)) {
|
||||
retry(err)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}, opts.retry)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports._spawn = spawnGit
|
||||
function spawnGit (gitArgs, gitOpts, opts) {
|
||||
opts = optCheck(opts)
|
||||
return checkGit().then(gitPath => {
|
||||
return promiseRetry((retry, number) => {
|
||||
if (number !== 1) {
|
||||
opts.log.silly('pacote', 'Retrying git command: ' + gitArgs.join(' ') + ' attempt # ' + number)
|
||||
}
|
||||
const child = cp.spawn(gitPath, gitArgs, mkOpts(gitOpts, opts))
|
||||
|
||||
let stdout = ''
|
||||
let stderr = ''
|
||||
child.stdout.on('data', d => { stdout += d })
|
||||
child.stderr.on('data', d => { stderr += d })
|
||||
|
||||
return finished(child, true).catch(err => {
|
||||
if (shouldRetry(stderr)) {
|
||||
retry(err)
|
||||
} else {
|
||||
err.stderr = stderr
|
||||
throw err
|
||||
}
|
||||
}).then(() => {
|
||||
return stdout
|
||||
})
|
||||
}, opts.retry)
|
||||
})
|
||||
}
|
||||
|
||||
function mkOpts (_gitOpts, opts) {
|
||||
const gitOpts = {
|
||||
env: gitEnv()
|
||||
}
|
||||
if (+opts.uid && !isNaN(opts.uid)) {
|
||||
gitOpts.uid = +opts.uid
|
||||
}
|
||||
if (+opts.gid && !isNaN(opts.gid)) {
|
||||
gitOpts.gid = +opts.gid
|
||||
}
|
||||
Object.assign(gitOpts, _gitOpts)
|
||||
return gitOpts
|
||||
}
|
||||
|
||||
function checkGit () {
|
||||
if (!GITPATH) {
|
||||
const err = new Error('No git binary found in $PATH')
|
||||
err.code = 'ENOGIT'
|
||||
return BB.reject(err)
|
||||
} else {
|
||||
return BB.resolve(GITPATH)
|
||||
}
|
||||
}
|
||||
|
||||
const REFS_TAGS = 'refs/tags/'
|
||||
const REFS_HEADS = 'refs/heads/'
|
||||
const HEAD = 'HEAD'
|
||||
function refType (ref) {
|
||||
return ref.indexOf(REFS_TAGS) !== -1
|
||||
? 'tag'
|
||||
: ref.indexOf(REFS_HEADS) !== -1
|
||||
? 'branch'
|
||||
: ref.endsWith(HEAD)
|
||||
? 'head'
|
||||
: 'other'
|
||||
}
|
64
website/node_modules/npm/node_modules/pacote/lib/util/opt-check.js
generated
vendored
Normal file
64
website/node_modules/npm/node_modules/pacote/lib/util/opt-check.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
const pkg = require('../../package.json')
|
||||
const silentlog = require('./silentlog')
|
||||
|
||||
function PacoteOptions (opts) {
|
||||
opts = opts || {}
|
||||
this._isPacoteOptions = true
|
||||
this.agent = opts.agent
|
||||
this.annotate = opts.annotate
|
||||
this.auth = opts.auth
|
||||
this.scopeTargets = opts.scopeTargets || {}
|
||||
this.defaultTag = opts.defaultTag || 'latest'
|
||||
this.cache = opts.cache
|
||||
this.ca = opts.ca
|
||||
this.cert = opts.cert
|
||||
this.integrity = opts.integrity
|
||||
this.key = opts.key
|
||||
this.localAddress = opts.localAddress
|
||||
this.log = opts.log || silentlog
|
||||
this.memoize = opts.memoize
|
||||
this.maxSockets = opts.maxSockets || 10
|
||||
this.offline = opts.offline
|
||||
this.preferOffline = opts.preferOffline
|
||||
this.proxy = opts.proxy
|
||||
this.noProxy = opts.noProxy
|
||||
this.registry = opts.registry || 'https://registry.npmjs.org'
|
||||
this.resolved = opts.resolved
|
||||
this.retry = opts.retry // for npm-registry-client
|
||||
this.scope = opts.scope
|
||||
this.userAgent = opts.userAgent || `${pkg.name}@${pkg.version}/node@${process.version}+${process.arch} (${process.platform})`
|
||||
this.where = opts.where
|
||||
this.preferOnline = opts.preferOnline
|
||||
this.strictSSL = !!opts.strictSSL
|
||||
this.isFromCI = !!(
|
||||
opts.isFromCI ||
|
||||
process.env['CI'] === 'true' ||
|
||||
process.env['TDDIUM'] ||
|
||||
process.env['JENKINS_URL'] ||
|
||||
process.env['bamboo.buildKey']
|
||||
)
|
||||
this.npmSession = opts.npmSession
|
||||
this.refer = opts.referer || opts.refer
|
||||
this.projectScope = opts.projectScope
|
||||
this.fullMetadata = opts.fullMetadata
|
||||
this.alwaysAuth = opts.alwaysAuth
|
||||
this.includeDeprecated = opts.includeDeprecated == null
|
||||
? true
|
||||
: opts.includeDeprecated
|
||||
|
||||
this.dirPacker = opts.dirPacker || null
|
||||
|
||||
this.uid = opts.uid
|
||||
this.gid = opts.gid
|
||||
|
||||
this.dmode = opts.dmode
|
||||
this.fmode = opts.fmode
|
||||
this.umask = opts.umask
|
||||
}
|
||||
|
||||
module.exports = optCheck
|
||||
function optCheck (opts) {
|
||||
return new PacoteOptions(opts)
|
||||
}
|
44
website/node_modules/npm/node_modules/pacote/lib/util/pack-dir.js
generated
vendored
Normal file
44
website/node_modules/npm/node_modules/pacote/lib/util/pack-dir.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const cacache = require('cacache')
|
||||
const cacheKey = require('./cache-key')
|
||||
const optCheck = require('./opt-check')
|
||||
const packlist = require('npm-packlist')
|
||||
const pipe = BB.promisify(require('mississippi').pipe)
|
||||
const tar = require('tar')
|
||||
|
||||
module.exports = packDir
|
||||
function packDir (manifest, label, dir, target, opts) {
|
||||
opts = optCheck(opts)
|
||||
|
||||
const packer = opts.dirPacker
|
||||
? BB.resolve(opts.dirPacker(manifest, dir))
|
||||
: mkPacker(dir)
|
||||
|
||||
if (!opts.cache) {
|
||||
return packer.then(packer => pipe(packer, target))
|
||||
} else {
|
||||
const cacher = cacache.put.stream(
|
||||
opts.cache, cacheKey('packed-dir', label), opts
|
||||
).on('integrity', i => {
|
||||
target.emit('integrity', i)
|
||||
})
|
||||
return packer.then(packer => BB.all([
|
||||
pipe(packer, cacher),
|
||||
pipe(packer, target)
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
function mkPacker (dir) {
|
||||
return packlist({path: dir}).then(files => {
|
||||
return tar.c({
|
||||
cwd: dir,
|
||||
gzip: true,
|
||||
portable: true,
|
||||
prefix: 'package/'
|
||||
}, files)
|
||||
})
|
||||
}
|
13
website/node_modules/npm/node_modules/pacote/lib/util/silentlog.js
generated
vendored
Normal file
13
website/node_modules/npm/node_modules/pacote/lib/util/silentlog.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
'use strict'
|
||||
|
||||
const noop = Function.prototype
|
||||
module.exports = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
verbose: noop,
|
||||
silly: noop,
|
||||
http: noop,
|
||||
pause: noop,
|
||||
resume: noop
|
||||
}
|
135
website/node_modules/npm/node_modules/pacote/lib/with-tarball-stream.js
generated
vendored
Normal file
135
website/node_modules/npm/node_modules/pacote/lib/with-tarball-stream.js
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const cacache = require('cacache')
|
||||
const fetch = require('./fetch.js')
|
||||
const fs = require('fs')
|
||||
const npa = require('npm-package-arg')
|
||||
const optCheck = require('./util/opt-check.js')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
const retry = require('promise-retry')
|
||||
|
||||
const statAsync = BB.promisify(fs.stat)
|
||||
|
||||
const RETRIABLE_ERRORS = new Set(['ENOENT', 'EINTEGRITY', 'Z_DATA_ERROR'])
|
||||
|
||||
module.exports = withTarballStream
|
||||
function withTarballStream (spec, opts, streamHandler) {
|
||||
opts = optCheck(opts)
|
||||
spec = npa(spec, opts.where)
|
||||
|
||||
// First, we check for a file: resolved shortcut
|
||||
const tryFile = (
|
||||
!opts.preferOnline &&
|
||||
opts.integrity &&
|
||||
opts.resolved &&
|
||||
opts.resolved.startsWith('file:')
|
||||
)
|
||||
? BB.try(() => {
|
||||
// NOTE - this is a special shortcut! Packages installed as files do not
|
||||
// have a `resolved` field -- this specific case only occurs when you have,
|
||||
// say, a git dependency or a registry dependency that you've packaged into
|
||||
// a local file, and put that file: spec in the `resolved` field.
|
||||
opts.log.silly('pacote', `trying ${spec} by local file: ${opts.resolved}`)
|
||||
const file = path.resolve(opts.where || '.', opts.resolved.substr(5))
|
||||
return statAsync(file)
|
||||
.then(() => {
|
||||
const verifier = ssri.integrityStream({integrity: opts.integrity})
|
||||
const stream = fs.createReadStream(file)
|
||||
.on('error', err => verifier.emit('error', err))
|
||||
.pipe(verifier)
|
||||
return streamHandler(stream)
|
||||
})
|
||||
.catch(err => {
|
||||
if (err.code === 'EINTEGRITY') {
|
||||
opts.log.warn('pacote', `EINTEGRITY while extracting ${spec} from ${file}.You will have to recreate the file.`)
|
||||
opts.log.verbose('pacote', `EINTEGRITY for ${spec}: ${err.message}`)
|
||||
}
|
||||
throw err
|
||||
})
|
||||
})
|
||||
: BB.reject(Object.assign(new Error('no file!'), {code: 'ENOENT'}))
|
||||
|
||||
const tryDigest = tryFile
|
||||
.catch(err => {
|
||||
if (
|
||||
opts.preferOnline ||
|
||||
!opts.cache ||
|
||||
!opts.integrity ||
|
||||
!RETRIABLE_ERRORS.has(err.code)
|
||||
) {
|
||||
throw err
|
||||
} else {
|
||||
opts.log.silly('tarball', `trying ${spec} by hash: ${opts.integrity}`)
|
||||
const stream = cacache.get.stream.byDigest(
|
||||
opts.cache, opts.integrity, opts
|
||||
)
|
||||
stream.once('error', err => stream.on('newListener', (ev, l) => {
|
||||
if (ev === 'error') { l(err) }
|
||||
}))
|
||||
return streamHandler(stream)
|
||||
.catch(err => {
|
||||
if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
|
||||
opts.log.warn('tarball', `cached data for ${spec} (${opts.integrity}) seems to be corrupted. Refreshing cache.`)
|
||||
return cleanUpCached(opts.cache, opts.integrity, opts)
|
||||
.then(() => { throw err })
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const trySpec = tryDigest
|
||||
.catch(err => {
|
||||
if (!RETRIABLE_ERRORS.has(err.code)) {
|
||||
// If it's not one of our retriable errors, bail out and give up.
|
||||
throw err
|
||||
} else {
|
||||
opts.log.silly(
|
||||
'tarball',
|
||||
`no local data for ${spec}. Extracting by manifest.`
|
||||
)
|
||||
return BB.resolve(retry((tryAgain, attemptNum) => {
|
||||
const tardata = fetch.tarball(spec, opts)
|
||||
if (!opts.resolved) {
|
||||
tardata.on('manifest', m => {
|
||||
opts.resolved = m._resolved
|
||||
})
|
||||
tardata.on('integrity', i => {
|
||||
opts.integrity = i
|
||||
})
|
||||
}
|
||||
return BB.try(() => streamHandler(tardata))
|
||||
.catch(err => {
|
||||
// Retry once if we have a cache, to clear up any weird conditions.
|
||||
// Don't retry network errors, though -- make-fetch-happen has already
|
||||
// taken care of making sure we're all set on that front.
|
||||
if (opts.cache && err.code && !err.code.match(/^E\d{3}$/)) {
|
||||
if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
|
||||
opts.log.warn('tarball', `tarball data for ${spec} (${opts.integrity}) seems to be corrupted. Trying one more time.`)
|
||||
}
|
||||
return cleanUpCached(opts.cache, err.sri, opts)
|
||||
.then(() => tryAgain(err))
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}, {retries: 1}))
|
||||
}
|
||||
})
|
||||
|
||||
return trySpec
|
||||
.catch(err => {
|
||||
if (err.code === 'EINTEGRITY') {
|
||||
err.message = `Verification failed while extracting ${spec}:\n${err.message}`
|
||||
}
|
||||
throw err
|
||||
})
|
||||
}
|
||||
|
||||
function cleanUpCached (cachePath, integrity, opts) {
|
||||
return cacache.rm.content(cachePath, integrity, opts)
|
||||
}
|
38
website/node_modules/npm/node_modules/pacote/manifest.js
generated
vendored
Normal file
38
website/node_modules/npm/node_modules/pacote/manifest.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict'
|
||||
|
||||
const fetchManifest = require('./lib/fetch').manifest
|
||||
const finalizeManifest = require('./lib/finalize-manifest')
|
||||
const optCheck = require('./lib/util/opt-check')
|
||||
const pinflight = require('promise-inflight')
|
||||
const npa = require('npm-package-arg')
|
||||
|
||||
module.exports = manifest
|
||||
function manifest (spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
spec = npa(spec, opts.where)
|
||||
|
||||
const label = [
|
||||
spec.name,
|
||||
spec.saveSpec || spec.fetchSpec,
|
||||
spec.type,
|
||||
opts.cache,
|
||||
opts.registry,
|
||||
opts.scope
|
||||
].join(':')
|
||||
return pinflight(label, () => {
|
||||
const startTime = Date.now()
|
||||
return fetchManifest(spec, opts).then(rawManifest => {
|
||||
return finalizeManifest(rawManifest, spec, opts)
|
||||
}).then(manifest => {
|
||||
if (opts.annotate) {
|
||||
manifest._from = spec.saveSpec || spec.raw
|
||||
manifest._requested = spec
|
||||
manifest._spec = spec.raw
|
||||
manifest._where = opts.where
|
||||
}
|
||||
const elapsedTime = Date.now() - startTime
|
||||
opts.log.silly('pacote', `${spec.type} manifest for ${spec.name}@${spec.saveSpec || spec.fetchSpec} fetched in ${elapsedTime}ms`)
|
||||
return manifest
|
||||
})
|
||||
})
|
||||
}
|
117
website/node_modules/npm/node_modules/pacote/package.json
generated
vendored
Normal file
117
website/node_modules/npm/node_modules/pacote/package.json
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
{
|
||||
"_args": [
|
||||
[
|
||||
"pacote@8.1.6",
|
||||
"/Users/rebecca/code/npm"
|
||||
]
|
||||
],
|
||||
"_from": "pacote@8.1.6",
|
||||
"_id": "pacote@8.1.6",
|
||||
"_inBundle": true,
|
||||
"_integrity": "sha512-wTOOfpaAQNEQNtPEx92x9Y9kRWVu45v583XT8x2oEV2xRB74+xdqMZIeGW4uFvAyZdmSBtye+wKdyyLaT8pcmw==",
|
||||
"_location": "/npm/pacote",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "pacote@8.1.6",
|
||||
"name": "pacote",
|
||||
"escapedName": "pacote",
|
||||
"rawSpec": "8.1.6",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "8.1.6"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/npm",
|
||||
"/npm/libcipm"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/pacote/-/pacote-8.1.6.tgz",
|
||||
"_spec": "8.1.6",
|
||||
"_where": "/Users/rebecca/code/npm",
|
||||
"author": {
|
||||
"name": "Kat Marchán",
|
||||
"email": "kzm@sykosomatic.org"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/zkat/pacote/issues"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Charlotte Spencer",
|
||||
"email": "charlottelaspencer@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Rebecca Turner",
|
||||
"email": "me@re-becca.org"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"bluebird": "^3.5.1",
|
||||
"cacache": "^11.0.2",
|
||||
"get-stream": "^3.0.0",
|
||||
"glob": "^7.1.2",
|
||||
"lru-cache": "^4.1.3",
|
||||
"make-fetch-happen": "^4.0.1",
|
||||
"minimatch": "^3.0.4",
|
||||
"minipass": "^2.3.3",
|
||||
"mississippi": "^3.0.0",
|
||||
"mkdirp": "^0.5.1",
|
||||
"normalize-package-data": "^2.4.0",
|
||||
"npm-package-arg": "^6.1.0",
|
||||
"npm-packlist": "^1.1.10",
|
||||
"npm-pick-manifest": "^2.1.0",
|
||||
"osenv": "^0.1.5",
|
||||
"promise-inflight": "^1.0.1",
|
||||
"promise-retry": "^1.1.1",
|
||||
"protoduck": "^5.0.0",
|
||||
"rimraf": "^2.6.2",
|
||||
"safe-buffer": "^5.1.2",
|
||||
"semver": "^5.5.0",
|
||||
"ssri": "^6.0.0",
|
||||
"tar": "^4.4.3",
|
||||
"unique-filename": "^1.1.0",
|
||||
"which": "^1.3.0"
|
||||
},
|
||||
"description": "JavaScript package downloader",
|
||||
"devDependencies": {
|
||||
"nock": "^9.2.6",
|
||||
"npmlog": "^4.1.2",
|
||||
"nyc": "^11.8.0",
|
||||
"require-inject": "^1.4.3",
|
||||
"standard": "^11.0.1",
|
||||
"standard-version": "^4.4.0",
|
||||
"tacks": "^1.2.6",
|
||||
"tap": "^12.0.1",
|
||||
"tar-stream": "^1.6.1",
|
||||
"weallbehave": "^1.2.0",
|
||||
"weallcontribute": "^1.0.7"
|
||||
},
|
||||
"files": [
|
||||
"*.js",
|
||||
"lib"
|
||||
],
|
||||
"homepage": "https://github.com/zkat/pacote#readme",
|
||||
"keywords": [
|
||||
"packages",
|
||||
"npm",
|
||||
"git"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"name": "pacote",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/zkat/pacote.git"
|
||||
},
|
||||
"scripts": {
|
||||
"postrelease": "npm publish && git push --follow-tags",
|
||||
"prerelease": "npm t",
|
||||
"pretest": "standard",
|
||||
"release": "standard-version -s",
|
||||
"test": "nyc --all -- tap -J test/*.js",
|
||||
"test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
|
||||
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
|
||||
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
|
||||
},
|
||||
"version": "8.1.6"
|
||||
}
|
64
website/node_modules/npm/node_modules/pacote/prefetch.js
generated
vendored
Normal file
64
website/node_modules/npm/node_modules/pacote/prefetch.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const cacache = require('cacache')
|
||||
const finished = BB.promisify(require('mississippi').finished)
|
||||
const optCheck = require('./lib/util/opt-check')
|
||||
const npa = require('npm-package-arg')
|
||||
|
||||
module.exports = prefetch
|
||||
function prefetch (spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
spec = npa(spec, opts.where)
|
||||
opts.log.warn('prefetch', 'pacote.prefetch() is deprecated. Please use pacote.tarball() instead.')
|
||||
const startTime = Date.now()
|
||||
if (!opts.cache) {
|
||||
opts.log.info('prefetch', 'skipping prefetch: no cache provided')
|
||||
return BB.resolve({spec})
|
||||
}
|
||||
if (opts.integrity && !opts.preferOnline) {
|
||||
opts.log.silly('prefetch', 'checking if', opts.integrity, 'is already cached')
|
||||
return cacache.get.hasContent(opts.cache, opts.integrity).then(info => {
|
||||
if (info) {
|
||||
opts.log.silly('prefetch', `content already exists for ${spec} (${Date.now() - startTime}ms)`)
|
||||
return {
|
||||
spec,
|
||||
integrity: info.integrity,
|
||||
size: info.size,
|
||||
byDigest: true
|
||||
}
|
||||
} else {
|
||||
return prefetchByManifest(startTime, spec, opts)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
opts.log.silly('prefetch', `no integrity hash provided for ${spec} - fetching by manifest`)
|
||||
return prefetchByManifest(startTime, spec, opts)
|
||||
}
|
||||
}
|
||||
|
||||
let fetch
|
||||
function prefetchByManifest (start, spec, opts) {
|
||||
let manifest
|
||||
let integrity
|
||||
return BB.resolve().then(() => {
|
||||
if (!fetch) {
|
||||
fetch = require('./lib/fetch')
|
||||
}
|
||||
const stream = fetch.tarball(spec, opts)
|
||||
if (!stream) { return }
|
||||
stream.on('data', function () {})
|
||||
stream.on('manifest', m => { manifest = m })
|
||||
stream.on('integrity', i => { integrity = i })
|
||||
return finished(stream)
|
||||
}).then(() => {
|
||||
opts.log.silly('prefetch', `${spec} done in ${Date.now() - start}ms`)
|
||||
return {
|
||||
manifest,
|
||||
spec,
|
||||
integrity: integrity || (manifest && manifest._integrity),
|
||||
byDigest: false
|
||||
}
|
||||
})
|
||||
}
|
67
website/node_modules/npm/node_modules/pacote/tarball.js
generated
vendored
Normal file
67
website/node_modules/npm/node_modules/pacote/tarball.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const fs = require('fs')
|
||||
const getStream = require('get-stream')
|
||||
const mkdirp = BB.promisify(require('mkdirp'))
|
||||
const npa = require('npm-package-arg')
|
||||
const optCheck = require('./lib/util/opt-check.js')
|
||||
const PassThrough = require('stream').PassThrough
|
||||
const path = require('path')
|
||||
const rimraf = BB.promisify(require('rimraf'))
|
||||
const withTarballStream = require('./lib/with-tarball-stream.js')
|
||||
|
||||
module.exports = tarball
|
||||
function tarball (spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
spec = npa(spec, opts.where)
|
||||
return withTarballStream(spec, opts, stream => getStream.buffer(stream))
|
||||
}
|
||||
|
||||
module.exports.stream = tarballStream
|
||||
function tarballStream (spec, opts) {
|
||||
opts = optCheck(opts)
|
||||
spec = npa(spec, opts.where)
|
||||
const output = new PassThrough()
|
||||
let hasTouchedOutput = false
|
||||
let lastError = null
|
||||
withTarballStream(spec, opts, stream => {
|
||||
if (hasTouchedOutput && lastError) {
|
||||
throw lastError
|
||||
} else if (hasTouchedOutput) {
|
||||
throw new Error('abort, abort!')
|
||||
} else {
|
||||
return new BB((resolve, reject) => {
|
||||
stream.on('error', reject)
|
||||
output.on('error', reject)
|
||||
output.on('error', () => { hasTouchedOutput = true })
|
||||
output.on('finish', resolve)
|
||||
stream.pipe(output)
|
||||
stream.once('data', () => { hasTouchedOutput = true })
|
||||
}).catch(err => {
|
||||
lastError = err
|
||||
throw err
|
||||
})
|
||||
}
|
||||
})
|
||||
.catch(err => output.emit('error', err))
|
||||
return output
|
||||
}
|
||||
|
||||
module.exports.toFile = tarballToFile
|
||||
function tarballToFile (spec, dest, opts) {
|
||||
opts = optCheck(opts)
|
||||
spec = npa(spec, opts.where)
|
||||
return mkdirp(path.dirname(dest))
|
||||
.then(() => withTarballStream(spec, opts, stream => {
|
||||
return rimraf(dest)
|
||||
.then(() => new BB((resolve, reject) => {
|
||||
const writer = fs.createWriteStream(dest)
|
||||
stream.on('error', reject)
|
||||
writer.on('error', reject)
|
||||
writer.on('close', resolve)
|
||||
stream.pipe(writer)
|
||||
}))
|
||||
}))
|
||||
}
|
Reference in New Issue
Block a user