mirror of
https://github.com/titanscouting/tra-analysis.git
synced 2025-09-06 15:07:21 +00:00
push all website files
This commit is contained in:
313
website/node_modules/npm/lib/search/all-package-metadata.js
generated
vendored
Normal file
313
website/node_modules/npm/lib/search/all-package-metadata.js
generated
vendored
Normal file
@@ -0,0 +1,313 @@
|
||||
'use strict'
|
||||
|
||||
var fs = require('graceful-fs')
|
||||
var path = require('path')
|
||||
var mkdir = require('mkdirp')
|
||||
var chownr = require('chownr')
|
||||
var npm = require('../npm.js')
|
||||
var log = require('npmlog')
|
||||
var cacheFile = require('npm-cache-filename')
|
||||
var correctMkdir = require('../utils/correct-mkdir.js')
|
||||
var mapToRegistry = require('../utils/map-to-registry.js')
|
||||
var jsonstream = require('JSONStream')
|
||||
var writeStreamAtomic = require('fs-write-stream-atomic')
|
||||
var ms = require('mississippi')
|
||||
var sortedUnionStream = require('sorted-union-stream')
|
||||
var once = require('once')
|
||||
var gunzip = require('../utils/gunzip-maybe')
|
||||
|
||||
// Returns a sorted stream of all package metadata. Internally, takes care of
|
||||
// maintaining its metadata cache and making partial or full remote requests,
|
||||
// according to staleness, validity, etc.
|
||||
//
|
||||
// The local cache must hold certain invariants:
|
||||
// 1. It must be a proper JSON object
|
||||
// 2. It must have its keys lexically sorted
|
||||
// 3. The first entry must be `_updated` with a millisecond timestamp as a val.
|
||||
// 4. It must include all entries that exist in the metadata endpoint as of
|
||||
// the value in `_updated`
|
||||
module.exports = allPackageMetadata
|
||||
function allPackageMetadata (staleness) {
|
||||
var stream = ms.through.obj()
|
||||
|
||||
mapToRegistry('-/all', npm.config, function (er, uri, auth) {
|
||||
if (er) return stream.emit('error', er)
|
||||
|
||||
var cacheBase = cacheFile(npm.config.get('cache'))(uri)
|
||||
var cachePath = path.join(cacheBase, '.cache.json')
|
||||
|
||||
createEntryStream(cachePath, uri, auth, staleness, function (err, entryStream, latest, newEntries) {
|
||||
if (err) return stream.emit('error', err)
|
||||
log.silly('all-package-metadata', 'entry stream created')
|
||||
if (entryStream && newEntries) {
|
||||
createCacheWriteStream(cachePath, latest, function (err, writeStream) {
|
||||
if (err) return stream.emit('error', err)
|
||||
log.silly('all-package-metadata', 'output stream created')
|
||||
ms.pipeline.obj(entryStream, writeStream, stream)
|
||||
})
|
||||
} else if (entryStream) {
|
||||
ms.pipeline.obj(entryStream, stream)
|
||||
} else {
|
||||
stream.emit('error', new Error('No search sources available'))
|
||||
}
|
||||
})
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
// Creates a stream of the latest available package metadata.
|
||||
// Metadata will come from a combination of the local cache and remote data.
|
||||
module.exports._createEntryStream = createEntryStream
|
||||
function createEntryStream (cachePath, uri, auth, staleness, cb) {
|
||||
createCacheEntryStream(cachePath, function (err, cacheStream, cacheLatest) {
|
||||
cacheLatest = cacheLatest || 0
|
||||
if (err) {
|
||||
log.warn('', 'Failed to read search cache. Rebuilding')
|
||||
log.silly('all-package-metadata', 'cache read error: ', err)
|
||||
}
|
||||
createEntryUpdateStream(uri, auth, staleness, cacheLatest, function (err, updateStream, updatedLatest) {
|
||||
updatedLatest = updatedLatest || 0
|
||||
var latest = updatedLatest || cacheLatest
|
||||
if (!cacheStream && !updateStream) {
|
||||
return cb(new Error('No search sources available'))
|
||||
}
|
||||
if (err) {
|
||||
log.warn('', 'Search data request failed, search might be stale')
|
||||
log.silly('all-package-metadata', 'update request error: ', err)
|
||||
}
|
||||
if (cacheStream && updateStream) {
|
||||
// Deduped, unioned, sorted stream from the combination of both.
|
||||
cb(null,
|
||||
createMergedStream(cacheStream, updateStream),
|
||||
latest,
|
||||
!!updatedLatest)
|
||||
} else {
|
||||
// Either one works if one or the other failed
|
||||
cb(null, cacheStream || updateStream, latest, !!updatedLatest)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Merges `a` and `b` into one stream, dropping duplicates in favor of entries
|
||||
// in `b`. Both input streams should already be individually sorted, and the
|
||||
// returned output stream will have semantics resembling the merge step of a
|
||||
// plain old merge sort.
|
||||
module.exports._createMergedStream = createMergedStream
|
||||
function createMergedStream (a, b) {
|
||||
linkStreams(a, b)
|
||||
return sortedUnionStream(b, a, function (pkg) { return pkg.name })
|
||||
}
|
||||
|
||||
// Reads the local index and returns a stream that spits out package data.
|
||||
module.exports._createCacheEntryStream = createCacheEntryStream
|
||||
function createCacheEntryStream (cacheFile, cb) {
|
||||
log.verbose('all-package-metadata', 'creating entry stream from local cache')
|
||||
log.verbose('all-package-metadata', cacheFile)
|
||||
fs.stat(cacheFile, function (err, stat) {
|
||||
if (err) return cb(err)
|
||||
// TODO - This isn't very helpful if `cacheFile` is empty or just `{}`
|
||||
var entryStream = ms.pipeline.obj(
|
||||
fs.createReadStream(cacheFile),
|
||||
jsonstream.parse('*'),
|
||||
// I believe this passthrough is necessary cause `jsonstream` returns
|
||||
// weird custom streams that behave funny sometimes.
|
||||
ms.through.obj()
|
||||
)
|
||||
extractUpdated(entryStream, 'cached-entry-stream', cb)
|
||||
})
|
||||
}
|
||||
|
||||
// Stream of entry updates from the server. If `latest` is `0`, streams the
|
||||
// entire metadata object from the registry.
|
||||
module.exports._createEntryUpdateStream = createEntryUpdateStream
|
||||
function createEntryUpdateStream (all, auth, staleness, latest, cb) {
|
||||
log.verbose('all-package-metadata', 'creating remote entry stream')
|
||||
var params = {
|
||||
timeout: 600,
|
||||
follow: true,
|
||||
staleOk: true,
|
||||
auth: auth,
|
||||
streaming: true
|
||||
}
|
||||
var partialUpdate = false
|
||||
if (latest && (Date.now() - latest < (staleness * 1000))) {
|
||||
// Skip the request altogether if our `latest` isn't stale.
|
||||
log.verbose('all-package-metadata', 'Local data up to date, skipping update')
|
||||
return cb(null)
|
||||
} else if (latest === 0) {
|
||||
log.warn('', 'Building the local index for the first time, please be patient')
|
||||
log.verbose('all-package-metadata', 'No cached data: requesting full metadata db')
|
||||
} else {
|
||||
log.verbose('all-package-metadata', 'Cached data present with timestamp:', latest, 'requesting partial index update')
|
||||
all += '/since?stale=update_after&startkey=' + latest
|
||||
partialUpdate = true
|
||||
}
|
||||
npm.registry.request(all, params, function (er, res) {
|
||||
if (er) return cb(er)
|
||||
log.silly('all-package-metadata', 'request stream opened, code:', res.statusCode)
|
||||
// NOTE - The stream returned by `request` seems to be very persnickety
|
||||
// and this is almost a magic incantation to get it to work.
|
||||
// Modify how `res` is used here at your own risk.
|
||||
var entryStream = ms.pipeline.obj(
|
||||
res,
|
||||
ms.through(function (chunk, enc, cb) {
|
||||
cb(null, chunk)
|
||||
}),
|
||||
gunzip(),
|
||||
jsonstream.parse('*', function (pkg, key) {
|
||||
if (key[0] === '_updated' || key[0][0] !== '_') {
|
||||
return pkg
|
||||
}
|
||||
})
|
||||
)
|
||||
if (partialUpdate) {
|
||||
// The `/all/since` endpoint doesn't return `_updated`, so we
|
||||
// just use the request's own timestamp.
|
||||
cb(null, entryStream, Date.parse(res.headers.date))
|
||||
} else {
|
||||
extractUpdated(entryStream, 'entry-update-stream', cb)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Both the (full) remote requests and the local index have `_updated` as their
|
||||
// first returned entries. This is the "latest" unix timestamp for the metadata
|
||||
// in question. This code does a bit of juggling with the data streams
|
||||
// so that we can pretend that field doesn't exist, but still extract `latest`
|
||||
function extractUpdated (entryStream, label, cb) {
|
||||
cb = once(cb)
|
||||
log.silly('all-package-metadata', 'extracting latest')
|
||||
function nope (msg) {
|
||||
return function () {
|
||||
log.warn('all-package-metadata', label, msg)
|
||||
entryStream.removeAllListeners()
|
||||
entryStream.destroy()
|
||||
cb(new Error(msg))
|
||||
}
|
||||
}
|
||||
var onErr = nope('Failed to read stream')
|
||||
var onEnd = nope('Empty or invalid stream')
|
||||
entryStream.on('error', onErr)
|
||||
entryStream.on('end', onEnd)
|
||||
entryStream.once('data', function (latest) {
|
||||
log.silly('all-package-metadata', 'got first stream entry for', label, latest)
|
||||
entryStream.removeListener('error', onErr)
|
||||
entryStream.removeListener('end', onEnd)
|
||||
// Because `.once()` unpauses the stream, we re-pause it after the first
|
||||
// entry so we don't vomit entries into the void.
|
||||
entryStream.pause()
|
||||
if (typeof latest === 'number') {
|
||||
// The extra pipeline is to return a stream that will implicitly unpause
|
||||
// after having an `.on('data')` listener attached, since using this
|
||||
// `data` event broke its initial state.
|
||||
cb(null, ms.pipeline.obj(entryStream, ms.through.obj()), latest)
|
||||
} else {
|
||||
cb(new Error('expected first entry to be _updated'))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Creates a stream that writes input metadata to the current cache.
|
||||
// Cache updates are atomic, and the stream closes when *everything* is done.
|
||||
// The stream is also passthrough, so entries going through it will also
|
||||
// be output from it.
|
||||
module.exports._createCacheWriteStream = createCacheWriteStream
|
||||
function createCacheWriteStream (cacheFile, latest, cb) {
|
||||
_ensureCacheDirExists(cacheFile, function (err) {
|
||||
if (err) return cb(err)
|
||||
log.silly('all-package-metadata', 'creating output stream')
|
||||
var outStream = _createCacheOutStream()
|
||||
var cacheFileStream = writeStreamAtomic(cacheFile)
|
||||
var inputStream = _createCacheInStream(cacheFileStream, outStream, latest)
|
||||
|
||||
// Glue together the various streams so they fail together.
|
||||
// `cacheFileStream` errors are already handled by the `inputStream`
|
||||
// pipeline
|
||||
var errEmitted = false
|
||||
linkStreams(inputStream, outStream, function () { errEmitted = true })
|
||||
|
||||
cacheFileStream.on('close', function () { !errEmitted && outStream.end() })
|
||||
|
||||
cb(null, ms.duplex.obj(inputStream, outStream))
|
||||
})
|
||||
}
|
||||
|
||||
function _ensureCacheDirExists (cacheFile, cb) {
|
||||
var cacheBase = path.dirname(cacheFile)
|
||||
log.silly('all-package-metadata', 'making sure cache dir exists at', cacheBase)
|
||||
correctMkdir(npm.cache, function (er, st) {
|
||||
if (er) return cb(er)
|
||||
mkdir(cacheBase, function (er, made) {
|
||||
if (er) return cb(er)
|
||||
chownr(made || cacheBase, st.uid, st.gid, cb)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function _createCacheOutStream () {
|
||||
return ms.pipeline.obj(
|
||||
// These two passthrough `through` streams compensate for some
|
||||
// odd behavior with `jsonstream`.
|
||||
ms.through(),
|
||||
jsonstream.parse('*', function (obj, key) {
|
||||
// This stream happens to get _updated passed through it, for
|
||||
// implementation reasons. We make sure to filter it out cause
|
||||
// the fact that it comes t
|
||||
if (typeof obj === 'object') {
|
||||
return obj
|
||||
}
|
||||
}),
|
||||
ms.through.obj()
|
||||
)
|
||||
}
|
||||
|
||||
function _createCacheInStream (writer, outStream, latest) {
|
||||
var updatedWritten = false
|
||||
var inStream = ms.pipeline.obj(
|
||||
ms.through.obj(function (pkg, enc, cb) {
|
||||
if (!updatedWritten && typeof pkg === 'number') {
|
||||
// This is the `_updated` value getting sent through.
|
||||
updatedWritten = true
|
||||
return cb(null, ['_updated', pkg])
|
||||
} else if (typeof pkg !== 'object') {
|
||||
this.emit('error', new Error('invalid value written to input stream'))
|
||||
} else {
|
||||
// The [key, val] format is expected by `jsonstream` for object writing
|
||||
cb(null, [pkg.name, pkg])
|
||||
}
|
||||
}),
|
||||
jsonstream.stringifyObject('{', ',', '}'),
|
||||
ms.through(function (chunk, enc, cb) {
|
||||
// This tees off the buffer data to `outStream`, and then continues
|
||||
// the pipeline as usual
|
||||
outStream.write(chunk, enc, function () {
|
||||
cb(null, chunk)
|
||||
})
|
||||
}),
|
||||
// And finally, we write to the cache file.
|
||||
writer
|
||||
)
|
||||
inStream.write(latest)
|
||||
return inStream
|
||||
}
|
||||
|
||||
// Links errors between `a` and `b`, preventing cycles, and calls `cb` if
|
||||
// an error happens, once per error.
|
||||
function linkStreams (a, b, cb) {
|
||||
var lastError = null
|
||||
a.on('error', function (err) {
|
||||
if (err !== lastError) {
|
||||
lastError = err
|
||||
b.emit('error', err)
|
||||
cb(err)
|
||||
}
|
||||
})
|
||||
b.on('error', function (err) {
|
||||
if (err !== lastError) {
|
||||
lastError = err
|
||||
a.emit('error', err)
|
||||
cb(err)
|
||||
}
|
||||
})
|
||||
}
|
50
website/node_modules/npm/lib/search/all-package-search.js
generated
vendored
Normal file
50
website/node_modules/npm/lib/search/all-package-search.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
var ms = require('mississippi')
|
||||
var allPackageMetadata = require('./all-package-metadata')
|
||||
var packageFilter = require('./package-filter.js')
|
||||
|
||||
module.exports = allPackageSearch
|
||||
function allPackageSearch (opts) {
|
||||
var searchSection = (opts.unicode ? '🤔 ' : '') + 'search'
|
||||
|
||||
// Get a stream with *all* the packages. This takes care of dealing
|
||||
// with the local cache as well, but that's an internal detail.
|
||||
var allEntriesStream = allPackageMetadata(opts.staleness)
|
||||
|
||||
// Grab a stream that filters those packages according to given params.
|
||||
var filterStream = streamFilter(function (pkg) {
|
||||
opts.log.gauge.pulse('search')
|
||||
opts.log.gauge.show({section: searchSection, logline: 'scanning ' + pkg.name})
|
||||
// Simply 'true' if the package matches search parameters.
|
||||
var match = packageFilter(pkg, opts.include, opts.exclude, {
|
||||
description: opts.description
|
||||
})
|
||||
return match
|
||||
})
|
||||
return ms.pipeline.obj(allEntriesStream, filterStream)
|
||||
}
|
||||
|
||||
function streamFilter (filter) {
|
||||
return ms.through.obj(function (data, enc, cb) {
|
||||
if (filter(data)) {
|
||||
this.push(standardizePkg(data))
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
||||
function standardizePkg (data) {
|
||||
return {
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
maintainers: (data.maintainers || []).map(function (m) {
|
||||
return { username: m.name, email: m.email }
|
||||
}),
|
||||
keywords: data.keywords || [],
|
||||
version: Object.keys(data.versions || {})[0] || [],
|
||||
date: (
|
||||
data.time &&
|
||||
data.time.modified &&
|
||||
new Date(data.time.modified)
|
||||
) || null
|
||||
}
|
||||
}
|
64
website/node_modules/npm/lib/search/esearch.js
generated
vendored
Normal file
64
website/node_modules/npm/lib/search/esearch.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
var npm = require('../npm.js')
|
||||
var log = require('npmlog')
|
||||
var mapToRegistry = require('../utils/map-to-registry.js')
|
||||
var jsonstream = require('JSONStream')
|
||||
var ms = require('mississippi')
|
||||
var gunzip = require('../utils/gunzip-maybe')
|
||||
|
||||
module.exports = esearch
|
||||
|
||||
function esearch (opts) {
|
||||
var stream = ms.through.obj()
|
||||
|
||||
mapToRegistry('-/v1/search', npm.config, function (er, uri, auth) {
|
||||
if (er) return stream.emit('error', er)
|
||||
createResultStream(uri, auth, opts, function (err, resultStream) {
|
||||
if (err) return stream.emit('error', err)
|
||||
ms.pipeline.obj(resultStream, stream)
|
||||
})
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
function createResultStream (uri, auth, opts, cb) {
|
||||
log.verbose('esearch', 'creating remote entry stream')
|
||||
var params = {
|
||||
timeout: 600,
|
||||
follow: true,
|
||||
staleOk: true,
|
||||
auth: auth,
|
||||
streaming: true
|
||||
}
|
||||
var q = buildQuery(opts)
|
||||
npm.registry.request(uri + '?text=' + encodeURIComponent(q) + '&size=' + opts.limit, params, function (err, res) {
|
||||
if (err) return cb(err)
|
||||
log.silly('esearch', 'request stream opened, code:', res.statusCode)
|
||||
// NOTE - The stream returned by `request` seems to be very persnickety
|
||||
// and this is almost a magic incantation to get it to work.
|
||||
// Modify how `res` is used here at your own risk.
|
||||
var entryStream = ms.pipeline.obj(
|
||||
res,
|
||||
ms.through(function (chunk, enc, cb) {
|
||||
cb(null, chunk)
|
||||
}),
|
||||
gunzip(),
|
||||
jsonstream.parse('objects.*.package', function (data) {
|
||||
return {
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
maintainers: data.maintainers,
|
||||
keywords: data.keywords,
|
||||
version: data.version,
|
||||
date: data.date ? new Date(data.date) : null
|
||||
}
|
||||
})
|
||||
)
|
||||
return cb(null, entryStream)
|
||||
})
|
||||
}
|
||||
|
||||
function buildQuery (opts) {
|
||||
return opts.include.join(' ')
|
||||
}
|
172
website/node_modules/npm/lib/search/format-package-stream.js
generated
vendored
Normal file
172
website/node_modules/npm/lib/search/format-package-stream.js
generated
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
'use strict'
|
||||
|
||||
var ms = require('mississippi')
|
||||
var jsonstream = require('JSONStream')
|
||||
var columnify = require('columnify')
|
||||
|
||||
// This module consumes package data in the following format:
|
||||
//
|
||||
// {
|
||||
// name: String,
|
||||
// description: String,
|
||||
// maintainers: [{ username: String, email: String }],
|
||||
// keywords: String | [String],
|
||||
// version: String,
|
||||
// date: Date // can be null,
|
||||
// }
|
||||
//
|
||||
// The returned stream will format this package data
|
||||
// into a byte stream of formatted, displayable output.
|
||||
|
||||
module.exports = formatPackageStream
|
||||
function formatPackageStream (opts) {
|
||||
opts = opts || {}
|
||||
if (opts.json) {
|
||||
return jsonOutputStream()
|
||||
} else {
|
||||
return textOutputStream(opts)
|
||||
}
|
||||
}
|
||||
|
||||
function jsonOutputStream () {
|
||||
return ms.pipeline.obj(
|
||||
ms.through.obj(),
|
||||
jsonstream.stringify('[', ',', ']'),
|
||||
ms.through()
|
||||
)
|
||||
}
|
||||
|
||||
function textOutputStream (opts) {
|
||||
var line = 0
|
||||
return ms.through.obj(function (pkg, enc, cb) {
|
||||
cb(null, prettify(pkg, ++line, opts))
|
||||
})
|
||||
}
|
||||
|
||||
function prettify (data, num, opts) {
|
||||
opts = opts || {}
|
||||
var truncate = !opts.long
|
||||
|
||||
var pkg = normalizePackage(data, opts)
|
||||
|
||||
var columns = opts.description
|
||||
? ['name', 'description', 'author', 'date', 'version', 'keywords']
|
||||
: ['name', 'author', 'date', 'version', 'keywords']
|
||||
|
||||
if (opts.parseable) {
|
||||
return columns.map(function (col) {
|
||||
return pkg[col] && ('' + pkg[col]).replace(/\t/g, ' ')
|
||||
}).join('\t')
|
||||
}
|
||||
|
||||
var output = columnify(
|
||||
[pkg],
|
||||
{
|
||||
include: columns,
|
||||
showHeaders: num <= 1,
|
||||
columnSplitter: ' | ',
|
||||
truncate: truncate,
|
||||
config: {
|
||||
name: { minWidth: 25, maxWidth: 25, truncate: false, truncateMarker: '' },
|
||||
description: { minWidth: 20, maxWidth: 20 },
|
||||
author: { minWidth: 15, maxWidth: 15 },
|
||||
date: { maxWidth: 11 },
|
||||
version: { minWidth: 8, maxWidth: 8 },
|
||||
keywords: { maxWidth: Infinity }
|
||||
}
|
||||
}
|
||||
)
|
||||
output = trimToMaxWidth(output)
|
||||
if (opts.color) {
|
||||
output = highlightSearchTerms(output, opts.args)
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
var colors = [31, 33, 32, 36, 34, 35]
|
||||
var cl = colors.length
|
||||
|
||||
function addColorMarker (str, arg, i) {
|
||||
var m = i % cl + 1
|
||||
var markStart = String.fromCharCode(m)
|
||||
var markEnd = String.fromCharCode(0)
|
||||
|
||||
if (arg.charAt(0) === '/') {
|
||||
return str.replace(
|
||||
new RegExp(arg.substr(1, arg.length - 2), 'gi'),
|
||||
function (bit) { return markStart + bit + markEnd }
|
||||
)
|
||||
}
|
||||
|
||||
// just a normal string, do the split/map thing
|
||||
var pieces = str.toLowerCase().split(arg.toLowerCase())
|
||||
var p = 0
|
||||
|
||||
return pieces.map(function (piece) {
|
||||
piece = str.substr(p, piece.length)
|
||||
var mark = markStart +
|
||||
str.substr(p + piece.length, arg.length) +
|
||||
markEnd
|
||||
p += piece.length + arg.length
|
||||
return piece + mark
|
||||
}).join('')
|
||||
}
|
||||
|
||||
function colorize (line) {
|
||||
for (var i = 0; i < cl; i++) {
|
||||
var m = i + 1
|
||||
var color = '\u001B[' + colors[i] + 'm'
|
||||
line = line.split(String.fromCharCode(m)).join(color)
|
||||
}
|
||||
var uncolor = '\u001B[0m'
|
||||
return line.split('\u0000').join(uncolor)
|
||||
}
|
||||
|
||||
function getMaxWidth () {
|
||||
var cols
|
||||
try {
|
||||
var tty = require('tty')
|
||||
var stdout = process.stdout
|
||||
cols = !tty.isatty(stdout.fd) ? Infinity : process.stdout.getWindowSize()[0]
|
||||
cols = (cols === 0) ? Infinity : cols
|
||||
} catch (ex) { cols = Infinity }
|
||||
return cols
|
||||
}
|
||||
|
||||
function trimToMaxWidth (str) {
|
||||
var maxWidth = getMaxWidth()
|
||||
return str.split('\n').map(function (line) {
|
||||
return line.slice(0, maxWidth)
|
||||
}).join('\n')
|
||||
}
|
||||
|
||||
function highlightSearchTerms (str, terms) {
|
||||
terms.forEach(function (arg, i) {
|
||||
str = addColorMarker(str, arg, i)
|
||||
})
|
||||
|
||||
return colorize(str).trim()
|
||||
}
|
||||
|
||||
function normalizePackage (data, opts) {
|
||||
opts = opts || {}
|
||||
return {
|
||||
name: data.name,
|
||||
description: opts.description ? data.description : '',
|
||||
author: (data.maintainers || []).map(function (m) {
|
||||
return '=' + m.username
|
||||
}).join(' '),
|
||||
keywords: Array.isArray(data.keywords)
|
||||
? data.keywords.join(' ')
|
||||
: typeof data.keywords === 'string'
|
||||
? data.keywords.replace(/[,\s]+/, ' ')
|
||||
: '',
|
||||
version: data.version,
|
||||
date: (data.date &&
|
||||
(data.date.toISOString() // remove time
|
||||
.split('T').join(' ')
|
||||
.replace(/:[0-9]{2}\.[0-9]{3}Z$/, ''))
|
||||
.slice(0, -5)) ||
|
||||
'prehistoric'
|
||||
}
|
||||
}
|
41
website/node_modules/npm/lib/search/package-filter.js
generated
vendored
Normal file
41
website/node_modules/npm/lib/search/package-filter.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = filter
|
||||
function filter (data, include, exclude, opts) {
|
||||
return typeof data === 'object' &&
|
||||
filterWords(data, include, exclude, opts)
|
||||
}
|
||||
|
||||
function getWords (data, opts) {
|
||||
return [ data.name ]
|
||||
.concat((opts && opts.description) ? data.description : [])
|
||||
.concat((data.maintainers || []).map(function (m) {
|
||||
return '=' + m.name
|
||||
}))
|
||||
.concat(data.versions && data.versions.length && data.url && ('<' + data.url + '>'))
|
||||
.concat(data.keywords || [])
|
||||
.map(function (f) { return f && f.trim && f.trim() })
|
||||
.filter(function (f) { return f })
|
||||
.join(' ')
|
||||
.toLowerCase()
|
||||
}
|
||||
|
||||
function filterWords (data, include, exclude, opts) {
|
||||
var words = getWords(data, opts)
|
||||
for (var i = 0, l = include.length; i < l; i++) {
|
||||
if (!match(words, include[i])) return false
|
||||
}
|
||||
for (i = 0, l = exclude.length; i < l; i++) {
|
||||
if (match(words, exclude[i])) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
function match (words, pattern) {
|
||||
if (pattern.charAt(0) === '/') {
|
||||
pattern = pattern.replace(/\/$/, '')
|
||||
pattern = new RegExp(pattern.substr(1, pattern.length - 1))
|
||||
return words.match(pattern)
|
||||
}
|
||||
return words.indexOf(pattern) !== -1
|
||||
}
|
Reference in New Issue
Block a user