test
Some checks failed
continuous-integration/drone/push Build is failing

This commit is contained in:
mol
2024-07-06 22:23:31 +08:00
parent 08173d8497
commit 263cb5ef03
1663 changed files with 526884 additions and 0 deletions

View File

@ -0,0 +1,13 @@
vscode-ripgrep
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,41 @@
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.5 BLOCK -->
## Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below.
## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
## Preferred Languages
We prefer all communications to be in English.
## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
<!-- END MICROSOFT SECURITY.MD BLOCK -->

Binary file not shown.

View File

@ -0,0 +1,46 @@
name: $(Date:yyyyMMdd)$(Rev:.r)
trigger:
branches:
include:
- main
pr: none
resources:
repositories:
- repository: templates
type: github
name: microsoft/vscode-engineering
ref: main
endpoint: Monaco
parameters:
- name: publishPackage
displayName: 🚀 Publish @vscode/ripgrep
type: boolean
default: false
extends:
template: azure-pipelines/npm-package/pipeline.yml@templates
parameters:
npmPackages:
- name: ripgrep
buildPlatforms:
- name: Linux
nodeVersions:
- 18.x
- name: MacOS
nodeVersions:
- 18.x
- name: Windows
nodeVersions:
- 18.x
buildSteps:
- script: npm i
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-token-public-repos)
publishPackage: ${{ parameters.publishPackage }}

View File

@ -0,0 +1,8 @@
{
"compilerOptions": {
"resolveJsonModule": true,
"lib": [
"esnext"
]
}
}

View File

@ -0,0 +1,356 @@
// @ts-check
'use strict';
const path = require('path');
const fs = require('fs');
const os = require('os');
const https = require('https');
const util = require('util');
const url = require('url');
const stream = require('stream');
const child_process = require('child_process');
const proxy_from_env = require('proxy-from-env');
const yauzl = require('yauzl'); // use yauzl ^2.9.2 because vscode already ships with it.
const packageVersion = require('../package.json').version;
const tmpDir = path.join(os.tmpdir(), `vscode-ripgrep-cache-${packageVersion}`);
const fsUnlink = util.promisify(fs.unlink);
const fsExists = util.promisify(fs.exists);
const fsMkdir = util.promisify(fs.mkdir);
const isWindows = os.platform() === 'win32';
const REPO = 'microsoft/ripgrep-prebuilt';
const pipelineAsync = util.promisify(stream.pipeline);
/**
* @param {string} _url
*/
function isGithubUrl(_url) {
return url.parse(_url).hostname === 'api.github.com';
}
/**
* @param {string} _url
* @param {fs.PathLike} dest
* @param {any} opts
*/
function download(_url, dest, opts) {
const proxy = proxy_from_env.getProxyForUrl(url.parse(_url));
if (proxy !== '') {
var HttpsProxyAgent = require('https-proxy-agent');
opts = {
...opts,
"agent": new HttpsProxyAgent.HttpsProxyAgent(proxy),
proxy
};
}
if (opts.headers && opts.headers.authorization && !isGithubUrl(_url)) {
delete opts.headers.authorization;
}
return new Promise((resolve, reject) => {
console.log(`Download options: ${JSON.stringify(opts)}`);
const outFile = fs.createWriteStream(dest);
const mergedOpts = {
...url.parse(_url),
...opts
};
https.get(mergedOpts, response => {
console.log('statusCode: ' + response.statusCode);
if (response.statusCode === 302) {
console.log('Following redirect to: ' + response.headers.location);
return download(response.headers.location, dest, opts)
.then(resolve, reject);
} else if (response.statusCode !== 200) {
reject(new Error('Download failed with ' + response.statusCode));
return;
}
response.pipe(outFile);
outFile.on('finish', () => {
resolve();
});
}).on('error', async err => {
await fsUnlink(dest);
reject(err);
});
});
}
/**
* @param {string} _url
* @param {any} opts
*/
function get(_url, opts) {
console.log(`GET ${_url}`);
const proxy = proxy_from_env.getProxyForUrl(url.parse(_url));
if (proxy !== '') {
var HttpsProxyAgent = require('https-proxy-agent');
opts = {
...opts,
"agent": new HttpsProxyAgent.HttpsProxyAgent(proxy)
};
}
return new Promise((resolve, reject) => {
let result = '';
opts = {
...url.parse(_url),
...opts
};
https.get(opts, response => {
if (response.statusCode !== 200) {
reject(new Error('Request failed: ' + response.statusCode));
}
response.on('data', d => {
result += d.toString();
});
response.on('end', () => {
resolve(result);
});
response.on('error', e => {
reject(e);
});
});
});
}
/**
* @param {string} repo
* @param {string} tag
*/
function getApiUrl(repo, tag) {
return `https://api.github.com/repos/${repo}/releases/tags/${tag}`;
}
/**
* @param {{ force: boolean; token: string; version: string; }} opts
* @param {string} assetName
* @param {string} downloadFolder
*/
async function getAssetFromGithubApi(opts, assetName, downloadFolder) {
const assetDownloadPath = path.join(downloadFolder, assetName);
// We can just use the cached binary
if (!opts.force && await fsExists(assetDownloadPath)) {
console.log('Using cached download: ' + assetDownloadPath);
return assetDownloadPath;
}
const downloadOpts = {
headers: {
'user-agent': 'vscode-ripgrep'
}
};
if (opts.token) {
downloadOpts.headers.authorization = `token ${opts.token}`;
}
console.log(`Finding release for ${opts.version}`);
const release = await get(getApiUrl(REPO, opts.version), downloadOpts);
let jsonRelease;
try {
jsonRelease = JSON.parse(release);
} catch (e) {
throw new Error('Malformed API response: ' + e.stack);
}
if (!jsonRelease.assets) {
throw new Error('Bad API response: ' + JSON.stringify(release));
}
const asset = jsonRelease.assets.find(a => a.name === assetName);
if (!asset) {
throw new Error('Asset not found with name: ' + assetName);
}
console.log(`Downloading from ${asset.url}`);
console.log(`Downloading to ${assetDownloadPath}`);
downloadOpts.headers.accept = 'application/octet-stream';
await download(asset.url, assetDownloadPath, downloadOpts);
}
/**
* @param {string} zipPath
* @param {string} destinationDir
*/
function unzipWindows(zipPath, destinationDir) {
// code from https://stackoverflow.com/questions/63932027/how-to-unzip-to-a-folder-using-yauzl
return new Promise((resolve, reject) => {
try {
// Create folder if not exists
fs.promises.mkdir(path.dirname(destinationDir), { recursive: true });
// Same as example we open the zip.
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) {
zipFile.close();
reject(err);
return;
}
// This is the key. We start by reading the first entry.
zipFile.readEntry();
// Now for every entry, we will write a file or dir
// to disk. Then call zipFile.readEntry() again to
// trigger the next cycle.
zipFile.on('entry', (entry) => {
try {
// Directories
if (/\/$/.test(entry.fileName)) {
// Create the directory then read the next entry.
fs.promises.mkdir(path.join(destinationDir, entry.fileName), { recursive: true });
zipFile.readEntry();
}
// Files
else {
// Write the file to disk.
zipFile.openReadStream(entry, (readErr, readStream) => {
if (readErr) {
zipFile.close();
reject(readErr);
return;
}
const file = fs.createWriteStream(path.join(destinationDir, entry.fileName));
readStream.pipe(file);
file.on('finish', () => {
// Wait until the file is finished writing, then read the next entry.
// @ts-ignore: Typing for close() is wrong.
file.close(() => {
zipFile.readEntry();
});
file.on('error', (err) => {
zipFile.close();
reject(err);
});
});
});
}
} catch (e) {
zipFile.close();
reject(e);
}
});
zipFile.on('end', (err) => {
resolve();
});
zipFile.on('error', (err) => {
zipFile.close();
reject(err);
});
});
}
catch (e) {
reject(e);
}
});
}
/**
* Handle whitespace in filepath as powershell splits path with whitespaces
* @param {string} path
*/
function sanitizePathForPowershell(path) {
path = path.replace(/ /g, '` '); // replace whitespace with "` " as solution provided here https://stackoverflow.com/a/18537344/7374562
return path;
}
function untar(zipPath, destinationDir) {
return new Promise((resolve, reject) => {
const unzipProc = child_process.spawn('tar', ['xvf', zipPath, '-C', destinationDir], { stdio: 'inherit' });
unzipProc.on('error', err => {
reject(err);
});
unzipProc.on('close', code => {
console.log(`tar xvf exited with ${code}`);
if (code !== 0) {
reject(new Error(`tar xvf exited with ${code}`));
return;
}
resolve();
});
});
}
/**
* @param {string} zipPath
* @param {string} destinationDir
*/
async function unzipRipgrep(zipPath, destinationDir) {
if (isWindows) {
await unzipWindows(zipPath, destinationDir);
} else {
await untar(zipPath, destinationDir);
}
const expectedName = path.join(destinationDir, 'rg');
if (await fsExists(expectedName)) {
return expectedName;
}
if (await fsExists(expectedName + '.exe')) {
return expectedName + '.exe';
}
throw new Error(`Expecting rg or rg.exe unzipped into ${destinationDir}, didn't find one.`);
}
module.exports = async opts => {
if (!opts.version) {
return Promise.reject(new Error('Missing version'));
}
if (!opts.target) {
return Promise.reject(new Error('Missing target'));
}
const extension = isWindows ? '.zip' : '.tar.gz';
const assetName = ['ripgrep', opts.version, opts.target].join('-') + extension;
if (!await fsExists(tmpDir)) {
await fsMkdir(tmpDir);
}
const assetDownloadPath = path.join(tmpDir, assetName);
try {
await getAssetFromGithubApi(opts, assetName, tmpDir)
} catch (e) {
console.log('Deleting invalid download cache');
try {
await fsUnlink(assetDownloadPath);
} catch (e) { }
throw e;
}
console.log(`Unzipping to ${opts.destDir}`);
try {
const destinationPath = await unzipRipgrep(assetDownloadPath, opts.destDir);
if (!isWindows) {
await util.promisify(fs.chmod)(destinationPath, '755');
}
} catch (e) {
console.log('Deleting invalid download');
try {
await fsUnlink(assetDownloadPath);
} catch (e) { }
throw e;
}
};

View File

@ -0,0 +1,5 @@
'use strict';
const path = require('path');
module.exports.rgPath = path.join(__dirname, `../bin/rg${process.platform === 'win32' ? '.exe' : ''}`);

View File

@ -0,0 +1,79 @@
// @ts-check
'use strict';
const os = require('os');
const fs = require('fs');
const path = require('path');
const util = require('util');
const child_process = require('child_process');
const download = require('./download');
const fsExists = util.promisify(fs.exists);
const mkdir = util.promisify(fs.mkdir);
const exec = util.promisify(child_process.exec);
const forceInstall = process.argv.includes('--force');
if (forceInstall) {
console.log('--force, ignoring caches');
}
const VERSION = 'v13.0.0-10';
const MULTI_ARCH_LINUX_VERSION = 'v13.0.0-4';// use this for arm-unknown-linux-gnueabihf and powerpc64le-unknown-linux-gnu until we can fix https://github.com/microsoft/ripgrep-prebuilt/issues/24 and https://github.com/microsoft/ripgrep-prebuilt/issues/32 respectively.
const BIN_PATH = path.join(__dirname, '../bin');
process.on('unhandledRejection', (reason, promise) => {
console.log('Unhandled rejection: ', promise, 'reason:', reason);
});
async function getTarget() {
const arch = process.env.npm_config_arch || os.arch();
switch (os.platform()) {
case 'darwin':
return arch === 'arm64' ? 'aarch64-apple-darwin' :
'x86_64-apple-darwin';
case 'win32':
return arch === 'x64' ? 'x86_64-pc-windows-msvc' :
arch === 'arm' ? 'aarch64-pc-windows-msvc' :
'i686-pc-windows-msvc';
case 'linux':
return arch === 'x64' ? 'x86_64-unknown-linux-musl' :
arch === 'arm' ? 'arm-unknown-linux-gnueabihf' :
arch === 'armv7l' ? 'arm-unknown-linux-gnueabihf' :
arch === 'arm64' ? 'aarch64-unknown-linux-musl':
arch === 'ppc64' ? 'powerpc64le-unknown-linux-gnu' :
arch === 's390x' ? 's390x-unknown-linux-gnu' :
'i686-unknown-linux-musl'
default: throw new Error('Unknown platform: ' + os.platform());
}
}
async function main() {
const binExists = await fsExists(BIN_PATH);
if (!forceInstall && binExists) {
console.log('bin/ folder already exists, exiting');
process.exit(0);
}
if (!binExists) {
await mkdir(BIN_PATH);
}
const target = await getTarget();
const opts = {
version: target === "arm-unknown-linux-gnueabihf" || target === "powerpc64le-unknown-linux-gnu" || target === "s390x-unknown-linux-gnu" ? MULTI_ARCH_LINUX_VERSION: VERSION,
token: process.env['GITHUB_TOKEN'],
target: await getTarget(),
destDir: BIN_PATH,
force: forceInstall
};
try {
await download(opts);
} catch (err) {
console.error(`Downloading ripgrep failed: ${err.stack}`);
process.exit(1);
}
}
main();

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Josh Wolfe
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,796 @@
var fs = require("fs");
var zlib = require("zlib");
var fd_slicer = require("fd-slicer");
var crc32 = require("buffer-crc32");
var util = require("util");
var EventEmitter = require("events").EventEmitter;
var Transform = require("stream").Transform;
var PassThrough = require("stream").PassThrough;
var Writable = require("stream").Writable;
exports.open = open;
exports.fromFd = fromFd;
exports.fromBuffer = fromBuffer;
exports.fromRandomAccessReader = fromRandomAccessReader;
exports.dosDateTimeToDate = dosDateTimeToDate;
exports.validateFileName = validateFileName;
exports.ZipFile = ZipFile;
exports.Entry = Entry;
exports.RandomAccessReader = RandomAccessReader;
function open(path, options, callback) {
if (typeof options === "function") {
callback = options;
options = null;
}
if (options == null) options = {};
if (options.autoClose == null) options.autoClose = true;
if (options.lazyEntries == null) options.lazyEntries = false;
if (options.decodeStrings == null) options.decodeStrings = true;
if (options.validateEntrySizes == null) options.validateEntrySizes = true;
if (options.strictFileNames == null) options.strictFileNames = false;
if (callback == null) callback = defaultCallback;
fs.open(path, "r", function(err, fd) {
if (err) return callback(err);
fromFd(fd, options, function(err, zipfile) {
if (err) fs.close(fd, defaultCallback);
callback(err, zipfile);
});
});
}
function fromFd(fd, options, callback) {
if (typeof options === "function") {
callback = options;
options = null;
}
if (options == null) options = {};
if (options.autoClose == null) options.autoClose = false;
if (options.lazyEntries == null) options.lazyEntries = false;
if (options.decodeStrings == null) options.decodeStrings = true;
if (options.validateEntrySizes == null) options.validateEntrySizes = true;
if (options.strictFileNames == null) options.strictFileNames = false;
if (callback == null) callback = defaultCallback;
fs.fstat(fd, function(err, stats) {
if (err) return callback(err);
var reader = fd_slicer.createFromFd(fd, {autoClose: true});
fromRandomAccessReader(reader, stats.size, options, callback);
});
}
function fromBuffer(buffer, options, callback) {
if (typeof options === "function") {
callback = options;
options = null;
}
if (options == null) options = {};
options.autoClose = false;
if (options.lazyEntries == null) options.lazyEntries = false;
if (options.decodeStrings == null) options.decodeStrings = true;
if (options.validateEntrySizes == null) options.validateEntrySizes = true;
if (options.strictFileNames == null) options.strictFileNames = false;
// limit the max chunk size. see https://github.com/thejoshwolfe/yauzl/issues/87
var reader = fd_slicer.createFromBuffer(buffer, {maxChunkSize: 0x10000});
fromRandomAccessReader(reader, buffer.length, options, callback);
}
function fromRandomAccessReader(reader, totalSize, options, callback) {
if (typeof options === "function") {
callback = options;
options = null;
}
if (options == null) options = {};
if (options.autoClose == null) options.autoClose = true;
if (options.lazyEntries == null) options.lazyEntries = false;
if (options.decodeStrings == null) options.decodeStrings = true;
var decodeStrings = !!options.decodeStrings;
if (options.validateEntrySizes == null) options.validateEntrySizes = true;
if (options.strictFileNames == null) options.strictFileNames = false;
if (callback == null) callback = defaultCallback;
if (typeof totalSize !== "number") throw new Error("expected totalSize parameter to be a number");
if (totalSize > Number.MAX_SAFE_INTEGER) {
throw new Error("zip file too large. only file sizes up to 2^52 are supported due to JavaScript's Number type being an IEEE 754 double.");
}
// the matching unref() call is in zipfile.close()
reader.ref();
// eocdr means End of Central Directory Record.
// search backwards for the eocdr signature.
// the last field of the eocdr is a variable-length comment.
// the comment size is encoded in a 2-byte field in the eocdr, which we can't find without trudging backwards through the comment to find it.
// as a consequence of this design decision, it's possible to have ambiguous zip file metadata if a coherent eocdr was in the comment.
// we search backwards for a eocdr signature, and hope that whoever made the zip file was smart enough to forbid the eocdr signature in the comment.
var eocdrWithoutCommentSize = 22;
var maxCommentSize = 0xffff; // 2-byte size
var bufferSize = Math.min(eocdrWithoutCommentSize + maxCommentSize, totalSize);
var buffer = newBuffer(bufferSize);
var bufferReadStart = totalSize - buffer.length;
readAndAssertNoEof(reader, buffer, 0, bufferSize, bufferReadStart, function(err) {
if (err) return callback(err);
for (var i = bufferSize - eocdrWithoutCommentSize; i >= 0; i -= 1) {
if (buffer.readUInt32LE(i) !== 0x06054b50) continue;
// found eocdr
var eocdrBuffer = buffer.slice(i);
// 0 - End of central directory signature = 0x06054b50
// 4 - Number of this disk
var diskNumber = eocdrBuffer.readUInt16LE(4);
if (diskNumber !== 0) {
return callback(new Error("multi-disk zip files are not supported: found disk number: " + diskNumber));
}
// 6 - Disk where central directory starts
// 8 - Number of central directory records on this disk
// 10 - Total number of central directory records
var entryCount = eocdrBuffer.readUInt16LE(10);
// 12 - Size of central directory (bytes)
// 16 - Offset of start of central directory, relative to start of archive
var centralDirectoryOffset = eocdrBuffer.readUInt32LE(16);
// 20 - Comment length
var commentLength = eocdrBuffer.readUInt16LE(20);
var expectedCommentLength = eocdrBuffer.length - eocdrWithoutCommentSize;
if (commentLength !== expectedCommentLength) {
return callback(new Error("invalid comment length. expected: " + expectedCommentLength + ". found: " + commentLength));
}
// 22 - Comment
// the encoding is always cp437.
var comment = decodeStrings ? decodeBuffer(eocdrBuffer, 22, eocdrBuffer.length, false)
: eocdrBuffer.slice(22);
if (!(entryCount === 0xffff || centralDirectoryOffset === 0xffffffff)) {
return callback(null, new ZipFile(reader, centralDirectoryOffset, totalSize, entryCount, comment, options.autoClose, options.lazyEntries, decodeStrings, options.validateEntrySizes, options.strictFileNames));
}
// ZIP64 format
// ZIP64 Zip64 end of central directory locator
var zip64EocdlBuffer = newBuffer(20);
var zip64EocdlOffset = bufferReadStart + i - zip64EocdlBuffer.length;
readAndAssertNoEof(reader, zip64EocdlBuffer, 0, zip64EocdlBuffer.length, zip64EocdlOffset, function(err) {
if (err) return callback(err);
// 0 - zip64 end of central dir locator signature = 0x07064b50
if (zip64EocdlBuffer.readUInt32LE(0) !== 0x07064b50) {
return callback(new Error("invalid zip64 end of central directory locator signature"));
}
// 4 - number of the disk with the start of the zip64 end of central directory
// 8 - relative offset of the zip64 end of central directory record
var zip64EocdrOffset = readUInt64LE(zip64EocdlBuffer, 8);
// 16 - total number of disks
// ZIP64 end of central directory record
var zip64EocdrBuffer = newBuffer(56);
readAndAssertNoEof(reader, zip64EocdrBuffer, 0, zip64EocdrBuffer.length, zip64EocdrOffset, function(err) {
if (err) return callback(err);
// 0 - zip64 end of central dir signature 4 bytes (0x06064b50)
if (zip64EocdrBuffer.readUInt32LE(0) !== 0x06064b50) {
return callback(new Error("invalid zip64 end of central directory record signature"));
}
// 4 - size of zip64 end of central directory record 8 bytes
// 12 - version made by 2 bytes
// 14 - version needed to extract 2 bytes
// 16 - number of this disk 4 bytes
// 20 - number of the disk with the start of the central directory 4 bytes
// 24 - total number of entries in the central directory on this disk 8 bytes
// 32 - total number of entries in the central directory 8 bytes
entryCount = readUInt64LE(zip64EocdrBuffer, 32);
// 40 - size of the central directory 8 bytes
// 48 - offset of start of central directory with respect to the starting disk number 8 bytes
centralDirectoryOffset = readUInt64LE(zip64EocdrBuffer, 48);
// 56 - zip64 extensible data sector (variable size)
return callback(null, new ZipFile(reader, centralDirectoryOffset, totalSize, entryCount, comment, options.autoClose, options.lazyEntries, decodeStrings, options.validateEntrySizes, options.strictFileNames));
});
});
return;
}
callback(new Error("end of central directory record signature not found"));
});
}
util.inherits(ZipFile, EventEmitter);
function ZipFile(reader, centralDirectoryOffset, fileSize, entryCount, comment, autoClose, lazyEntries, decodeStrings, validateEntrySizes, strictFileNames) {
var self = this;
EventEmitter.call(self);
self.reader = reader;
// forward close events
self.reader.on("error", function(err) {
// error closing the fd
emitError(self, err);
});
self.reader.once("close", function() {
self.emit("close");
});
self.readEntryCursor = centralDirectoryOffset;
self.fileSize = fileSize;
self.entryCount = entryCount;
self.comment = comment;
self.entriesRead = 0;
self.autoClose = !!autoClose;
self.lazyEntries = !!lazyEntries;
self.decodeStrings = !!decodeStrings;
self.validateEntrySizes = !!validateEntrySizes;
self.strictFileNames = !!strictFileNames;
self.isOpen = true;
self.emittedError = false;
if (!self.lazyEntries) self._readEntry();
}
ZipFile.prototype.close = function() {
if (!this.isOpen) return;
this.isOpen = false;
this.reader.unref();
};
function emitErrorAndAutoClose(self, err) {
if (self.autoClose) self.close();
emitError(self, err);
}
function emitError(self, err) {
if (self.emittedError) return;
self.emittedError = true;
self.emit("error", err);
}
ZipFile.prototype.readEntry = function() {
if (!this.lazyEntries) throw new Error("readEntry() called without lazyEntries:true");
this._readEntry();
};
ZipFile.prototype._readEntry = function() {
var self = this;
if (self.entryCount === self.entriesRead) {
// done with metadata
setImmediate(function() {
if (self.autoClose) self.close();
if (self.emittedError) return;
self.emit("end");
});
return;
}
if (self.emittedError) return;
var buffer = newBuffer(46);
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, self.readEntryCursor, function(err) {
if (err) return emitErrorAndAutoClose(self, err);
if (self.emittedError) return;
var entry = new Entry();
// 0 - Central directory file header signature
var signature = buffer.readUInt32LE(0);
if (signature !== 0x02014b50) return emitErrorAndAutoClose(self, new Error("invalid central directory file header signature: 0x" + signature.toString(16)));
// 4 - Version made by
entry.versionMadeBy = buffer.readUInt16LE(4);
// 6 - Version needed to extract (minimum)
entry.versionNeededToExtract = buffer.readUInt16LE(6);
// 8 - General purpose bit flag
entry.generalPurposeBitFlag = buffer.readUInt16LE(8);
// 10 - Compression method
entry.compressionMethod = buffer.readUInt16LE(10);
// 12 - File last modification time
entry.lastModFileTime = buffer.readUInt16LE(12);
// 14 - File last modification date
entry.lastModFileDate = buffer.readUInt16LE(14);
// 16 - CRC-32
entry.crc32 = buffer.readUInt32LE(16);
// 20 - Compressed size
entry.compressedSize = buffer.readUInt32LE(20);
// 24 - Uncompressed size
entry.uncompressedSize = buffer.readUInt32LE(24);
// 28 - File name length (n)
entry.fileNameLength = buffer.readUInt16LE(28);
// 30 - Extra field length (m)
entry.extraFieldLength = buffer.readUInt16LE(30);
// 32 - File comment length (k)
entry.fileCommentLength = buffer.readUInt16LE(32);
// 34 - Disk number where file starts
// 36 - Internal file attributes
entry.internalFileAttributes = buffer.readUInt16LE(36);
// 38 - External file attributes
entry.externalFileAttributes = buffer.readUInt32LE(38);
// 42 - Relative offset of local file header
entry.relativeOffsetOfLocalHeader = buffer.readUInt32LE(42);
if (entry.generalPurposeBitFlag & 0x40) return emitErrorAndAutoClose(self, new Error("strong encryption is not supported"));
self.readEntryCursor += 46;
buffer = newBuffer(entry.fileNameLength + entry.extraFieldLength + entry.fileCommentLength);
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, self.readEntryCursor, function(err) {
if (err) return emitErrorAndAutoClose(self, err);
if (self.emittedError) return;
// 46 - File name
var isUtf8 = (entry.generalPurposeBitFlag & 0x800) !== 0;
entry.fileName = self.decodeStrings ? decodeBuffer(buffer, 0, entry.fileNameLength, isUtf8)
: buffer.slice(0, entry.fileNameLength);
// 46+n - Extra field
var fileCommentStart = entry.fileNameLength + entry.extraFieldLength;
var extraFieldBuffer = buffer.slice(entry.fileNameLength, fileCommentStart);
entry.extraFields = [];
var i = 0;
while (i < extraFieldBuffer.length - 3) {
var headerId = extraFieldBuffer.readUInt16LE(i + 0);
var dataSize = extraFieldBuffer.readUInt16LE(i + 2);
var dataStart = i + 4;
var dataEnd = dataStart + dataSize;
if (dataEnd > extraFieldBuffer.length) return emitErrorAndAutoClose(self, new Error("extra field length exceeds extra field buffer size"));
var dataBuffer = newBuffer(dataSize);
extraFieldBuffer.copy(dataBuffer, 0, dataStart, dataEnd);
entry.extraFields.push({
id: headerId,
data: dataBuffer,
});
i = dataEnd;
}
// 46+n+m - File comment
entry.fileComment = self.decodeStrings ? decodeBuffer(buffer, fileCommentStart, fileCommentStart + entry.fileCommentLength, isUtf8)
: buffer.slice(fileCommentStart, fileCommentStart + entry.fileCommentLength);
// compatibility hack for https://github.com/thejoshwolfe/yauzl/issues/47
entry.comment = entry.fileComment;
self.readEntryCursor += buffer.length;
self.entriesRead += 1;
if (entry.uncompressedSize === 0xffffffff ||
entry.compressedSize === 0xffffffff ||
entry.relativeOffsetOfLocalHeader === 0xffffffff) {
// ZIP64 format
// find the Zip64 Extended Information Extra Field
var zip64EiefBuffer = null;
for (var i = 0; i < entry.extraFields.length; i++) {
var extraField = entry.extraFields[i];
if (extraField.id === 0x0001) {
zip64EiefBuffer = extraField.data;
break;
}
}
if (zip64EiefBuffer == null) {
return emitErrorAndAutoClose(self, new Error("expected zip64 extended information extra field"));
}
var index = 0;
// 0 - Original Size 8 bytes
if (entry.uncompressedSize === 0xffffffff) {
if (index + 8 > zip64EiefBuffer.length) {
return emitErrorAndAutoClose(self, new Error("zip64 extended information extra field does not include uncompressed size"));
}
entry.uncompressedSize = readUInt64LE(zip64EiefBuffer, index);
index += 8;
}
// 8 - Compressed Size 8 bytes
if (entry.compressedSize === 0xffffffff) {
if (index + 8 > zip64EiefBuffer.length) {
return emitErrorAndAutoClose(self, new Error("zip64 extended information extra field does not include compressed size"));
}
entry.compressedSize = readUInt64LE(zip64EiefBuffer, index);
index += 8;
}
// 16 - Relative Header Offset 8 bytes
if (entry.relativeOffsetOfLocalHeader === 0xffffffff) {
if (index + 8 > zip64EiefBuffer.length) {
return emitErrorAndAutoClose(self, new Error("zip64 extended information extra field does not include relative header offset"));
}
entry.relativeOffsetOfLocalHeader = readUInt64LE(zip64EiefBuffer, index);
index += 8;
}
// 24 - Disk Start Number 4 bytes
}
// check for Info-ZIP Unicode Path Extra Field (0x7075)
// see https://github.com/thejoshwolfe/yauzl/issues/33
if (self.decodeStrings) {
for (var i = 0; i < entry.extraFields.length; i++) {
var extraField = entry.extraFields[i];
if (extraField.id === 0x7075) {
if (extraField.data.length < 6) {
// too short to be meaningful
continue;
}
// Version 1 byte version of this extra field, currently 1
if (extraField.data.readUInt8(0) !== 1) {
// > Changes may not be backward compatible so this extra
// > field should not be used if the version is not recognized.
continue;
}
// NameCRC32 4 bytes File Name Field CRC32 Checksum
var oldNameCrc32 = extraField.data.readUInt32LE(1);
if (crc32.unsigned(buffer.slice(0, entry.fileNameLength)) !== oldNameCrc32) {
// > If the CRC check fails, this UTF-8 Path Extra Field should be
// > ignored and the File Name field in the header should be used instead.
continue;
}
// UnicodeName Variable UTF-8 version of the entry File Name
entry.fileName = decodeBuffer(extraField.data, 5, extraField.data.length, true);
break;
}
}
}
// validate file size
if (self.validateEntrySizes && entry.compressionMethod === 0) {
var expectedCompressedSize = entry.uncompressedSize;
if (entry.isEncrypted()) {
// traditional encryption prefixes the file data with a header
expectedCompressedSize += 12;
}
if (entry.compressedSize !== expectedCompressedSize) {
var msg = "compressed/uncompressed size mismatch for stored file: " + entry.compressedSize + " != " + entry.uncompressedSize;
return emitErrorAndAutoClose(self, new Error(msg));
}
}
if (self.decodeStrings) {
if (!self.strictFileNames) {
// allow backslash
entry.fileName = entry.fileName.replace(/\\/g, "/");
}
var errorMessage = validateFileName(entry.fileName, self.validateFileNameOptions);
if (errorMessage != null) return emitErrorAndAutoClose(self, new Error(errorMessage));
}
self.emit("entry", entry);
if (!self.lazyEntries) self._readEntry();
});
});
};
ZipFile.prototype.openReadStream = function(entry, options, callback) {
var self = this;
// parameter validation
var relativeStart = 0;
var relativeEnd = entry.compressedSize;
if (callback == null) {
callback = options;
options = {};
} else {
// validate options that the caller has no excuse to get wrong
if (options.decrypt != null) {
if (!entry.isEncrypted()) {
throw new Error("options.decrypt can only be specified for encrypted entries");
}
if (options.decrypt !== false) throw new Error("invalid options.decrypt value: " + options.decrypt);
if (entry.isCompressed()) {
if (options.decompress !== false) throw new Error("entry is encrypted and compressed, and options.decompress !== false");
}
}
if (options.decompress != null) {
if (!entry.isCompressed()) {
throw new Error("options.decompress can only be specified for compressed entries");
}
if (!(options.decompress === false || options.decompress === true)) {
throw new Error("invalid options.decompress value: " + options.decompress);
}
}
if (options.start != null || options.end != null) {
if (entry.isCompressed() && options.decompress !== false) {
throw new Error("start/end range not allowed for compressed entry without options.decompress === false");
}
if (entry.isEncrypted() && options.decrypt !== false) {
throw new Error("start/end range not allowed for encrypted entry without options.decrypt === false");
}
}
if (options.start != null) {
relativeStart = options.start;
if (relativeStart < 0) throw new Error("options.start < 0");
if (relativeStart > entry.compressedSize) throw new Error("options.start > entry.compressedSize");
}
if (options.end != null) {
relativeEnd = options.end;
if (relativeEnd < 0) throw new Error("options.end < 0");
if (relativeEnd > entry.compressedSize) throw new Error("options.end > entry.compressedSize");
if (relativeEnd < relativeStart) throw new Error("options.end < options.start");
}
}
// any further errors can either be caused by the zipfile,
// or were introduced in a minor version of yauzl,
// so should be passed to the client rather than thrown.
if (!self.isOpen) return callback(new Error("closed"));
if (entry.isEncrypted()) {
if (options.decrypt !== false) return callback(new Error("entry is encrypted, and options.decrypt !== false"));
}
// make sure we don't lose the fd before we open the actual read stream
self.reader.ref();
var buffer = newBuffer(30);
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, entry.relativeOffsetOfLocalHeader, function(err) {
try {
if (err) return callback(err);
// 0 - Local file header signature = 0x04034b50
var signature = buffer.readUInt32LE(0);
if (signature !== 0x04034b50) {
return callback(new Error("invalid local file header signature: 0x" + signature.toString(16)));
}
// all this should be redundant
// 4 - Version needed to extract (minimum)
// 6 - General purpose bit flag
// 8 - Compression method
// 10 - File last modification time
// 12 - File last modification date
// 14 - CRC-32
// 18 - Compressed size
// 22 - Uncompressed size
// 26 - File name length (n)
var fileNameLength = buffer.readUInt16LE(26);
// 28 - Extra field length (m)
var extraFieldLength = buffer.readUInt16LE(28);
// 30 - File name
// 30+n - Extra field
var localFileHeaderEnd = entry.relativeOffsetOfLocalHeader + buffer.length + fileNameLength + extraFieldLength;
var decompress;
if (entry.compressionMethod === 0) {
// 0 - The file is stored (no compression)
decompress = false;
} else if (entry.compressionMethod === 8) {
// 8 - The file is Deflated
decompress = options.decompress != null ? options.decompress : true;
} else {
return callback(new Error("unsupported compression method: " + entry.compressionMethod));
}
var fileDataStart = localFileHeaderEnd;
var fileDataEnd = fileDataStart + entry.compressedSize;
if (entry.compressedSize !== 0) {
// bounds check now, because the read streams will probably not complain loud enough.
// since we're dealing with an unsigned offset plus an unsigned size,
// we only have 1 thing to check for.
if (fileDataEnd > self.fileSize) {
return callback(new Error("file data overflows file bounds: " +
fileDataStart + " + " + entry.compressedSize + " > " + self.fileSize));
}
}
var readStream = self.reader.createReadStream({
start: fileDataStart + relativeStart,
end: fileDataStart + relativeEnd,
});
var endpointStream = readStream;
if (decompress) {
var destroyed = false;
var inflateFilter = zlib.createInflateRaw();
readStream.on("error", function(err) {
// setImmediate here because errors can be emitted during the first call to pipe()
setImmediate(function() {
if (!destroyed) inflateFilter.emit("error", err);
});
});
readStream.pipe(inflateFilter);
if (self.validateEntrySizes) {
endpointStream = new AssertByteCountStream(entry.uncompressedSize);
inflateFilter.on("error", function(err) {
// forward zlib errors to the client-visible stream
setImmediate(function() {
if (!destroyed) endpointStream.emit("error", err);
});
});
inflateFilter.pipe(endpointStream);
} else {
// the zlib filter is the client-visible stream
endpointStream = inflateFilter;
}
// this is part of yauzl's API, so implement this function on the client-visible stream
endpointStream.destroy = function() {
destroyed = true;
if (inflateFilter !== endpointStream) inflateFilter.unpipe(endpointStream);
readStream.unpipe(inflateFilter);
// TODO: the inflateFilter may cause a memory leak. see Issue #27.
readStream.destroy();
};
}
callback(null, endpointStream);
} finally {
self.reader.unref();
}
});
};
function Entry() {
}
Entry.prototype.getLastModDate = function() {
return dosDateTimeToDate(this.lastModFileDate, this.lastModFileTime);
};
Entry.prototype.isEncrypted = function() {
return (this.generalPurposeBitFlag & 0x1) !== 0;
};
Entry.prototype.isCompressed = function() {
return this.compressionMethod === 8;
};
function dosDateTimeToDate(date, time) {
var day = date & 0x1f; // 1-31
var month = (date >> 5 & 0xf) - 1; // 1-12, 0-11
var year = (date >> 9 & 0x7f) + 1980; // 0-128, 1980-2108
var millisecond = 0;
var second = (time & 0x1f) * 2; // 0-29, 0-58 (even numbers)
var minute = time >> 5 & 0x3f; // 0-59
var hour = time >> 11 & 0x1f; // 0-23
return new Date(year, month, day, hour, minute, second, millisecond);
}
function validateFileName(fileName) {
if (fileName.indexOf("\\") !== -1) {
return "invalid characters in fileName: " + fileName;
}
if (/^[a-zA-Z]:/.test(fileName) || /^\//.test(fileName)) {
return "absolute path: " + fileName;
}
if (fileName.split("/").indexOf("..") !== -1) {
return "invalid relative path: " + fileName;
}
// all good
return null;
}
function readAndAssertNoEof(reader, buffer, offset, length, position, callback) {
if (length === 0) {
// fs.read will throw an out-of-bounds error if you try to read 0 bytes from a 0 byte file
return setImmediate(function() { callback(null, newBuffer(0)); });
}
reader.read(buffer, offset, length, position, function(err, bytesRead) {
if (err) return callback(err);
if (bytesRead < length) {
return callback(new Error("unexpected EOF"));
}
callback();
});
}
util.inherits(AssertByteCountStream, Transform);
function AssertByteCountStream(byteCount) {
Transform.call(this);
this.actualByteCount = 0;
this.expectedByteCount = byteCount;
}
AssertByteCountStream.prototype._transform = function(chunk, encoding, cb) {
this.actualByteCount += chunk.length;
if (this.actualByteCount > this.expectedByteCount) {
var msg = "too many bytes in the stream. expected " + this.expectedByteCount + ". got at least " + this.actualByteCount;
return cb(new Error(msg));
}
cb(null, chunk);
};
AssertByteCountStream.prototype._flush = function(cb) {
if (this.actualByteCount < this.expectedByteCount) {
var msg = "not enough bytes in the stream. expected " + this.expectedByteCount + ". got only " + this.actualByteCount;
return cb(new Error(msg));
}
cb();
};
util.inherits(RandomAccessReader, EventEmitter);
function RandomAccessReader() {
EventEmitter.call(this);
this.refCount = 0;
}
RandomAccessReader.prototype.ref = function() {
this.refCount += 1;
};
RandomAccessReader.prototype.unref = function() {
var self = this;
self.refCount -= 1;
if (self.refCount > 0) return;
if (self.refCount < 0) throw new Error("invalid unref");
self.close(onCloseDone);
function onCloseDone(err) {
if (err) return self.emit('error', err);
self.emit('close');
}
};
RandomAccessReader.prototype.createReadStream = function(options) {
var start = options.start;
var end = options.end;
if (start === end) {
var emptyStream = new PassThrough();
setImmediate(function() {
emptyStream.end();
});
return emptyStream;
}
var stream = this._readStreamForRange(start, end);
var destroyed = false;
var refUnrefFilter = new RefUnrefFilter(this);
stream.on("error", function(err) {
setImmediate(function() {
if (!destroyed) refUnrefFilter.emit("error", err);
});
});
refUnrefFilter.destroy = function() {
stream.unpipe(refUnrefFilter);
refUnrefFilter.unref();
stream.destroy();
};
var byteCounter = new AssertByteCountStream(end - start);
refUnrefFilter.on("error", function(err) {
setImmediate(function() {
if (!destroyed) byteCounter.emit("error", err);
});
});
byteCounter.destroy = function() {
destroyed = true;
refUnrefFilter.unpipe(byteCounter);
refUnrefFilter.destroy();
};
return stream.pipe(refUnrefFilter).pipe(byteCounter);
};
RandomAccessReader.prototype._readStreamForRange = function(start, end) {
throw new Error("not implemented");
};
RandomAccessReader.prototype.read = function(buffer, offset, length, position, callback) {
var readStream = this.createReadStream({start: position, end: position + length});
var writeStream = new Writable();
var written = 0;
writeStream._write = function(chunk, encoding, cb) {
chunk.copy(buffer, offset + written, 0, chunk.length);
written += chunk.length;
cb();
};
writeStream.on("finish", callback);
readStream.on("error", function(error) {
callback(error);
});
readStream.pipe(writeStream);
};
RandomAccessReader.prototype.close = function(callback) {
setImmediate(callback);
};
util.inherits(RefUnrefFilter, PassThrough);
function RefUnrefFilter(context) {
PassThrough.call(this);
this.context = context;
this.context.ref();
this.unreffedYet = false;
}
RefUnrefFilter.prototype._flush = function(cb) {
this.unref();
cb();
};
RefUnrefFilter.prototype.unref = function(cb) {
if (this.unreffedYet) return;
this.unreffedYet = true;
this.context.unref();
};
var cp437 = '\u0000☺☻♥♦♣♠•◘○◙♂♀♪♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~⌂ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ ';
function decodeBuffer(buffer, start, end, isUtf8) {
if (isUtf8) {
return buffer.toString("utf8", start, end);
} else {
var result = "";
for (var i = start; i < end; i++) {
result += cp437[buffer[i]];
}
return result;
}
}
function readUInt64LE(buffer, offset) {
// there is no native function for this, because we can't actually store 64-bit integers precisely.
// after 53 bits, JavaScript's Number type (IEEE 754 double) can't store individual integers anymore.
// but since 53 bits is a whole lot more than 32 bits, we do our best anyway.
var lower32 = buffer.readUInt32LE(offset);
var upper32 = buffer.readUInt32LE(offset + 4);
// we can't use bitshifting here, because JavaScript bitshifting only works on 32-bit integers.
return upper32 * 0x100000000 + lower32;
// as long as we're bounds checking the result of this function against the total file size,
// we'll catch any overflow errors, because we already made sure the total file size was within reason.
}
// Node 10 deprecated new Buffer().
var newBuffer;
if (typeof Buffer.allocUnsafe === "function") {
newBuffer = function(len) {
return Buffer.allocUnsafe(len);
};
} else {
newBuffer = function(len) {
return new Buffer(len);
};
}
function defaultCallback(err) {
if (err) throw err;
}

View File

@ -0,0 +1,40 @@
{
"name": "yauzl",
"version": "2.10.0",
"description": "yet another unzip library for node",
"main": "index.js",
"scripts": {
"test": "node test/test.js",
"test-cov": "istanbul cover test/test.js",
"test-travis": "istanbul cover --report lcovonly test/test.js"
},
"repository": {
"type": "git",
"url": "https://github.com/thejoshwolfe/yauzl.git"
},
"keywords": [
"unzip",
"zip",
"stream",
"archive",
"file"
],
"author": "Josh Wolfe <thejoshwolfe@gmail.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/thejoshwolfe/yauzl/issues"
},
"homepage": "https://github.com/thejoshwolfe/yauzl",
"dependencies": {
"fd-slicer": "~1.1.0",
"buffer-crc32": "~0.2.3"
},
"devDependencies": {
"bl": "~1.0.0",
"istanbul": "~0.3.4",
"pend": "~1.2.0"
},
"files": [
"index.js"
]
}

View File

@ -0,0 +1,24 @@
{
"name": "@vscode/ripgrep",
"version": "1.15.9",
"description": "A module for using ripgrep in a Node project",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
"repository": {
"type": "git",
"url": "https://github.com/microsoft/vscode-ripgrep"
},
"scripts": {
"postinstall": "node ./lib/postinstall.js"
},
"author": "Rob Lourens",
"license": "MIT",
"dependencies": {
"https-proxy-agent": "^7.0.2",
"yauzl": "^2.9.2",
"proxy-from-env": "^1.1.0"
},
"devDependencies": {
"@types/node": "^20.8.4"
}
}