啊啊
This commit is contained in:
21
node_modules/.store/extract-zip@2.0.1/node_modules/@types/yauzl/LICENSE
generated
vendored
Normal file
21
node_modules/.store/extract-zip@2.0.1/node_modules/@types/yauzl/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
15
node_modules/.store/extract-zip@2.0.1/node_modules/@types/yauzl/README.md
generated
vendored
Normal file
15
node_modules/.store/extract-zip@2.0.1/node_modules/@types/yauzl/README.md
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# Installation
|
||||
> `npm install --save @types/yauzl`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for yauzl (https://github.com/thejoshwolfe/yauzl).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/yauzl.
|
||||
|
||||
### Additional Details
|
||||
* Last updated: Tue, 07 Nov 2023 15:11:36 GMT
|
||||
* Dependencies: [@types/node](https://npmjs.com/package/@types/node)
|
||||
|
||||
# Credits
|
||||
These definitions were written by [Florian Keller](https://github.com/ffflorian).
|
||||
110
node_modules/.store/extract-zip@2.0.1/node_modules/@types/yauzl/index.d.ts
generated
vendored
Normal file
110
node_modules/.store/extract-zip@2.0.1/node_modules/@types/yauzl/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
/// <reference types="node" />
|
||||
|
||||
import { EventEmitter } from "events";
|
||||
import { Readable } from "stream";
|
||||
|
||||
export abstract class RandomAccessReader extends EventEmitter {
|
||||
_readStreamForRange(start: number, end: number): void;
|
||||
createReadStream(options: { start: number; end: number }): void;
|
||||
read(buffer: Buffer, offset: number, length: number, position: number, callback: (err: Error | null) => void): void;
|
||||
close(callback: (err: Error | null) => void): void;
|
||||
}
|
||||
|
||||
export class Entry {
|
||||
comment: string;
|
||||
compressedSize: number;
|
||||
compressionMethod: number;
|
||||
crc32: number;
|
||||
externalFileAttributes: number;
|
||||
extraFieldLength: number;
|
||||
extraFields: Array<{ id: number; data: Buffer }>;
|
||||
fileCommentLength: number;
|
||||
fileName: string;
|
||||
fileNameLength: number;
|
||||
generalPurposeBitFlag: number;
|
||||
internalFileAttributes: number;
|
||||
lastModFileDate: number;
|
||||
lastModFileTime: number;
|
||||
relativeOffsetOfLocalHeader: number;
|
||||
uncompressedSize: number;
|
||||
versionMadeBy: number;
|
||||
versionNeededToExtract: number;
|
||||
|
||||
getLastModDate(): Date;
|
||||
isEncrypted(): boolean;
|
||||
isCompressed(): boolean;
|
||||
}
|
||||
|
||||
export interface ZipFileOptions {
|
||||
decompress: boolean | null;
|
||||
decrypt: boolean | null;
|
||||
start: number | null;
|
||||
end: number | null;
|
||||
}
|
||||
|
||||
export class ZipFile extends EventEmitter {
|
||||
autoClose: boolean;
|
||||
comment: string;
|
||||
decodeStrings: boolean;
|
||||
emittedError: boolean;
|
||||
entriesRead: number;
|
||||
entryCount: number;
|
||||
fileSize: number;
|
||||
isOpen: boolean;
|
||||
lazyEntries: boolean;
|
||||
readEntryCursor: boolean;
|
||||
validateEntrySizes: boolean;
|
||||
|
||||
constructor(
|
||||
reader: RandomAccessReader,
|
||||
centralDirectoryOffset: number,
|
||||
fileSize: number,
|
||||
entryCount: number,
|
||||
comment: string,
|
||||
autoClose: boolean,
|
||||
lazyEntries: boolean,
|
||||
decodeStrings: boolean,
|
||||
validateEntrySizes: boolean,
|
||||
);
|
||||
|
||||
openReadStream(
|
||||
entry: Entry,
|
||||
options: ZipFileOptions,
|
||||
callback: (err: Error | null, stream: Readable) => void,
|
||||
): void;
|
||||
openReadStream(entry: Entry, callback: (err: Error | null, stream: Readable) => void): void;
|
||||
close(): void;
|
||||
readEntry(): void;
|
||||
}
|
||||
|
||||
export interface Options {
|
||||
autoClose?: boolean | undefined;
|
||||
lazyEntries?: boolean | undefined;
|
||||
decodeStrings?: boolean | undefined;
|
||||
validateEntrySizes?: boolean | undefined;
|
||||
strictFileNames?: boolean | undefined;
|
||||
}
|
||||
|
||||
export function open(path: string, options: Options, callback?: (err: Error | null, zipfile: ZipFile) => void): void;
|
||||
export function open(path: string, callback?: (err: Error | null, zipfile: ZipFile) => void): void;
|
||||
export function fromFd(fd: number, options: Options, callback?: (err: Error | null, zipfile: ZipFile) => void): void;
|
||||
export function fromFd(fd: number, callback?: (err: Error | null, zipfile: ZipFile) => void): void;
|
||||
export function fromBuffer(
|
||||
buffer: Buffer,
|
||||
options: Options,
|
||||
callback?: (err: Error | null, zipfile: ZipFile) => void,
|
||||
): void;
|
||||
export function fromBuffer(buffer: Buffer, callback?: (err: Error | null, zipfile: ZipFile) => void): void;
|
||||
export function fromRandomAccessReader(
|
||||
reader: RandomAccessReader,
|
||||
totalSize: number,
|
||||
options: Options,
|
||||
callback: (err: Error | null, zipfile: ZipFile) => void,
|
||||
): void;
|
||||
export function fromRandomAccessReader(
|
||||
reader: RandomAccessReader,
|
||||
totalSize: number,
|
||||
callback: (err: Error | null, zipfile: ZipFile) => void,
|
||||
): void;
|
||||
export function dosDateTimeToDate(date: number, time: number): Date;
|
||||
export function validateFileName(fileName: string): string | null;
|
||||
30
node_modules/.store/extract-zip@2.0.1/node_modules/@types/yauzl/package.json
generated
vendored
Normal file
30
node_modules/.store/extract-zip@2.0.1/node_modules/@types/yauzl/package.json
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "@types/yauzl",
|
||||
"version": "2.10.3",
|
||||
"description": "TypeScript definitions for yauzl",
|
||||
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/yauzl",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Florian Keller",
|
||||
"githubUsername": "ffflorian",
|
||||
"url": "https://github.com/ffflorian"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"types": "index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||
"directory": "types/yauzl"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
},
|
||||
"typesPublisherContentHash": "4fb24c28ac8c0fdb7539555e955c273a2a4a433e99938ed73d9e7df3a9e1e2a7",
|
||||
"typeScriptVersion": "4.5",
|
||||
"__npminstall_done": true,
|
||||
"_from": "@types/yauzl@2.10.3",
|
||||
"_resolved": "https://registry.npmmirror.com/@types/yauzl/-/yauzl-2.10.3.tgz"
|
||||
}
|
||||
20
node_modules/.store/extract-zip@2.0.1/node_modules/debug/LICENSE
generated
vendored
Normal file
20
node_modules/.store/extract-zip@2.0.1/node_modules/debug/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca>
|
||||
Copyright (c) 2018-2021 Josh Junon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
|
||||
and associated documentation files (the 'Software'), to deal in the Software without restriction,
|
||||
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||
portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
481
node_modules/.store/extract-zip@2.0.1/node_modules/debug/README.md
generated
vendored
Normal file
481
node_modules/.store/extract-zip@2.0.1/node_modules/debug/README.md
generated
vendored
Normal file
@@ -0,0 +1,481 @@
|
||||
# debug
|
||||
[](https://travis-ci.org/debug-js/debug) [](https://coveralls.io/github/debug-js/debug?branch=master) [](https://visionmedia-community-slackin.now.sh/) [](#backers)
|
||||
[](#sponsors)
|
||||
|
||||
<img width="647" src="https://user-images.githubusercontent.com/71256/29091486-fa38524c-7c37-11e7-895f-e7ec8e1039b6.png">
|
||||
|
||||
A tiny JavaScript debugging utility modelled after Node.js core's debugging
|
||||
technique. Works in Node.js and web browsers.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
$ npm install debug
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole.
|
||||
|
||||
Example [_app.js_](./examples/node/app.js):
|
||||
|
||||
```js
|
||||
var debug = require('debug')('http')
|
||||
, http = require('http')
|
||||
, name = 'My App';
|
||||
|
||||
// fake app
|
||||
|
||||
debug('booting %o', name);
|
||||
|
||||
http.createServer(function(req, res){
|
||||
debug(req.method + ' ' + req.url);
|
||||
res.end('hello\n');
|
||||
}).listen(3000, function(){
|
||||
debug('listening');
|
||||
});
|
||||
|
||||
// fake worker of some kind
|
||||
|
||||
require('./worker');
|
||||
```
|
||||
|
||||
Example [_worker.js_](./examples/node/worker.js):
|
||||
|
||||
```js
|
||||
var a = require('debug')('worker:a')
|
||||
, b = require('debug')('worker:b');
|
||||
|
||||
function work() {
|
||||
a('doing lots of uninteresting work');
|
||||
setTimeout(work, Math.random() * 1000);
|
||||
}
|
||||
|
||||
work();
|
||||
|
||||
function workb() {
|
||||
b('doing some work');
|
||||
setTimeout(workb, Math.random() * 2000);
|
||||
}
|
||||
|
||||
workb();
|
||||
```
|
||||
|
||||
The `DEBUG` environment variable is then used to enable these based on space or
|
||||
comma-delimited names.
|
||||
|
||||
Here are some examples:
|
||||
|
||||
<img width="647" alt="screen shot 2017-08-08 at 12 53 04 pm" src="https://user-images.githubusercontent.com/71256/29091703-a6302cdc-7c38-11e7-8304-7c0b3bc600cd.png">
|
||||
<img width="647" alt="screen shot 2017-08-08 at 12 53 38 pm" src="https://user-images.githubusercontent.com/71256/29091700-a62a6888-7c38-11e7-800b-db911291ca2b.png">
|
||||
<img width="647" alt="screen shot 2017-08-08 at 12 53 25 pm" src="https://user-images.githubusercontent.com/71256/29091701-a62ea114-7c38-11e7-826a-2692bedca740.png">
|
||||
|
||||
#### Windows command prompt notes
|
||||
|
||||
##### CMD
|
||||
|
||||
On Windows the environment variable is set using the `set` command.
|
||||
|
||||
```cmd
|
||||
set DEBUG=*,-not_this
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```cmd
|
||||
set DEBUG=* & node app.js
|
||||
```
|
||||
|
||||
##### PowerShell (VS Code default)
|
||||
|
||||
PowerShell uses different syntax to set environment variables.
|
||||
|
||||
```cmd
|
||||
$env:DEBUG = "*,-not_this"
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```cmd
|
||||
$env:DEBUG='app';node app.js
|
||||
```
|
||||
|
||||
Then, run the program to be debugged as usual.
|
||||
|
||||
npm script example:
|
||||
```js
|
||||
"windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js",
|
||||
```
|
||||
|
||||
## Namespace Colors
|
||||
|
||||
Every debug instance has a color generated for it based on its namespace name.
|
||||
This helps when visually parsing the debug output to identify which debug instance
|
||||
a debug line belongs to.
|
||||
|
||||
#### Node.js
|
||||
|
||||
In Node.js, colors are enabled when stderr is a TTY. You also _should_ install
|
||||
the [`supports-color`](https://npmjs.org/supports-color) module alongside debug,
|
||||
otherwise debug will only use a small handful of basic colors.
|
||||
|
||||
<img width="521" src="https://user-images.githubusercontent.com/71256/29092181-47f6a9e6-7c3a-11e7-9a14-1928d8a711cd.png">
|
||||
|
||||
#### Web Browser
|
||||
|
||||
Colors are also enabled on "Web Inspectors" that understand the `%c` formatting
|
||||
option. These are WebKit web inspectors, Firefox ([since version
|
||||
31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/))
|
||||
and the Firebug plugin for Firefox (any version).
|
||||
|
||||
<img width="524" src="https://user-images.githubusercontent.com/71256/29092033-b65f9f2e-7c39-11e7-8e32-f6f0d8e865c1.png">
|
||||
|
||||
|
||||
## Millisecond diff
|
||||
|
||||
When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls.
|
||||
|
||||
<img width="647" src="https://user-images.githubusercontent.com/71256/29091486-fa38524c-7c37-11e7-895f-e7ec8e1039b6.png">
|
||||
|
||||
When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below:
|
||||
|
||||
<img width="647" src="https://user-images.githubusercontent.com/71256/29091956-6bd78372-7c39-11e7-8c55-c948396d6edd.png">
|
||||
|
||||
|
||||
## Conventions
|
||||
|
||||
If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output.
|
||||
|
||||
## Wildcards
|
||||
|
||||
The `*` character may be used as a wildcard. Suppose for example your library has
|
||||
debuggers named "connect:bodyParser", "connect:compress", "connect:session",
|
||||
instead of listing all three with
|
||||
`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do
|
||||
`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`.
|
||||
|
||||
You can also exclude specific debuggers by prefixing them with a "-" character.
|
||||
For example, `DEBUG=*,-connect:*` would include all debuggers except those
|
||||
starting with "connect:".
|
||||
|
||||
## Environment Variables
|
||||
|
||||
When running through Node.js, you can set a few environment variables that will
|
||||
change the behavior of the debug logging:
|
||||
|
||||
| Name | Purpose |
|
||||
|-----------|-------------------------------------------------|
|
||||
| `DEBUG` | Enables/disables specific debugging namespaces. |
|
||||
| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). |
|
||||
| `DEBUG_COLORS`| Whether or not to use colors in the debug output. |
|
||||
| `DEBUG_DEPTH` | Object inspection depth. |
|
||||
| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. |
|
||||
|
||||
|
||||
__Note:__ The environment variables beginning with `DEBUG_` end up being
|
||||
converted into an Options object that gets used with `%o`/`%O` formatters.
|
||||
See the Node.js documentation for
|
||||
[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options)
|
||||
for the complete list.
|
||||
|
||||
## Formatters
|
||||
|
||||
Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting.
|
||||
Below are the officially supported formatters:
|
||||
|
||||
| Formatter | Representation |
|
||||
|-----------|----------------|
|
||||
| `%O` | Pretty-print an Object on multiple lines. |
|
||||
| `%o` | Pretty-print an Object all on a single line. |
|
||||
| `%s` | String. |
|
||||
| `%d` | Number (both integer and float). |
|
||||
| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. |
|
||||
| `%%` | Single percent sign ('%'). This does not consume an argument. |
|
||||
|
||||
|
||||
### Custom formatters
|
||||
|
||||
You can add custom formatters by extending the `debug.formatters` object.
|
||||
For example, if you wanted to add support for rendering a Buffer as hex with
|
||||
`%h`, you could do something like:
|
||||
|
||||
```js
|
||||
const createDebug = require('debug')
|
||||
createDebug.formatters.h = (v) => {
|
||||
return v.toString('hex')
|
||||
}
|
||||
|
||||
// …elsewhere
|
||||
const debug = createDebug('foo')
|
||||
debug('this is hex: %h', new Buffer('hello world'))
|
||||
// foo this is hex: 68656c6c6f20776f726c6421 +0ms
|
||||
```
|
||||
|
||||
|
||||
## Browser Support
|
||||
|
||||
You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify),
|
||||
or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest),
|
||||
if you don't want to build it yourself.
|
||||
|
||||
Debug's enable state is currently persisted by `localStorage`.
|
||||
Consider the situation shown below where you have `worker:a` and `worker:b`,
|
||||
and wish to debug both. You can enable this using `localStorage.debug`:
|
||||
|
||||
```js
|
||||
localStorage.debug = 'worker:*'
|
||||
```
|
||||
|
||||
And then refresh the page.
|
||||
|
||||
```js
|
||||
a = debug('worker:a');
|
||||
b = debug('worker:b');
|
||||
|
||||
setInterval(function(){
|
||||
a('doing some work');
|
||||
}, 1000);
|
||||
|
||||
setInterval(function(){
|
||||
b('doing some work');
|
||||
}, 1200);
|
||||
```
|
||||
|
||||
In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_.
|
||||
|
||||
<img width="647" src="https://user-images.githubusercontent.com/7143133/152083257-29034707-c42c-4959-8add-3cee850e6fcf.png">
|
||||
|
||||
## Output streams
|
||||
|
||||
By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method:
|
||||
|
||||
Example [_stdout.js_](./examples/node/stdout.js):
|
||||
|
||||
```js
|
||||
var debug = require('debug');
|
||||
var error = debug('app:error');
|
||||
|
||||
// by default stderr is used
|
||||
error('goes to stderr!');
|
||||
|
||||
var log = debug('app:log');
|
||||
// set this namespace to log via console.log
|
||||
log.log = console.log.bind(console); // don't forget to bind to console!
|
||||
log('goes to stdout');
|
||||
error('still goes to stderr!');
|
||||
|
||||
// set all output to go via console.info
|
||||
// overrides all per-namespace log settings
|
||||
debug.log = console.info.bind(console);
|
||||
error('now goes to stdout via console.info');
|
||||
log('still goes to stdout, but via console.info now');
|
||||
```
|
||||
|
||||
## Extend
|
||||
You can simply extend debugger
|
||||
```js
|
||||
const log = require('debug')('auth');
|
||||
|
||||
//creates new debug instance with extended namespace
|
||||
const logSign = log.extend('sign');
|
||||
const logLogin = log.extend('login');
|
||||
|
||||
log('hello'); // auth hello
|
||||
logSign('hello'); //auth:sign hello
|
||||
logLogin('hello'); //auth:login hello
|
||||
```
|
||||
|
||||
## Set dynamically
|
||||
|
||||
You can also enable debug dynamically by calling the `enable()` method :
|
||||
|
||||
```js
|
||||
let debug = require('debug');
|
||||
|
||||
console.log(1, debug.enabled('test'));
|
||||
|
||||
debug.enable('test');
|
||||
console.log(2, debug.enabled('test'));
|
||||
|
||||
debug.disable();
|
||||
console.log(3, debug.enabled('test'));
|
||||
|
||||
```
|
||||
|
||||
print :
|
||||
```
|
||||
1 false
|
||||
2 true
|
||||
3 false
|
||||
```
|
||||
|
||||
Usage :
|
||||
`enable(namespaces)`
|
||||
`namespaces` can include modes separated by a colon and wildcards.
|
||||
|
||||
Note that calling `enable()` completely overrides previously set DEBUG variable :
|
||||
|
||||
```
|
||||
$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))'
|
||||
=> false
|
||||
```
|
||||
|
||||
`disable()`
|
||||
|
||||
Will disable all namespaces. The functions returns the namespaces currently
|
||||
enabled (and skipped). This can be useful if you want to disable debugging
|
||||
temporarily without knowing what was enabled to begin with.
|
||||
|
||||
For example:
|
||||
|
||||
```js
|
||||
let debug = require('debug');
|
||||
debug.enable('foo:*,-foo:bar');
|
||||
let namespaces = debug.disable();
|
||||
debug.enable(namespaces);
|
||||
```
|
||||
|
||||
Note: There is no guarantee that the string will be identical to the initial
|
||||
enable string, but semantically they will be identical.
|
||||
|
||||
## Checking whether a debug target is enabled
|
||||
|
||||
After you've created a debug instance, you can determine whether or not it is
|
||||
enabled by checking the `enabled` property:
|
||||
|
||||
```javascript
|
||||
const debug = require('debug')('http');
|
||||
|
||||
if (debug.enabled) {
|
||||
// do stuff...
|
||||
}
|
||||
```
|
||||
|
||||
You can also manually toggle this property to force the debug instance to be
|
||||
enabled or disabled.
|
||||
|
||||
## Usage in child processes
|
||||
|
||||
Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process.
|
||||
For example:
|
||||
|
||||
```javascript
|
||||
worker = fork(WORKER_WRAP_PATH, [workerPath], {
|
||||
stdio: [
|
||||
/* stdin: */ 0,
|
||||
/* stdout: */ 'pipe',
|
||||
/* stderr: */ 'pipe',
|
||||
'ipc',
|
||||
],
|
||||
env: Object.assign({}, process.env, {
|
||||
DEBUG_COLORS: 1 // without this settings, colors won't be shown
|
||||
}),
|
||||
});
|
||||
|
||||
worker.stderr.pipe(process.stderr, { end: false });
|
||||
```
|
||||
|
||||
|
||||
## Authors
|
||||
|
||||
- TJ Holowaychuk
|
||||
- Nathan Rajlich
|
||||
- Andrew Rhyne
|
||||
- Josh Junon
|
||||
|
||||
## Backers
|
||||
|
||||
Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)]
|
||||
|
||||
<a href="https://opencollective.com/debug/backer/0/website" target="_blank"><img src="https://opencollective.com/debug/backer/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/1/website" target="_blank"><img src="https://opencollective.com/debug/backer/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/2/website" target="_blank"><img src="https://opencollective.com/debug/backer/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/3/website" target="_blank"><img src="https://opencollective.com/debug/backer/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/4/website" target="_blank"><img src="https://opencollective.com/debug/backer/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/5/website" target="_blank"><img src="https://opencollective.com/debug/backer/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/6/website" target="_blank"><img src="https://opencollective.com/debug/backer/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/7/website" target="_blank"><img src="https://opencollective.com/debug/backer/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/8/website" target="_blank"><img src="https://opencollective.com/debug/backer/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/9/website" target="_blank"><img src="https://opencollective.com/debug/backer/9/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/10/website" target="_blank"><img src="https://opencollective.com/debug/backer/10/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/11/website" target="_blank"><img src="https://opencollective.com/debug/backer/11/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/12/website" target="_blank"><img src="https://opencollective.com/debug/backer/12/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/13/website" target="_blank"><img src="https://opencollective.com/debug/backer/13/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/14/website" target="_blank"><img src="https://opencollective.com/debug/backer/14/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/15/website" target="_blank"><img src="https://opencollective.com/debug/backer/15/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/16/website" target="_blank"><img src="https://opencollective.com/debug/backer/16/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/17/website" target="_blank"><img src="https://opencollective.com/debug/backer/17/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/18/website" target="_blank"><img src="https://opencollective.com/debug/backer/18/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/19/website" target="_blank"><img src="https://opencollective.com/debug/backer/19/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/20/website" target="_blank"><img src="https://opencollective.com/debug/backer/20/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/21/website" target="_blank"><img src="https://opencollective.com/debug/backer/21/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/22/website" target="_blank"><img src="https://opencollective.com/debug/backer/22/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/23/website" target="_blank"><img src="https://opencollective.com/debug/backer/23/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/24/website" target="_blank"><img src="https://opencollective.com/debug/backer/24/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/25/website" target="_blank"><img src="https://opencollective.com/debug/backer/25/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/26/website" target="_blank"><img src="https://opencollective.com/debug/backer/26/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/27/website" target="_blank"><img src="https://opencollective.com/debug/backer/27/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/28/website" target="_blank"><img src="https://opencollective.com/debug/backer/28/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/29/website" target="_blank"><img src="https://opencollective.com/debug/backer/29/avatar.svg"></a>
|
||||
|
||||
|
||||
## Sponsors
|
||||
|
||||
Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)]
|
||||
|
||||
<a href="https://opencollective.com/debug/sponsor/0/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/1/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/2/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/3/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/4/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/5/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/6/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/7/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/8/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/9/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/9/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/10/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/10/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/11/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/11/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/12/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/12/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/13/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/13/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/14/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/14/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/15/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/15/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/16/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/16/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/17/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/17/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/18/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/18/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/19/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/19/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/20/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/20/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/21/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/21/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/22/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/22/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/23/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/23/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/24/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/24/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/25/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/25/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/26/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/26/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/27/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/27/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/28/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/28/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/29/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/29/avatar.svg"></a>
|
||||
|
||||
## License
|
||||
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca>
|
||||
Copyright (c) 2018-2021 Josh Junon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
62
node_modules/.store/extract-zip@2.0.1/node_modules/debug/package.json
generated
vendored
Normal file
62
node_modules/.store/extract-zip@2.0.1/node_modules/debug/package.json
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"name": "debug",
|
||||
"version": "4.3.4",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/debug-js/debug.git"
|
||||
},
|
||||
"description": "Lightweight debugging utility for Node.js and the browser",
|
||||
"keywords": [
|
||||
"debug",
|
||||
"log",
|
||||
"debugger"
|
||||
],
|
||||
"files": [
|
||||
"src",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"author": "Josh Junon <josh.junon@protonmail.com>",
|
||||
"contributors": [
|
||||
"TJ Holowaychuk <tj@vision-media.ca>",
|
||||
"Nathan Rajlich <nathan@tootallnate.net> (http://n8.io)",
|
||||
"Andrew Rhyne <rhyneandrew@gmail.com>"
|
||||
],
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"lint": "xo",
|
||||
"test": "npm run test:node && npm run test:browser && npm run lint",
|
||||
"test:node": "istanbul cover _mocha -- test.js",
|
||||
"test:browser": "karma start --single-run",
|
||||
"test:coverage": "cat ./coverage/lcov.info | coveralls"
|
||||
},
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"brfs": "^2.0.1",
|
||||
"browserify": "^16.2.3",
|
||||
"coveralls": "^3.0.2",
|
||||
"istanbul": "^0.4.5",
|
||||
"karma": "^3.1.4",
|
||||
"karma-browserify": "^6.0.0",
|
||||
"karma-chrome-launcher": "^2.2.0",
|
||||
"karma-mocha": "^1.3.0",
|
||||
"mocha": "^5.2.0",
|
||||
"mocha-lcov-reporter": "^1.2.0",
|
||||
"xo": "^0.23.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
},
|
||||
"main": "./src/index.js",
|
||||
"browser": "./src/browser.js",
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"__npminstall_done": true,
|
||||
"_from": "debug@4.3.4",
|
||||
"_resolved": "https://registry.npmmirror.com/debug/-/debug-4.3.4.tgz"
|
||||
}
|
||||
269
node_modules/.store/extract-zip@2.0.1/node_modules/debug/src/browser.js
generated
vendored
Normal file
269
node_modules/.store/extract-zip@2.0.1/node_modules/debug/src/browser.js
generated
vendored
Normal file
@@ -0,0 +1,269 @@
|
||||
/* eslint-env browser */
|
||||
|
||||
/**
|
||||
* This is the web browser implementation of `debug()`.
|
||||
*/
|
||||
|
||||
exports.formatArgs = formatArgs;
|
||||
exports.save = save;
|
||||
exports.load = load;
|
||||
exports.useColors = useColors;
|
||||
exports.storage = localstorage();
|
||||
exports.destroy = (() => {
|
||||
let warned = false;
|
||||
|
||||
return () => {
|
||||
if (!warned) {
|
||||
warned = true;
|
||||
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
|
||||
}
|
||||
};
|
||||
})();
|
||||
|
||||
/**
|
||||
* Colors.
|
||||
*/
|
||||
|
||||
exports.colors = [
|
||||
'#0000CC',
|
||||
'#0000FF',
|
||||
'#0033CC',
|
||||
'#0033FF',
|
||||
'#0066CC',
|
||||
'#0066FF',
|
||||
'#0099CC',
|
||||
'#0099FF',
|
||||
'#00CC00',
|
||||
'#00CC33',
|
||||
'#00CC66',
|
||||
'#00CC99',
|
||||
'#00CCCC',
|
||||
'#00CCFF',
|
||||
'#3300CC',
|
||||
'#3300FF',
|
||||
'#3333CC',
|
||||
'#3333FF',
|
||||
'#3366CC',
|
||||
'#3366FF',
|
||||
'#3399CC',
|
||||
'#3399FF',
|
||||
'#33CC00',
|
||||
'#33CC33',
|
||||
'#33CC66',
|
||||
'#33CC99',
|
||||
'#33CCCC',
|
||||
'#33CCFF',
|
||||
'#6600CC',
|
||||
'#6600FF',
|
||||
'#6633CC',
|
||||
'#6633FF',
|
||||
'#66CC00',
|
||||
'#66CC33',
|
||||
'#9900CC',
|
||||
'#9900FF',
|
||||
'#9933CC',
|
||||
'#9933FF',
|
||||
'#99CC00',
|
||||
'#99CC33',
|
||||
'#CC0000',
|
||||
'#CC0033',
|
||||
'#CC0066',
|
||||
'#CC0099',
|
||||
'#CC00CC',
|
||||
'#CC00FF',
|
||||
'#CC3300',
|
||||
'#CC3333',
|
||||
'#CC3366',
|
||||
'#CC3399',
|
||||
'#CC33CC',
|
||||
'#CC33FF',
|
||||
'#CC6600',
|
||||
'#CC6633',
|
||||
'#CC9900',
|
||||
'#CC9933',
|
||||
'#CCCC00',
|
||||
'#CCCC33',
|
||||
'#FF0000',
|
||||
'#FF0033',
|
||||
'#FF0066',
|
||||
'#FF0099',
|
||||
'#FF00CC',
|
||||
'#FF00FF',
|
||||
'#FF3300',
|
||||
'#FF3333',
|
||||
'#FF3366',
|
||||
'#FF3399',
|
||||
'#FF33CC',
|
||||
'#FF33FF',
|
||||
'#FF6600',
|
||||
'#FF6633',
|
||||
'#FF9900',
|
||||
'#FF9933',
|
||||
'#FFCC00',
|
||||
'#FFCC33'
|
||||
];
|
||||
|
||||
/**
|
||||
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
|
||||
* and the Firebug extension (any Firefox version) are known
|
||||
* to support "%c" CSS customizations.
|
||||
*
|
||||
* TODO: add a `localStorage` variable to explicitly enable/disable colors
|
||||
*/
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
function useColors() {
|
||||
// NB: In an Electron preload script, document will be defined but not fully
|
||||
// initialized. Since we know we're in Chrome, we'll just detect this case
|
||||
// explicitly
|
||||
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Internet Explorer and Edge do not support colors.
|
||||
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Is webkit? http://stackoverflow.com/a/16459606/376773
|
||||
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
|
||||
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
|
||||
// Is firebug? http://stackoverflow.com/a/398120/376773
|
||||
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
|
||||
// Is firefox >= v31?
|
||||
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
|
||||
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||
|
||||
// Double check webkit in userAgent just in case we are in a worker
|
||||
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
|
||||
}
|
||||
|
||||
/**
|
||||
* Colorize log arguments if enabled.
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function formatArgs(args) {
|
||||
args[0] = (this.useColors ? '%c' : '') +
|
||||
this.namespace +
|
||||
(this.useColors ? ' %c' : ' ') +
|
||||
args[0] +
|
||||
(this.useColors ? '%c ' : ' ') +
|
||||
'+' + module.exports.humanize(this.diff);
|
||||
|
||||
if (!this.useColors) {
|
||||
return;
|
||||
}
|
||||
|
||||
const c = 'color: ' + this.color;
|
||||
args.splice(1, 0, c, 'color: inherit');
|
||||
|
||||
// The final "%c" is somewhat tricky, because there could be other
|
||||
// arguments passed either before or after the %c, so we need to
|
||||
// figure out the correct index to insert the CSS into
|
||||
let index = 0;
|
||||
let lastC = 0;
|
||||
args[0].replace(/%[a-zA-Z%]/g, match => {
|
||||
if (match === '%%') {
|
||||
return;
|
||||
}
|
||||
index++;
|
||||
if (match === '%c') {
|
||||
// We only are interested in the *last* %c
|
||||
// (the user may have provided their own)
|
||||
lastC = index;
|
||||
}
|
||||
});
|
||||
|
||||
args.splice(lastC, 0, c);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes `console.debug()` when available.
|
||||
* No-op when `console.debug` is not a "function".
|
||||
* If `console.debug` is not available, falls back
|
||||
* to `console.log`.
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
exports.log = console.debug || console.log || (() => {});
|
||||
|
||||
/**
|
||||
* Save `namespaces`.
|
||||
*
|
||||
* @param {String} namespaces
|
||||
* @api private
|
||||
*/
|
||||
function save(namespaces) {
|
||||
try {
|
||||
if (namespaces) {
|
||||
exports.storage.setItem('debug', namespaces);
|
||||
} else {
|
||||
exports.storage.removeItem('debug');
|
||||
}
|
||||
} catch (error) {
|
||||
// Swallow
|
||||
// XXX (@Qix-) should we be logging these?
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load `namespaces`.
|
||||
*
|
||||
* @return {String} returns the previously persisted debug modes
|
||||
* @api private
|
||||
*/
|
||||
function load() {
|
||||
let r;
|
||||
try {
|
||||
r = exports.storage.getItem('debug');
|
||||
} catch (error) {
|
||||
// Swallow
|
||||
// XXX (@Qix-) should we be logging these?
|
||||
}
|
||||
|
||||
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
|
||||
if (!r && typeof process !== 'undefined' && 'env' in process) {
|
||||
r = process.env.DEBUG;
|
||||
}
|
||||
|
||||
return r;
|
||||
}
|
||||
|
||||
/**
|
||||
* Localstorage attempts to return the localstorage.
|
||||
*
|
||||
* This is necessary because safari throws
|
||||
* when a user disables cookies/localstorage
|
||||
* and you attempt to access it.
|
||||
*
|
||||
* @return {LocalStorage}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function localstorage() {
|
||||
try {
|
||||
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
|
||||
// The Browser also has localStorage in the global context.
|
||||
return localStorage;
|
||||
} catch (error) {
|
||||
// Swallow
|
||||
// XXX (@Qix-) should we be logging these?
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = require('./common')(exports);
|
||||
|
||||
const {formatters} = module.exports;
|
||||
|
||||
/**
|
||||
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
|
||||
*/
|
||||
|
||||
formatters.j = function (v) {
|
||||
try {
|
||||
return JSON.stringify(v);
|
||||
} catch (error) {
|
||||
return '[UnexpectedJSONParseError]: ' + error.message;
|
||||
}
|
||||
};
|
||||
274
node_modules/.store/extract-zip@2.0.1/node_modules/debug/src/common.js
generated
vendored
Normal file
274
node_modules/.store/extract-zip@2.0.1/node_modules/debug/src/common.js
generated
vendored
Normal file
@@ -0,0 +1,274 @@
|
||||
|
||||
/**
|
||||
* This is the common logic for both the Node.js and web browser
|
||||
* implementations of `debug()`.
|
||||
*/
|
||||
|
||||
function setup(env) {
|
||||
createDebug.debug = createDebug;
|
||||
createDebug.default = createDebug;
|
||||
createDebug.coerce = coerce;
|
||||
createDebug.disable = disable;
|
||||
createDebug.enable = enable;
|
||||
createDebug.enabled = enabled;
|
||||
createDebug.humanize = require('ms');
|
||||
createDebug.destroy = destroy;
|
||||
|
||||
Object.keys(env).forEach(key => {
|
||||
createDebug[key] = env[key];
|
||||
});
|
||||
|
||||
/**
|
||||
* The currently active debug mode names, and names to skip.
|
||||
*/
|
||||
|
||||
createDebug.names = [];
|
||||
createDebug.skips = [];
|
||||
|
||||
/**
|
||||
* Map of special "%n" handling functions, for the debug "format" argument.
|
||||
*
|
||||
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
|
||||
*/
|
||||
createDebug.formatters = {};
|
||||
|
||||
/**
|
||||
* Selects a color for a debug namespace
|
||||
* @param {String} namespace The namespace string for the debug instance to be colored
|
||||
* @return {Number|String} An ANSI color code for the given namespace
|
||||
* @api private
|
||||
*/
|
||||
function selectColor(namespace) {
|
||||
let hash = 0;
|
||||
|
||||
for (let i = 0; i < namespace.length; i++) {
|
||||
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
|
||||
hash |= 0; // Convert to 32bit integer
|
||||
}
|
||||
|
||||
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
|
||||
}
|
||||
createDebug.selectColor = selectColor;
|
||||
|
||||
/**
|
||||
* Create a debugger with the given `namespace`.
|
||||
*
|
||||
* @param {String} namespace
|
||||
* @return {Function}
|
||||
* @api public
|
||||
*/
|
||||
function createDebug(namespace) {
|
||||
let prevTime;
|
||||
let enableOverride = null;
|
||||
let namespacesCache;
|
||||
let enabledCache;
|
||||
|
||||
function debug(...args) {
|
||||
// Disabled?
|
||||
if (!debug.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const self = debug;
|
||||
|
||||
// Set `diff` timestamp
|
||||
const curr = Number(new Date());
|
||||
const ms = curr - (prevTime || curr);
|
||||
self.diff = ms;
|
||||
self.prev = prevTime;
|
||||
self.curr = curr;
|
||||
prevTime = curr;
|
||||
|
||||
args[0] = createDebug.coerce(args[0]);
|
||||
|
||||
if (typeof args[0] !== 'string') {
|
||||
// Anything else let's inspect with %O
|
||||
args.unshift('%O');
|
||||
}
|
||||
|
||||
// Apply any `formatters` transformations
|
||||
let index = 0;
|
||||
args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {
|
||||
// If we encounter an escaped % then don't increase the array index
|
||||
if (match === '%%') {
|
||||
return '%';
|
||||
}
|
||||
index++;
|
||||
const formatter = createDebug.formatters[format];
|
||||
if (typeof formatter === 'function') {
|
||||
const val = args[index];
|
||||
match = formatter.call(self, val);
|
||||
|
||||
// Now we need to remove `args[index]` since it's inlined in the `format`
|
||||
args.splice(index, 1);
|
||||
index--;
|
||||
}
|
||||
return match;
|
||||
});
|
||||
|
||||
// Apply env-specific formatting (colors, etc.)
|
||||
createDebug.formatArgs.call(self, args);
|
||||
|
||||
const logFn = self.log || createDebug.log;
|
||||
logFn.apply(self, args);
|
||||
}
|
||||
|
||||
debug.namespace = namespace;
|
||||
debug.useColors = createDebug.useColors();
|
||||
debug.color = createDebug.selectColor(namespace);
|
||||
debug.extend = extend;
|
||||
debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.
|
||||
|
||||
Object.defineProperty(debug, 'enabled', {
|
||||
enumerable: true,
|
||||
configurable: false,
|
||||
get: () => {
|
||||
if (enableOverride !== null) {
|
||||
return enableOverride;
|
||||
}
|
||||
if (namespacesCache !== createDebug.namespaces) {
|
||||
namespacesCache = createDebug.namespaces;
|
||||
enabledCache = createDebug.enabled(namespace);
|
||||
}
|
||||
|
||||
return enabledCache;
|
||||
},
|
||||
set: v => {
|
||||
enableOverride = v;
|
||||
}
|
||||
});
|
||||
|
||||
// Env-specific initialization logic for debug instances
|
||||
if (typeof createDebug.init === 'function') {
|
||||
createDebug.init(debug);
|
||||
}
|
||||
|
||||
return debug;
|
||||
}
|
||||
|
||||
function extend(namespace, delimiter) {
|
||||
const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
|
||||
newDebug.log = this.log;
|
||||
return newDebug;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables a debug mode by namespaces. This can include modes
|
||||
* separated by a colon and wildcards.
|
||||
*
|
||||
* @param {String} namespaces
|
||||
* @api public
|
||||
*/
|
||||
function enable(namespaces) {
|
||||
createDebug.save(namespaces);
|
||||
createDebug.namespaces = namespaces;
|
||||
|
||||
createDebug.names = [];
|
||||
createDebug.skips = [];
|
||||
|
||||
let i;
|
||||
const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
|
||||
const len = split.length;
|
||||
|
||||
for (i = 0; i < len; i++) {
|
||||
if (!split[i]) {
|
||||
// ignore empty strings
|
||||
continue;
|
||||
}
|
||||
|
||||
namespaces = split[i].replace(/\*/g, '.*?');
|
||||
|
||||
if (namespaces[0] === '-') {
|
||||
createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));
|
||||
} else {
|
||||
createDebug.names.push(new RegExp('^' + namespaces + '$'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable debug output.
|
||||
*
|
||||
* @return {String} namespaces
|
||||
* @api public
|
||||
*/
|
||||
function disable() {
|
||||
const namespaces = [
|
||||
...createDebug.names.map(toNamespace),
|
||||
...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)
|
||||
].join(',');
|
||||
createDebug.enable('');
|
||||
return namespaces;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the given mode name is enabled, false otherwise.
|
||||
*
|
||||
* @param {String} name
|
||||
* @return {Boolean}
|
||||
* @api public
|
||||
*/
|
||||
function enabled(name) {
|
||||
if (name[name.length - 1] === '*') {
|
||||
return true;
|
||||
}
|
||||
|
||||
let i;
|
||||
let len;
|
||||
|
||||
for (i = 0, len = createDebug.skips.length; i < len; i++) {
|
||||
if (createDebug.skips[i].test(name)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0, len = createDebug.names.length; i < len; i++) {
|
||||
if (createDebug.names[i].test(name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert regexp to namespace
|
||||
*
|
||||
* @param {RegExp} regxep
|
||||
* @return {String} namespace
|
||||
* @api private
|
||||
*/
|
||||
function toNamespace(regexp) {
|
||||
return regexp.toString()
|
||||
.substring(2, regexp.toString().length - 2)
|
||||
.replace(/\.\*\?$/, '*');
|
||||
}
|
||||
|
||||
/**
|
||||
* Coerce `val`.
|
||||
*
|
||||
* @param {Mixed} val
|
||||
* @return {Mixed}
|
||||
* @api private
|
||||
*/
|
||||
function coerce(val) {
|
||||
if (val instanceof Error) {
|
||||
return val.stack || val.message;
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
/**
|
||||
* XXX DO NOT USE. This is a temporary stub function.
|
||||
* XXX It WILL be removed in the next major release.
|
||||
*/
|
||||
function destroy() {
|
||||
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
|
||||
}
|
||||
|
||||
createDebug.enable(createDebug.load());
|
||||
|
||||
return createDebug;
|
||||
}
|
||||
|
||||
module.exports = setup;
|
||||
10
node_modules/.store/extract-zip@2.0.1/node_modules/debug/src/index.js
generated
vendored
Normal file
10
node_modules/.store/extract-zip@2.0.1/node_modules/debug/src/index.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* Detect Electron renderer / nwjs process, which is node, but we should
|
||||
* treat as a browser.
|
||||
*/
|
||||
|
||||
if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {
|
||||
module.exports = require('./browser.js');
|
||||
} else {
|
||||
module.exports = require('./node.js');
|
||||
}
|
||||
263
node_modules/.store/extract-zip@2.0.1/node_modules/debug/src/node.js
generated
vendored
Normal file
263
node_modules/.store/extract-zip@2.0.1/node_modules/debug/src/node.js
generated
vendored
Normal file
@@ -0,0 +1,263 @@
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const tty = require('tty');
|
||||
const util = require('util');
|
||||
|
||||
/**
|
||||
* This is the Node.js implementation of `debug()`.
|
||||
*/
|
||||
|
||||
exports.init = init;
|
||||
exports.log = log;
|
||||
exports.formatArgs = formatArgs;
|
||||
exports.save = save;
|
||||
exports.load = load;
|
||||
exports.useColors = useColors;
|
||||
exports.destroy = util.deprecate(
|
||||
() => {},
|
||||
'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'
|
||||
);
|
||||
|
||||
/**
|
||||
* Colors.
|
||||
*/
|
||||
|
||||
exports.colors = [6, 2, 3, 4, 5, 1];
|
||||
|
||||
try {
|
||||
// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
const supportsColor = require('supports-color');
|
||||
|
||||
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
|
||||
exports.colors = [
|
||||
20,
|
||||
21,
|
||||
26,
|
||||
27,
|
||||
32,
|
||||
33,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
56,
|
||||
57,
|
||||
62,
|
||||
63,
|
||||
68,
|
||||
69,
|
||||
74,
|
||||
75,
|
||||
76,
|
||||
77,
|
||||
78,
|
||||
79,
|
||||
80,
|
||||
81,
|
||||
92,
|
||||
93,
|
||||
98,
|
||||
99,
|
||||
112,
|
||||
113,
|
||||
128,
|
||||
129,
|
||||
134,
|
||||
135,
|
||||
148,
|
||||
149,
|
||||
160,
|
||||
161,
|
||||
162,
|
||||
163,
|
||||
164,
|
||||
165,
|
||||
166,
|
||||
167,
|
||||
168,
|
||||
169,
|
||||
170,
|
||||
171,
|
||||
172,
|
||||
173,
|
||||
178,
|
||||
179,
|
||||
184,
|
||||
185,
|
||||
196,
|
||||
197,
|
||||
198,
|
||||
199,
|
||||
200,
|
||||
201,
|
||||
202,
|
||||
203,
|
||||
204,
|
||||
205,
|
||||
206,
|
||||
207,
|
||||
208,
|
||||
209,
|
||||
214,
|
||||
215,
|
||||
220,
|
||||
221
|
||||
];
|
||||
}
|
||||
} catch (error) {
|
||||
// Swallow - we only care if `supports-color` is available; it doesn't have to be.
|
||||
}
|
||||
|
||||
/**
|
||||
* Build up the default `inspectOpts` object from the environment variables.
|
||||
*
|
||||
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
|
||||
*/
|
||||
|
||||
exports.inspectOpts = Object.keys(process.env).filter(key => {
|
||||
return /^debug_/i.test(key);
|
||||
}).reduce((obj, key) => {
|
||||
// Camel-case
|
||||
const prop = key
|
||||
.substring(6)
|
||||
.toLowerCase()
|
||||
.replace(/_([a-z])/g, (_, k) => {
|
||||
return k.toUpperCase();
|
||||
});
|
||||
|
||||
// Coerce string value into JS value
|
||||
let val = process.env[key];
|
||||
if (/^(yes|on|true|enabled)$/i.test(val)) {
|
||||
val = true;
|
||||
} else if (/^(no|off|false|disabled)$/i.test(val)) {
|
||||
val = false;
|
||||
} else if (val === 'null') {
|
||||
val = null;
|
||||
} else {
|
||||
val = Number(val);
|
||||
}
|
||||
|
||||
obj[prop] = val;
|
||||
return obj;
|
||||
}, {});
|
||||
|
||||
/**
|
||||
* Is stdout a TTY? Colored output is enabled when `true`.
|
||||
*/
|
||||
|
||||
function useColors() {
|
||||
return 'colors' in exports.inspectOpts ?
|
||||
Boolean(exports.inspectOpts.colors) :
|
||||
tty.isatty(process.stderr.fd);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds ANSI color escape codes if enabled.
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function formatArgs(args) {
|
||||
const {namespace: name, useColors} = this;
|
||||
|
||||
if (useColors) {
|
||||
const c = this.color;
|
||||
const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c);
|
||||
const prefix = ` ${colorCode};1m${name} \u001B[0m`;
|
||||
|
||||
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
|
||||
args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m');
|
||||
} else {
|
||||
args[0] = getDate() + name + ' ' + args[0];
|
||||
}
|
||||
}
|
||||
|
||||
function getDate() {
|
||||
if (exports.inspectOpts.hideDate) {
|
||||
return '';
|
||||
}
|
||||
return new Date().toISOString() + ' ';
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes `util.format()` with the specified arguments and writes to stderr.
|
||||
*/
|
||||
|
||||
function log(...args) {
|
||||
return process.stderr.write(util.format(...args) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Save `namespaces`.
|
||||
*
|
||||
* @param {String} namespaces
|
||||
* @api private
|
||||
*/
|
||||
function save(namespaces) {
|
||||
if (namespaces) {
|
||||
process.env.DEBUG = namespaces;
|
||||
} else {
|
||||
// If you set a process.env field to null or undefined, it gets cast to the
|
||||
// string 'null' or 'undefined'. Just delete instead.
|
||||
delete process.env.DEBUG;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load `namespaces`.
|
||||
*
|
||||
* @return {String} returns the previously persisted debug modes
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function load() {
|
||||
return process.env.DEBUG;
|
||||
}
|
||||
|
||||
/**
|
||||
* Init logic for `debug` instances.
|
||||
*
|
||||
* Create a new `inspectOpts` object in case `useColors` is set
|
||||
* differently for a particular `debug` instance.
|
||||
*/
|
||||
|
||||
function init(debug) {
|
||||
debug.inspectOpts = {};
|
||||
|
||||
const keys = Object.keys(exports.inspectOpts);
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = require('./common')(exports);
|
||||
|
||||
const {formatters} = module.exports;
|
||||
|
||||
/**
|
||||
* Map %o to `util.inspect()`, all on a single line.
|
||||
*/
|
||||
|
||||
formatters.o = function (v) {
|
||||
this.inspectOpts.colors = this.useColors;
|
||||
return util.inspect(v, this.inspectOpts)
|
||||
.split('\n')
|
||||
.map(str => str.trim())
|
||||
.join(' ');
|
||||
};
|
||||
|
||||
/**
|
||||
* Map %O to `util.inspect()`, allowing multiple lines if needed.
|
||||
*/
|
||||
|
||||
formatters.O = function (v) {
|
||||
this.inspectOpts.colors = this.useColors;
|
||||
return util.inspect(v, this.inspectOpts);
|
||||
};
|
||||
23
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/LICENSE
generated
vendored
Normal file
23
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
Copyright (c) 2014 Max Ogden and other contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
19
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/cli.js
generated
vendored
Normal file
19
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/cli.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/* eslint-disable no-process-exit */
|
||||
|
||||
var extract = require('./')
|
||||
|
||||
var args = process.argv.slice(2)
|
||||
var source = args[0]
|
||||
var dest = args[1] || process.cwd()
|
||||
if (!source) {
|
||||
console.error('Usage: extract-zip foo.zip <targetDirectory>')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
extract(source, { dir: dest })
|
||||
.catch(function (err) {
|
||||
console.error('error!', err)
|
||||
process.exit(1)
|
||||
})
|
||||
21
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/index.d.ts
generated
vendored
Normal file
21
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
// Based on the type definitions for extract-zip 1.6
|
||||
// Definitions by: Mizunashi Mana <https://github.com/mizunashi-mana>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped/blob/e69b58e/types/extract-zip/index.d.ts
|
||||
|
||||
import { Entry, ZipFile } from 'yauzl';
|
||||
|
||||
declare namespace extract {
|
||||
interface Options {
|
||||
dir: string;
|
||||
defaultDirMode?: number;
|
||||
defaultFileMode?: number;
|
||||
onEntry?: (entry: Entry, zipfile: ZipFile) => void;
|
||||
}
|
||||
}
|
||||
|
||||
declare function extract(
|
||||
zipPath: string,
|
||||
opts: extract.Options,
|
||||
): Promise<void>;
|
||||
|
||||
export = extract;
|
||||
173
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/index.js
generated
vendored
Normal file
173
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/index.js
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
||||
const debug = require('debug')('extract-zip')
|
||||
// eslint-disable-next-line node/no-unsupported-features/node-builtins
|
||||
const { createWriteStream, promises: fs } = require('fs')
|
||||
const getStream = require('get-stream')
|
||||
const path = require('path')
|
||||
const { promisify } = require('util')
|
||||
const stream = require('stream')
|
||||
const yauzl = require('yauzl')
|
||||
|
||||
const openZip = promisify(yauzl.open)
|
||||
const pipeline = promisify(stream.pipeline)
|
||||
|
||||
class Extractor {
|
||||
constructor (zipPath, opts) {
|
||||
this.zipPath = zipPath
|
||||
this.opts = opts
|
||||
}
|
||||
|
||||
async extract () {
|
||||
debug('opening', this.zipPath, 'with opts', this.opts)
|
||||
|
||||
this.zipfile = await openZip(this.zipPath, { lazyEntries: true })
|
||||
this.canceled = false
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this.zipfile.on('error', err => {
|
||||
this.canceled = true
|
||||
reject(err)
|
||||
})
|
||||
this.zipfile.readEntry()
|
||||
|
||||
this.zipfile.on('close', () => {
|
||||
if (!this.canceled) {
|
||||
debug('zip extraction complete')
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
|
||||
this.zipfile.on('entry', async entry => {
|
||||
/* istanbul ignore if */
|
||||
if (this.canceled) {
|
||||
debug('skipping entry', entry.fileName, { cancelled: this.canceled })
|
||||
return
|
||||
}
|
||||
|
||||
debug('zipfile entry', entry.fileName)
|
||||
|
||||
if (entry.fileName.startsWith('__MACOSX/')) {
|
||||
this.zipfile.readEntry()
|
||||
return
|
||||
}
|
||||
|
||||
const destDir = path.dirname(path.join(this.opts.dir, entry.fileName))
|
||||
|
||||
try {
|
||||
await fs.mkdir(destDir, { recursive: true })
|
||||
|
||||
const canonicalDestDir = await fs.realpath(destDir)
|
||||
const relativeDestDir = path.relative(this.opts.dir, canonicalDestDir)
|
||||
|
||||
if (relativeDestDir.split(path.sep).includes('..')) {
|
||||
throw new Error(`Out of bound path "${canonicalDestDir}" found while processing file ${entry.fileName}`)
|
||||
}
|
||||
|
||||
await this.extractEntry(entry)
|
||||
debug('finished processing', entry.fileName)
|
||||
this.zipfile.readEntry()
|
||||
} catch (err) {
|
||||
this.canceled = true
|
||||
this.zipfile.close()
|
||||
reject(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async extractEntry (entry) {
|
||||
/* istanbul ignore if */
|
||||
if (this.canceled) {
|
||||
debug('skipping entry extraction', entry.fileName, { cancelled: this.canceled })
|
||||
return
|
||||
}
|
||||
|
||||
if (this.opts.onEntry) {
|
||||
this.opts.onEntry(entry, this.zipfile)
|
||||
}
|
||||
|
||||
const dest = path.join(this.opts.dir, entry.fileName)
|
||||
|
||||
// convert external file attr int into a fs stat mode int
|
||||
const mode = (entry.externalFileAttributes >> 16) & 0xFFFF
|
||||
// check if it's a symlink or dir (using stat mode constants)
|
||||
const IFMT = 61440
|
||||
const IFDIR = 16384
|
||||
const IFLNK = 40960
|
||||
const symlink = (mode & IFMT) === IFLNK
|
||||
let isDir = (mode & IFMT) === IFDIR
|
||||
|
||||
// Failsafe, borrowed from jsZip
|
||||
if (!isDir && entry.fileName.endsWith('/')) {
|
||||
isDir = true
|
||||
}
|
||||
|
||||
// check for windows weird way of specifying a directory
|
||||
// https://github.com/maxogden/extract-zip/issues/13#issuecomment-154494566
|
||||
const madeBy = entry.versionMadeBy >> 8
|
||||
if (!isDir) isDir = (madeBy === 0 && entry.externalFileAttributes === 16)
|
||||
|
||||
debug('extracting entry', { filename: entry.fileName, isDir: isDir, isSymlink: symlink })
|
||||
|
||||
const procMode = this.getExtractedMode(mode, isDir) & 0o777
|
||||
|
||||
// always ensure folders are created
|
||||
const destDir = isDir ? dest : path.dirname(dest)
|
||||
|
||||
const mkdirOptions = { recursive: true }
|
||||
if (isDir) {
|
||||
mkdirOptions.mode = procMode
|
||||
}
|
||||
debug('mkdir', { dir: destDir, ...mkdirOptions })
|
||||
await fs.mkdir(destDir, mkdirOptions)
|
||||
if (isDir) return
|
||||
|
||||
debug('opening read stream', dest)
|
||||
const readStream = await promisify(this.zipfile.openReadStream.bind(this.zipfile))(entry)
|
||||
|
||||
if (symlink) {
|
||||
const link = await getStream(readStream)
|
||||
debug('creating symlink', link, dest)
|
||||
await fs.symlink(link, dest)
|
||||
} else {
|
||||
await pipeline(readStream, createWriteStream(dest, { mode: procMode }))
|
||||
}
|
||||
}
|
||||
|
||||
getExtractedMode (entryMode, isDir) {
|
||||
let mode = entryMode
|
||||
// Set defaults, if necessary
|
||||
if (mode === 0) {
|
||||
if (isDir) {
|
||||
if (this.opts.defaultDirMode) {
|
||||
mode = parseInt(this.opts.defaultDirMode, 10)
|
||||
}
|
||||
|
||||
if (!mode) {
|
||||
mode = 0o755
|
||||
}
|
||||
} else {
|
||||
if (this.opts.defaultFileMode) {
|
||||
mode = parseInt(this.opts.defaultFileMode, 10)
|
||||
}
|
||||
|
||||
if (!mode) {
|
||||
mode = 0o644
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mode
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = async function (zipPath, opts) {
|
||||
debug('creating target directory', opts.dir)
|
||||
|
||||
if (!path.isAbsolute(opts.dir)) {
|
||||
throw new Error('Target directory is expected to be absolute')
|
||||
}
|
||||
|
||||
await fs.mkdir(opts.dir, { recursive: true })
|
||||
opts.dir = await fs.realpath(opts.dir)
|
||||
return new Extractor(zipPath, opts).extract()
|
||||
}
|
||||
83
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/package.json
generated
vendored
Normal file
83
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/package.json
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
{
|
||||
"name": "extract-zip",
|
||||
"version": "2.0.1",
|
||||
"description": "unzip a zip file into a directory using 100% javascript",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"bin": {
|
||||
"extract-zip": "cli.js"
|
||||
},
|
||||
"scripts": {
|
||||
"ava": "ava",
|
||||
"coverage": "nyc ava",
|
||||
"lint": "yarn lint:js && yarn lint:ts && yarn tsd",
|
||||
"lint:js": "eslint .",
|
||||
"lint:ts": "eslint --config .eslintrc.typescript.js --ext .ts .",
|
||||
"test": "yarn lint && ava",
|
||||
"tsd": "tsd"
|
||||
},
|
||||
"files": [
|
||||
"cli.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"author": "max ogden",
|
||||
"license": "BSD-2-Clause",
|
||||
"repository": "maxogden/extract-zip",
|
||||
"keywords": [
|
||||
"unzip",
|
||||
"zip",
|
||||
"extract"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10.17.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"get-stream": "^5.1.0",
|
||||
"yauzl": "^2.10.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@types/yauzl": "^2.9.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^3.2.0",
|
||||
"@typescript-eslint/parser": "^3.2.0",
|
||||
"ava": "^3.5.1",
|
||||
"eslint": "^7.2.0",
|
||||
"eslint-config-standard": "^14.1.1",
|
||||
"eslint-plugin-ava": "^10.2.0",
|
||||
"eslint-plugin-import": "^2.20.1",
|
||||
"eslint-plugin-node": "^11.0.0",
|
||||
"eslint-plugin-promise": "^4.2.1",
|
||||
"eslint-plugin-standard": "^4.0.1",
|
||||
"fs-extra": "^9.0.0",
|
||||
"husky": "^4.2.3",
|
||||
"lint-staged": "^10.0.9",
|
||||
"nyc": "^15.0.0",
|
||||
"tsd": "^0.11.0",
|
||||
"typescript": "^3.8.3"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:ava/recommended",
|
||||
"plugin:import/errors",
|
||||
"plugin:import/warnings",
|
||||
"plugin:node/recommended",
|
||||
"plugin:promise/recommended",
|
||||
"standard"
|
||||
]
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "lint-staged"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.js": "yarn lint:js --fix",
|
||||
"*.ts": "yarn lint:ts --fix"
|
||||
},
|
||||
"__npminstall_done": true,
|
||||
"_from": "extract-zip@2.0.1",
|
||||
"_resolved": "https://registry.npmmirror.com/extract-zip/-/extract-zip-2.0.1.tgz"
|
||||
}
|
||||
57
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/readme.md
generated
vendored
Normal file
57
node_modules/.store/extract-zip@2.0.1/node_modules/extract-zip/readme.md
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
# extract-zip
|
||||
|
||||
Unzip written in pure JavaScript. Extracts a zip into a directory. Available as a library or a command line program.
|
||||
|
||||
Uses the [`yauzl`](http://npmjs.org/yauzl) ZIP parser.
|
||||
|
||||
[](https://npm.im/extract-zip)
|
||||
[](https://github.com/standard/standard)
|
||||
[](https://github.com/maxogden/extract-zip/actions?query=workflow%3ACI)
|
||||
|
||||
## Installation
|
||||
|
||||
Make sure you have Node 10 or greater installed.
|
||||
|
||||
Get the library:
|
||||
|
||||
```
|
||||
npm install extract-zip --save
|
||||
```
|
||||
|
||||
Install the command line program:
|
||||
|
||||
```
|
||||
npm install extract-zip -g
|
||||
```
|
||||
|
||||
## JS API
|
||||
|
||||
```javascript
|
||||
const extract = require('extract-zip')
|
||||
|
||||
async function main () {
|
||||
try {
|
||||
await extract(source, { dir: target })
|
||||
console.log('Extraction complete')
|
||||
} catch (err) {
|
||||
// handle any errors
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
- `dir` (required) - the path to the directory where the extracted files are written
|
||||
- `defaultDirMode` - integer - Directory Mode (permissions), defaults to `0o755`
|
||||
- `defaultFileMode` - integer - File Mode (permissions), defaults to `0o644`
|
||||
- `onEntry` - function - if present, will be called with `(entry, zipfile)`, entry is every entry from the zip file forwarded from the `entry` event from yauzl. `zipfile` is the `yauzl` instance
|
||||
|
||||
Default modes are only used if no permissions are set in the zip file.
|
||||
|
||||
## CLI Usage
|
||||
|
||||
```
|
||||
extract-zip foo.zip <targetDirectory>
|
||||
```
|
||||
|
||||
If not specified, `targetDirectory` will default to `process.cwd()`.
|
||||
52
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/buffer-stream.js
generated
vendored
Normal file
52
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/buffer-stream.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
'use strict';
|
||||
const {PassThrough: PassThroughStream} = require('stream');
|
||||
|
||||
module.exports = options => {
|
||||
options = {...options};
|
||||
|
||||
const {array} = options;
|
||||
let {encoding} = options;
|
||||
const isBuffer = encoding === 'buffer';
|
||||
let objectMode = false;
|
||||
|
||||
if (array) {
|
||||
objectMode = !(encoding || isBuffer);
|
||||
} else {
|
||||
encoding = encoding || 'utf8';
|
||||
}
|
||||
|
||||
if (isBuffer) {
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
const stream = new PassThroughStream({objectMode});
|
||||
|
||||
if (encoding) {
|
||||
stream.setEncoding(encoding);
|
||||
}
|
||||
|
||||
let length = 0;
|
||||
const chunks = [];
|
||||
|
||||
stream.on('data', chunk => {
|
||||
chunks.push(chunk);
|
||||
|
||||
if (objectMode) {
|
||||
length = chunks.length;
|
||||
} else {
|
||||
length += chunk.length;
|
||||
}
|
||||
});
|
||||
|
||||
stream.getBufferedValue = () => {
|
||||
if (array) {
|
||||
return chunks;
|
||||
}
|
||||
|
||||
return isBuffer ? Buffer.concat(chunks, length) : chunks.join('');
|
||||
};
|
||||
|
||||
stream.getBufferedLength = () => length;
|
||||
|
||||
return stream;
|
||||
};
|
||||
108
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/index.d.ts
generated
vendored
Normal file
108
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
/// <reference types="node"/>
|
||||
import {Stream} from 'stream';
|
||||
|
||||
declare class MaxBufferErrorClass extends Error {
|
||||
readonly name: 'MaxBufferError';
|
||||
constructor();
|
||||
}
|
||||
|
||||
declare namespace getStream {
|
||||
interface Options {
|
||||
/**
|
||||
Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `MaxBufferError` error.
|
||||
|
||||
@default Infinity
|
||||
*/
|
||||
readonly maxBuffer?: number;
|
||||
}
|
||||
|
||||
interface OptionsWithEncoding<EncodingType = BufferEncoding> extends Options {
|
||||
/**
|
||||
[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream.
|
||||
|
||||
@default 'utf8'
|
||||
*/
|
||||
readonly encoding?: EncodingType;
|
||||
}
|
||||
|
||||
type MaxBufferError = MaxBufferErrorClass;
|
||||
}
|
||||
|
||||
declare const getStream: {
|
||||
/**
|
||||
Get the `stream` as a string.
|
||||
|
||||
@returns A promise that resolves when the end event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode.
|
||||
|
||||
@example
|
||||
```
|
||||
import * as fs from 'fs';
|
||||
import getStream = require('get-stream');
|
||||
|
||||
(async () => {
|
||||
const stream = fs.createReadStream('unicorn.txt');
|
||||
|
||||
console.log(await getStream(stream));
|
||||
// ,,))))))));,
|
||||
// __)))))))))))))),
|
||||
// \|/ -\(((((''''((((((((.
|
||||
// -*-==//////(('' . `)))))),
|
||||
// /|\ ))| o ;-. '((((( ,(,
|
||||
// ( `| / ) ;))))' ,_))^;(~
|
||||
// | | | ,))((((_ _____------~~~-. %,;(;(>';'~
|
||||
// o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
|
||||
// ; ''''```` `: `:::|\,__,%% );`'; ~
|
||||
// | _ ) / `:|`----' `-'
|
||||
// ______/\/~ | / /
|
||||
// /~;;.____/;;' / ___--,-( `;;;/
|
||||
// / // _;______;'------~~~~~ /;;/\ /
|
||||
// // | | / ; \;;,\
|
||||
// (<_ | ; /',/-----' _>
|
||||
// \_| ||_ //~;~~~~~~~~~
|
||||
// `\_| (,~~
|
||||
// \~\
|
||||
// ~~
|
||||
})();
|
||||
```
|
||||
*/
|
||||
(stream: Stream, options?: getStream.OptionsWithEncoding): Promise<string>;
|
||||
|
||||
/**
|
||||
Get the `stream` as a buffer.
|
||||
|
||||
It honors the `maxBuffer` option as above, but it refers to byte length rather than string length.
|
||||
*/
|
||||
buffer(
|
||||
stream: Stream,
|
||||
options?: getStream.OptionsWithEncoding
|
||||
): Promise<Buffer>;
|
||||
|
||||
/**
|
||||
Get the `stream` as an array of values.
|
||||
|
||||
It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen:
|
||||
|
||||
- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes).
|
||||
- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array.
|
||||
- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array.
|
||||
*/
|
||||
array<StreamObjectModeType>(
|
||||
stream: Stream,
|
||||
options?: getStream.Options
|
||||
): Promise<StreamObjectModeType[]>;
|
||||
array(
|
||||
stream: Stream,
|
||||
options: getStream.OptionsWithEncoding<'buffer'>
|
||||
): Promise<Buffer[]>;
|
||||
array(
|
||||
stream: Stream,
|
||||
options: getStream.OptionsWithEncoding<BufferEncoding>
|
||||
): Promise<string[]>;
|
||||
|
||||
MaxBufferError: typeof MaxBufferErrorClass;
|
||||
|
||||
// TODO: Remove this for the next major release
|
||||
default: typeof getStream;
|
||||
};
|
||||
|
||||
export = getStream;
|
||||
60
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/index.js
generated
vendored
Normal file
60
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/index.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
'use strict';
|
||||
const {constants: BufferConstants} = require('buffer');
|
||||
const pump = require('pump');
|
||||
const bufferStream = require('./buffer-stream');
|
||||
|
||||
class MaxBufferError extends Error {
|
||||
constructor() {
|
||||
super('maxBuffer exceeded');
|
||||
this.name = 'MaxBufferError';
|
||||
}
|
||||
}
|
||||
|
||||
async function getStream(inputStream, options) {
|
||||
if (!inputStream) {
|
||||
return Promise.reject(new Error('Expected a stream'));
|
||||
}
|
||||
|
||||
options = {
|
||||
maxBuffer: Infinity,
|
||||
...options
|
||||
};
|
||||
|
||||
const {maxBuffer} = options;
|
||||
|
||||
let stream;
|
||||
await new Promise((resolve, reject) => {
|
||||
const rejectPromise = error => {
|
||||
// Don't retrieve an oversized buffer.
|
||||
if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) {
|
||||
error.bufferedData = stream.getBufferedValue();
|
||||
}
|
||||
|
||||
reject(error);
|
||||
};
|
||||
|
||||
stream = pump(inputStream, bufferStream(options), error => {
|
||||
if (error) {
|
||||
rejectPromise(error);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
|
||||
stream.on('data', () => {
|
||||
if (stream.getBufferedLength() > maxBuffer) {
|
||||
rejectPromise(new MaxBufferError());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return stream.getBufferedValue();
|
||||
}
|
||||
|
||||
module.exports = getStream;
|
||||
// TODO: Remove this for the next major release
|
||||
module.exports.default = getStream;
|
||||
module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'});
|
||||
module.exports.array = (stream, options) => getStream(stream, {...options, array: true});
|
||||
module.exports.MaxBufferError = MaxBufferError;
|
||||
9
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/license
generated
vendored
Normal file
9
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/license
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
53
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/package.json
generated
vendored
Normal file
53
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/package.json
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "get-stream",
|
||||
"version": "5.2.0",
|
||||
"description": "Get a stream as a string, buffer, or array",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/get-stream",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"buffer-stream.js"
|
||||
],
|
||||
"keywords": [
|
||||
"get",
|
||||
"stream",
|
||||
"promise",
|
||||
"concat",
|
||||
"string",
|
||||
"text",
|
||||
"buffer",
|
||||
"read",
|
||||
"data",
|
||||
"consume",
|
||||
"readable",
|
||||
"readablestream",
|
||||
"array",
|
||||
"object"
|
||||
],
|
||||
"dependencies": {
|
||||
"pump": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^12.0.7",
|
||||
"ava": "^2.0.0",
|
||||
"into-stream": "^5.0.0",
|
||||
"tsd": "^0.7.2",
|
||||
"xo": "^0.24.0"
|
||||
},
|
||||
"__npminstall_done": true,
|
||||
"_from": "get-stream@5.2.0",
|
||||
"_resolved": "https://registry.npmmirror.com/get-stream/-/get-stream-5.2.0.tgz"
|
||||
}
|
||||
124
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/readme.md
generated
vendored
Normal file
124
node_modules/.store/extract-zip@2.0.1/node_modules/get-stream/readme.md
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
# get-stream [](https://travis-ci.com/github/sindresorhus/get-stream)
|
||||
|
||||
> Get a stream as a string, buffer, or array
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install get-stream
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const fs = require('fs');
|
||||
const getStream = require('get-stream');
|
||||
|
||||
(async () => {
|
||||
const stream = fs.createReadStream('unicorn.txt');
|
||||
|
||||
console.log(await getStream(stream));
|
||||
/*
|
||||
,,))))))));,
|
||||
__)))))))))))))),
|
||||
\|/ -\(((((''''((((((((.
|
||||
-*-==//////(('' . `)))))),
|
||||
/|\ ))| o ;-. '((((( ,(,
|
||||
( `| / ) ;))))' ,_))^;(~
|
||||
| | | ,))((((_ _____------~~~-. %,;(;(>';'~
|
||||
o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
|
||||
; ''''```` `: `:::|\,__,%% );`'; ~
|
||||
| _ ) / `:|`----' `-'
|
||||
______/\/~ | / /
|
||||
/~;;.____/;;' / ___--,-( `;;;/
|
||||
/ // _;______;'------~~~~~ /;;/\ /
|
||||
// | | / ; \;;,\
|
||||
(<_ | ; /',/-----' _>
|
||||
\_| ||_ //~;~~~~~~~~~
|
||||
`\_| (,~~
|
||||
\~\
|
||||
~~
|
||||
*/
|
||||
})();
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode.
|
||||
|
||||
### getStream(stream, options?)
|
||||
|
||||
Get the `stream` as a string.
|
||||
|
||||
#### options
|
||||
|
||||
Type: `object`
|
||||
|
||||
##### encoding
|
||||
|
||||
Type: `string`\
|
||||
Default: `'utf8'`
|
||||
|
||||
[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream.
|
||||
|
||||
##### maxBuffer
|
||||
|
||||
Type: `number`\
|
||||
Default: `Infinity`
|
||||
|
||||
Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error.
|
||||
|
||||
### getStream.buffer(stream, options?)
|
||||
|
||||
Get the `stream` as a buffer.
|
||||
|
||||
It honors the `maxBuffer` option as above, but it refers to byte length rather than string length.
|
||||
|
||||
### getStream.array(stream, options?)
|
||||
|
||||
Get the `stream` as an array of values.
|
||||
|
||||
It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen:
|
||||
|
||||
- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes).
|
||||
|
||||
- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array.
|
||||
|
||||
- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array.
|
||||
|
||||
## Errors
|
||||
|
||||
If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error.
|
||||
|
||||
```js
|
||||
(async () => {
|
||||
try {
|
||||
await getStream(streamThatErrorsAtTheEnd('unicorn'));
|
||||
} catch (error) {
|
||||
console.log(error.bufferedData);
|
||||
//=> 'unicorn'
|
||||
}
|
||||
})()
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)?
|
||||
|
||||
This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package.
|
||||
|
||||
## Related
|
||||
|
||||
- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-get-stream?utm_source=npm-get-stream&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
21
node_modules/.store/extract-zip@2.0.1/node_modules/yauzl/LICENSE
generated
vendored
Normal file
21
node_modules/.store/extract-zip@2.0.1/node_modules/yauzl/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Josh Wolfe
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
658
node_modules/.store/extract-zip@2.0.1/node_modules/yauzl/README.md
generated
vendored
Normal file
658
node_modules/.store/extract-zip@2.0.1/node_modules/yauzl/README.md
generated
vendored
Normal file
@@ -0,0 +1,658 @@
|
||||
# yauzl
|
||||
|
||||
[](https://travis-ci.org/thejoshwolfe/yauzl)
|
||||
[](https://coveralls.io/r/thejoshwolfe/yauzl)
|
||||
|
||||
yet another unzip library for node. For zipping, see
|
||||
[yazl](https://github.com/thejoshwolfe/yazl).
|
||||
|
||||
Design principles:
|
||||
|
||||
* Follow the spec.
|
||||
Don't scan for local file headers.
|
||||
Read the central directory for file metadata.
|
||||
(see [No Streaming Unzip API](#no-streaming-unzip-api)).
|
||||
* Don't block the JavaScript thread.
|
||||
Use and provide async APIs.
|
||||
* Keep memory usage under control.
|
||||
Don't attempt to buffer entire files in RAM at once.
|
||||
* Never crash (if used properly).
|
||||
Don't let malformed zip files bring down client applications who are trying to catch errors.
|
||||
* Catch unsafe file names.
|
||||
See `validateFileName()`.
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var yauzl = require("yauzl");
|
||||
|
||||
yauzl.open("path/to/file.zip", {lazyEntries: true}, function(err, zipfile) {
|
||||
if (err) throw err;
|
||||
zipfile.readEntry();
|
||||
zipfile.on("entry", function(entry) {
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
// Directory file names end with '/'.
|
||||
// Note that entires for directories themselves are optional.
|
||||
// An entry's fileName implicitly requires its parent directories to exist.
|
||||
zipfile.readEntry();
|
||||
} else {
|
||||
// file entry
|
||||
zipfile.openReadStream(entry, function(err, readStream) {
|
||||
if (err) throw err;
|
||||
readStream.on("end", function() {
|
||||
zipfile.readEntry();
|
||||
});
|
||||
readStream.pipe(somewhere);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
See also `examples/` for more usage examples.
|
||||
|
||||
## API
|
||||
|
||||
The default for every optional `callback` parameter is:
|
||||
|
||||
```js
|
||||
function defaultCallback(err) {
|
||||
if (err) throw err;
|
||||
}
|
||||
```
|
||||
|
||||
### open(path, [options], [callback])
|
||||
|
||||
Calls `fs.open(path, "r")` and reads the `fd` effectively the same as `fromFd()` would.
|
||||
|
||||
`options` may be omitted or `null`. The defaults are `{autoClose: true, lazyEntries: false, decodeStrings: true, validateEntrySizes: true, strictFileNames: false}`.
|
||||
|
||||
`autoClose` is effectively equivalent to:
|
||||
|
||||
```js
|
||||
zipfile.once("end", function() {
|
||||
zipfile.close();
|
||||
});
|
||||
```
|
||||
|
||||
`lazyEntries` indicates that entries should be read only when `readEntry()` is called.
|
||||
If `lazyEntries` is `false`, `entry` events will be emitted as fast as possible to allow `pipe()`ing
|
||||
file data from all entries in parallel.
|
||||
This is not recommended, as it can lead to out of control memory usage for zip files with many entries.
|
||||
See [issue #22](https://github.com/thejoshwolfe/yauzl/issues/22).
|
||||
If `lazyEntries` is `true`, an `entry` or `end` event will be emitted in response to each call to `readEntry()`.
|
||||
This allows processing of one entry at a time, and will keep memory usage under control for zip files with many entries.
|
||||
|
||||
`decodeStrings` is the default and causes yauzl to decode strings with `CP437` or `UTF-8` as required by the spec.
|
||||
The exact effects of turning this option off are:
|
||||
|
||||
* `zipfile.comment`, `entry.fileName`, and `entry.fileComment` will be `Buffer` objects instead of `String`s.
|
||||
* Any Info-ZIP Unicode Path Extra Field will be ignored. See `extraFields`.
|
||||
* Automatic file name validation will not be performed. See `validateFileName()`.
|
||||
|
||||
`validateEntrySizes` is the default and ensures that an entry's reported uncompressed size matches its actual uncompressed size.
|
||||
This check happens as early as possible, which is either before emitting each `"entry"` event (for entries with no compression),
|
||||
or during the `readStream` piping after calling `openReadStream()`.
|
||||
See `openReadStream()` for more information on defending against zip bomb attacks.
|
||||
|
||||
When `strictFileNames` is `false` (the default) and `decodeStrings` is `true`,
|
||||
all backslash (`\`) characters in each `entry.fileName` are replaced with forward slashes (`/`).
|
||||
The spec forbids file names with backslashes,
|
||||
but Microsoft's `System.IO.Compression.ZipFile` class in .NET versions 4.5.0 until 4.6.1
|
||||
creates non-conformant zipfiles with backslashes in file names.
|
||||
`strictFileNames` is `false` by default so that clients can read these
|
||||
non-conformant zipfiles without knowing about this Microsoft-specific bug.
|
||||
When `strictFileNames` is `true` and `decodeStrings` is `true`,
|
||||
entries with backslashes in their file names will result in an error. See `validateFileName()`.
|
||||
When `decodeStrings` is `false`, `strictFileNames` has no effect.
|
||||
|
||||
The `callback` is given the arguments `(err, zipfile)`.
|
||||
An `err` is provided if the End of Central Directory Record cannot be found, or if its metadata appears malformed.
|
||||
This kind of error usually indicates that this is not a zip file.
|
||||
Otherwise, `zipfile` is an instance of `ZipFile`.
|
||||
|
||||
### fromFd(fd, [options], [callback])
|
||||
|
||||
Reads from the fd, which is presumed to be an open .zip file.
|
||||
Note that random access is required by the zip file specification,
|
||||
so the fd cannot be an open socket or any other fd that does not support random access.
|
||||
|
||||
`options` may be omitted or `null`. The defaults are `{autoClose: false, lazyEntries: false, decodeStrings: true, validateEntrySizes: true, strictFileNames: false}`.
|
||||
|
||||
See `open()` for the meaning of the options and callback.
|
||||
|
||||
### fromBuffer(buffer, [options], [callback])
|
||||
|
||||
Like `fromFd()`, but reads from a RAM buffer instead of an open file.
|
||||
`buffer` is a `Buffer`.
|
||||
|
||||
If a `ZipFile` is acquired from this method,
|
||||
it will never emit the `close` event,
|
||||
and calling `close()` is not necessary.
|
||||
|
||||
`options` may be omitted or `null`. The defaults are `{lazyEntries: false, decodeStrings: true, validateEntrySizes: true, strictFileNames: false}`.
|
||||
|
||||
See `open()` for the meaning of the options and callback.
|
||||
The `autoClose` option is ignored for this method.
|
||||
|
||||
### fromRandomAccessReader(reader, totalSize, [options], [callback])
|
||||
|
||||
This method of reading a zip file allows clients to implement their own back-end file system.
|
||||
For example, a client might translate read calls into network requests.
|
||||
|
||||
The `reader` parameter must be of a type that is a subclass of
|
||||
[RandomAccessReader](#class-randomaccessreader) that implements the required methods.
|
||||
The `totalSize` is a Number and indicates the total file size of the zip file.
|
||||
|
||||
`options` may be omitted or `null`. The defaults are `{autoClose: true, lazyEntries: false, decodeStrings: true, validateEntrySizes: true, strictFileNames: false}`.
|
||||
|
||||
See `open()` for the meaning of the options and callback.
|
||||
|
||||
### dosDateTimeToDate(date, time)
|
||||
|
||||
Converts MS-DOS `date` and `time` data into a JavaScript `Date` object.
|
||||
Each parameter is a `Number` treated as an unsigned 16-bit integer.
|
||||
Note that this format does not support timezones,
|
||||
so the returned object will use the local timezone.
|
||||
|
||||
### validateFileName(fileName)
|
||||
|
||||
Returns `null` or a `String` error message depending on the validity of `fileName`.
|
||||
If `fileName` starts with `"/"` or `/[A-Za-z]:\//` or if it contains `".."` path segments or `"\\"`,
|
||||
this function returns an error message appropriate for use like this:
|
||||
|
||||
```js
|
||||
var errorMessage = yauzl.validateFileName(fileName);
|
||||
if (errorMessage != null) throw new Error(errorMessage);
|
||||
```
|
||||
|
||||
This function is automatically run for each entry, as long as `decodeStrings` is `true`.
|
||||
See `open()`, `strictFileNames`, and `Event: "entry"` for more information.
|
||||
|
||||
### Class: ZipFile
|
||||
|
||||
The constructor for the class is not part of the public API.
|
||||
Use `open()`, `fromFd()`, `fromBuffer()`, or `fromRandomAccessReader()` instead.
|
||||
|
||||
#### Event: "entry"
|
||||
|
||||
Callback gets `(entry)`, which is an `Entry`.
|
||||
See `open()` and `readEntry()` for when this event is emitted.
|
||||
|
||||
If `decodeStrings` is `true`, entries emitted via this event have already passed file name validation.
|
||||
See `validateFileName()` and `open()` for more information.
|
||||
|
||||
If `validateEntrySizes` is `true` and this entry's `compressionMethod` is `0` (stored without compression),
|
||||
this entry has already passed entry size validation.
|
||||
See `open()` for more information.
|
||||
|
||||
#### Event: "end"
|
||||
|
||||
Emitted after the last `entry` event has been emitted.
|
||||
See `open()` and `readEntry()` for more info on when this event is emitted.
|
||||
|
||||
#### Event: "close"
|
||||
|
||||
Emitted after the fd is actually closed.
|
||||
This is after calling `close()` (or after the `end` event when `autoClose` is `true`),
|
||||
and after all stream pipelines created from `openReadStream()` have finished reading data from the fd.
|
||||
|
||||
If this `ZipFile` was acquired from `fromRandomAccessReader()`,
|
||||
the "fd" in the previous paragraph refers to the `RandomAccessReader` implemented by the client.
|
||||
|
||||
If this `ZipFile` was acquired from `fromBuffer()`, this event is never emitted.
|
||||
|
||||
#### Event: "error"
|
||||
|
||||
Emitted in the case of errors with reading the zip file.
|
||||
(Note that other errors can be emitted from the streams created from `openReadStream()` as well.)
|
||||
After this event has been emitted, no further `entry`, `end`, or `error` events will be emitted,
|
||||
but the `close` event may still be emitted.
|
||||
|
||||
#### readEntry()
|
||||
|
||||
Causes this `ZipFile` to emit an `entry` or `end` event (or an `error` event).
|
||||
This method must only be called when this `ZipFile` was created with the `lazyEntries` option set to `true` (see `open()`).
|
||||
When this `ZipFile` was created with the `lazyEntries` option set to `true`,
|
||||
`entry` and `end` events are only ever emitted in response to this method call.
|
||||
|
||||
The event that is emitted in response to this method will not be emitted until after this method has returned,
|
||||
so it is safe to call this method before attaching event listeners.
|
||||
|
||||
After calling this method, calling this method again before the response event has been emitted will cause undefined behavior.
|
||||
Calling this method after the `end` event has been emitted will cause undefined behavior.
|
||||
Calling this method after calling `close()` will cause undefined behavior.
|
||||
|
||||
#### openReadStream(entry, [options], callback)
|
||||
|
||||
`entry` must be an `Entry` object from this `ZipFile`.
|
||||
`callback` gets `(err, readStream)`, where `readStream` is a `Readable Stream` that provides the file data for this entry.
|
||||
If this zipfile is already closed (see `close()`), the `callback` will receive an `err`.
|
||||
|
||||
`options` may be omitted or `null`, and has the following defaults:
|
||||
|
||||
```js
|
||||
{
|
||||
decompress: entry.isCompressed() ? true : null,
|
||||
decrypt: null,
|
||||
start: 0, // actually the default is null, see below
|
||||
end: entry.compressedSize, // actually the default is null, see below
|
||||
}
|
||||
```
|
||||
|
||||
If the entry is compressed (with a supported compression method),
|
||||
and the `decompress` option is `true` (or omitted),
|
||||
the read stream provides the decompressed data.
|
||||
Omitting the `decompress` option is what most clients should do.
|
||||
|
||||
The `decompress` option must be `null` (or omitted) when the entry is not compressed (see `isCompressed()`),
|
||||
and either `true` (or omitted) or `false` when the entry is compressed.
|
||||
Specifying `decompress: false` for a compressed entry causes the read stream
|
||||
to provide the raw compressed file data without going through a zlib inflate transform.
|
||||
|
||||
If the entry is encrypted (see `isEncrypted()`), clients may want to avoid calling `openReadStream()` on the entry entirely.
|
||||
Alternatively, clients may call `openReadStream()` for encrypted entries and specify `decrypt: false`.
|
||||
If the entry is also compressed, clients must *also* specify `decompress: false`.
|
||||
Specifying `decrypt: false` for an encrypted entry causes the read stream to provide the raw, still-encrypted file data.
|
||||
(This data includes the 12-byte header described in the spec.)
|
||||
|
||||
The `decrypt` option must be `null` (or omitted) for non-encrypted entries, and `false` for encrypted entries.
|
||||
Omitting the `decrypt` option (or specifying it as `null`) for an encrypted entry
|
||||
will result in the `callback` receiving an `err`.
|
||||
This default behavior is so that clients not accounting for encrypted files aren't surprised by bogus file data.
|
||||
|
||||
The `start` (inclusive) and `end` (exclusive) options are byte offsets into this entry's file data,
|
||||
and can be used to obtain part of an entry's file data rather than the whole thing.
|
||||
If either of these options are specified and non-`null`,
|
||||
then the above options must be used to obain the file's raw data.
|
||||
Speficying `{start: 0, end: entry.compressedSize}` will result in the complete file,
|
||||
which is effectively the default values for these options,
|
||||
but note that unlike omitting the options, when you specify `start` or `end` as any non-`null` value,
|
||||
the above requirement is still enforced that you must also pass the appropriate options to get the file's raw data.
|
||||
|
||||
It's possible for the `readStream` provided to the `callback` to emit errors for several reasons.
|
||||
For example, if zlib cannot decompress the data, the zlib error will be emitted from the `readStream`.
|
||||
Two more error cases (when `validateEntrySizes` is `true`) are if the decompressed data has too many
|
||||
or too few actual bytes compared to the reported byte count from the entry's `uncompressedSize` field.
|
||||
yauzl notices this false information and emits an error from the `readStream`
|
||||
after some number of bytes have already been piped through the stream.
|
||||
|
||||
This check allows clients to trust the `uncompressedSize` field in `Entry` objects.
|
||||
Guarding against [zip bomb](http://en.wikipedia.org/wiki/Zip_bomb) attacks can be accomplished by
|
||||
doing some heuristic checks on the size metadata and then watching out for the above errors.
|
||||
Such heuristics are outside the scope of this library,
|
||||
but enforcing the `uncompressedSize` is implemented here as a security feature.
|
||||
|
||||
It is possible to destroy the `readStream` before it has piped all of its data.
|
||||
To do this, call `readStream.destroy()`.
|
||||
You must `unpipe()` the `readStream` from any destination before calling `readStream.destroy()`.
|
||||
If this zipfile was created using `fromRandomAccessReader()`, the `RandomAccessReader` implementation
|
||||
must provide readable streams that implement a `.destroy()` method (see `randomAccessReader._readStreamForRange()`)
|
||||
in order for calls to `readStream.destroy()` to work in this context.
|
||||
|
||||
#### close()
|
||||
|
||||
Causes all future calls to `openReadStream()` to fail,
|
||||
and closes the fd, if any, after all streams created by `openReadStream()` have emitted their `end` events.
|
||||
|
||||
If the `autoClose` option is set to `true` (see `open()`),
|
||||
this function will be called automatically effectively in response to this object's `end` event.
|
||||
|
||||
If the `lazyEntries` option is set to `false` (see `open()`) and this object's `end` event has not been emitted yet,
|
||||
this function causes undefined behavior.
|
||||
If the `lazyEntries` option is set to `true`,
|
||||
you can call this function instead of calling `readEntry()` to abort reading the entries of a zipfile.
|
||||
|
||||
It is safe to call this function multiple times; after the first call, successive calls have no effect.
|
||||
This includes situations where the `autoClose` option effectively calls this function for you.
|
||||
|
||||
If `close()` is never called, then the zipfile is "kept open".
|
||||
For zipfiles created with `fromFd()`, this will leave the `fd` open, which may be desirable.
|
||||
For zipfiles created with `open()`, this will leave the underlying `fd` open, thereby "leaking" it, which is probably undesirable.
|
||||
For zipfiles created with `fromRandomAccessReader()`, the reader's `close()` method will never be called.
|
||||
For zipfiles created with `fromBuffer()`, the `close()` function has no effect whether called or not.
|
||||
|
||||
Regardless of how this `ZipFile` was created, there are no resources other than those listed above that require cleanup from this function.
|
||||
This means it may be desirable to never call `close()` in some usecases.
|
||||
|
||||
#### isOpen
|
||||
|
||||
`Boolean`. `true` until `close()` is called; then it's `false`.
|
||||
|
||||
#### entryCount
|
||||
|
||||
`Number`. Total number of central directory records.
|
||||
|
||||
#### comment
|
||||
|
||||
`String`. Always decoded with `CP437` per the spec.
|
||||
|
||||
If `decodeStrings` is `false` (see `open()`), this field is the undecoded `Buffer` instead of a decoded `String`.
|
||||
|
||||
### Class: Entry
|
||||
|
||||
Objects of this class represent Central Directory Records.
|
||||
Refer to the zipfile specification for more details about these fields.
|
||||
|
||||
These fields are of type `Number`:
|
||||
|
||||
* `versionMadeBy`
|
||||
* `versionNeededToExtract`
|
||||
* `generalPurposeBitFlag`
|
||||
* `compressionMethod`
|
||||
* `lastModFileTime` (MS-DOS format, see `getLastModDateTime`)
|
||||
* `lastModFileDate` (MS-DOS format, see `getLastModDateTime`)
|
||||
* `crc32`
|
||||
* `compressedSize`
|
||||
* `uncompressedSize`
|
||||
* `fileNameLength` (bytes)
|
||||
* `extraFieldLength` (bytes)
|
||||
* `fileCommentLength` (bytes)
|
||||
* `internalFileAttributes`
|
||||
* `externalFileAttributes`
|
||||
* `relativeOffsetOfLocalHeader`
|
||||
|
||||
#### fileName
|
||||
|
||||
`String`.
|
||||
Following the spec, the bytes for the file name are decoded with
|
||||
`UTF-8` if `generalPurposeBitFlag & 0x800`, otherwise with `CP437`.
|
||||
Alternatively, this field may be populated from the Info-ZIP Unicode Path Extra Field
|
||||
(see `extraFields`).
|
||||
|
||||
This field is automatically validated by `validateFileName()` before yauzl emits an "entry" event.
|
||||
If this field would contain unsafe characters, yauzl emits an error instead of an entry.
|
||||
|
||||
If `decodeStrings` is `false` (see `open()`), this field is the undecoded `Buffer` instead of a decoded `String`.
|
||||
Therefore, `generalPurposeBitFlag` and any Info-ZIP Unicode Path Extra Field are ignored.
|
||||
Furthermore, no automatic file name validation is performed for this file name.
|
||||
|
||||
#### extraFields
|
||||
|
||||
`Array` with each entry in the form `{id: id, data: data}`,
|
||||
where `id` is a `Number` and `data` is a `Buffer`.
|
||||
|
||||
This library looks for and reads the ZIP64 Extended Information Extra Field (0x0001)
|
||||
in order to support ZIP64 format zip files.
|
||||
|
||||
This library also looks for and reads the Info-ZIP Unicode Path Extra Field (0x7075)
|
||||
in order to support some zipfiles that use it instead of General Purpose Bit 11
|
||||
to convey `UTF-8` file names.
|
||||
When the field is identified and verified to be reliable (see the zipfile spec),
|
||||
the the file name in this field is stored in the `fileName` property,
|
||||
and the file name in the central directory record for this entry is ignored.
|
||||
Note that when `decodeStrings` is false, all Info-ZIP Unicode Path Extra Fields are ignored.
|
||||
|
||||
None of the other fields are considered significant by this library.
|
||||
Fields that this library reads are left unalterned in the `extraFields` array.
|
||||
|
||||
#### fileComment
|
||||
|
||||
`String` decoded with the charset indicated by `generalPurposeBitFlag & 0x800` as with the `fileName`.
|
||||
(The Info-ZIP Unicode Path Extra Field has no effect on the charset used for this field.)
|
||||
|
||||
If `decodeStrings` is `false` (see `open()`), this field is the undecoded `Buffer` instead of a decoded `String`.
|
||||
|
||||
Prior to yauzl version 2.7.0, this field was erroneously documented as `comment` instead of `fileComment`.
|
||||
For compatibility with any code that uses the field name `comment`,
|
||||
yauzl creates an alias field named `comment` which is identical to `fileComment`.
|
||||
|
||||
#### getLastModDate()
|
||||
|
||||
Effectively implemented as:
|
||||
|
||||
```js
|
||||
return dosDateTimeToDate(this.lastModFileDate, this.lastModFileTime);
|
||||
```
|
||||
|
||||
#### isEncrypted()
|
||||
|
||||
Returns is this entry encrypted with "Traditional Encryption".
|
||||
Effectively implemented as:
|
||||
|
||||
```js
|
||||
return (this.generalPurposeBitFlag & 0x1) !== 0;
|
||||
```
|
||||
|
||||
See `openReadStream()` for the implications of this value.
|
||||
|
||||
Note that "Strong Encryption" is not supported, and will result in an `"error"` event emitted from the `ZipFile`.
|
||||
|
||||
#### isCompressed()
|
||||
|
||||
Effectively implemented as:
|
||||
|
||||
```js
|
||||
return this.compressionMethod === 8;
|
||||
```
|
||||
|
||||
See `openReadStream()` for the implications of this value.
|
||||
|
||||
### Class: RandomAccessReader
|
||||
|
||||
This class is meant to be subclassed by clients and instantiated for the `fromRandomAccessReader()` function.
|
||||
|
||||
An example implementation can be found in `test/test.js`.
|
||||
|
||||
#### randomAccessReader._readStreamForRange(start, end)
|
||||
|
||||
Subclasses *must* implement this method.
|
||||
|
||||
`start` and `end` are Numbers and indicate byte offsets from the start of the file.
|
||||
`end` is exclusive, so `_readStreamForRange(0x1000, 0x2000)` would indicate to read `0x1000` bytes.
|
||||
`end - start` will always be at least `1`.
|
||||
|
||||
This method should return a readable stream which will be `pipe()`ed into another stream.
|
||||
It is expected that the readable stream will provide data in several chunks if necessary.
|
||||
If the readable stream provides too many or too few bytes, an error will be emitted.
|
||||
(Note that `validateEntrySizes` has no effect on this check,
|
||||
because this is a low-level API that should behave correctly regardless of the contents of the file.)
|
||||
Any errors emitted on the readable stream will be handled and re-emitted on the client-visible stream
|
||||
(returned from `zipfile.openReadStream()`) or provided as the `err` argument to the appropriate callback
|
||||
(for example, for `fromRandomAccessReader()`).
|
||||
|
||||
The returned stream *must* implement a method `.destroy()`
|
||||
if you call `readStream.destroy()` on streams you get from `openReadStream()`.
|
||||
If you never call `readStream.destroy()`, then streams returned from this method do not need to implement a method `.destroy()`.
|
||||
`.destroy()` should abort any streaming that is in progress and clean up any associated resources.
|
||||
`.destroy()` will only be called after the stream has been `unpipe()`d from its destination.
|
||||
|
||||
Note that the stream returned from this method might not be the same object that is provided by `openReadStream()`.
|
||||
The stream returned from this method might be `pipe()`d through one or more filter streams (for example, a zlib inflate stream).
|
||||
|
||||
#### randomAccessReader.read(buffer, offset, length, position, callback)
|
||||
|
||||
Subclasses may implement this method.
|
||||
The default implementation uses `createReadStream()` to fill the `buffer`.
|
||||
|
||||
This method should behave like `fs.read()`.
|
||||
|
||||
#### randomAccessReader.close(callback)
|
||||
|
||||
Subclasses may implement this method.
|
||||
The default implementation is effectively `setImmediate(callback);`.
|
||||
|
||||
`callback` takes parameters `(err)`.
|
||||
|
||||
This method is called once the all streams returned from `_readStreamForRange()` have ended,
|
||||
and no more `_readStreamForRange()` or `read()` requests will be issued to this object.
|
||||
|
||||
## How to Avoid Crashing
|
||||
|
||||
When a malformed zipfile is encountered, the default behavior is to crash (throw an exception).
|
||||
If you want to handle errors more gracefully than this,
|
||||
be sure to do the following:
|
||||
|
||||
* Provide `callback` parameters where they are allowed, and check the `err` parameter.
|
||||
* Attach a listener for the `error` event on any `ZipFile` object you get from `open()`, `fromFd()`, `fromBuffer()`, or `fromRandomAccessReader()`.
|
||||
* Attach a listener for the `error` event on any stream you get from `openReadStream()`.
|
||||
|
||||
Minor version updates to yauzl will not add any additional requirements to this list.
|
||||
|
||||
## Limitations
|
||||
|
||||
### No Streaming Unzip API
|
||||
|
||||
Due to the design of the .zip file format, it's impossible to interpret a .zip file from start to finish
|
||||
(such as from a readable stream) without sacrificing correctness.
|
||||
The Central Directory, which is the authority on the contents of the .zip file, is at the end of a .zip file, not the beginning.
|
||||
A streaming API would need to either buffer the entire .zip file to get to the Central Directory before interpreting anything
|
||||
(defeating the purpose of a streaming interface), or rely on the Local File Headers which are interspersed through the .zip file.
|
||||
However, the Local File Headers are explicitly denounced in the spec as being unreliable copies of the Central Directory,
|
||||
so trusting them would be a violation of the spec.
|
||||
|
||||
Any library that offers a streaming unzip API must make one of the above two compromises,
|
||||
which makes the library either dishonest or nonconformant (usually the latter).
|
||||
This library insists on correctness and adherence to the spec, and so does not offer a streaming API.
|
||||
|
||||
Here is a way to create a spec-conformant .zip file using the `zip` command line program (Info-ZIP)
|
||||
available in most unix-like environments, that is (nearly) impossible to parse correctly with a streaming parser:
|
||||
|
||||
```
|
||||
$ echo -ne '\x50\x4b\x07\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' > file.txt
|
||||
$ zip -q0 - file.txt | cat > out.zip
|
||||
```
|
||||
|
||||
This .zip file contains a single file entry that uses General Purpose Bit 3,
|
||||
which means the Local File Header doesn't know the size of the file.
|
||||
Any streaming parser that encounters this situation will either immediately fail,
|
||||
or attempt to search for the Data Descriptor after the file's contents.
|
||||
The file's contents is a sequence of 16-bytes crafted to exactly mimic a valid Data Descriptor for an empty file,
|
||||
which will fool any parser that gets this far into thinking that the file is empty rather than containing 16-bytes.
|
||||
What follows the file's real contents is the file's real Data Descriptor,
|
||||
which will likely cause some kind of signature mismatch error for a streaming parser (if one hasn't occurred already).
|
||||
|
||||
By using General Purpose Bit 3 (and compression method 0),
|
||||
it's possible to create arbitrarily ambiguous .zip files that
|
||||
distract parsers with file contents that contain apparently valid .zip file metadata.
|
||||
|
||||
### Limitted ZIP64 Support
|
||||
|
||||
For ZIP64, only zip files smaller than `8PiB` are supported,
|
||||
not the full `16EiB` range that a 64-bit integer should be able to index.
|
||||
This is due to the JavaScript Number type being an IEEE 754 double precision float.
|
||||
|
||||
The Node.js `fs` module probably has this same limitation.
|
||||
|
||||
### ZIP64 Extensible Data Sector Is Ignored
|
||||
|
||||
The spec does not allow zip file creators to put arbitrary data here,
|
||||
but rather reserves its use for PKWARE and mentions something about Z390.
|
||||
This doesn't seem useful to expose in this library, so it is ignored.
|
||||
|
||||
### No Multi-Disk Archive Support
|
||||
|
||||
This library does not support multi-disk zip files.
|
||||
The multi-disk fields in the zipfile spec were intended for a zip file to span multiple floppy disks,
|
||||
which probably never happens now.
|
||||
If the "number of this disk" field in the End of Central Directory Record is not `0`,
|
||||
the `open()`, `fromFd()`, `fromBuffer()`, or `fromRandomAccessReader()` `callback` will receive an `err`.
|
||||
By extension the following zip file fields are ignored by this library and not provided to clients:
|
||||
|
||||
* Disk where central directory starts
|
||||
* Number of central directory records on this disk
|
||||
* Disk number where file starts
|
||||
|
||||
### Limited Encryption Handling
|
||||
|
||||
You can detect when a file entry is encrypted with "Traditional Encryption" via `isEncrypted()`,
|
||||
but yauzl will not help you decrypt it.
|
||||
See `openReadStream()`.
|
||||
|
||||
If a zip file contains file entries encrypted with "Strong Encryption", yauzl emits an error.
|
||||
|
||||
If the central directory is encrypted or compressed, yauzl emits an error.
|
||||
|
||||
### Local File Headers Are Ignored
|
||||
|
||||
Many unzip libraries mistakenly read the Local File Header data in zip files.
|
||||
This data is officially defined to be redundant with the Central Directory information,
|
||||
and is not to be trusted.
|
||||
Aside from checking the signature, yauzl ignores the content of the Local File Header.
|
||||
|
||||
### No CRC-32 Checking
|
||||
|
||||
This library provides the `crc32` field of `Entry` objects read from the Central Directory.
|
||||
However, this field is not used for anything in this library.
|
||||
|
||||
### versionNeededToExtract Is Ignored
|
||||
|
||||
The field `versionNeededToExtract` is ignored,
|
||||
because this library doesn't support the complete zip file spec at any version,
|
||||
|
||||
### No Support For Obscure Compression Methods
|
||||
|
||||
Regarding the `compressionMethod` field of `Entry` objects,
|
||||
only method `0` (stored with no compression)
|
||||
and method `8` (deflated) are supported.
|
||||
Any of the other 15 official methods will cause the `openReadStream()` `callback` to receive an `err`.
|
||||
|
||||
### Data Descriptors Are Ignored
|
||||
|
||||
There may or may not be Data Descriptor sections in a zip file.
|
||||
This library provides no support for finding or interpreting them.
|
||||
|
||||
### Archive Extra Data Record Is Ignored
|
||||
|
||||
There may or may not be an Archive Extra Data Record section in a zip file.
|
||||
This library provides no support for finding or interpreting it.
|
||||
|
||||
### No Language Encoding Flag Support
|
||||
|
||||
Zip files officially support charset encodings other than CP437 and UTF-8,
|
||||
but the zip file spec does not specify how it works.
|
||||
This library makes no attempt to interpret the Language Encoding Flag.
|
||||
|
||||
## Change History
|
||||
|
||||
* 2.10.0
|
||||
* Added support for non-conformant zipfiles created by Microsoft, and added option `strictFileNames` to disable the workaround. [issue #66](https://github.com/thejoshwolfe/yauzl/issues/66), [issue #88](https://github.com/thejoshwolfe/yauzl/issues/88)
|
||||
* 2.9.2
|
||||
* Removed `tools/hexdump-zip.js` and `tools/hex2bin.js`. Those tools are now located here: [thejoshwolfe/hexdump-zip](https://github.com/thejoshwolfe/hexdump-zip) and [thejoshwolfe/hex2bin](https://github.com/thejoshwolfe/hex2bin)
|
||||
* Worked around performance problem with zlib when using `fromBuffer()` and `readStream.destroy()` for large compressed files. [issue #87](https://github.com/thejoshwolfe/yauzl/issues/87)
|
||||
* 2.9.1
|
||||
* Removed `console.log()` accidentally introduced in 2.9.0. [issue #64](https://github.com/thejoshwolfe/yauzl/issues/64)
|
||||
* 2.9.0
|
||||
* Throw an exception if `readEntry()` is called without `lazyEntries:true`. Previously this caused undefined behavior. [issue #63](https://github.com/thejoshwolfe/yauzl/issues/63)
|
||||
* 2.8.0
|
||||
* Added option `validateEntrySizes`. [issue #53](https://github.com/thejoshwolfe/yauzl/issues/53)
|
||||
* Added `examples/promises.js`
|
||||
* Added ability to read raw file data via `decompress` and `decrypt` options. [issue #11](https://github.com/thejoshwolfe/yauzl/issues/11), [issue #38](https://github.com/thejoshwolfe/yauzl/issues/38), [pull #39](https://github.com/thejoshwolfe/yauzl/pull/39)
|
||||
* Added `start` and `end` options to `openReadStream()`. [issue #38](https://github.com/thejoshwolfe/yauzl/issues/38)
|
||||
* 2.7.0
|
||||
* Added option `decodeStrings`. [issue #42](https://github.com/thejoshwolfe/yauzl/issues/42)
|
||||
* Fixed documentation for `entry.fileComment` and added compatibility alias. [issue #47](https://github.com/thejoshwolfe/yauzl/issues/47)
|
||||
* 2.6.0
|
||||
* Support Info-ZIP Unicode Path Extra Field, used by WinRAR for Chinese file names. [issue #33](https://github.com/thejoshwolfe/yauzl/issues/33)
|
||||
* 2.5.0
|
||||
* Ignore malformed Extra Field that is common in Android .apk files. [issue #31](https://github.com/thejoshwolfe/yauzl/issues/31)
|
||||
* 2.4.3
|
||||
* Fix crash when parsing malformed Extra Field buffers. [issue #31](https://github.com/thejoshwolfe/yauzl/issues/31)
|
||||
* 2.4.2
|
||||
* Remove .npmignore and .travis.yml from npm package.
|
||||
* 2.4.1
|
||||
* Fix error handling.
|
||||
* 2.4.0
|
||||
* Add ZIP64 support. [issue #6](https://github.com/thejoshwolfe/yauzl/issues/6)
|
||||
* Add `lazyEntries` option. [issue #22](https://github.com/thejoshwolfe/yauzl/issues/22)
|
||||
* Add `readStream.destroy()` method. [issue #26](https://github.com/thejoshwolfe/yauzl/issues/26)
|
||||
* Add `fromRandomAccessReader()`. [issue #14](https://github.com/thejoshwolfe/yauzl/issues/14)
|
||||
* Add `examples/unzip.js`.
|
||||
* 2.3.1
|
||||
* Documentation updates.
|
||||
* 2.3.0
|
||||
* Check that `uncompressedSize` is correct, or else emit an error. [issue #13](https://github.com/thejoshwolfe/yauzl/issues/13)
|
||||
* 2.2.1
|
||||
* Update dependencies.
|
||||
* 2.2.0
|
||||
* Update dependencies.
|
||||
* 2.1.0
|
||||
* Remove dependency on `iconv`.
|
||||
* 2.0.3
|
||||
* Fix crash when trying to read a 0-byte file.
|
||||
* 2.0.2
|
||||
* Fix event behavior after errors.
|
||||
* 2.0.1
|
||||
* Fix bug with using `iconv`.
|
||||
* 2.0.0
|
||||
* Initial release.
|
||||
796
node_modules/.store/extract-zip@2.0.1/node_modules/yauzl/index.js
generated
vendored
Normal file
796
node_modules/.store/extract-zip@2.0.1/node_modules/yauzl/index.js
generated
vendored
Normal file
@@ -0,0 +1,796 @@
|
||||
var fs = require("fs");
|
||||
var zlib = require("zlib");
|
||||
var fd_slicer = require("fd-slicer");
|
||||
var crc32 = require("buffer-crc32");
|
||||
var util = require("util");
|
||||
var EventEmitter = require("events").EventEmitter;
|
||||
var Transform = require("stream").Transform;
|
||||
var PassThrough = require("stream").PassThrough;
|
||||
var Writable = require("stream").Writable;
|
||||
|
||||
exports.open = open;
|
||||
exports.fromFd = fromFd;
|
||||
exports.fromBuffer = fromBuffer;
|
||||
exports.fromRandomAccessReader = fromRandomAccessReader;
|
||||
exports.dosDateTimeToDate = dosDateTimeToDate;
|
||||
exports.validateFileName = validateFileName;
|
||||
exports.ZipFile = ZipFile;
|
||||
exports.Entry = Entry;
|
||||
exports.RandomAccessReader = RandomAccessReader;
|
||||
|
||||
function open(path, options, callback) {
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = null;
|
||||
}
|
||||
if (options == null) options = {};
|
||||
if (options.autoClose == null) options.autoClose = true;
|
||||
if (options.lazyEntries == null) options.lazyEntries = false;
|
||||
if (options.decodeStrings == null) options.decodeStrings = true;
|
||||
if (options.validateEntrySizes == null) options.validateEntrySizes = true;
|
||||
if (options.strictFileNames == null) options.strictFileNames = false;
|
||||
if (callback == null) callback = defaultCallback;
|
||||
fs.open(path, "r", function(err, fd) {
|
||||
if (err) return callback(err);
|
||||
fromFd(fd, options, function(err, zipfile) {
|
||||
if (err) fs.close(fd, defaultCallback);
|
||||
callback(err, zipfile);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function fromFd(fd, options, callback) {
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = null;
|
||||
}
|
||||
if (options == null) options = {};
|
||||
if (options.autoClose == null) options.autoClose = false;
|
||||
if (options.lazyEntries == null) options.lazyEntries = false;
|
||||
if (options.decodeStrings == null) options.decodeStrings = true;
|
||||
if (options.validateEntrySizes == null) options.validateEntrySizes = true;
|
||||
if (options.strictFileNames == null) options.strictFileNames = false;
|
||||
if (callback == null) callback = defaultCallback;
|
||||
fs.fstat(fd, function(err, stats) {
|
||||
if (err) return callback(err);
|
||||
var reader = fd_slicer.createFromFd(fd, {autoClose: true});
|
||||
fromRandomAccessReader(reader, stats.size, options, callback);
|
||||
});
|
||||
}
|
||||
|
||||
function fromBuffer(buffer, options, callback) {
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = null;
|
||||
}
|
||||
if (options == null) options = {};
|
||||
options.autoClose = false;
|
||||
if (options.lazyEntries == null) options.lazyEntries = false;
|
||||
if (options.decodeStrings == null) options.decodeStrings = true;
|
||||
if (options.validateEntrySizes == null) options.validateEntrySizes = true;
|
||||
if (options.strictFileNames == null) options.strictFileNames = false;
|
||||
// limit the max chunk size. see https://github.com/thejoshwolfe/yauzl/issues/87
|
||||
var reader = fd_slicer.createFromBuffer(buffer, {maxChunkSize: 0x10000});
|
||||
fromRandomAccessReader(reader, buffer.length, options, callback);
|
||||
}
|
||||
|
||||
function fromRandomAccessReader(reader, totalSize, options, callback) {
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = null;
|
||||
}
|
||||
if (options == null) options = {};
|
||||
if (options.autoClose == null) options.autoClose = true;
|
||||
if (options.lazyEntries == null) options.lazyEntries = false;
|
||||
if (options.decodeStrings == null) options.decodeStrings = true;
|
||||
var decodeStrings = !!options.decodeStrings;
|
||||
if (options.validateEntrySizes == null) options.validateEntrySizes = true;
|
||||
if (options.strictFileNames == null) options.strictFileNames = false;
|
||||
if (callback == null) callback = defaultCallback;
|
||||
if (typeof totalSize !== "number") throw new Error("expected totalSize parameter to be a number");
|
||||
if (totalSize > Number.MAX_SAFE_INTEGER) {
|
||||
throw new Error("zip file too large. only file sizes up to 2^52 are supported due to JavaScript's Number type being an IEEE 754 double.");
|
||||
}
|
||||
|
||||
// the matching unref() call is in zipfile.close()
|
||||
reader.ref();
|
||||
|
||||
// eocdr means End of Central Directory Record.
|
||||
// search backwards for the eocdr signature.
|
||||
// the last field of the eocdr is a variable-length comment.
|
||||
// the comment size is encoded in a 2-byte field in the eocdr, which we can't find without trudging backwards through the comment to find it.
|
||||
// as a consequence of this design decision, it's possible to have ambiguous zip file metadata if a coherent eocdr was in the comment.
|
||||
// we search backwards for a eocdr signature, and hope that whoever made the zip file was smart enough to forbid the eocdr signature in the comment.
|
||||
var eocdrWithoutCommentSize = 22;
|
||||
var maxCommentSize = 0xffff; // 2-byte size
|
||||
var bufferSize = Math.min(eocdrWithoutCommentSize + maxCommentSize, totalSize);
|
||||
var buffer = newBuffer(bufferSize);
|
||||
var bufferReadStart = totalSize - buffer.length;
|
||||
readAndAssertNoEof(reader, buffer, 0, bufferSize, bufferReadStart, function(err) {
|
||||
if (err) return callback(err);
|
||||
for (var i = bufferSize - eocdrWithoutCommentSize; i >= 0; i -= 1) {
|
||||
if (buffer.readUInt32LE(i) !== 0x06054b50) continue;
|
||||
// found eocdr
|
||||
var eocdrBuffer = buffer.slice(i);
|
||||
|
||||
// 0 - End of central directory signature = 0x06054b50
|
||||
// 4 - Number of this disk
|
||||
var diskNumber = eocdrBuffer.readUInt16LE(4);
|
||||
if (diskNumber !== 0) {
|
||||
return callback(new Error("multi-disk zip files are not supported: found disk number: " + diskNumber));
|
||||
}
|
||||
// 6 - Disk where central directory starts
|
||||
// 8 - Number of central directory records on this disk
|
||||
// 10 - Total number of central directory records
|
||||
var entryCount = eocdrBuffer.readUInt16LE(10);
|
||||
// 12 - Size of central directory (bytes)
|
||||
// 16 - Offset of start of central directory, relative to start of archive
|
||||
var centralDirectoryOffset = eocdrBuffer.readUInt32LE(16);
|
||||
// 20 - Comment length
|
||||
var commentLength = eocdrBuffer.readUInt16LE(20);
|
||||
var expectedCommentLength = eocdrBuffer.length - eocdrWithoutCommentSize;
|
||||
if (commentLength !== expectedCommentLength) {
|
||||
return callback(new Error("invalid comment length. expected: " + expectedCommentLength + ". found: " + commentLength));
|
||||
}
|
||||
// 22 - Comment
|
||||
// the encoding is always cp437.
|
||||
var comment = decodeStrings ? decodeBuffer(eocdrBuffer, 22, eocdrBuffer.length, false)
|
||||
: eocdrBuffer.slice(22);
|
||||
|
||||
if (!(entryCount === 0xffff || centralDirectoryOffset === 0xffffffff)) {
|
||||
return callback(null, new ZipFile(reader, centralDirectoryOffset, totalSize, entryCount, comment, options.autoClose, options.lazyEntries, decodeStrings, options.validateEntrySizes, options.strictFileNames));
|
||||
}
|
||||
|
||||
// ZIP64 format
|
||||
|
||||
// ZIP64 Zip64 end of central directory locator
|
||||
var zip64EocdlBuffer = newBuffer(20);
|
||||
var zip64EocdlOffset = bufferReadStart + i - zip64EocdlBuffer.length;
|
||||
readAndAssertNoEof(reader, zip64EocdlBuffer, 0, zip64EocdlBuffer.length, zip64EocdlOffset, function(err) {
|
||||
if (err) return callback(err);
|
||||
|
||||
// 0 - zip64 end of central dir locator signature = 0x07064b50
|
||||
if (zip64EocdlBuffer.readUInt32LE(0) !== 0x07064b50) {
|
||||
return callback(new Error("invalid zip64 end of central directory locator signature"));
|
||||
}
|
||||
// 4 - number of the disk with the start of the zip64 end of central directory
|
||||
// 8 - relative offset of the zip64 end of central directory record
|
||||
var zip64EocdrOffset = readUInt64LE(zip64EocdlBuffer, 8);
|
||||
// 16 - total number of disks
|
||||
|
||||
// ZIP64 end of central directory record
|
||||
var zip64EocdrBuffer = newBuffer(56);
|
||||
readAndAssertNoEof(reader, zip64EocdrBuffer, 0, zip64EocdrBuffer.length, zip64EocdrOffset, function(err) {
|
||||
if (err) return callback(err);
|
||||
|
||||
// 0 - zip64 end of central dir signature 4 bytes (0x06064b50)
|
||||
if (zip64EocdrBuffer.readUInt32LE(0) !== 0x06064b50) {
|
||||
return callback(new Error("invalid zip64 end of central directory record signature"));
|
||||
}
|
||||
// 4 - size of zip64 end of central directory record 8 bytes
|
||||
// 12 - version made by 2 bytes
|
||||
// 14 - version needed to extract 2 bytes
|
||||
// 16 - number of this disk 4 bytes
|
||||
// 20 - number of the disk with the start of the central directory 4 bytes
|
||||
// 24 - total number of entries in the central directory on this disk 8 bytes
|
||||
// 32 - total number of entries in the central directory 8 bytes
|
||||
entryCount = readUInt64LE(zip64EocdrBuffer, 32);
|
||||
// 40 - size of the central directory 8 bytes
|
||||
// 48 - offset of start of central directory with respect to the starting disk number 8 bytes
|
||||
centralDirectoryOffset = readUInt64LE(zip64EocdrBuffer, 48);
|
||||
// 56 - zip64 extensible data sector (variable size)
|
||||
return callback(null, new ZipFile(reader, centralDirectoryOffset, totalSize, entryCount, comment, options.autoClose, options.lazyEntries, decodeStrings, options.validateEntrySizes, options.strictFileNames));
|
||||
});
|
||||
});
|
||||
return;
|
||||
}
|
||||
callback(new Error("end of central directory record signature not found"));
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(ZipFile, EventEmitter);
|
||||
function ZipFile(reader, centralDirectoryOffset, fileSize, entryCount, comment, autoClose, lazyEntries, decodeStrings, validateEntrySizes, strictFileNames) {
|
||||
var self = this;
|
||||
EventEmitter.call(self);
|
||||
self.reader = reader;
|
||||
// forward close events
|
||||
self.reader.on("error", function(err) {
|
||||
// error closing the fd
|
||||
emitError(self, err);
|
||||
});
|
||||
self.reader.once("close", function() {
|
||||
self.emit("close");
|
||||
});
|
||||
self.readEntryCursor = centralDirectoryOffset;
|
||||
self.fileSize = fileSize;
|
||||
self.entryCount = entryCount;
|
||||
self.comment = comment;
|
||||
self.entriesRead = 0;
|
||||
self.autoClose = !!autoClose;
|
||||
self.lazyEntries = !!lazyEntries;
|
||||
self.decodeStrings = !!decodeStrings;
|
||||
self.validateEntrySizes = !!validateEntrySizes;
|
||||
self.strictFileNames = !!strictFileNames;
|
||||
self.isOpen = true;
|
||||
self.emittedError = false;
|
||||
|
||||
if (!self.lazyEntries) self._readEntry();
|
||||
}
|
||||
ZipFile.prototype.close = function() {
|
||||
if (!this.isOpen) return;
|
||||
this.isOpen = false;
|
||||
this.reader.unref();
|
||||
};
|
||||
|
||||
function emitErrorAndAutoClose(self, err) {
|
||||
if (self.autoClose) self.close();
|
||||
emitError(self, err);
|
||||
}
|
||||
function emitError(self, err) {
|
||||
if (self.emittedError) return;
|
||||
self.emittedError = true;
|
||||
self.emit("error", err);
|
||||
}
|
||||
|
||||
ZipFile.prototype.readEntry = function() {
|
||||
if (!this.lazyEntries) throw new Error("readEntry() called without lazyEntries:true");
|
||||
this._readEntry();
|
||||
};
|
||||
ZipFile.prototype._readEntry = function() {
|
||||
var self = this;
|
||||
if (self.entryCount === self.entriesRead) {
|
||||
// done with metadata
|
||||
setImmediate(function() {
|
||||
if (self.autoClose) self.close();
|
||||
if (self.emittedError) return;
|
||||
self.emit("end");
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (self.emittedError) return;
|
||||
var buffer = newBuffer(46);
|
||||
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, self.readEntryCursor, function(err) {
|
||||
if (err) return emitErrorAndAutoClose(self, err);
|
||||
if (self.emittedError) return;
|
||||
var entry = new Entry();
|
||||
// 0 - Central directory file header signature
|
||||
var signature = buffer.readUInt32LE(0);
|
||||
if (signature !== 0x02014b50) return emitErrorAndAutoClose(self, new Error("invalid central directory file header signature: 0x" + signature.toString(16)));
|
||||
// 4 - Version made by
|
||||
entry.versionMadeBy = buffer.readUInt16LE(4);
|
||||
// 6 - Version needed to extract (minimum)
|
||||
entry.versionNeededToExtract = buffer.readUInt16LE(6);
|
||||
// 8 - General purpose bit flag
|
||||
entry.generalPurposeBitFlag = buffer.readUInt16LE(8);
|
||||
// 10 - Compression method
|
||||
entry.compressionMethod = buffer.readUInt16LE(10);
|
||||
// 12 - File last modification time
|
||||
entry.lastModFileTime = buffer.readUInt16LE(12);
|
||||
// 14 - File last modification date
|
||||
entry.lastModFileDate = buffer.readUInt16LE(14);
|
||||
// 16 - CRC-32
|
||||
entry.crc32 = buffer.readUInt32LE(16);
|
||||
// 20 - Compressed size
|
||||
entry.compressedSize = buffer.readUInt32LE(20);
|
||||
// 24 - Uncompressed size
|
||||
entry.uncompressedSize = buffer.readUInt32LE(24);
|
||||
// 28 - File name length (n)
|
||||
entry.fileNameLength = buffer.readUInt16LE(28);
|
||||
// 30 - Extra field length (m)
|
||||
entry.extraFieldLength = buffer.readUInt16LE(30);
|
||||
// 32 - File comment length (k)
|
||||
entry.fileCommentLength = buffer.readUInt16LE(32);
|
||||
// 34 - Disk number where file starts
|
||||
// 36 - Internal file attributes
|
||||
entry.internalFileAttributes = buffer.readUInt16LE(36);
|
||||
// 38 - External file attributes
|
||||
entry.externalFileAttributes = buffer.readUInt32LE(38);
|
||||
// 42 - Relative offset of local file header
|
||||
entry.relativeOffsetOfLocalHeader = buffer.readUInt32LE(42);
|
||||
|
||||
if (entry.generalPurposeBitFlag & 0x40) return emitErrorAndAutoClose(self, new Error("strong encryption is not supported"));
|
||||
|
||||
self.readEntryCursor += 46;
|
||||
|
||||
buffer = newBuffer(entry.fileNameLength + entry.extraFieldLength + entry.fileCommentLength);
|
||||
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, self.readEntryCursor, function(err) {
|
||||
if (err) return emitErrorAndAutoClose(self, err);
|
||||
if (self.emittedError) return;
|
||||
// 46 - File name
|
||||
var isUtf8 = (entry.generalPurposeBitFlag & 0x800) !== 0;
|
||||
entry.fileName = self.decodeStrings ? decodeBuffer(buffer, 0, entry.fileNameLength, isUtf8)
|
||||
: buffer.slice(0, entry.fileNameLength);
|
||||
|
||||
// 46+n - Extra field
|
||||
var fileCommentStart = entry.fileNameLength + entry.extraFieldLength;
|
||||
var extraFieldBuffer = buffer.slice(entry.fileNameLength, fileCommentStart);
|
||||
entry.extraFields = [];
|
||||
var i = 0;
|
||||
while (i < extraFieldBuffer.length - 3) {
|
||||
var headerId = extraFieldBuffer.readUInt16LE(i + 0);
|
||||
var dataSize = extraFieldBuffer.readUInt16LE(i + 2);
|
||||
var dataStart = i + 4;
|
||||
var dataEnd = dataStart + dataSize;
|
||||
if (dataEnd > extraFieldBuffer.length) return emitErrorAndAutoClose(self, new Error("extra field length exceeds extra field buffer size"));
|
||||
var dataBuffer = newBuffer(dataSize);
|
||||
extraFieldBuffer.copy(dataBuffer, 0, dataStart, dataEnd);
|
||||
entry.extraFields.push({
|
||||
id: headerId,
|
||||
data: dataBuffer,
|
||||
});
|
||||
i = dataEnd;
|
||||
}
|
||||
|
||||
// 46+n+m - File comment
|
||||
entry.fileComment = self.decodeStrings ? decodeBuffer(buffer, fileCommentStart, fileCommentStart + entry.fileCommentLength, isUtf8)
|
||||
: buffer.slice(fileCommentStart, fileCommentStart + entry.fileCommentLength);
|
||||
// compatibility hack for https://github.com/thejoshwolfe/yauzl/issues/47
|
||||
entry.comment = entry.fileComment;
|
||||
|
||||
self.readEntryCursor += buffer.length;
|
||||
self.entriesRead += 1;
|
||||
|
||||
if (entry.uncompressedSize === 0xffffffff ||
|
||||
entry.compressedSize === 0xffffffff ||
|
||||
entry.relativeOffsetOfLocalHeader === 0xffffffff) {
|
||||
// ZIP64 format
|
||||
// find the Zip64 Extended Information Extra Field
|
||||
var zip64EiefBuffer = null;
|
||||
for (var i = 0; i < entry.extraFields.length; i++) {
|
||||
var extraField = entry.extraFields[i];
|
||||
if (extraField.id === 0x0001) {
|
||||
zip64EiefBuffer = extraField.data;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (zip64EiefBuffer == null) {
|
||||
return emitErrorAndAutoClose(self, new Error("expected zip64 extended information extra field"));
|
||||
}
|
||||
var index = 0;
|
||||
// 0 - Original Size 8 bytes
|
||||
if (entry.uncompressedSize === 0xffffffff) {
|
||||
if (index + 8 > zip64EiefBuffer.length) {
|
||||
return emitErrorAndAutoClose(self, new Error("zip64 extended information extra field does not include uncompressed size"));
|
||||
}
|
||||
entry.uncompressedSize = readUInt64LE(zip64EiefBuffer, index);
|
||||
index += 8;
|
||||
}
|
||||
// 8 - Compressed Size 8 bytes
|
||||
if (entry.compressedSize === 0xffffffff) {
|
||||
if (index + 8 > zip64EiefBuffer.length) {
|
||||
return emitErrorAndAutoClose(self, new Error("zip64 extended information extra field does not include compressed size"));
|
||||
}
|
||||
entry.compressedSize = readUInt64LE(zip64EiefBuffer, index);
|
||||
index += 8;
|
||||
}
|
||||
// 16 - Relative Header Offset 8 bytes
|
||||
if (entry.relativeOffsetOfLocalHeader === 0xffffffff) {
|
||||
if (index + 8 > zip64EiefBuffer.length) {
|
||||
return emitErrorAndAutoClose(self, new Error("zip64 extended information extra field does not include relative header offset"));
|
||||
}
|
||||
entry.relativeOffsetOfLocalHeader = readUInt64LE(zip64EiefBuffer, index);
|
||||
index += 8;
|
||||
}
|
||||
// 24 - Disk Start Number 4 bytes
|
||||
}
|
||||
|
||||
// check for Info-ZIP Unicode Path Extra Field (0x7075)
|
||||
// see https://github.com/thejoshwolfe/yauzl/issues/33
|
||||
if (self.decodeStrings) {
|
||||
for (var i = 0; i < entry.extraFields.length; i++) {
|
||||
var extraField = entry.extraFields[i];
|
||||
if (extraField.id === 0x7075) {
|
||||
if (extraField.data.length < 6) {
|
||||
// too short to be meaningful
|
||||
continue;
|
||||
}
|
||||
// Version 1 byte version of this extra field, currently 1
|
||||
if (extraField.data.readUInt8(0) !== 1) {
|
||||
// > Changes may not be backward compatible so this extra
|
||||
// > field should not be used if the version is not recognized.
|
||||
continue;
|
||||
}
|
||||
// NameCRC32 4 bytes File Name Field CRC32 Checksum
|
||||
var oldNameCrc32 = extraField.data.readUInt32LE(1);
|
||||
if (crc32.unsigned(buffer.slice(0, entry.fileNameLength)) !== oldNameCrc32) {
|
||||
// > If the CRC check fails, this UTF-8 Path Extra Field should be
|
||||
// > ignored and the File Name field in the header should be used instead.
|
||||
continue;
|
||||
}
|
||||
// UnicodeName Variable UTF-8 version of the entry File Name
|
||||
entry.fileName = decodeBuffer(extraField.data, 5, extraField.data.length, true);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// validate file size
|
||||
if (self.validateEntrySizes && entry.compressionMethod === 0) {
|
||||
var expectedCompressedSize = entry.uncompressedSize;
|
||||
if (entry.isEncrypted()) {
|
||||
// traditional encryption prefixes the file data with a header
|
||||
expectedCompressedSize += 12;
|
||||
}
|
||||
if (entry.compressedSize !== expectedCompressedSize) {
|
||||
var msg = "compressed/uncompressed size mismatch for stored file: " + entry.compressedSize + " != " + entry.uncompressedSize;
|
||||
return emitErrorAndAutoClose(self, new Error(msg));
|
||||
}
|
||||
}
|
||||
|
||||
if (self.decodeStrings) {
|
||||
if (!self.strictFileNames) {
|
||||
// allow backslash
|
||||
entry.fileName = entry.fileName.replace(/\\/g, "/");
|
||||
}
|
||||
var errorMessage = validateFileName(entry.fileName, self.validateFileNameOptions);
|
||||
if (errorMessage != null) return emitErrorAndAutoClose(self, new Error(errorMessage));
|
||||
}
|
||||
self.emit("entry", entry);
|
||||
|
||||
if (!self.lazyEntries) self._readEntry();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
ZipFile.prototype.openReadStream = function(entry, options, callback) {
|
||||
var self = this;
|
||||
// parameter validation
|
||||
var relativeStart = 0;
|
||||
var relativeEnd = entry.compressedSize;
|
||||
if (callback == null) {
|
||||
callback = options;
|
||||
options = {};
|
||||
} else {
|
||||
// validate options that the caller has no excuse to get wrong
|
||||
if (options.decrypt != null) {
|
||||
if (!entry.isEncrypted()) {
|
||||
throw new Error("options.decrypt can only be specified for encrypted entries");
|
||||
}
|
||||
if (options.decrypt !== false) throw new Error("invalid options.decrypt value: " + options.decrypt);
|
||||
if (entry.isCompressed()) {
|
||||
if (options.decompress !== false) throw new Error("entry is encrypted and compressed, and options.decompress !== false");
|
||||
}
|
||||
}
|
||||
if (options.decompress != null) {
|
||||
if (!entry.isCompressed()) {
|
||||
throw new Error("options.decompress can only be specified for compressed entries");
|
||||
}
|
||||
if (!(options.decompress === false || options.decompress === true)) {
|
||||
throw new Error("invalid options.decompress value: " + options.decompress);
|
||||
}
|
||||
}
|
||||
if (options.start != null || options.end != null) {
|
||||
if (entry.isCompressed() && options.decompress !== false) {
|
||||
throw new Error("start/end range not allowed for compressed entry without options.decompress === false");
|
||||
}
|
||||
if (entry.isEncrypted() && options.decrypt !== false) {
|
||||
throw new Error("start/end range not allowed for encrypted entry without options.decrypt === false");
|
||||
}
|
||||
}
|
||||
if (options.start != null) {
|
||||
relativeStart = options.start;
|
||||
if (relativeStart < 0) throw new Error("options.start < 0");
|
||||
if (relativeStart > entry.compressedSize) throw new Error("options.start > entry.compressedSize");
|
||||
}
|
||||
if (options.end != null) {
|
||||
relativeEnd = options.end;
|
||||
if (relativeEnd < 0) throw new Error("options.end < 0");
|
||||
if (relativeEnd > entry.compressedSize) throw new Error("options.end > entry.compressedSize");
|
||||
if (relativeEnd < relativeStart) throw new Error("options.end < options.start");
|
||||
}
|
||||
}
|
||||
// any further errors can either be caused by the zipfile,
|
||||
// or were introduced in a minor version of yauzl,
|
||||
// so should be passed to the client rather than thrown.
|
||||
if (!self.isOpen) return callback(new Error("closed"));
|
||||
if (entry.isEncrypted()) {
|
||||
if (options.decrypt !== false) return callback(new Error("entry is encrypted, and options.decrypt !== false"));
|
||||
}
|
||||
// make sure we don't lose the fd before we open the actual read stream
|
||||
self.reader.ref();
|
||||
var buffer = newBuffer(30);
|
||||
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, entry.relativeOffsetOfLocalHeader, function(err) {
|
||||
try {
|
||||
if (err) return callback(err);
|
||||
// 0 - Local file header signature = 0x04034b50
|
||||
var signature = buffer.readUInt32LE(0);
|
||||
if (signature !== 0x04034b50) {
|
||||
return callback(new Error("invalid local file header signature: 0x" + signature.toString(16)));
|
||||
}
|
||||
// all this should be redundant
|
||||
// 4 - Version needed to extract (minimum)
|
||||
// 6 - General purpose bit flag
|
||||
// 8 - Compression method
|
||||
// 10 - File last modification time
|
||||
// 12 - File last modification date
|
||||
// 14 - CRC-32
|
||||
// 18 - Compressed size
|
||||
// 22 - Uncompressed size
|
||||
// 26 - File name length (n)
|
||||
var fileNameLength = buffer.readUInt16LE(26);
|
||||
// 28 - Extra field length (m)
|
||||
var extraFieldLength = buffer.readUInt16LE(28);
|
||||
// 30 - File name
|
||||
// 30+n - Extra field
|
||||
var localFileHeaderEnd = entry.relativeOffsetOfLocalHeader + buffer.length + fileNameLength + extraFieldLength;
|
||||
var decompress;
|
||||
if (entry.compressionMethod === 0) {
|
||||
// 0 - The file is stored (no compression)
|
||||
decompress = false;
|
||||
} else if (entry.compressionMethod === 8) {
|
||||
// 8 - The file is Deflated
|
||||
decompress = options.decompress != null ? options.decompress : true;
|
||||
} else {
|
||||
return callback(new Error("unsupported compression method: " + entry.compressionMethod));
|
||||
}
|
||||
var fileDataStart = localFileHeaderEnd;
|
||||
var fileDataEnd = fileDataStart + entry.compressedSize;
|
||||
if (entry.compressedSize !== 0) {
|
||||
// bounds check now, because the read streams will probably not complain loud enough.
|
||||
// since we're dealing with an unsigned offset plus an unsigned size,
|
||||
// we only have 1 thing to check for.
|
||||
if (fileDataEnd > self.fileSize) {
|
||||
return callback(new Error("file data overflows file bounds: " +
|
||||
fileDataStart + " + " + entry.compressedSize + " > " + self.fileSize));
|
||||
}
|
||||
}
|
||||
var readStream = self.reader.createReadStream({
|
||||
start: fileDataStart + relativeStart,
|
||||
end: fileDataStart + relativeEnd,
|
||||
});
|
||||
var endpointStream = readStream;
|
||||
if (decompress) {
|
||||
var destroyed = false;
|
||||
var inflateFilter = zlib.createInflateRaw();
|
||||
readStream.on("error", function(err) {
|
||||
// setImmediate here because errors can be emitted during the first call to pipe()
|
||||
setImmediate(function() {
|
||||
if (!destroyed) inflateFilter.emit("error", err);
|
||||
});
|
||||
});
|
||||
readStream.pipe(inflateFilter);
|
||||
|
||||
if (self.validateEntrySizes) {
|
||||
endpointStream = new AssertByteCountStream(entry.uncompressedSize);
|
||||
inflateFilter.on("error", function(err) {
|
||||
// forward zlib errors to the client-visible stream
|
||||
setImmediate(function() {
|
||||
if (!destroyed) endpointStream.emit("error", err);
|
||||
});
|
||||
});
|
||||
inflateFilter.pipe(endpointStream);
|
||||
} else {
|
||||
// the zlib filter is the client-visible stream
|
||||
endpointStream = inflateFilter;
|
||||
}
|
||||
// this is part of yauzl's API, so implement this function on the client-visible stream
|
||||
endpointStream.destroy = function() {
|
||||
destroyed = true;
|
||||
if (inflateFilter !== endpointStream) inflateFilter.unpipe(endpointStream);
|
||||
readStream.unpipe(inflateFilter);
|
||||
// TODO: the inflateFilter may cause a memory leak. see Issue #27.
|
||||
readStream.destroy();
|
||||
};
|
||||
}
|
||||
callback(null, endpointStream);
|
||||
} finally {
|
||||
self.reader.unref();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
function Entry() {
|
||||
}
|
||||
Entry.prototype.getLastModDate = function() {
|
||||
return dosDateTimeToDate(this.lastModFileDate, this.lastModFileTime);
|
||||
};
|
||||
Entry.prototype.isEncrypted = function() {
|
||||
return (this.generalPurposeBitFlag & 0x1) !== 0;
|
||||
};
|
||||
Entry.prototype.isCompressed = function() {
|
||||
return this.compressionMethod === 8;
|
||||
};
|
||||
|
||||
function dosDateTimeToDate(date, time) {
|
||||
var day = date & 0x1f; // 1-31
|
||||
var month = (date >> 5 & 0xf) - 1; // 1-12, 0-11
|
||||
var year = (date >> 9 & 0x7f) + 1980; // 0-128, 1980-2108
|
||||
|
||||
var millisecond = 0;
|
||||
var second = (time & 0x1f) * 2; // 0-29, 0-58 (even numbers)
|
||||
var minute = time >> 5 & 0x3f; // 0-59
|
||||
var hour = time >> 11 & 0x1f; // 0-23
|
||||
|
||||
return new Date(year, month, day, hour, minute, second, millisecond);
|
||||
}
|
||||
|
||||
function validateFileName(fileName) {
|
||||
if (fileName.indexOf("\\") !== -1) {
|
||||
return "invalid characters in fileName: " + fileName;
|
||||
}
|
||||
if (/^[a-zA-Z]:/.test(fileName) || /^\//.test(fileName)) {
|
||||
return "absolute path: " + fileName;
|
||||
}
|
||||
if (fileName.split("/").indexOf("..") !== -1) {
|
||||
return "invalid relative path: " + fileName;
|
||||
}
|
||||
// all good
|
||||
return null;
|
||||
}
|
||||
|
||||
function readAndAssertNoEof(reader, buffer, offset, length, position, callback) {
|
||||
if (length === 0) {
|
||||
// fs.read will throw an out-of-bounds error if you try to read 0 bytes from a 0 byte file
|
||||
return setImmediate(function() { callback(null, newBuffer(0)); });
|
||||
}
|
||||
reader.read(buffer, offset, length, position, function(err, bytesRead) {
|
||||
if (err) return callback(err);
|
||||
if (bytesRead < length) {
|
||||
return callback(new Error("unexpected EOF"));
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(AssertByteCountStream, Transform);
|
||||
function AssertByteCountStream(byteCount) {
|
||||
Transform.call(this);
|
||||
this.actualByteCount = 0;
|
||||
this.expectedByteCount = byteCount;
|
||||
}
|
||||
AssertByteCountStream.prototype._transform = function(chunk, encoding, cb) {
|
||||
this.actualByteCount += chunk.length;
|
||||
if (this.actualByteCount > this.expectedByteCount) {
|
||||
var msg = "too many bytes in the stream. expected " + this.expectedByteCount + ". got at least " + this.actualByteCount;
|
||||
return cb(new Error(msg));
|
||||
}
|
||||
cb(null, chunk);
|
||||
};
|
||||
AssertByteCountStream.prototype._flush = function(cb) {
|
||||
if (this.actualByteCount < this.expectedByteCount) {
|
||||
var msg = "not enough bytes in the stream. expected " + this.expectedByteCount + ". got only " + this.actualByteCount;
|
||||
return cb(new Error(msg));
|
||||
}
|
||||
cb();
|
||||
};
|
||||
|
||||
util.inherits(RandomAccessReader, EventEmitter);
|
||||
function RandomAccessReader() {
|
||||
EventEmitter.call(this);
|
||||
this.refCount = 0;
|
||||
}
|
||||
RandomAccessReader.prototype.ref = function() {
|
||||
this.refCount += 1;
|
||||
};
|
||||
RandomAccessReader.prototype.unref = function() {
|
||||
var self = this;
|
||||
self.refCount -= 1;
|
||||
|
||||
if (self.refCount > 0) return;
|
||||
if (self.refCount < 0) throw new Error("invalid unref");
|
||||
|
||||
self.close(onCloseDone);
|
||||
|
||||
function onCloseDone(err) {
|
||||
if (err) return self.emit('error', err);
|
||||
self.emit('close');
|
||||
}
|
||||
};
|
||||
RandomAccessReader.prototype.createReadStream = function(options) {
|
||||
var start = options.start;
|
||||
var end = options.end;
|
||||
if (start === end) {
|
||||
var emptyStream = new PassThrough();
|
||||
setImmediate(function() {
|
||||
emptyStream.end();
|
||||
});
|
||||
return emptyStream;
|
||||
}
|
||||
var stream = this._readStreamForRange(start, end);
|
||||
|
||||
var destroyed = false;
|
||||
var refUnrefFilter = new RefUnrefFilter(this);
|
||||
stream.on("error", function(err) {
|
||||
setImmediate(function() {
|
||||
if (!destroyed) refUnrefFilter.emit("error", err);
|
||||
});
|
||||
});
|
||||
refUnrefFilter.destroy = function() {
|
||||
stream.unpipe(refUnrefFilter);
|
||||
refUnrefFilter.unref();
|
||||
stream.destroy();
|
||||
};
|
||||
|
||||
var byteCounter = new AssertByteCountStream(end - start);
|
||||
refUnrefFilter.on("error", function(err) {
|
||||
setImmediate(function() {
|
||||
if (!destroyed) byteCounter.emit("error", err);
|
||||
});
|
||||
});
|
||||
byteCounter.destroy = function() {
|
||||
destroyed = true;
|
||||
refUnrefFilter.unpipe(byteCounter);
|
||||
refUnrefFilter.destroy();
|
||||
};
|
||||
|
||||
return stream.pipe(refUnrefFilter).pipe(byteCounter);
|
||||
};
|
||||
RandomAccessReader.prototype._readStreamForRange = function(start, end) {
|
||||
throw new Error("not implemented");
|
||||
};
|
||||
RandomAccessReader.prototype.read = function(buffer, offset, length, position, callback) {
|
||||
var readStream = this.createReadStream({start: position, end: position + length});
|
||||
var writeStream = new Writable();
|
||||
var written = 0;
|
||||
writeStream._write = function(chunk, encoding, cb) {
|
||||
chunk.copy(buffer, offset + written, 0, chunk.length);
|
||||
written += chunk.length;
|
||||
cb();
|
||||
};
|
||||
writeStream.on("finish", callback);
|
||||
readStream.on("error", function(error) {
|
||||
callback(error);
|
||||
});
|
||||
readStream.pipe(writeStream);
|
||||
};
|
||||
RandomAccessReader.prototype.close = function(callback) {
|
||||
setImmediate(callback);
|
||||
};
|
||||
|
||||
util.inherits(RefUnrefFilter, PassThrough);
|
||||
function RefUnrefFilter(context) {
|
||||
PassThrough.call(this);
|
||||
this.context = context;
|
||||
this.context.ref();
|
||||
this.unreffedYet = false;
|
||||
}
|
||||
RefUnrefFilter.prototype._flush = function(cb) {
|
||||
this.unref();
|
||||
cb();
|
||||
};
|
||||
RefUnrefFilter.prototype.unref = function(cb) {
|
||||
if (this.unreffedYet) return;
|
||||
this.unreffedYet = true;
|
||||
this.context.unref();
|
||||
};
|
||||
|
||||
var cp437 = '\u0000☺☻♥♦♣♠•◘○◙♂♀♪♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~⌂ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ ';
|
||||
function decodeBuffer(buffer, start, end, isUtf8) {
|
||||
if (isUtf8) {
|
||||
return buffer.toString("utf8", start, end);
|
||||
} else {
|
||||
var result = "";
|
||||
for (var i = start; i < end; i++) {
|
||||
result += cp437[buffer[i]];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
function readUInt64LE(buffer, offset) {
|
||||
// there is no native function for this, because we can't actually store 64-bit integers precisely.
|
||||
// after 53 bits, JavaScript's Number type (IEEE 754 double) can't store individual integers anymore.
|
||||
// but since 53 bits is a whole lot more than 32 bits, we do our best anyway.
|
||||
var lower32 = buffer.readUInt32LE(offset);
|
||||
var upper32 = buffer.readUInt32LE(offset + 4);
|
||||
// we can't use bitshifting here, because JavaScript bitshifting only works on 32-bit integers.
|
||||
return upper32 * 0x100000000 + lower32;
|
||||
// as long as we're bounds checking the result of this function against the total file size,
|
||||
// we'll catch any overflow errors, because we already made sure the total file size was within reason.
|
||||
}
|
||||
|
||||
// Node 10 deprecated new Buffer().
|
||||
var newBuffer;
|
||||
if (typeof Buffer.allocUnsafe === "function") {
|
||||
newBuffer = function(len) {
|
||||
return Buffer.allocUnsafe(len);
|
||||
};
|
||||
} else {
|
||||
newBuffer = function(len) {
|
||||
return new Buffer(len);
|
||||
};
|
||||
}
|
||||
|
||||
function defaultCallback(err) {
|
||||
if (err) throw err;
|
||||
}
|
||||
43
node_modules/.store/extract-zip@2.0.1/node_modules/yauzl/package.json
generated
vendored
Normal file
43
node_modules/.store/extract-zip@2.0.1/node_modules/yauzl/package.json
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "yauzl",
|
||||
"version": "2.10.0",
|
||||
"description": "yet another unzip library for node",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "node test/test.js",
|
||||
"test-cov": "istanbul cover test/test.js",
|
||||
"test-travis": "istanbul cover --report lcovonly test/test.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/thejoshwolfe/yauzl.git"
|
||||
},
|
||||
"keywords": [
|
||||
"unzip",
|
||||
"zip",
|
||||
"stream",
|
||||
"archive",
|
||||
"file"
|
||||
],
|
||||
"author": "Josh Wolfe <thejoshwolfe@gmail.com>",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/thejoshwolfe/yauzl/issues"
|
||||
},
|
||||
"homepage": "https://github.com/thejoshwolfe/yauzl",
|
||||
"dependencies": {
|
||||
"fd-slicer": "~1.1.0",
|
||||
"buffer-crc32": "~0.2.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bl": "~1.0.0",
|
||||
"istanbul": "~0.3.4",
|
||||
"pend": "~1.2.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"__npminstall_done": true,
|
||||
"_from": "yauzl@2.10.0",
|
||||
"_resolved": "https://registry.npmmirror.com/yauzl/-/yauzl-2.10.0.tgz"
|
||||
}
|
||||
Reference in New Issue
Block a user