Current Dev State

This commit is contained in:
Tim Lorsbach
2025-06-23 20:13:54 +02:00
parent b4f9bb277d
commit ded50edaa2
22617 changed files with 4345095 additions and 174 deletions

71
static/js/ketcher2/node_modules/guppy-cli/README.md generated vendored Normal file
View File

@ -0,0 +1,71 @@
# guppy-cli
> Install git-hooks for use with [git-guppy](https://github.com/therealklanni/git-guppy).
Special git-hooks that invoke git-guppy can be installed using the commandline utility or by requiring guppy-cli as a dependency and calling the provided methods.
## Commandline Usage
Install with `npm install -g guppy-cli`
```
Usage: guppy <hookname>|all [-d <path>]
Commands:
hookname Install a git-hook by name, for a list, see --hooks
all Install all available git-hooks (use caution)
Options:
-d, --dest Destination path for git-hook (default: ./.git/hooks/)
--hooks Print a complete list of git-hooks
-h, --help Show help
--version Show version number
Examples:
guppy pre-commit
guppy pre-commit -d some/where
guppy all
```
Existing git-hooks will be backed up the first time. If a backup already exists, it will not be overwritten. The extension `.guppy` will be appended to the filename of existing git-hooks when backing up.
## Pre-packaged installers
For convenience, each type of git-hook has an installer package.
Install any package as a dev-dependency to install the associated git-hook automatically.
```
npm install --save-dev <package>
```
- [guppy-applypatch-msg](https://github.com/therealklanni/guppy-applypatch-msg)
- [guppy-commit-msg](https://github.com/therealklanni/guppy-commit-msg)
- [guppy-post-applypatch](https://github.com/therealklanni/guppy-post-applypatch)
- [guppy-post-checkout](https://github.com/therealklanni/guppy-post-checkout)
- [guppy-post-commit](https://github.com/therealklanni/guppy-post-commit)
- [guppy-post-merge](https://github.com/therealklanni/guppy-post-merge)
- [guppy-post-receive](https://github.com/therealklanni/guppy-post-receive)
- [guppy-post-rewrite](https://github.com/therealklanni/guppy-post-rewrite)
- [guppy-post-update](https://github.com/therealklanni/guppy-post-update)
- [guppy-pre-applypatch](https://github.com/therealklanni/guppy-pre-applypatch)
- [guppy-pre-auto-gc](https://github.com/therealklanni/guppy-pre-auto-gc)
- [guppy-pre-commit](https://github.com/therealklanni/guppy-pre-commit)
- [guppy-pre-push](https://github.com/therealklanni/guppy-pre-push)
- [guppy-pre-rebase](https://github.com/therealklanni/guppy-pre-rebase)
- [guppy-pre-receive](https://github.com/therealklanni/guppy-pre-receive)
- [guppy-prepare-commit-msg](https://github.com/therealklanni/guppy-prepare-commit-msg)
- [guppy-update](https://github.com/therealklanni/guppy-update)
## Dependency Usage
To use guppy-cli as a dependency, install with:
```
npm install --save guppy-cli
```
Available methods:
- gup.install(*hookname*, *destination*, *callback(err, result)*) - Install the named hook to *destination*.
- gup.installAll(*destination*, *callback(err)*) - Install all hooks to *destination*.

92
static/js/ketcher2/node_modules/guppy-cli/bin/index.js generated vendored Executable file
View File

@ -0,0 +1,92 @@
#!/usr/bin/env node
/* global error */
'use strict';
require('shelljs/global');
var fs = require("fs");
var gup = require('../');
var yargs = require('yargs')
.usage('Usage: $0 <hookname>|all [-d <path>]')
.command('hookname', 'Install a git-hook by name, for a list, see --hooks')
.command('all', 'Install all available git-hooks (use caution)')
.alias('d', 'dest')
.describe('d', 'Destination path for git-hook (default: ./.git/hooks/)')
.describe('hooks', 'Print a complete list of git-hooks')
.help('h')
.alias('h', 'help')
.example('$0 pre-commit')
.example('$0 pre-commit -d some/where')
.example('$0 all')
.version(function() {
return require('../package').version;
})
.epilogue('Existing git-hooks will be backed up the first time. If a backup ' +
'already exists, it will not be overwritten.' +
'\n\nhttps://github.com/therealklanni/guppy-cli');
var argv = yargs.argv;
var hooks = ' applypatch-msg, commit-msg, post-applypatch, post-checkout, ' +
'post-commit,\n post-merge, post-receive, post-rewrite, post-update, ' +
'pre-applypatch,\n pre-auto-gc, pre-commit, pre-push, pre-rebase, ' +
'pre-receive,\n prepare-commit-msg, update';
if (argv.hooks) {
console.log('Available git-hooks are:\n' + hooks);
exit(0);
}
var dest;
if (argv.dest) {
dest = argv.dest;
} else {
var topLevel = exec('git rev-parse --show-toplevel', { silent: true })
.output.slice(0, -1);
if (test('-f', topLevel + '/.git')) {
// this is a sub module
var buf = fs.readFileSync(topLevel + '/.git', "utf8").trim();
if (buf.substr(0,6) === 'gitdir') {
topLevel = topLevel + '/' + buf.substr(8).trim();
}
dest = topLevel + '/hooks/';
} else {
dest = topLevel + '/.git/hooks/';
}
if (error()) {
console.error('fatal: Not a git repository (or any of the parent directories): .git');
exit(1);
}
}
var hook = argv._[0];
if (!hook) {
yargs.showHelp();
exit(2);
}
if (hook.toLowerCase() === 'all') {
gup.installAll(dest, function (err) {
if (err) {
console.error('fatal: ' + err.message);
exit(3);
}
console.log('guppy: Installed all git-hooks to: ' + dest);
exit(0);
});
} else if (hooks.indexOf(hook) !== -1) {
gup.install(hook, dest, function (err, results) {
if (err) {
console.error('fatal: ' + err.message);
exit(4);
}
console.log('guppy: Installed git-hook: ' + results);
exit(0);
});
} else {
console.error('fatal: Invalid hook name: ' + hook);
exit(5);
}

78
static/js/ketcher2/node_modules/guppy-cli/index.js generated vendored Normal file
View File

@ -0,0 +1,78 @@
/* global chmod */
'use strict';
require('shelljs/global');
var fs = require('vinyl-fs');
var map = require('map-stream');
var async = require('async');
var path = require('path');
var hooks = [
'applypatch-msg',
'commit-msg',
'post-applypatch',
'post-checkout',
'post-commit',
'post-merge',
'post-receive',
'post-rewrite',
'post-update',
'pre-applypatch',
'pre-auto-gc',
'pre-commit',
'pre-push',
'pre-rebase',
'pre-receive',
'prepare-commit-msg',
'update'
];
function install(hook, dest, cb) {
cb = cb || dest;
if (!cb) cb(new Error('Callback must be supplied.'));
if (typeof cb !== 'function') cb(new Error('Callback must be a function.'));
if (hooks.indexOf(hook) === -1) cb(new Error('Invalid hook name.'));
dest = ((typeof dest === 'function' ? null : dest) ||
exec('git rev-parse --show-toplevel')
.output.slice(0, -1) + '/.git/hooks/');
var destHook = path.join(dest, hook);
if (test('-f', destHook)) {
var bakDest = destHook + '.guppy';
if (!test('-f', bakDest)) {
mv(destHook, bakDest);
}
}
return fs.src(path.join(__dirname, 'scripts/hookfile'))
.pipe(map(function(file, next) {
file.path = file.path.replace('hookfile', hook);
next(null, file);
}))
.pipe(fs.dest(dest))
.on('finish', function () {
chmod('u+x', destHook);
cb(null, destHook);
})
.on('error', function (err) {
cb(err);
});
}
function installAll(dest, cb) {
async.parallel(
hooks.map(function (hook) {
return function (next) {
return install(hook, dest, next);
};
}),
cb
);
}
module.exports.install = install;
module.exports.installAll = installAll;

View File

@ -0,0 +1,56 @@
'use strict';
function preserveCamelCase(str) {
var isLastCharLower = false;
for (var i = 0; i < str.length; i++) {
var c = str.charAt(i);
if (isLastCharLower && (/[a-zA-Z]/).test(c) && c.toUpperCase() === c) {
str = str.substr(0, i) + '-' + str.substr(i);
isLastCharLower = false;
i++;
} else {
isLastCharLower = (c.toLowerCase() === c);
}
}
return str;
}
module.exports = function () {
var str = [].map.call(arguments, function (str) {
return str.trim();
}).filter(function (str) {
return str.length;
}).join('-');
if (!str.length) {
return '';
}
if (str.length === 1) {
return str;
}
if (!(/[_.\- ]+/).test(str)) {
if (str === str.toUpperCase()) {
return str.toLowerCase();
}
if (str[0] !== str[0].toLowerCase()) {
return str[0].toLowerCase() + str.slice(1);
}
return str;
}
str = preserveCamelCase(str);
return str
.replace(/^[_.\- ]+/, '')
.toLowerCase()
.replace(/[_.\- ]+(\w|$)/g, function (m, p1) {
return p1.toUpperCase();
});
};

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,71 @@
{
"_from": "camelcase@^2.0.1",
"_id": "camelcase@2.1.1",
"_inBundle": false,
"_integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=",
"_location": "/guppy-cli/camelcase",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "camelcase@^2.0.1",
"name": "camelcase",
"escapedName": "camelcase",
"rawSpec": "^2.0.1",
"saveSpec": null,
"fetchSpec": "^2.0.1"
},
"_requiredBy": [
"/guppy-cli/yargs"
],
"_resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz",
"_shasum": "7c1d16d679a1bbe59ca02cacecfb011e201f5a1f",
"_spec": "camelcase@^2.0.1",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/yargs",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"bugs": {
"url": "https://github.com/sindresorhus/camelcase/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Convert a dash/dot/underscore/space separated string to camelCase: foo-bar → fooBar",
"devDependencies": {
"ava": "*",
"xo": "*"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/sindresorhus/camelcase#readme",
"keywords": [
"camelcase",
"camel-case",
"camel",
"case",
"dash",
"hyphen",
"dot",
"underscore",
"separator",
"string",
"text",
"convert"
],
"license": "MIT",
"name": "camelcase",
"repository": {
"type": "git",
"url": "git+https://github.com/sindresorhus/camelcase.git"
},
"scripts": {
"test": "xo && ava"
},
"version": "2.1.1"
}

View File

@ -0,0 +1,57 @@
# camelcase [![Build Status](https://travis-ci.org/sindresorhus/camelcase.svg?branch=master)](https://travis-ci.org/sindresorhus/camelcase)
> Convert a dash/dot/underscore/space separated string to camelCase: `foo-bar` → `fooBar`
## Install
```
$ npm install --save camelcase
```
## Usage
```js
const camelCase = require('camelcase');
camelCase('foo-bar');
//=> 'fooBar'
camelCase('foo_bar');
//=> 'fooBar'
camelCase('Foo-Bar');
//=> 'fooBar'
camelCase('--foo.bar');
//=> 'fooBar'
camelCase('__foo__bar__');
//=> 'fooBar'
camelCase('foo bar');
//=> 'fooBar'
console.log(process.argv[3]);
//=> '--foo-bar'
camelCase(process.argv[3]);
//=> 'fooBar'
camelCase('foo', 'bar');
//=> 'fooBar'
camelCase('__foo__', '--bar');
//=> 'fooBar'
```
## Related
- [decamelize](https://github.com/sindresorhus/decamelize) - The inverse of this module
- [uppercamelcase](https://github.com/SamVerschueren/uppercamelcase) - Like this module, but to PascalCase instead of camelCase
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,15 @@
# Change Log
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
<a name="3.2.0"></a>
# [3.2.0](https://github.com/yargs/cliui/compare/v3.1.2...v3.2.0) (2016-04-11)
### Bug Fixes
* reduces tarball size ([acc6c33](https://github.com/yargs/cliui/commit/acc6c33))
### Features
* adds standard-version for release management ([ff84e32](https://github.com/yargs/cliui/commit/ff84e32))

View File

@ -0,0 +1,14 @@
Copyright (c) 2015, Contributors
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted, provided
that the above copyright notice and this permission notice
appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE
LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES
OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -0,0 +1,110 @@
# cliui
[![Build Status](https://travis-ci.org/yargs/cliui.svg)](https://travis-ci.org/yargs/cliui)
[![Coverage Status](https://coveralls.io/repos/yargs/cliui/badge.svg?branch=)](https://coveralls.io/r/yargs/cliui?branch=)
[![NPM version](https://img.shields.io/npm/v/cliui.svg)](https://www.npmjs.com/package/cliui)
[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version)
easily create complex multi-column command-line-interfaces.
## Example
```js
var ui = require('cliui')({
width: 80
})
ui.div('Usage: $0 [command] [options]')
ui.div({
text: 'Options:',
padding: [2, 0, 2, 0]
})
ui.div(
{
text: "-f, --file",
width: 20,
padding: [0, 4, 0, 4]
},
{
text: "the file to load." +
chalk.green("(if this description is long it wraps).")
,
width: 20
},
{
text: chalk.red("[required]"),
align: 'right'
}
)
console.log(ui.toString())
```
<img width="500" src="screenshot.png">
## Layout DSL
cliui exposes a simple layout DSL:
If you create a single `ui.row`, passing a string rather than an
object:
* `\n`: characters will be interpreted as new rows.
* `\t`: characters will be interpreted as new columns.
* `\s`: characters will be interpreted as padding.
**as an example...**
```js
var ui = require('./')({
width: 60
})
ui.div(
'Usage: node ./bin/foo.js\n' +
' <regex>\t provide a regex\n' +
' <glob>\t provide a glob\t [required]'
)
console.log(ui.toString())
```
**will output:**
```shell
Usage: node ./bin/foo.js
<regex> provide a regex
<glob> provide a glob [required]
```
## Methods
```js
cliui = require('cliui')
```
### cliui({width: integer})
Specify the maximum width of the UI being generated.
### cliui({wrap: boolean})
Enable or disable the wrapping of text in a column.
### cliui.div(column, column, column)
Create a row with any number of columns, a column
can either be a string, or an object with the following
options:
* **width:** the width of a column.
* **align:** alignment, `right` or `center`.
* **padding:** `[top, right, bottom, left]`.
* **border:** should a border be placed around the div?
### cliui.span(column, column, column)
Similar to `div`, except the next row will be appended without
a new line being created.

View File

@ -0,0 +1,316 @@
var stringWidth = require('string-width')
var stripAnsi = require('strip-ansi')
var wrap = require('wrap-ansi')
var align = {
right: alignRight,
center: alignCenter
}
var top = 0
var right = 1
var bottom = 2
var left = 3
function UI (opts) {
this.width = opts.width
this.wrap = opts.wrap
this.rows = []
}
UI.prototype.span = function () {
var cols = this.div.apply(this, arguments)
cols.span = true
}
UI.prototype.div = function () {
if (arguments.length === 0) this.div('')
if (this.wrap && this._shouldApplyLayoutDSL.apply(this, arguments)) {
return this._applyLayoutDSL(arguments[0])
}
var cols = []
for (var i = 0, arg; (arg = arguments[i]) !== undefined; i++) {
if (typeof arg === 'string') cols.push(this._colFromString(arg))
else cols.push(arg)
}
this.rows.push(cols)
return cols
}
UI.prototype._shouldApplyLayoutDSL = function () {
return arguments.length === 1 && typeof arguments[0] === 'string' &&
/[\t\n]/.test(arguments[0])
}
UI.prototype._applyLayoutDSL = function (str) {
var _this = this
var rows = str.split('\n')
var leftColumnWidth = 0
// simple heuristic for layout, make sure the
// second column lines up along the left-hand.
// don't allow the first column to take up more
// than 50% of the screen.
rows.forEach(function (row) {
var columns = row.split('\t')
if (columns.length > 1 && stringWidth(columns[0]) > leftColumnWidth) {
leftColumnWidth = Math.min(
Math.floor(_this.width * 0.5),
stringWidth(columns[0])
)
}
})
// generate a table:
// replacing ' ' with padding calculations.
// using the algorithmically generated width.
rows.forEach(function (row) {
var columns = row.split('\t')
_this.div.apply(_this, columns.map(function (r, i) {
return {
text: r.trim(),
padding: _this._measurePadding(r),
width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined
}
}))
})
return this.rows[this.rows.length - 1]
}
UI.prototype._colFromString = function (str) {
return {
text: str,
padding: this._measurePadding(str)
}
}
UI.prototype._measurePadding = function (str) {
// measure padding without ansi escape codes
var noAnsi = stripAnsi(str)
return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length]
}
UI.prototype.toString = function () {
var _this = this
var lines = []
_this.rows.forEach(function (row, i) {
_this.rowToString(row, lines)
})
// don't display any lines with the
// hidden flag set.
lines = lines.filter(function (line) {
return !line.hidden
})
return lines.map(function (line) {
return line.text
}).join('\n')
}
UI.prototype.rowToString = function (row, lines) {
var _this = this
var padding
var rrows = this._rasterize(row)
var str = ''
var ts
var width
var wrapWidth
rrows.forEach(function (rrow, r) {
str = ''
rrow.forEach(function (col, c) {
ts = '' // temporary string used during alignment/padding.
width = row[c].width // the width with padding.
wrapWidth = _this._negatePadding(row[c]) // the width without padding.
ts += col
for (var i = 0; i < wrapWidth - stringWidth(col); i++) {
ts += ' '
}
// align the string within its column.
if (row[c].align && row[c].align !== 'left' && _this.wrap) {
ts = align[row[c].align](ts, wrapWidth)
if (stringWidth(ts) < wrapWidth) ts += new Array(width - stringWidth(ts)).join(' ')
}
// apply border and padding to string.
padding = row[c].padding || [0, 0, 0, 0]
if (padding[left]) str += new Array(padding[left] + 1).join(' ')
str += addBorder(row[c], ts, '| ')
str += ts
str += addBorder(row[c], ts, ' |')
if (padding[right]) str += new Array(padding[right] + 1).join(' ')
// if prior row is span, try to render the
// current row on the prior line.
if (r === 0 && lines.length > 0) {
str = _this._renderInline(str, lines[lines.length - 1])
}
})
// remove trailing whitespace.
lines.push({
text: str.replace(/ +$/, ''),
span: row.span
})
})
return lines
}
function addBorder (col, ts, style) {
if (col.border) {
if (/[.']-+[.']/.test(ts)) return ''
else if (ts.trim().length) return style
else return ' '
}
return ''
}
// if the full 'source' can render in
// the target line, do so.
UI.prototype._renderInline = function (source, previousLine) {
var leadingWhitespace = source.match(/^ */)[0].length
var target = previousLine.text
var targetTextWidth = stringWidth(target.trimRight())
if (!previousLine.span) return source
// if we're not applying wrapping logic,
// just always append to the span.
if (!this.wrap) {
previousLine.hidden = true
return target + source
}
if (leadingWhitespace < targetTextWidth) return source
previousLine.hidden = true
return target.trimRight() + new Array(leadingWhitespace - targetTextWidth + 1).join(' ') + source.trimLeft()
}
UI.prototype._rasterize = function (row) {
var _this = this
var i
var rrow
var rrows = []
var widths = this._columnWidths(row)
var wrapped
// word wrap all columns, and create
// a data-structure that is easy to rasterize.
row.forEach(function (col, c) {
// leave room for left and right padding.
col.width = widths[c]
if (_this.wrap) wrapped = wrap(col.text, _this._negatePadding(col), {hard: true}).split('\n')
else wrapped = col.text.split('\n')
if (col.border) {
wrapped.unshift('.' + new Array(_this._negatePadding(col) + 3).join('-') + '.')
wrapped.push("'" + new Array(_this._negatePadding(col) + 3).join('-') + "'")
}
// add top and bottom padding.
if (col.padding) {
for (i = 0; i < (col.padding[top] || 0); i++) wrapped.unshift('')
for (i = 0; i < (col.padding[bottom] || 0); i++) wrapped.push('')
}
wrapped.forEach(function (str, r) {
if (!rrows[r]) rrows.push([])
rrow = rrows[r]
for (var i = 0; i < c; i++) {
if (rrow[i] === undefined) rrow.push('')
}
rrow.push(str)
})
})
return rrows
}
UI.prototype._negatePadding = function (col) {
var wrapWidth = col.width
if (col.padding) wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0)
if (col.border) wrapWidth -= 4
return wrapWidth
}
UI.prototype._columnWidths = function (row) {
var _this = this
var widths = []
var unset = row.length
var unsetWidth
var remainingWidth = this.width
// column widths can be set in config.
row.forEach(function (col, i) {
if (col.width) {
unset--
widths[i] = col.width
remainingWidth -= col.width
} else {
widths[i] = undefined
}
})
// any unset widths should be calculated.
if (unset) unsetWidth = Math.floor(remainingWidth / unset)
widths.forEach(function (w, i) {
if (!_this.wrap) widths[i] = row[i].width || stringWidth(row[i].text)
else if (w === undefined) widths[i] = Math.max(unsetWidth, _minWidth(row[i]))
})
return widths
}
// calculates the minimum width of
// a column, based on padding preferences.
function _minWidth (col) {
var padding = col.padding || []
var minWidth = 1 + (padding[left] || 0) + (padding[right] || 0)
if (col.border) minWidth += 4
return minWidth
}
function alignRight (str, width) {
str = str.trim()
var padding = ''
var strWidth = stringWidth(str)
if (strWidth < width) {
padding = new Array(width - strWidth + 1).join(' ')
}
return padding + str
}
function alignCenter (str, width) {
str = str.trim()
var padding = ''
var strWidth = stringWidth(str.trim())
if (strWidth < width) {
padding = new Array(parseInt((width - strWidth) / 2, 10) + 1).join(' ')
}
return padding + str
}
module.exports = function (opts) {
opts = opts || {}
return new UI({
width: (opts || {}).width || 80,
wrap: typeof opts.wrap === 'boolean' ? opts.wrap : true
})
}

View File

@ -0,0 +1,96 @@
{
"_from": "cliui@^3.0.3",
"_id": "cliui@3.2.0",
"_inBundle": false,
"_integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=",
"_location": "/guppy-cli/cliui",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "cliui@^3.0.3",
"name": "cliui",
"escapedName": "cliui",
"rawSpec": "^3.0.3",
"saveSpec": null,
"fetchSpec": "^3.0.3"
},
"_requiredBy": [
"/guppy-cli/yargs"
],
"_resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz",
"_shasum": "120601537a916d29940f934da3b48d585a39213d",
"_spec": "cliui@^3.0.3",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/yargs",
"author": {
"name": "Ben Coe",
"email": "ben@npmjs.com"
},
"bugs": {
"url": "https://github.com/yargs/cliui/issues"
},
"bundleDependencies": false,
"config": {
"blanket": {
"pattern": [
"index.js"
],
"data-cover-never": [
"node_modules",
"test"
],
"output-reporter": "spec"
}
},
"dependencies": {
"string-width": "^1.0.1",
"strip-ansi": "^3.0.1",
"wrap-ansi": "^2.0.0"
},
"deprecated": false,
"description": "easily create complex multi-column command-line-interfaces",
"devDependencies": {
"chai": "^3.5.0",
"chalk": "^1.1.2",
"coveralls": "^2.11.8",
"mocha": "^2.4.5",
"nyc": "^6.4.0",
"standard": "^6.0.8",
"standard-version": "^2.1.2"
},
"files": [
"index.js"
],
"homepage": "https://github.com/yargs/cliui#readme",
"keywords": [
"cli",
"command-line",
"layout",
"design",
"console",
"wrap",
"table"
],
"license": "ISC",
"main": "index.js",
"name": "cliui",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/yargs/cliui.git"
},
"scripts": {
"coverage": "nyc --reporter=text-lcov mocha | coveralls",
"pretest": "standard",
"test": "nyc mocha",
"version": "standard-version"
},
"standard": {
"ignore": [
"**/example/**"
],
"globals": [
"it"
]
},
"version": "3.2.0"
}

View File

@ -0,0 +1,15 @@
The ISC License
Copyright (c) 2015 Elan Shanker
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -0,0 +1,109 @@
glob-parent [![Build Status](https://travis-ci.org/es128/glob-parent.svg)](https://travis-ci.org/es128/glob-parent) [![Coverage Status](https://img.shields.io/coveralls/es128/glob-parent.svg)](https://coveralls.io/r/es128/glob-parent?branch=master)
======
Javascript module to extract the non-magic parent path from a glob string.
[![NPM](https://nodei.co/npm/glob-parent.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/glob-parent/)
[![NPM](https://nodei.co/npm-dl/glob-parent.png?height=3&months=9)](https://nodei.co/npm-dl/glob-parent/)
Usage
-----
```sh
npm install glob-parent --save
```
**Examples**
```js
var globParent = require('glob-parent');
globParent('path/to/*.js'); // 'path/to'
globParent('/root/path/to/*.js'); // '/root/path/to'
globParent('/*.js'); // '/'
globParent('*.js'); // '.'
globParent('**/*.js'); // '.'
globParent('path/{to,from}'); // 'path'
globParent('path/!(to|from)'); // 'path'
globParent('path/?(to|from)'); // 'path'
globParent('path/+(to|from)'); // 'path'
globParent('path/*(to|from)'); // 'path'
globParent('path/@(to|from)'); // 'path'
globParent('path/**/*'); // 'path'
// if provided a non-glob path, returns the nearest dir
globParent('path/foo/bar.js'); // 'path/foo'
globParent('path/foo/'); // 'path/foo'
globParent('path/foo'); // 'path' (see issue #3 for details)
```
## Escaping
The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters:
- `?` (question mark)
- `*` (star)
- `|` (pipe)
- `(` (opening parenthesis)
- `)` (closing parenthesis)
- `{` (opening curly brace)
- `}` (closing curly brace)
- `[` (opening bracket)
- `]` (closing bracket)
**Example**
```js
globParent('foo/[bar]/') // 'foo'
globParent('foo/\\[bar]/') // 'foo/[bar]'
```
## Limitations
#### Braces & Brackets
This library attempts a quick and imperfect method of determining which path
parts have glob magic without fully parsing/lexing the pattern. There are some
advanced use cases that can trip it up, such as nested braces where the outer
pair is escaped and the inner one contains a path separator. If you find
yourself in the unlikely circumstance of being affected by this or need to
ensure higher-fidelity glob handling in your library, it is recommended that you
pre-process your input with [expand-braces] and/or [expand-brackets].
#### Windows
Backslashes are not valid path separators for globs. If a path with backslashes
is provided anyway, for simple cases, glob-parent will replace the path
separator for you and return the non-glob parent path (now with
forward-slashes, which are still valid as Windows path separators).
This cannot be used in conjunction with escape characters.
```js
// BAD
globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)'
// GOOD
globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)'
```
If you are using escape characters for a pattern without path parts (i.e.
relative to `cwd`), prefix with `./` to avoid confusing glob-parent.
```js
// BAD
globParent('foo \\[bar]') // 'foo '
globParent('foo \\[bar]*') // 'foo '
// GOOD
globParent('./foo \\[bar]') // 'foo [bar]'
globParent('./foo \\[bar]*') // '.'
```
Change Log
----------
[See release notes page on GitHub](https://github.com/es128/glob-parent/releases)
License
-------
[ISC](https://raw.github.com/es128/glob-parent/master/LICENSE)
[expand-braces]: https://github.com/jonschlinkert/expand-braces
[expand-brackets]: https://github.com/jonschlinkert/expand-brackets

View File

@ -0,0 +1,24 @@
'use strict';
var path = require('path');
var isglob = require('is-glob');
var pathDirname = require('path-dirname');
var isWin32 = require('os').platform() === 'win32';
module.exports = function globParent(str) {
// flip windows path separators
if (isWin32 && str.indexOf('/') < 0) str = str.split('\\').join('/');
// special case for strings ending in enclosure containing path separator
if (/[\{\[].*[\/]*.*[\}\]]$/.test(str)) str += '/';
// preserves full path in case of trailing path separator
str += 'a';
// remove path parts that are globby
do {str = pathDirname.posix(str)}
while (isglob(str) || /(^|[^\\])([\{\[]|\([^\)]+$)/.test(str));
// remove escape chars and return result
return str.replace(/\\([\*\?\|\[\]\(\)\{\}])/g, '$1');
};

View File

@ -0,0 +1,70 @@
{
"_from": "glob-parent@^3.0.0",
"_id": "glob-parent@3.1.0",
"_inBundle": false,
"_integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=",
"_location": "/guppy-cli/glob-parent",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "glob-parent@^3.0.0",
"name": "glob-parent",
"escapedName": "glob-parent",
"rawSpec": "^3.0.0",
"saveSpec": null,
"fetchSpec": "^3.0.0"
},
"_requiredBy": [
"/guppy-cli/glob-stream"
],
"_resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz",
"_shasum": "9e6af6299d8d3bd2bd40430832bd113df906c5ae",
"_spec": "glob-parent@^3.0.0",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/glob-stream",
"author": {
"name": "Elan Shanker",
"url": "https://github.com/es128"
},
"bugs": {
"url": "https://github.com/es128/glob-parent/issues"
},
"bundleDependencies": false,
"dependencies": {
"is-glob": "^3.1.0",
"path-dirname": "^1.0.0"
},
"deprecated": false,
"description": "Strips glob magic from a string to provide the parent directory path",
"devDependencies": {
"coveralls": "^2.11.2",
"istanbul": "^0.3.5",
"mocha": "^2.1.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/es128/glob-parent",
"keywords": [
"glob",
"parent",
"strip",
"path",
"dirname",
"directory",
"base",
"wildcard"
],
"license": "ISC",
"main": "index.js",
"name": "glob-parent",
"repository": {
"type": "git",
"url": "git+https://github.com/es128/glob-parent.git"
},
"scripts": {
"ci-test": "istanbul cover _mocha && cat ./coverage/lcov.info | coveralls",
"test": "istanbul test node_modules/mocha/bin/_mocha"
},
"version": "3.1.0"
}

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015 Blaine Bublitz, Eric Schoffstall and other contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,95 @@
<p align="center">
<a href="http://gulpjs.com">
<img height="257" width="114" src="https://raw.githubusercontent.com/gulpjs/artwork/master/gulp-2x.png">
</a>
</p>
# glob-stream
[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url]
A wrapper around [node-glob][node-glob-url] to make it streamy.
## Usage
```javascript
var gs = require('glob-stream');
var stream = gs.create('./files/**/*.coffee', { /* options */ });
stream.on('data', function(file){
// file has path, base, and cwd attrs
});
```
You can pass any combination of globs. One caveat is that you can not only pass a glob negation, you must give it at least one positive glob so it knows where to start. All given must match for the file to be returned.
## API
### create(globs, options)
Returns a stream for multiple globs or filters.
### createStream(positiveGlob, negativeGlobs, options)
Returns a stream for a single glob or filter.
### Options
- cwd
- Default is `process.cwd()`
- base
- Default is everything before a glob starts (see [glob-parent][glob-parent-url])
- cwdbase
- Default is `false`
- When true it is the same as saying opt.base = opt.cwd
- allowEmpty
- Default is `false`
- If true, won't emit an error when a glob pointing at a single file fails to match
- Any through2 related options are documented in [through2][through2-url]
This argument is passed directly to [node-glob][node-glob-url] so check there for more options
### Glob
```js
var stream = gs.create(['./**/*.js', '!./node_modules/**/*']);
```
Globs are executed in order, so negations should follow positive globs. For example:
```js
gulp.src(['!b*.js', '*.js'])
```
would not exclude any files, but this would
```js
gulp.src(['*.js', '!b*.js'])
```
## Related
- [globby][globby-url] - Non-streaming `glob` wrapper with support for multiple patterns.
## License
MIT
[globby-url]: https://github.com/sindresorhus/globby
[through2-url]: https://github.com/rvagg/through2
[node-glob-url]: https://github.com/isaacs/node-glob
[glob-parent-url]: https://github.com/es128/glob-parent
[downloads-image]: http://img.shields.io/npm/dm/glob-stream.svg
[npm-url]: https://www.npmjs.com/package/glob-stream
[npm-image]: https://badge.fury.io/js/glob-stream.svg
[travis-url]: https://travis-ci.org/gulpjs/glob-stream
[travis-image]: https://travis-ci.org/gulpjs/glob-stream.svg?branch=master
[coveralls-url]: https://coveralls.io/r/gulpjs/glob-stream
[coveralls-image]: https://coveralls.io/repos/gulpjs/glob-stream/badge.svg
[gitter-url]: https://gitter.im/gulpjs/gulp
[gitter-image]: https://badges.gitter.im/gulpjs/gulp.png

View File

@ -0,0 +1,209 @@
'use strict';
var through2 = require('through2');
var Combine = require('ordered-read-streams');
var unique = require('unique-stream');
var glob = require('glob');
var micromatch = require('micromatch');
var resolveGlob = require('to-absolute-glob');
var globParent = require('glob-parent');
var path = require('path');
var extend = require('extend');
var sepRe = (process.platform === 'win32' ? /[\/\\]/ : /\/+/);
var gs = {
// Creates a stream for a single glob or filter
createStream: function(ourGlob, negatives, opt) {
var ourOpt = extend({}, opt);
delete ourOpt.root;
// Extract base path from glob
var basePath = ourOpt.base || getBasePath(ourGlob, opt);
// Remove path relativity to make globs make sense
ourGlob = resolveGlob(ourGlob, opt);
// Create globbing stuff
var globber = new glob.Glob(ourGlob, ourOpt);
// Create stream and map events from globber to it
var stream = through2.obj(opt,
negatives.length ? filterNegatives : undefined);
var found = false;
globber.on('error', stream.emit.bind(stream, 'error'));
globber.once('end', function() {
if (opt.allowEmpty !== true && !found && globIsSingular(globber)) {
stream.emit('error',
new Error('File not found with singular glob: ' + ourGlob));
}
stream.end();
});
globber.on('match', function(filename) {
found = true;
stream.write({
cwd: opt.cwd,
base: basePath,
path: path.normalize(filename),
});
});
return stream;
function filterNegatives(filename, enc, cb) {
var matcha = isMatch.bind(null, filename);
if (negatives.every(matcha)) {
cb(null, filename); // Pass
} else {
cb(); // Ignore
}
}
},
// Creates a stream for multiple globs or filters
create: function(globs, opt) {
if (!opt) {
opt = {};
}
if (typeof opt.cwd !== 'string') {
opt.cwd = process.cwd();
}
if (typeof opt.dot !== 'boolean') {
opt.dot = false;
}
if (typeof opt.silent !== 'boolean') {
opt.silent = true;
}
if (typeof opt.nonull !== 'boolean') {
opt.nonull = false;
}
if (typeof opt.cwdbase !== 'boolean') {
opt.cwdbase = false;
}
if (opt.cwdbase) {
opt.base = opt.cwd;
}
// Only one glob no need to aggregate
if (!Array.isArray(globs)) {
globs = [globs];
}
var positives = [];
var negatives = [];
var ourOpt = extend({}, opt);
delete ourOpt.root;
globs.forEach(function(glob, index) {
if (typeof glob !== 'string' && !(glob instanceof RegExp)) {
throw new Error('Invalid glob at index ' + index);
}
var globArray = isNegative(glob) ? negatives : positives;
// Create Minimatch instances for negative glob patterns
if (globArray === negatives && typeof glob === 'string') {
var ourGlob = resolveGlob(glob, opt);
glob = micromatch.matcher(ourGlob, ourOpt);
}
globArray.push({
index: index,
glob: glob,
});
});
if (positives.length === 0) {
throw new Error('Missing positive glob');
}
// Only one positive glob no need to aggregate
if (positives.length === 1) {
return streamFromPositive(positives[0]);
}
// Create all individual streams
var streams = positives.map(streamFromPositive);
// Then just pipe them to a single unique stream and return it
var aggregate = new Combine(streams);
var uniqueStream = unique('path');
var returnStream = aggregate.pipe(uniqueStream);
aggregate.on('error', function(err) {
returnStream.emit('error', err);
});
return returnStream;
function streamFromPositive(positive) {
var negativeGlobs = negatives.filter(indexGreaterThan(positive.index))
.map(toGlob);
return gs.createStream(positive.glob, negativeGlobs, opt);
}
},
};
function isMatch(file, matcher) {
if (typeof matcher === 'function') {
return matcher(file.path);
}
if (matcher instanceof RegExp) {
return matcher.test(file.path);
}
}
function isNegative(pattern) {
if (typeof pattern === 'string') {
return pattern[0] === '!';
}
if (pattern instanceof RegExp) {
return true;
}
}
function indexGreaterThan(index) {
return function(obj) {
return obj.index > index;
};
}
function toGlob(obj) {
return obj.glob;
}
function globIsSingular(glob) {
var globSet = glob.minimatch.set;
if (globSet.length !== 1) {
return false;
}
return globSet[0].every(function isString(value) {
return typeof value === 'string';
});
}
function getBasePath(ourGlob, opt) {
var basePath;
var parent = globParent(ourGlob);
if (parent === '/' && opt && opt.root) {
basePath = path.normalize(opt.root);
} else {
basePath = resolveGlob(parent, opt);
}
if (!sepRe.test(basePath.charAt(basePath.length - 1))) {
basePath += path.sep;
}
return basePath;
}
module.exports = gs;

View File

@ -0,0 +1,18 @@
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.

View File

@ -0,0 +1,15 @@
# readable-stream
***Node-core streams for userland***
[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
This package is a mirror of the Streams2 and Streams3 implementations in Node-core.
If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core.
**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.
**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` youll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when youre ready to start using Streams3, pin to `"~1.1.0"`

View File

@ -0,0 +1 @@
module.exports = require("./lib/_stream_duplex.js")

View File

@ -0,0 +1,89 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
module.exports = Duplex;
/*<replacement>*/
var objectKeys = Object.keys || function (obj) {
var keys = [];
for (var key in obj) keys.push(key);
return keys;
}
/*</replacement>*/
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var Readable = require('./_stream_readable');
var Writable = require('./_stream_writable');
util.inherits(Duplex, Readable);
forEach(objectKeys(Writable.prototype), function(method) {
if (!Duplex.prototype[method])
Duplex.prototype[method] = Writable.prototype[method];
});
function Duplex(options) {
if (!(this instanceof Duplex))
return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
if (options && options.readable === false)
this.readable = false;
if (options && options.writable === false)
this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false)
this.allowHalfOpen = false;
this.once('end', onend);
}
// the no-half-open enforcer
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended)
return;
// no more data can be written.
// But allow more writes to happen in this tick.
process.nextTick(this.end.bind(this));
}
function forEach (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}

View File

@ -0,0 +1,46 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough))
return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};

View File

@ -0,0 +1,982 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
module.exports = Readable;
/*<replacement>*/
var isArray = require('isarray');
/*</replacement>*/
/*<replacement>*/
var Buffer = require('buffer').Buffer;
/*</replacement>*/
Readable.ReadableState = ReadableState;
var EE = require('events').EventEmitter;
/*<replacement>*/
if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
var Stream = require('stream');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var StringDecoder;
util.inherits(Readable, Stream);
function ReadableState(options, stream) {
options = options || {};
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = false;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// In streams that never have any data, and do push(null) right away,
// the consumer can miss the 'end' event if they do some I/O before
// consuming the stream. So, we don't emit('end') until some reading
// happens.
this.calledRead = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder)
StringDecoder = require('string_decoder/').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
}
function Readable(options) {
if (!(this instanceof Readable))
return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
Stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable.prototype.push = function(chunk, encoding) {
var state = this._readableState;
if (typeof chunk === 'string' && !state.objectMode) {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer(chunk, encoding);
encoding = '';
}
}
return readableAddChunk(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable.prototype.unshift = function(chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
if (er) {
stream.emit('error', er);
} else if (chunk === null || chunk === undefined) {
state.reading = false;
if (!state.ended)
onEofChunk(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) {
state.buffer.unshift(chunk);
} else {
state.reading = false;
state.buffer.push(chunk);
}
if (state.needReadable)
emitReadable(stream);
maybeReadMore(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
return !state.ended &&
(state.needReadable ||
state.length < state.highWaterMark ||
state.length === 0);
}
// backwards compatibility.
Readable.prototype.setEncoding = function(enc) {
if (!StringDecoder)
StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
};
// Don't raise the hwm > 128MB
var MAX_HWM = 0x800000;
function roundUpToNextPowerOf2(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
// Get the next highest power of 2
n--;
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
n++;
}
return n;
}
function howMuchToRead(n, state) {
if (state.length === 0 && state.ended)
return 0;
if (state.objectMode)
return n === 0 ? 0 : 1;
if (n === null || isNaN(n)) {
// only flow one buffer at a time
if (state.flowing && state.buffer.length)
return state.buffer[0].length;
else
return state.length;
}
if (n <= 0)
return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark)
state.highWaterMark = roundUpToNextPowerOf2(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else
return state.length;
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function(n) {
var state = this._readableState;
state.calledRead = true;
var nOrig = n;
var ret;
if (typeof n !== 'number' || n > 0)
state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
emitReadable(this);
return null;
}
n = howMuchToRead(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
ret = null;
// In cases where the decoder did not receive enough data
// to produce a full chunk, then immediately received an
// EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>].
// howMuchToRead will see this and coerce the amount to
// read to zero (because it's looking at the length of the
// first <Buffer > in state.buffer), and we'll end up here.
//
// This can only happen via state.decoder -- no other venue
// exists for pushing a zero-length chunk into state.buffer
// and triggering this behavior. In this case, we return our
// remaining data and end the stream, if appropriate.
if (state.length > 0 && state.decoder) {
ret = fromList(n, state);
state.length -= ret.length;
}
if (state.length === 0)
endReadable(this);
return ret;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
// if we currently have less than the highWaterMark, then also read some
if (state.length - n <= state.highWaterMark)
doRead = true;
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading)
doRead = false;
if (doRead) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0)
state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read called its callback synchronously, then `reading`
// will be false, and we need to re-evaluate how much data we
// can return to the user.
if (doRead && !state.reading)
n = howMuchToRead(nOrig, state);
if (n > 0)
ret = fromList(n, state);
else
ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (state.length === 0 && !state.ended)
state.needReadable = true;
// If we happened to read() exactly the remaining amount in the
// buffer, and the EOF has been seen at this point, then make sure
// that we emit 'end' on the very next tick.
if (state.ended && !state.endEmitted && state.length === 0)
endReadable(this);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
if (!Buffer.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk(stream, state) {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// if we've ended and we have some data left, then emit
// 'readable' now to make sure it gets picked up.
if (state.length > 0)
emitReadable(stream);
else
endReadable(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable(stream) {
var state = stream._readableState;
state.needReadable = false;
if (state.emittedReadable)
return;
state.emittedReadable = true;
if (state.sync)
process.nextTick(function() {
emitReadable_(stream);
});
else
emitReadable_(stream);
}
function emitReadable_(stream) {
stream.emit('readable');
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
process.nextTick(function() {
maybeReadMore_(stream, state);
});
}
}
function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable.prototype._read = function(n) {
this.emit('error', new Error('not implemented'));
};
Readable.prototype.pipe = function(dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted)
process.nextTick(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
if (readable !== src) return;
cleanup();
}
function onend() {
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain(src);
dest.on('drain', ondrain);
function cleanup() {
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (!dest._writableState || dest._writableState.needDrain)
ondrain();
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
unpipe();
dest.removeListener('error', onerror);
if (EE.listenerCount(dest, 'error') === 0)
dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
if (!dest._events || !dest._events.error)
dest.on('error', onerror);
else if (isArray(dest._events.error))
dest._events.error.unshift(onerror);
else
dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
// the handler that waits for readable events after all
// the data gets sucked out in flow.
// This would be easier to follow with a .once() handler
// in flow(), but that is too slow.
this.on('readable', pipeOnReadable);
state.flowing = true;
process.nextTick(function() {
flow(src);
});
}
return dest;
};
function pipeOnDrain(src) {
return function() {
var dest = this;
var state = src._readableState;
state.awaitDrain--;
if (state.awaitDrain === 0)
flow(src);
};
}
function flow(src) {
var state = src._readableState;
var chunk;
state.awaitDrain = 0;
function write(dest, i, list) {
var written = dest.write(chunk);
if (false === written) {
state.awaitDrain++;
}
}
while (state.pipesCount && null !== (chunk = src.read())) {
if (state.pipesCount === 1)
write(state.pipes, 0, null);
else
forEach(state.pipes, write);
src.emit('data', chunk);
// if anyone needs a drain, then we have to wait for that.
if (state.awaitDrain > 0)
return;
}
// if every destination was unpiped, either before entering this
// function, or in the while loop, then stop flowing.
//
// NB: This is a pretty rare edge case.
if (state.pipesCount === 0) {
state.flowing = false;
// if there were data event listeners added, then switch to old mode.
if (EE.listenerCount(src, 'data') > 0)
emitDataEvents(src);
return;
}
// at this point, no one needed a drain, so we just ran out of data
// on the next readable event, start it over again.
state.ranOut = true;
}
function pipeOnReadable() {
if (this._readableState.ranOut) {
this._readableState.ranOut = false;
flow(this);
}
}
Readable.prototype.unpipe = function(dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes)
return this;
if (!dest)
dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable);
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable);
state.flowing = false;
for (var i = 0; i < len; i++)
dests[i].emit('unpipe', this);
return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
if (i === -1)
return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable.prototype.on = function(ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
if (ev === 'data' && !this._readableState.flowing)
emitDataEvents(this);
if (ev === 'readable' && this.readable) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
this.read(0);
} else if (state.length) {
emitReadable(this, state);
}
}
}
return res;
};
Readable.prototype.addListener = Readable.prototype.on;
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable.prototype.resume = function() {
emitDataEvents(this);
this.read(0);
this.emit('resume');
};
Readable.prototype.pause = function() {
emitDataEvents(this, true);
this.emit('pause');
};
function emitDataEvents(stream, startPaused) {
var state = stream._readableState;
if (state.flowing) {
// https://github.com/isaacs/readable-stream/issues/16
throw new Error('Cannot switch to old mode now.');
}
var paused = startPaused || false;
var readable = false;
// convert to an old-style stream.
stream.readable = true;
stream.pipe = Stream.prototype.pipe;
stream.on = stream.addListener = Stream.prototype.on;
stream.on('readable', function() {
readable = true;
var c;
while (!paused && (null !== (c = stream.read())))
stream.emit('data', c);
if (c === null) {
readable = false;
stream._readableState.needReadable = true;
}
});
stream.pause = function() {
paused = true;
this.emit('pause');
};
stream.resume = function() {
paused = false;
if (readable)
process.nextTick(function() {
stream.emit('readable');
});
else
this.read(0);
this.emit('resume');
};
// now make it start, just in case it hadn't already.
stream.emit('readable');
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable.prototype.wrap = function(stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function() {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
self.push(chunk);
}
self.push(null);
});
stream.on('data', function(chunk) {
if (state.decoder)
chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
//if (state.objectMode && util.isNullOrUndefined(chunk))
if (state.objectMode && (chunk === null || chunk === undefined))
return;
else if (!state.objectMode && (!chunk || !chunk.length))
return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (typeof stream[i] === 'function' &&
typeof this[i] === 'undefined') {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
}}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach(events, function(ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable._fromList = fromList;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0)
return null;
if (length === 0)
ret = null;
else if (objectMode)
ret = list.shift();
else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode)
ret = list.join('');
else
ret = Buffer.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode)
ret = '';
else
ret = new Buffer(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode)
ret += buf.slice(0, cpy);
else
buf.copy(ret, c, 0, cpy);
if (cpy < buf.length)
list[0] = buf.slice(cpy);
else
list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted && state.calledRead) {
state.ended = true;
process.nextTick(function() {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
});
}
}
function forEach (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf (xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}

View File

@ -0,0 +1,210 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
module.exports = Transform;
var Duplex = require('./_stream_duplex');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(Transform, Duplex);
function TransformState(options, stream) {
this.afterTransform = function(er, data) {
return afterTransform(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
}
function afterTransform(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb)
return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (data !== null && data !== undefined)
stream.push(data);
if (cb)
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform))
return new Transform(options);
Duplex.call(this, options);
var ts = this._transformState = new TransformState(options, this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this.once('finish', function() {
if ('function' === typeof this._flush)
this._flush(function(er) {
done(stream, er);
});
else
done(stream);
});
}
Transform.prototype.push = function(chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function(chunk, encoding, cb) {
throw new Error('not implemented');
};
Transform.prototype._write = function(chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform ||
rs.needReadable ||
rs.length < rs.highWaterMark)
this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function(n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done(stream, er) {
if (er)
return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var rs = stream._readableState;
var ts = stream._transformState;
if (ws.length)
throw new Error('calling transform done when ws.length != 0');
if (ts.transforming)
throw new Error('calling transform done when still transforming');
return stream.push(null);
}

View File

@ -0,0 +1,386 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
module.exports = Writable;
/*<replacement>*/
var Buffer = require('buffer').Buffer;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var Stream = require('stream');
util.inherits(Writable, Stream);
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
}
function WritableState(options, stream) {
options = options || {};
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function(er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.buffer = [];
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
}
function Writable(options) {
var Duplex = require('./_stream_duplex');
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
if (!(this instanceof Writable) && !(this instanceof Duplex))
return new Writable(options);
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function() {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
function writeAfterEnd(stream, state, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk(stream, state, chunk, cb) {
var valid = true;
if (!Buffer.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
valid = false;
}
return valid;
}
Writable.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (Buffer.isBuffer(chunk))
encoding = 'buffer';
else if (!encoding)
encoding = state.defaultEncoding;
if (typeof cb !== 'function')
cb = function() {};
if (state.ended)
writeAfterEnd(this, state, cb);
else if (validChunk(this, state, chunk, cb))
ret = writeOrBuffer(this, state, chunk, encoding, cb);
return ret;
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
typeof chunk === 'string') {
chunk = new Buffer(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
if (Buffer.isBuffer(chunk))
encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret)
state.needDrain = true;
if (state.writing)
state.buffer.push(new WriteReq(chunk, encoding, cb));
else
doWrite(stream, state, len, chunk, encoding, cb);
return ret;
}
function doWrite(stream, state, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
if (sync)
process.nextTick(function() {
cb(er);
});
else
cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate(state);
if (er)
onwriteError(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(stream, state);
if (!finished && !state.bufferProcessing && state.buffer.length)
clearBuffer(stream, state);
if (sync) {
process.nextTick(function() {
afterWrite(stream, state, finished, cb);
});
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished)
onwriteDrain(stream, state);
cb();
if (finished)
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
for (var c = 0; c < state.buffer.length; c++) {
var entry = state.buffer[c];
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, len, chunk, encoding, cb);
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
c++;
break;
}
}
state.bufferProcessing = false;
if (c < state.buffer.length)
state.buffer = state.buffer.slice(c);
else
state.buffer.length = 0;
}
Writable.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (typeof chunk !== 'undefined' && chunk !== null)
this.write(chunk, encoding);
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable(this, state, cb);
};
function needFinish(stream, state) {
return (state.ending &&
state.length === 0 &&
!state.finished &&
!state.writing);
}
function finishMaybe(stream, state) {
var need = needFinish(stream, state);
if (need) {
state.finished = true;
stream.emit('finish');
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished)
process.nextTick(cb);
else
stream.once('finish', cb);
}
state.ended = true;
}

View File

@ -0,0 +1,65 @@
{
"_from": "readable-stream@>=1.0.33-1 <1.1.0-0",
"_id": "readable-stream@1.0.34",
"_inBundle": false,
"_integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=",
"_location": "/guppy-cli/glob-stream/readable-stream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "readable-stream@>=1.0.33-1 <1.1.0-0",
"name": "readable-stream",
"escapedName": "readable-stream",
"rawSpec": ">=1.0.33-1 <1.1.0-0",
"saveSpec": null,
"fetchSpec": ">=1.0.33-1 <1.1.0-0"
},
"_requiredBy": [
"/guppy-cli/glob-stream/through2"
],
"_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz",
"_shasum": "125820e34bc842d2f2aaafafe4c2916ee32c157c",
"_spec": "readable-stream@>=1.0.33-1 <1.1.0-0",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/glob-stream/node_modules/through2",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"browser": {
"util": false
},
"bugs": {
"url": "https://github.com/isaacs/readable-stream/issues"
},
"bundleDependencies": false,
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.1",
"isarray": "0.0.1",
"string_decoder": "~0.10.x"
},
"deprecated": false,
"description": "Streams2, a user-land copy of the stream library from Node.js v0.10.x",
"devDependencies": {
"tap": "~0.2.6"
},
"homepage": "https://github.com/isaacs/readable-stream#readme",
"keywords": [
"readable",
"stream",
"pipe"
],
"license": "MIT",
"main": "readable.js",
"name": "readable-stream",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/readable-stream.git"
},
"scripts": {
"test": "tap test/simple/*.js"
},
"version": "1.0.34"
}

View File

@ -0,0 +1 @@
module.exports = require("./lib/_stream_passthrough.js")

View File

@ -0,0 +1,11 @@
var Stream = require('stream'); // hack to fix a circular dependency issue when used with browserify
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = Stream;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
if (!process.browser && process.env.READABLE_STREAM === 'disable') {
module.exports = require('stream');
}

View File

@ -0,0 +1 @@
module.exports = require("./lib/_stream_transform.js")

View File

@ -0,0 +1 @@
module.exports = require("./lib/_stream_writable.js")

View File

@ -0,0 +1,39 @@
Copyright 2013, Rod Vagg (the "Original Author")
All rights reserved.
MIT +no-false-attribs License
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
Distributions of all or part of the Software intended to be used
by the recipients as they would use the unmodified Software,
containing modifications that substantially alter, remove, or
disable functionality of the Software, outside of the documented
configuration mechanisms provided by the Software, shall be
modified such that the Original Author's bug reporting email
addresses and urls are either replaced with the contact information
of the parties responsible for the changes, or removed entirely.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
Except where noted, this license applies to any and all software
programs and associated documentation files created by the
Original Author, when distributed with the Software.

View File

@ -0,0 +1,132 @@
# through2
[![NPM](https://nodei.co/npm/through2.png?downloads&downloadRank)](https://nodei.co/npm/through2/)
**A tiny wrapper around Node streams.Transform (Streams2) to avoid explicit subclassing noise**
Inspired by [Dominic Tarr](https://github.com/dominictarr)'s [through](https://github.com/dominictarr/through) in that it's so much easier to make a stream out of a function than it is to set up the prototype chain properly: `through(function (chunk) { ... })`.
Note: A **Streams3** version of through2 is available in npm with the tag `"1.0"` rather than `"latest"` so an `npm install through2` will get you the current Streams2 version (version number is 0.x.x). To use a Streams3 version use `npm install through2@1` to fetch the latest version 1.x.x. More information about Streams2 vs Streams3 and recommendations see the article **[Why I don't use Node's core 'stream' module](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html)**.
```js
fs.createReadStream('ex.txt')
.pipe(through2(function (chunk, enc, callback) {
for (var i = 0; i < chunk.length; i++)
if (chunk[i] == 97)
chunk[i] = 122 // swap 'a' for 'z'
this.push(chunk)
callback()
}))
.pipe(fs.createWriteStream('out.txt'))
```
Or object streams:
```js
var all = []
fs.createReadStream('data.csv')
.pipe(csv2())
.pipe(through2.obj(function (chunk, enc, callback) {
var data = {
name : chunk[0]
, address : chunk[3]
, phone : chunk[10]
}
this.push(data)
callback()
}))
.on('data', function (data) {
all.push(data)
})
.on('end', function () {
doSomethingSpecial(all)
})
```
Note that `through2.obj(fn)` is a convenience wrapper around `through2({ objectMode: true }, fn)`.
## API
<b><code>through2([ options, ] [ transformFunction ] [, flushFunction ])</code></b>
Consult the **[stream.Transform](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_transform)** documentation for the exact rules of the `transformFunction` (i.e. `this._transform`) and the optional `flushFunction` (i.e. `this._flush`).
### options
The options argument is optional and is passed straight through to `stream.Transform`. So you can use `objectMode:true` if you are processing non-binary streams (or just use `through2.obj()`).
The `options` argument is first, unlike standard convention, because if I'm passing in an anonymous function then I'd prefer for the options argument to not get lost at the end of the call:
```js
fs.createReadStream('/tmp/important.dat')
.pipe(through2({ objectMode: true, allowHalfOpen: false },
function (chunk, enc, cb) {
cb(null, 'wut?') // note we can use the second argument on the callback
// to provide data as an alternative to this.push('wut?')
}
)
.pipe(fs.createWriteStream('/tmp/wut.txt'))
```
### transformFunction
The `transformFunction` must have the following signature: `function (chunk, encoding, callback) {}`. A minimal implementation should call the `callback` function to indicate that the transformation is done, even if that transformation means discarding the chunk.
To queue a new chunk, call `this.push(chunk)`&mdash;this can be called as many times as required before the `callback()` if you have multiple pieces to send on.
Alternatively, you may use `callback(err, chunk)` as shorthand for emitting a single chunk or an error.
If you **do not provide a `transformFunction`** then you will get a simple pass-through stream.
### flushFunction
The optional `flushFunction` is provided as the last argument (2nd or 3rd, depending on whether you've supplied options) is called just prior to the stream ending. Can be used to finish up any processing that may be in progress.
```js
fs.createReadStream('/tmp/important.dat')
.pipe(through2(
function (chunk, enc, cb) { cb(null, chunk) }, // transform is a noop
function (cb) { // flush function
this.push('tacking on an extra buffer to the end');
cb();
}
))
.pipe(fs.createWriteStream('/tmp/wut.txt'));
```
<b><code>through2.ctor([ options, ] transformFunction[, flushFunction ])</code></b>
Instead of returning a `stream.Transform` instance, `through2.ctor()` returns a **constructor** for a custom Transform. This is useful when you want to use the same transform logic in multiple instances.
```js
var FToC = through2.ctor({objectMode: true}, function (record, encoding, callback) {
if (record.temp != null && record.unit = "F") {
record.temp = ( ( record.temp - 32 ) * 5 ) / 9
record.unit = "C"
}
this.push(record)
callback()
})
// Create instances of FToC like so:
var converter = new FToC()
// Or:
var converter = FToC()
// Or specify/override options when you instantiate, if you prefer:
var converter = FToC({objectMode: true})
```
## See Also
- [through2-map](https://github.com/brycebaril/through2-map) - Array.prototype.map analog for streams.
- [through2-filter](https://github.com/brycebaril/through2-filter) - Array.prototype.filter analog for streams.
- [through2-reduce](https://github.com/brycebaril/through2-reduce) - Array.prototype.reduce analog for streams.
- [through2-spy](https://github.com/brycebaril/through2-spy) - Wrapper for simple stream.PassThrough spies.
## License
**through2** is Copyright (c) 2013 Rod Vagg [@rvagg](https://twitter.com/rvagg) and licenced under the MIT licence. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details.

View File

@ -0,0 +1,64 @@
{
"_from": "through2@^0.6.0",
"_id": "through2@0.6.5",
"_inBundle": false,
"_integrity": "sha1-QaucZ7KdVyCQcUEOHXp6lozTrUg=",
"_location": "/guppy-cli/glob-stream/through2",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "through2@^0.6.0",
"name": "through2",
"escapedName": "through2",
"rawSpec": "^0.6.0",
"saveSpec": null,
"fetchSpec": "^0.6.0"
},
"_requiredBy": [
"/guppy-cli/glob-stream"
],
"_resolved": "https://registry.npmjs.org/through2/-/through2-0.6.5.tgz",
"_shasum": "41ab9c67b29d57209071410e1d7a7a968cd3ad48",
"_spec": "through2@^0.6.0",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/glob-stream",
"author": {
"name": "Rod Vagg",
"email": "r@va.gg",
"url": "https://github.com/rvagg"
},
"bugs": {
"url": "https://github.com/rvagg/through2/issues"
},
"bundleDependencies": false,
"dependencies": {
"readable-stream": ">=1.0.33-1 <1.1.0-0",
"xtend": ">=4.0.0 <4.1.0-0"
},
"deprecated": false,
"description": "A tiny wrapper around Node streams2 Transform to avoid explicit subclassing noise",
"devDependencies": {
"bl": ">=0.9.0 <0.10.0-0",
"stream-spigot": ">=3.0.4 <3.1.0-0",
"tape": ">=2.14.0 <2.15.0-0"
},
"homepage": "https://github.com/rvagg/through2#readme",
"keywords": [
"stream",
"streams2",
"through",
"transform"
],
"license": "MIT",
"main": "through2.js",
"name": "through2",
"repository": {
"type": "git",
"url": "git+https://github.com/rvagg/through2.git"
},
"scripts": {
"test": "node test/test.js",
"test-local": "brtapsauce-local test/basic-test.js"
},
"version": "0.6.5"
}

View File

@ -0,0 +1,96 @@
var Transform = require('readable-stream/transform')
, inherits = require('util').inherits
, xtend = require('xtend')
function DestroyableTransform(opts) {
Transform.call(this, opts)
this._destroyed = false
}
inherits(DestroyableTransform, Transform)
DestroyableTransform.prototype.destroy = function(err) {
if (this._destroyed) return
this._destroyed = true
var self = this
process.nextTick(function() {
if (err)
self.emit('error', err)
self.emit('close')
})
}
// a noop _transform function
function noop (chunk, enc, callback) {
callback(null, chunk)
}
// create a new export function, used by both the main export and
// the .ctor export, contains common logic for dealing with arguments
function through2 (construct) {
return function (options, transform, flush) {
if (typeof options == 'function') {
flush = transform
transform = options
options = {}
}
if (typeof transform != 'function')
transform = noop
if (typeof flush != 'function')
flush = null
return construct(options, transform, flush)
}
}
// main export, just make me a transform stream!
module.exports = through2(function (options, transform, flush) {
var t2 = new DestroyableTransform(options)
t2._transform = transform
if (flush)
t2._flush = flush
return t2
})
// make me a reusable prototype that I can `new`, or implicitly `new`
// with a constructor call
module.exports.ctor = through2(function (options, transform, flush) {
function Through2 (override) {
if (!(this instanceof Through2))
return new Through2(override)
this.options = xtend(options, override)
DestroyableTransform.call(this, this.options)
}
inherits(Through2, DestroyableTransform)
Through2.prototype._transform = transform
if (flush)
Through2.prototype._flush = flush
return Through2
})
module.exports.obj = through2(function (options, transform, flush) {
var t2 = new DestroyableTransform(xtend({ objectMode: true, highWaterMark: 16 }, options))
t2._transform = transform
if (flush)
t2._flush = flush
return t2
})

View File

@ -0,0 +1,92 @@
{
"_from": "glob-stream@^5.3.2",
"_id": "glob-stream@5.3.5",
"_inBundle": false,
"_integrity": "sha1-pVZlqajM3EGRWofHAeMtTgFvrSI=",
"_location": "/guppy-cli/glob-stream",
"_phantomChildren": {
"core-util-is": "1.0.2",
"inherits": "2.0.3",
"isarray": "0.0.1",
"string_decoder": "0.10.31",
"xtend": "4.0.1"
},
"_requested": {
"type": "range",
"registry": true,
"raw": "glob-stream@^5.3.2",
"name": "glob-stream",
"escapedName": "glob-stream",
"rawSpec": "^5.3.2",
"saveSpec": null,
"fetchSpec": "^5.3.2"
},
"_requiredBy": [
"/guppy-cli/vinyl-fs"
],
"_resolved": "https://registry.npmjs.org/glob-stream/-/glob-stream-5.3.5.tgz",
"_shasum": "a55665a9a8ccdc41915a87c701e32d4e016fad22",
"_spec": "glob-stream@^5.3.2",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/vinyl-fs",
"author": {
"name": "Gulp Team",
"email": "team@gulpjs.com",
"url": "http://gulpjs.com/"
},
"bugs": {
"url": "https://github.com/gulpjs/glob-stream/issues"
},
"bundleDependencies": false,
"contributors": [],
"dependencies": {
"extend": "^3.0.0",
"glob": "^5.0.3",
"glob-parent": "^3.0.0",
"micromatch": "^2.3.7",
"ordered-read-streams": "^0.3.0",
"through2": "^0.6.0",
"to-absolute-glob": "^0.1.1",
"unique-stream": "^2.0.2"
},
"deprecated": false,
"description": "A wrapper around node-glob to make it streamy",
"devDependencies": {
"coveralls": "^2.11.2",
"eslint": "^1.7.3",
"eslint-config-gulp": "^2.0.0",
"istanbul": "^0.3.0",
"istanbul-coveralls": "^1.0.1",
"jscs": "^2.3.5",
"jscs-preset-gulp": "^1.0.0",
"mocha": "^2.0.0",
"mocha-lcov-reporter": "^0.0.2",
"rimraf": "^2.2.5",
"should": "^7.1.0",
"stream-sink": "^1.2.0"
},
"engines": {
"node": ">= 0.10"
},
"files": [
"index.js"
],
"homepage": "http://gulpjs.com",
"keywords": [
"glob",
"stream"
],
"license": "MIT",
"main": "index.js",
"name": "glob-stream",
"repository": {
"type": "git",
"url": "git+https://github.com/gulpjs/glob-stream.git"
},
"scripts": {
"coveralls": "istanbul cover _mocha --report lcovonly && istanbul-coveralls",
"lint": "eslint . && jscs . test/",
"pretest": "npm run lint",
"test": "mocha"
},
"version": "5.3.5"
}

View File

@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -0,0 +1,377 @@
[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Dependency Status](https://david-dm.org/isaacs/node-glob.svg)](https://david-dm.org/isaacs/node-glob) [![devDependency Status](https://david-dm.org/isaacs/node-glob/dev-status.svg)](https://david-dm.org/isaacs/node-glob#info=devDependencies) [![optionalDependency Status](https://david-dm.org/isaacs/node-glob/optional-status.svg)](https://david-dm.org/isaacs/node-glob#info=optionalDependencies)
# Glob
Match files using the patterns the shell uses, like stars and stuff.
This is a glob implementation in JavaScript. It uses the `minimatch`
library to do its matching.
![](oh-my-glob.gif)
## Usage
```javascript
var glob = require("glob")
// options is optional
glob("**/*.js", options, function (er, files) {
// files is an array of filenames.
// If the `nonull` option is set, and nothing
// was found, then files is ["**/*.js"]
// er is an error object or null.
})
```
## Glob Primer
"Globs" are the patterns you type when you do stuff like `ls *.js` on
the command line, or put `build/*` in a `.gitignore` file.
Before parsing the path part patterns, braced sections are expanded
into a set. Braced sections start with `{` and end with `}`, with any
number of comma-delimited sections within. Braced sections may contain
slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
The following characters have special magic meaning when used in a
path portion:
* `*` Matches 0 or more characters in a single path portion
* `?` Matches 1 character
* `[...]` Matches a range of characters, similar to a RegExp range.
If the first character of the range is `!` or `^` then it matches
any character not in the range.
* `!(pattern|pattern|pattern)` Matches anything that does not match
any of the patterns provided.
* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
patterns provided.
* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
patterns provided.
* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
provided
* `**` If a "globstar" is alone in a path portion, then it matches
zero or more directories and subdirectories searching for matches.
It does not crawl symlinked directories.
### Dots
If a file or directory path portion has a `.` as the first character,
then it will not match any glob pattern unless that pattern's
corresponding path part also has a `.` as its first character.
For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
However the pattern `a/*/c` would not, because `*` does not start with
a dot character.
You can make glob treat dots as normal characters by setting
`dot:true` in the options.
### Basename Matching
If you set `matchBase:true` in the options, and the pattern has no
slashes in it, then it will seek for any file anywhere in the tree
with a matching basename. For example, `*.js` would match
`test/simple/basic.js`.
### Negation
The intent for negation would be for a pattern starting with `!` to
match everything that *doesn't* match the supplied pattern. However,
the implementation is weird, and for the time being, this should be
avoided. The behavior is deprecated in version 5, and will be removed
entirely in version 6.
### Empty Sets
If no matching files are found, then an empty array is returned. This
differs from the shell, where the pattern itself is returned. For
example:
$ echo a*s*d*f
a*s*d*f
To get the bash-style behavior, set the `nonull:true` in the options.
### See Also:
* `man sh`
* `man bash` (Search for "Pattern Matching")
* `man 3 fnmatch`
* `man 5 gitignore`
* [minimatch documentation](https://github.com/isaacs/minimatch)
## glob.hasMagic(pattern, [options])
Returns `true` if there are any special characters in the pattern, and
`false` otherwise.
Note that the options affect the results. If `noext:true` is set in
the options object, then `+(a|b)` will not be considered a magic
pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
then that is considered magical, unless `nobrace:true` is set in the
options.
## glob(pattern, [options], cb)
* `pattern` {String} Pattern to be matched
* `options` {Object}
* `cb` {Function}
* `err` {Error | null}
* `matches` {Array<String>} filenames found matching the pattern
Perform an asynchronous glob search.
## glob.sync(pattern, [options])
* `pattern` {String} Pattern to be matched
* `options` {Object}
* return: {Array<String>} filenames found matching the pattern
Perform a synchronous glob search.
## Class: glob.Glob
Create a Glob object by instantiating the `glob.Glob` class.
```javascript
var Glob = require("glob").Glob
var mg = new Glob(pattern, options, cb)
```
It's an EventEmitter, and starts walking the filesystem to find matches
immediately.
### new glob.Glob(pattern, [options], [cb])
* `pattern` {String} pattern to search for
* `options` {Object}
* `cb` {Function} Called when an error occurs, or matches are found
* `err` {Error | null}
* `matches` {Array<String>} filenames found matching the pattern
Note that if the `sync` flag is set in the options, then matches will
be immediately available on the `g.found` member.
### Properties
* `minimatch` The minimatch object that the glob uses.
* `options` The options object passed in.
* `aborted` Boolean which is set to true when calling `abort()`. There
is no way at this time to continue a glob search after aborting, but
you can re-use the statCache to avoid having to duplicate syscalls.
* `cache` Convenience object. Each field has the following possible
values:
* `false` - Path does not exist
* `true` - Path exists
* `'DIR'` - Path exists, and is not a directory
* `'FILE'` - Path exists, and is a directory
* `[file, entries, ...]` - Path exists, is a directory, and the
array value is the results of `fs.readdir`
* `statCache` Cache of `fs.stat` results, to prevent statting the same
path multiple times.
* `symlinks` A record of which paths are symbolic links, which is
relevant in resolving `**` patterns.
* `realpathCache` An optional object which is passed to `fs.realpath`
to minimize unnecessary syscalls. It is stored on the instantiated
Glob object, and may be re-used.
### Events
* `end` When the matching is finished, this is emitted with all the
matches found. If the `nonull` option is set, and no match was found,
then the `matches` list contains the original pattern. The matches
are sorted, unless the `nosort` flag is set.
* `match` Every time a match is found, this is emitted with the matched.
* `error` Emitted when an unexpected error is encountered, or whenever
any fs error occurs if `options.strict` is set.
* `abort` When `abort()` is called, this event is raised.
### Methods
* `pause` Temporarily stop the search
* `resume` Resume the search
* `abort` Stop the search forever
### Options
All the options that can be passed to Minimatch can also be passed to
Glob to change pattern matching behavior. Also, some have been added,
or have glob-specific ramifications.
All options are false by default, unless otherwise noted.
All options are added to the Glob object, as well.
If you are running many `glob` operations, you can pass a Glob object
as the `options` argument to a subsequent operation to shortcut some
`stat` and `readdir` calls. At the very least, you may pass in shared
`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
parallel glob operations will be sped up by sharing information about
the filesystem.
* `cwd` The current working directory in which to search. Defaults
to `process.cwd()`.
* `root` The place where patterns starting with `/` will be mounted
onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
systems, and `C:\` or some such on Windows.)
* `dot` Include `.dot` files in normal matches and `globstar` matches.
Note that an explicit dot in a portion of the pattern will always
match dot files.
* `nomount` By default, a pattern starting with a forward-slash will be
"mounted" onto the root setting, so that a valid filesystem path is
returned. Set this flag to disable that behavior.
* `mark` Add a `/` character to directory matches. Note that this
requires additional stat calls.
* `nosort` Don't sort the results.
* `stat` Set to true to stat *all* results. This reduces performance
somewhat, and is completely unnecessary, unless `readdir` is presumed
to be an untrustworthy indicator of file existence.
* `silent` When an unusual error is encountered when attempting to
read a directory, a warning will be printed to stderr. Set the
`silent` option to true to suppress these warnings.
* `strict` When an unusual error is encountered when attempting to
read a directory, the process will just continue on in search of
other matches. Set the `strict` option to raise an error in these
cases.
* `cache` See `cache` property above. Pass in a previously generated
cache object to save some fs calls.
* `statCache` A cache of results of filesystem information, to prevent
unnecessary stat calls. While it should not normally be necessary
to set this, you may pass the statCache from one glob() call to the
options object of another, if you know that the filesystem will not
change between calls. (See "Race Conditions" below.)
* `symlinks` A cache of known symbolic links. You may pass in a
previously generated `symlinks` object to save `lstat` calls when
resolving `**` matches.
* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
* `nounique` In some cases, brace-expanded patterns can result in the
same file showing up multiple times in the result set. By default,
this implementation prevents duplicates in the result set. Set this
flag to disable that behavior.
* `nonull` Set to never return an empty set, instead returning a set
containing the pattern itself. This is the default in glob(3).
* `debug` Set to enable debug logging in minimatch and glob.
* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
* `noglobstar` Do not match `**` against multiple filenames. (Ie,
treat it as a normal `*` instead.)
* `noext` Do not match `+(a|b)` "extglob" patterns.
* `nocase` Perform a case-insensitive match. Note: on
case-insensitive filesystems, non-magic patterns will match by
default, since `stat` and `readdir` will not raise errors.
* `matchBase` Perform a basename-only match if the pattern does not
contain any slash characters. That is, `*.js` would be treated as
equivalent to `**/*.js`, matching all js files in all directories.
* `nodir` Do not match directories, only files. (Note: to match
*only* directories, simply put a `/` at the end of the pattern.)
* `ignore` Add a pattern or an array of patterns to exclude matches.
* `follow` Follow symlinked directories when expanding `**` patterns.
Note that this can result in a lot of duplicate references in the
presence of cyclic links.
* `realpath` Set to true to call `fs.realpath` on all of the results.
In the case of a symlink that cannot be resolved, the full absolute
path to the matched entry is returned (though it will usually be a
broken symlink)
* `nonegate` Suppress deprecated `negate` behavior. (See below.)
Default=true
* `nocomment` Suppress deprecated `comment` behavior. (See below.)
Default=true
## Comparisons to other fnmatch/glob implementations
While strict compliance with the existing standards is a worthwhile
goal, some discrepancies exist between node-glob and other
implementations, and are intentional.
The double-star character `**` is supported by default, unless the
`noglobstar` flag is set. This is supported in the manner of bsdglob
and bash 4.3, where `**` only has special significance if it is the only
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
`a/**b` will not.
Note that symlinked directories are not crawled as part of a `**`,
though their contents may match against subsequent portions of the
pattern. This prevents infinite loops and duplicates and the like.
If an escaped pattern has no matches, and the `nonull` flag is set,
then glob returns the pattern as-provided, rather than
interpreting the character escapes. For example,
`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
that it does not resolve escaped pattern characters.
If brace expansion is not disabled, then it is performed before any
other interpretation of the glob pattern. Thus, a pattern like
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
checked for validity. Since those two are valid, matching proceeds.
### Comments and Negation
**Note**: In version 5 of this module, negation and comments are
**disabled** by default. You can explicitly set `nonegate:false` or
`nocomment:false` to re-enable them. They are going away entirely in
version 6.
The intent for negation would be for a pattern starting with `!` to
match everything that *doesn't* match the supplied pattern. However,
the implementation is weird. It is better to use the `ignore` option
to set a pattern or set of patterns to exclude from matches. If you
want the "everything except *x*" type of behavior, you can use `**` as
the main pattern, and set an `ignore` for the things to exclude.
The comments feature is added in minimatch, primarily to more easily
support use cases like ignore files, where a `#` at the start of a
line makes the pattern "empty". However, in the context of a
straightforward filesystem globber, "comments" don't make much sense.
## Windows
**Please only use forward-slashes in glob expressions.**
Though windows uses either `/` or `\` as its path separator, only `/`
characters are used by this glob implementation. You must use
forward-slashes **only** in glob expressions. Back-slashes will always
be interpreted as escape characters, not path separators.
Results from absolute patterns such as `/foo/*` are mounted onto the
root setting using `path.join`. On windows, this will by default result
in `/foo/*` matching `C:\foo\bar.txt`.
## Race Conditions
Glob searching, by its very nature, is susceptible to race conditions,
since it relies on directory walking and such.
As a result, it is possible that a file that exists when glob looks for
it may have been deleted or modified by the time it returns the result.
As part of its internal implementation, this program caches all stat
and readdir calls that it makes, in order to cut down on system
overhead. However, this also makes it even more susceptible to races,
especially if the cache or statCache objects are reused between glob
calls.
Users are thus advised not to use a glob result as a guarantee of
filesystem state in the face of rapid changes. For the vast majority
of operations, this is never a problem.
## Contributing
Any change to behavior (including bugfixes) must come with a test.
Patches that fail tests or reduce performance will be rejected.
```
# to run tests
npm test
# to re-generate test fixtures
npm run test-regen
# to benchmark against bash/zsh
npm run bench
# to profile javascript
npm run prof
```

View File

@ -0,0 +1,245 @@
exports.alphasort = alphasort
exports.alphasorti = alphasorti
exports.setopts = setopts
exports.ownProp = ownProp
exports.makeAbs = makeAbs
exports.finish = finish
exports.mark = mark
exports.isIgnored = isIgnored
exports.childrenIgnored = childrenIgnored
function ownProp (obj, field) {
return Object.prototype.hasOwnProperty.call(obj, field)
}
var path = require("path")
var minimatch = require("minimatch")
var isAbsolute = require("path-is-absolute")
var Minimatch = minimatch.Minimatch
function alphasorti (a, b) {
return a.toLowerCase().localeCompare(b.toLowerCase())
}
function alphasort (a, b) {
return a.localeCompare(b)
}
function setupIgnores (self, options) {
self.ignore = options.ignore || []
if (!Array.isArray(self.ignore))
self.ignore = [self.ignore]
if (self.ignore.length) {
self.ignore = self.ignore.map(ignoreMap)
}
}
function ignoreMap (pattern) {
var gmatcher = null
if (pattern.slice(-3) === '/**') {
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
gmatcher = new Minimatch(gpattern)
}
return {
matcher: new Minimatch(pattern),
gmatcher: gmatcher
}
}
function setopts (self, pattern, options) {
if (!options)
options = {}
// base-matching: just use globstar for that.
if (options.matchBase && -1 === pattern.indexOf("/")) {
if (options.noglobstar) {
throw new Error("base matching requires globstar")
}
pattern = "**/" + pattern
}
self.silent = !!options.silent
self.pattern = pattern
self.strict = options.strict !== false
self.realpath = !!options.realpath
self.realpathCache = options.realpathCache || Object.create(null)
self.follow = !!options.follow
self.dot = !!options.dot
self.mark = !!options.mark
self.nodir = !!options.nodir
if (self.nodir)
self.mark = true
self.sync = !!options.sync
self.nounique = !!options.nounique
self.nonull = !!options.nonull
self.nosort = !!options.nosort
self.nocase = !!options.nocase
self.stat = !!options.stat
self.noprocess = !!options.noprocess
self.maxLength = options.maxLength || Infinity
self.cache = options.cache || Object.create(null)
self.statCache = options.statCache || Object.create(null)
self.symlinks = options.symlinks || Object.create(null)
setupIgnores(self, options)
self.changedCwd = false
var cwd = process.cwd()
if (!ownProp(options, "cwd"))
self.cwd = cwd
else {
self.cwd = options.cwd
self.changedCwd = path.resolve(options.cwd) !== cwd
}
self.root = options.root || path.resolve(self.cwd, "/")
self.root = path.resolve(self.root)
if (process.platform === "win32")
self.root = self.root.replace(/\\/g, "/")
self.nomount = !!options.nomount
// disable comments and negation unless the user explicitly
// passes in false as the option.
options.nonegate = options.nonegate === false ? false : true
options.nocomment = options.nocomment === false ? false : true
deprecationWarning(options)
self.minimatch = new Minimatch(pattern, options)
self.options = self.minimatch.options
}
// TODO(isaacs): remove entirely in v6
// exported to reset in tests
exports.deprecationWarned
function deprecationWarning(options) {
if (!options.nonegate || !options.nocomment) {
if (process.noDeprecation !== true && !exports.deprecationWarned) {
var msg = 'glob WARNING: comments and negation will be disabled in v6'
if (process.throwDeprecation)
throw new Error(msg)
else if (process.traceDeprecation)
console.trace(msg)
else
console.error(msg)
exports.deprecationWarned = true
}
}
}
function finish (self) {
var nou = self.nounique
var all = nou ? [] : Object.create(null)
for (var i = 0, l = self.matches.length; i < l; i ++) {
var matches = self.matches[i]
if (!matches || Object.keys(matches).length === 0) {
if (self.nonull) {
// do like the shell, and spit out the literal glob
var literal = self.minimatch.globSet[i]
if (nou)
all.push(literal)
else
all[literal] = true
}
} else {
// had matches
var m = Object.keys(matches)
if (nou)
all.push.apply(all, m)
else
m.forEach(function (m) {
all[m] = true
})
}
}
if (!nou)
all = Object.keys(all)
if (!self.nosort)
all = all.sort(self.nocase ? alphasorti : alphasort)
// at *some* point we statted all of these
if (self.mark) {
for (var i = 0; i < all.length; i++) {
all[i] = self._mark(all[i])
}
if (self.nodir) {
all = all.filter(function (e) {
return !(/\/$/.test(e))
})
}
}
if (self.ignore.length)
all = all.filter(function(m) {
return !isIgnored(self, m)
})
self.found = all
}
function mark (self, p) {
var abs = makeAbs(self, p)
var c = self.cache[abs]
var m = p
if (c) {
var isDir = c === 'DIR' || Array.isArray(c)
var slash = p.slice(-1) === '/'
if (isDir && !slash)
m += '/'
else if (!isDir && slash)
m = m.slice(0, -1)
if (m !== p) {
var mabs = makeAbs(self, m)
self.statCache[mabs] = self.statCache[abs]
self.cache[mabs] = self.cache[abs]
}
}
return m
}
// lotta situps...
function makeAbs (self, f) {
var abs = f
if (f.charAt(0) === '/') {
abs = path.join(self.root, f)
} else if (isAbsolute(f) || f === '') {
abs = f
} else if (self.changedCwd) {
abs = path.resolve(self.cwd, f)
} else {
abs = path.resolve(f)
}
return abs
}
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
function isIgnored (self, path) {
if (!self.ignore.length)
return false
return self.ignore.some(function(item) {
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
})
}
function childrenIgnored (self, path) {
if (!self.ignore.length)
return false
return self.ignore.some(function(item) {
return !!(item.gmatcher && item.gmatcher.match(path))
})
}

View File

@ -0,0 +1,752 @@
// Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module.exports = glob
var fs = require('fs')
var minimatch = require('minimatch')
var Minimatch = minimatch.Minimatch
var inherits = require('inherits')
var EE = require('events').EventEmitter
var path = require('path')
var assert = require('assert')
var isAbsolute = require('path-is-absolute')
var globSync = require('./sync.js')
var common = require('./common.js')
var alphasort = common.alphasort
var alphasorti = common.alphasorti
var setopts = common.setopts
var ownProp = common.ownProp
var inflight = require('inflight')
var util = require('util')
var childrenIgnored = common.childrenIgnored
var isIgnored = common.isIgnored
var once = require('once')
function glob (pattern, options, cb) {
if (typeof options === 'function') cb = options, options = {}
if (!options) options = {}
if (options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return globSync(pattern, options)
}
return new Glob(pattern, options, cb)
}
glob.sync = globSync
var GlobSync = glob.GlobSync = globSync.GlobSync
// old api surface
glob.glob = glob
glob.hasMagic = function (pattern, options_) {
var options = util._extend({}, options_)
options.noprocess = true
var g = new Glob(pattern, options)
var set = g.minimatch.set
if (set.length > 1)
return true
for (var j = 0; j < set[0].length; j++) {
if (typeof set[0][j] !== 'string')
return true
}
return false
}
glob.Glob = Glob
inherits(Glob, EE)
function Glob (pattern, options, cb) {
if (typeof options === 'function') {
cb = options
options = null
}
if (options && options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return new GlobSync(pattern, options)
}
if (!(this instanceof Glob))
return new Glob(pattern, options, cb)
setopts(this, pattern, options)
this._didRealPath = false
// process each pattern in the minimatch set
var n = this.minimatch.set.length
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this.matches = new Array(n)
if (typeof cb === 'function') {
cb = once(cb)
this.on('error', cb)
this.on('end', function (matches) {
cb(null, matches)
})
}
var self = this
var n = this.minimatch.set.length
this._processing = 0
this.matches = new Array(n)
this._emitQueue = []
this._processQueue = []
this.paused = false
if (this.noprocess)
return this
if (n === 0)
return done()
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false, done)
}
function done () {
--self._processing
if (self._processing <= 0)
self._finish()
}
}
Glob.prototype._finish = function () {
assert(this instanceof Glob)
if (this.aborted)
return
if (this.realpath && !this._didRealpath)
return this._realpath()
common.finish(this)
this.emit('end', this.found)
}
Glob.prototype._realpath = function () {
if (this._didRealpath)
return
this._didRealpath = true
var n = this.matches.length
if (n === 0)
return this._finish()
var self = this
for (var i = 0; i < this.matches.length; i++)
this._realpathSet(i, next)
function next () {
if (--n === 0)
self._finish()
}
}
Glob.prototype._realpathSet = function (index, cb) {
var matchset = this.matches[index]
if (!matchset)
return cb()
var found = Object.keys(matchset)
var self = this
var n = found.length
if (n === 0)
return cb()
var set = this.matches[index] = Object.create(null)
found.forEach(function (p, i) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self._makeAbs(p)
fs.realpath(p, self.realpathCache, function (er, real) {
if (!er)
set[real] = true
else if (er.syscall === 'stat')
set[p] = true
else
self.emit('error', er) // srsly wtf right here
if (--n === 0) {
self.matches[index] = set
cb()
}
})
})
}
Glob.prototype._mark = function (p) {
return common.mark(this, p)
}
Glob.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
}
Glob.prototype.abort = function () {
this.aborted = true
this.emit('abort')
}
Glob.prototype.pause = function () {
if (!this.paused) {
this.paused = true
this.emit('pause')
}
}
Glob.prototype.resume = function () {
if (this.paused) {
this.emit('resume')
this.paused = false
if (this._emitQueue.length) {
var eq = this._emitQueue.slice(0)
this._emitQueue.length = 0
for (var i = 0; i < eq.length; i ++) {
var e = eq[i]
this._emitMatch(e[0], e[1])
}
}
if (this._processQueue.length) {
var pq = this._processQueue.slice(0)
this._processQueue.length = 0
for (var i = 0; i < pq.length; i ++) {
var p = pq[i]
this._processing--
this._process(p[0], p[1], p[2], p[3])
}
}
}
}
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
assert(this instanceof Glob)
assert(typeof cb === 'function')
if (this.aborted)
return
this._processing++
if (this.paused) {
this._processQueue.push([pattern, index, inGlobStar, cb])
return
}
//console.error('PROCESS %d', this._processing, pattern)
// Get the first [n] parts of pattern that are all strings.
var n = 0
while (typeof pattern[n] === 'string') {
n ++
}
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index, cb)
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/')
break
}
var remain = pattern.slice(n)
// get the list of entries.
var read
if (prefix === null)
read = '.'
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this._makeAbs(read)
//if ignored, skip _processing
if (childrenIgnored(this, read))
return cb()
var isGlobStar = remain[0] === minimatch.GLOBSTAR
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
}
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
// if the abs isn't a dir, then nothing can match!
if (!entries)
return cb()
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0]
var negate = !!this.minimatch.negate
var rawGlob = pn._glob
var dotOk = this.dot || rawGlob.charAt(0) === '.'
var matchedEntries = []
for (var i = 0; i < entries.length; i++) {
var e = entries[i]
if (e.charAt(0) !== '.' || dotOk) {
var m
if (negate && !prefix) {
m = !e.match(pn)
} else {
m = e.match(pn)
}
if (m)
matchedEntries.push(e)
}
}
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries.length
// If there are no matched entries, then nothing matches.
if (len === 0)
return cb()
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null)
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e)
}
this._emitMatch(index, e)
}
// This was the last one, and no stats were needed
return cb()
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift()
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
var newPattern
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
this._process([e].concat(remain), index, inGlobStar, cb)
}
cb()
}
Glob.prototype._emitMatch = function (index, e) {
if (this.aborted)
return
if (this.matches[index][e])
return
if (isIgnored(this, e))
return
if (this.paused) {
this._emitQueue.push([index, e])
return
}
var abs = this._makeAbs(e)
if (this.nodir) {
var c = this.cache[abs]
if (c === 'DIR' || Array.isArray(c))
return
}
if (this.mark)
e = this._mark(e)
this.matches[index][e] = true
var st = this.statCache[abs]
if (st)
this.emit('stat', e, st)
this.emit('match', e)
}
Glob.prototype._readdirInGlobStar = function (abs, cb) {
if (this.aborted)
return
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false, cb)
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight(lstatkey, lstatcb_)
if (lstatcb)
fs.lstat(abs, lstatcb)
function lstatcb_ (er, lstat) {
if (er)
return cb()
var isSym = lstat.isSymbolicLink()
self.symlinks[abs] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && !lstat.isDirectory()) {
self.cache[abs] = 'FILE'
cb()
} else
self._readdir(abs, false, cb)
}
}
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
if (this.aborted)
return
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
if (!cb)
return
//console.error('RD %j %j', +inGlobStar, abs)
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs, cb)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (!c || c === 'FILE')
return cb()
if (Array.isArray(c))
return cb(null, c)
}
var self = this
fs.readdir(abs, readdirCb(this, abs, cb))
}
function readdirCb (self, abs, cb) {
return function (er, entries) {
if (er)
self._readdirError(abs, er, cb)
else
self._readdirEntries(abs, entries, cb)
}
}
Glob.prototype._readdirEntries = function (abs, entries, cb) {
if (this.aborted)
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i]
if (abs === '/')
e = abs + e
else
e = abs + '/' + e
this.cache[e] = true
}
}
this.cache[abs] = entries
return cb(null, entries)
}
Glob.prototype._readdirError = function (f, er, cb) {
if (this.aborted)
return
// handle errors, and cache the information
switch (er.code) {
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
this.cache[this._makeAbs(f)] = 'FILE'
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false
if (this.strict) {
this.emit('error', er)
// If the error is handled, then we abort
// if not, we threw out of here
this.abort()
}
if (!this.silent)
console.error('glob error', er)
break
}
return cb()
}
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
//console.error('pgs2', prefix, remain[0], entries)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return cb()
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1)
var gspref = prefix ? [ prefix ] : []
var noGlobStar = gspref.concat(remainWithoutGlobStar)
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false, cb)
var isSym = this.symlinks[abs]
var len = entries.length
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return cb()
for (var i = 0; i < len; i++) {
var e = entries[i]
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
this._process(instead, index, true, cb)
var below = gspref.concat(entries[i], remain)
this._process(below, index, true, cb)
}
cb()
}
Glob.prototype._processSimple = function (prefix, index, cb) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this._stat(prefix, function (er, exists) {
self._processSimple2(prefix, index, er, exists, cb)
})
}
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
//console.error('ps2', prefix, exists)
if (!this.matches[index])
this.matches[index] = Object.create(null)
// If it doesn't exist, then just mark the lack of results
if (!exists)
return cb()
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix)
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix)
} else {
prefix = path.resolve(this.root, prefix)
if (trail)
prefix += '/'
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/')
// Mark this as a match
this._emitMatch(index, prefix)
cb()
}
// Returns either 'DIR', 'FILE', or false
Glob.prototype._stat = function (f, cb) {
var abs = this._makeAbs(f)
var needDir = f.slice(-1) === '/'
if (f.length > this.maxLength)
return cb()
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (Array.isArray(c))
c = 'DIR'
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return cb(null, c)
if (needDir && c === 'FILE')
return cb()
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this.statCache[abs]
if (stat !== undefined) {
if (stat === false)
return cb(null, stat)
else {
var type = stat.isDirectory() ? 'DIR' : 'FILE'
if (needDir && type === 'FILE')
return cb()
else
return cb(null, type, stat)
}
}
var self = this
var statcb = inflight('stat\0' + abs, lstatcb_)
if (statcb)
fs.lstat(abs, statcb)
function lstatcb_ (er, lstat) {
if (lstat && lstat.isSymbolicLink()) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return fs.stat(abs, function (er, stat) {
if (er)
self._stat2(f, abs, null, lstat, cb)
else
self._stat2(f, abs, er, stat, cb)
})
} else {
self._stat2(f, abs, er, lstat, cb)
}
}
}
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
if (er) {
this.statCache[abs] = false
return cb()
}
var needDir = f.slice(-1) === '/'
this.statCache[abs] = stat
if (abs.slice(-1) === '/' && !stat.isDirectory())
return cb(null, false, stat)
var c = stat.isDirectory() ? 'DIR' : 'FILE'
this.cache[abs] = this.cache[abs] || c
if (needDir && c !== 'DIR')
return cb()
return cb(null, c, stat)
}

View File

@ -0,0 +1,75 @@
{
"_from": "glob@^5.0.3",
"_id": "glob@5.0.15",
"_inBundle": false,
"_integrity": "sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E=",
"_location": "/guppy-cli/glob",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "glob@^5.0.3",
"name": "glob",
"escapedName": "glob",
"rawSpec": "^5.0.3",
"saveSpec": null,
"fetchSpec": "^5.0.3"
},
"_requiredBy": [
"/guppy-cli/glob-stream"
],
"_resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz",
"_shasum": "1bc936b9e02f4a603fcc222ecf7633d30b8b93b1",
"_spec": "glob@^5.0.3",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/glob-stream",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/node-glob/issues"
},
"bundleDependencies": false,
"dependencies": {
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "2 || 3",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"deprecated": false,
"description": "a little globber",
"devDependencies": {
"mkdirp": "0",
"rimraf": "^2.2.8",
"tap": "^1.1.4",
"tick": "0.0.6"
},
"engines": {
"node": "*"
},
"files": [
"glob.js",
"sync.js",
"common.js"
],
"homepage": "https://github.com/isaacs/node-glob#readme",
"license": "ISC",
"main": "glob.js",
"name": "glob",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-glob.git"
},
"scripts": {
"bench": "bash benchmark.sh",
"benchclean": "node benchclean.js",
"prepublish": "npm run benchclean",
"prof": "bash prof.sh && cat profile.txt",
"profclean": "rm -f v8.log profile.txt",
"test": "tap test/*.js --cov",
"test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js"
},
"version": "5.0.15"
}

View File

@ -0,0 +1,460 @@
module.exports = globSync
globSync.GlobSync = GlobSync
var fs = require('fs')
var minimatch = require('minimatch')
var Minimatch = minimatch.Minimatch
var Glob = require('./glob.js').Glob
var util = require('util')
var path = require('path')
var assert = require('assert')
var isAbsolute = require('path-is-absolute')
var common = require('./common.js')
var alphasort = common.alphasort
var alphasorti = common.alphasorti
var setopts = common.setopts
var ownProp = common.ownProp
var childrenIgnored = common.childrenIgnored
function globSync (pattern, options) {
if (typeof options === 'function' || arguments.length === 3)
throw new TypeError('callback provided to sync glob\n'+
'See: https://github.com/isaacs/node-glob/issues/167')
return new GlobSync(pattern, options).found
}
function GlobSync (pattern, options) {
if (!pattern)
throw new Error('must provide pattern')
if (typeof options === 'function' || arguments.length === 3)
throw new TypeError('callback provided to sync glob\n'+
'See: https://github.com/isaacs/node-glob/issues/167')
if (!(this instanceof GlobSync))
return new GlobSync(pattern, options)
setopts(this, pattern, options)
if (this.noprocess)
return this
var n = this.minimatch.set.length
this.matches = new Array(n)
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false)
}
this._finish()
}
GlobSync.prototype._finish = function () {
assert(this instanceof GlobSync)
if (this.realpath) {
var self = this
this.matches.forEach(function (matchset, index) {
var set = self.matches[index] = Object.create(null)
for (var p in matchset) {
try {
p = self._makeAbs(p)
var real = fs.realpathSync(p, self.realpathCache)
set[real] = true
} catch (er) {
if (er.syscall === 'stat')
set[self._makeAbs(p)] = true
else
throw er
}
}
})
}
common.finish(this)
}
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
assert(this instanceof GlobSync)
// Get the first [n] parts of pattern that are all strings.
var n = 0
while (typeof pattern[n] === 'string') {
n ++
}
// now n is the index of the first one that is *not* a string.
// See if there's anything else
var prefix
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index)
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/')
break
}
var remain = pattern.slice(n)
// get the list of entries.
var read
if (prefix === null)
read = '.'
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this._makeAbs(read)
//if ignored, skip processing
if (childrenIgnored(this, read))
return
var isGlobStar = remain[0] === minimatch.GLOBSTAR
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
}
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
var entries = this._readdir(abs, inGlobStar)
// if the abs isn't a dir, then nothing can match!
if (!entries)
return
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0]
var negate = !!this.minimatch.negate
var rawGlob = pn._glob
var dotOk = this.dot || rawGlob.charAt(0) === '.'
var matchedEntries = []
for (var i = 0; i < entries.length; i++) {
var e = entries[i]
if (e.charAt(0) !== '.' || dotOk) {
var m
if (negate && !prefix) {
m = !e.match(pn)
} else {
m = e.match(pn)
}
if (m)
matchedEntries.push(e)
}
}
var len = matchedEntries.length
// If there are no matched entries, then nothing matches.
if (len === 0)
return
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null)
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
if (prefix) {
if (prefix.slice(-1) !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e)
}
this.matches[index][e] = true
}
// This was the last one, and no stats were needed
return
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift()
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
var newPattern
if (prefix)
newPattern = [prefix, e]
else
newPattern = [e]
this._process(newPattern.concat(remain), index, inGlobStar)
}
}
GlobSync.prototype._emitMatch = function (index, e) {
var abs = this._makeAbs(e)
if (this.mark)
e = this._mark(e)
if (this.matches[index][e])
return
if (this.nodir) {
var c = this.cache[this._makeAbs(e)]
if (c === 'DIR' || Array.isArray(c))
return
}
this.matches[index][e] = true
if (this.stat)
this._stat(e)
}
GlobSync.prototype._readdirInGlobStar = function (abs) {
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false)
var entries
var lstat
var stat
try {
lstat = fs.lstatSync(abs)
} catch (er) {
// lstat failed, doesn't exist
return null
}
var isSym = lstat.isSymbolicLink()
this.symlinks[abs] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && !lstat.isDirectory())
this.cache[abs] = 'FILE'
else
entries = this._readdir(abs, false)
return entries
}
GlobSync.prototype._readdir = function (abs, inGlobStar) {
var entries
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (!c || c === 'FILE')
return null
if (Array.isArray(c))
return c
}
try {
return this._readdirEntries(abs, fs.readdirSync(abs))
} catch (er) {
this._readdirError(abs, er)
return null
}
}
GlobSync.prototype._readdirEntries = function (abs, entries) {
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i]
if (abs === '/')
e = abs + e
else
e = abs + '/' + e
this.cache[e] = true
}
}
this.cache[abs] = entries
// mark and cache dir-ness
return entries
}
GlobSync.prototype._readdirError = function (f, er) {
// handle errors, and cache the information
switch (er.code) {
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
this.cache[this._makeAbs(f)] = 'FILE'
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false
if (this.strict)
throw er
if (!this.silent)
console.error('glob error', er)
break
}
}
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
var entries = this._readdir(abs, inGlobStar)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1)
var gspref = prefix ? [ prefix ] : []
var noGlobStar = gspref.concat(remainWithoutGlobStar)
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false)
var len = entries.length
var isSym = this.symlinks[abs]
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return
for (var i = 0; i < len; i++) {
var e = entries[i]
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
this._process(instead, index, true)
var below = gspref.concat(entries[i], remain)
this._process(below, index, true)
}
}
GlobSync.prototype._processSimple = function (prefix, index) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var exists = this._stat(prefix)
if (!this.matches[index])
this.matches[index] = Object.create(null)
// If it doesn't exist, then just mark the lack of results
if (!exists)
return
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix)
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix)
} else {
prefix = path.resolve(this.root, prefix)
if (trail)
prefix += '/'
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/')
// Mark this as a match
this.matches[index][prefix] = true
}
// Returns either 'DIR', 'FILE', or false
GlobSync.prototype._stat = function (f) {
var abs = this._makeAbs(f)
var needDir = f.slice(-1) === '/'
if (f.length > this.maxLength)
return false
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (Array.isArray(c))
c = 'DIR'
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return c
if (needDir && c === 'FILE')
return false
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this.statCache[abs]
if (!stat) {
var lstat
try {
lstat = fs.lstatSync(abs)
} catch (er) {
return false
}
if (lstat.isSymbolicLink()) {
try {
stat = fs.statSync(abs)
} catch (er) {
stat = lstat
}
} else {
stat = lstat
}
}
this.statCache[abs] = stat
var c = stat.isDirectory() ? 'DIR' : 'FILE'
this.cache[abs] = this.cache[abs] || c
if (needDir && c !== 'DIR')
return false
return c
}
GlobSync.prototype._mark = function (p) {
return common.mark(this, p)
}
GlobSync.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
}

View File

@ -0,0 +1,13 @@
Copyright (c) 2014, Florian Reiterer
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -0,0 +1,252 @@
## gulp-sourcemaps [![NPM version][npm-image]][npm-url] [![build status][travis-image]][travis-url] [![Test coverage][coveralls-image]][coveralls-url]
### Usage
#### Write inline source maps
Inline source maps are embedded in the source file.
Example:
```javascript
var gulp = require('gulp');
var plugin1 = require('gulp-plugin1');
var plugin2 = require('gulp-plugin2');
var sourcemaps = require('gulp-sourcemaps');
gulp.task('javascript', function() {
gulp.src('src/**/*.js')
.pipe(sourcemaps.init())
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write())
.pipe(gulp.dest('dist'));
});
```
All plugins between `sourcemaps.init()` and `sourcemaps.write()` need to have support for `gulp-sourcemaps`. You can find a list of such plugins in the [wiki](https://github.com/floridoo/gulp-sourcemaps/wiki/Plugins-with-gulp-sourcemaps-support).
#### Write external source map files
To write external source map files, pass a path relative to the destination to `sourcemaps.write()`.
Example:
```javascript
var gulp = require('gulp');
var plugin1 = require('gulp-plugin1');
var plugin2 = require('gulp-plugin2');
var sourcemaps = require('gulp-sourcemaps');
gulp.task('javascript', function() {
gulp.src('src/**/*.js')
.pipe(sourcemaps.init())
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write('../maps'))
.pipe(gulp.dest('dist'));
});
```
#### Load existing source maps
To load existing source maps, pass the option `loadMaps: true` to `sourcemaps.init()`.
Example:
```javascript
var gulp = require('gulp');
var plugin1 = require('gulp-plugin1');
var plugin2 = require('gulp-plugin2');
var sourcemaps = require('gulp-sourcemaps');
gulp.task('javascript', function() {
gulp.src('src/**/*.js')
.pipe(sourcemaps.init({loadMaps: true}))
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write())
.pipe(gulp.dest('dist'));
});
```
#### Handle source files from different directories
Use the `base` option on `gulp.src` to make sure all files are relative to a common base directory.
Example:
```javascript
var gulp = require('gulp');
var plugin1 = require('gulp-plugin1');
var plugin2 = require('gulp-plugin2');
var sourcemaps = require('gulp-sourcemaps');
gulp.task('javascript', function() {
gulp.src(['src/test.js', 'src/testdir/test2.js'], { base: 'src' })
.pipe(sourcemaps.init())
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write('../maps'))
.pipe(gulp.dest('dist'));
});
```
### Init Options
- `loadMaps`
Set to true to load existing maps for source files. Supports the following:
- inline source maps
- source map files referenced by a `sourceMappingURL=` comment
- source map files with the same name (plus .map) in the same directory
- `debug`
Set this to `true` to output debug messages (e.g. about missing source content).
### Write Options
- `addComment`
By default a comment containing / referencing the source map is added. Set this to `false` to disable the comment (e.g. if you want to load the source maps by header).
Example:
```javascript
gulp.task('javascript', function() {
var stream = gulp.src('src/**/*.js')
.pipe(sourcemaps.init())
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write('../maps', {addComment: false}))
.pipe(gulp.dest('dist'));
});
```
- `includeContent`
By default the source maps include the source code. Pass `false` to use the original files.
Including the content is the recommended way, because it "just works". When setting this to `false` you have to host the source files and set the correct `sourceRoot`.
- `sourceRoot`
Set the path where the source files are hosted (use this when `includeContent` is set to `false`). This is an URL (or subpath) relative to the source map, not a local file system path. If you have sources in different subpaths, an absolute path (from the domain root) pointing to the source file root is recommended, or define it with a function.
Example:
```javascript
gulp.task('javascript', function() {
var stream = gulp.src('src/**/*.js')
.pipe(sourcemaps.init())
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write({includeContent: false, sourceRoot: '/src'}))
.pipe(gulp.dest('dist'));
});
```
Example (using a function):
```javascript
gulp.task('javascript', function() {
var stream = gulp.src('src/**/*.js')
.pipe(sourcemaps.init())
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write({
includeContent: false,
sourceRoot: function(file) {
return '/src';
}
}))
.pipe(gulp.dest('dist'));
});
```
- `sourceMappingURLPrefix`
Specify a prefix to be prepended onto the source map URL when writing external source maps. Relative paths will have their leading dots stripped.
Example:
```javascript
gulp.task('javascript', function() {
var stream = gulp.src('src/**/*.js')
.pipe(sourcemaps.init())
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write('../maps', {
sourceMappingURLPrefix: 'https://asset-host.example.com/assets'
}))
.pipe(gulp.dest('public/scripts'));
});
```
This will result in a source mapping URL comment like `sourceMappingURL=https://asset-host.example.com/assets/maps/helloworld.js.map`.
- `sourceMappingURL`
If you need full control over the source map URL you can pass a function to this option. The output of the function must be the full URL to the source map (in function of the output file).
Example:
```javascript
gulp.task('javascript', function() {
var stream = gulp.src('src/**/*.js')
.pipe(sourcemaps.init())
.pipe(plugin1())
.pipe(plugin2())
.pipe(sourcemaps.write('../maps', {
sourceMappingURL: function(file) {
return 'https://asset-host.example.com/' + file.relative + '.map';
}
}))
.pipe(gulp.dest('public/scripts'));
});
```
This will result in a source mapping URL comment like `sourceMappingURL=https://asset-host.example.com/helloworld.js.map`.
- `debug`
Set this to `true` to output debug messages (e.g. about missing source content).
### Plugin developers only: How to add source map support to plugins
- Generate a source map for the transformation the plugin is applying
- **Important**: Make sure the paths in the generated source map (`file` and `sources`) are relative to `file.base` (e.g. use `file.relative`).
- Apply this source map to the vinyl `file`. E.g. by using [vinyl-sourcemaps-apply](https://github.com/floridoo/vinyl-sourcemaps-apply).
This combines the source map of this plugin with the source maps coming from plugins further up the chain.
- Add your plugin to the [wiki page](https://github.com/floridoo/gulp-sourcemaps/wiki/Plugins-with-gulp-sourcemaps-support)
#### Example:
```javascript
var through = require('through2');
var applySourceMap = require('vinyl-sourcemaps-apply');
var myTransform = require('myTransform');
module.exports = function(options) {
function transform(file, encoding, callback) {
// generate source maps if plugin source-map present
if (file.sourceMap) {
options.makeSourceMaps = true;
}
// do normal plugin logic
var result = myTransform(file.contents, options);
file.contents = new Buffer(result.code);
// apply source map to the chain
if (file.sourceMap) {
applySourceMap(file, result.map);
}
this.push(file);
callback();
}
return through.obj(transform);
};
```
[npm-image]: https://img.shields.io/npm/v/gulp-sourcemaps.svg
[npm-url]: https://www.npmjs.com/package/gulp-sourcemaps
[travis-image]: https://img.shields.io/travis/floridoo/gulp-sourcemaps.svg
[travis-url]: https://travis-ci.org/floridoo/gulp-sourcemaps
[coveralls-image]: https://img.shields.io/coveralls/floridoo/gulp-sourcemaps.svg
[coveralls-url]: https://coveralls.io/r/floridoo/gulp-sourcemaps?branch=master

View File

@ -0,0 +1,269 @@
'use strict';
var through = require('through2');
var fs = require('graceful-fs');
var path = require('path');
var File = require('vinyl');
var convert = require('convert-source-map');
var stripBom = require('strip-bom');
var PLUGIN_NAME = 'gulp-sourcemap';
var urlRegex = /^(https?|webpack(-[^:]+)?):\/\//;
/**
* Initialize source mapping chain
*/
module.exports.init = function init(options) {
function sourceMapInit(file, encoding, callback) {
/*jshint validthis:true */
// pass through if file is null or already has a source map
if (file.isNull() || file.sourceMap) {
this.push(file);
return callback();
}
if (file.isStream()) {
return callback(new Error(PLUGIN_NAME + '-init: Streaming not supported'));
}
var fileContent = file.contents.toString();
var sourceMap;
if (options && options.loadMaps) {
var sourcePath = ''; //root path for the sources in the map
// Try to read inline source map
sourceMap = convert.fromSource(fileContent);
if (sourceMap) {
sourceMap = sourceMap.toObject();
// sources in map are relative to the source file
sourcePath = path.dirname(file.path);
fileContent = convert.removeComments(fileContent);
} else {
// look for source map comment referencing a source map file
var mapComment = convert.mapFileCommentRegex.exec(fileContent);
var mapFile;
if (mapComment) {
mapFile = path.resolve(path.dirname(file.path), mapComment[1] || mapComment[2]);
fileContent = convert.removeMapFileComments(fileContent);
// if no comment try map file with same name as source file
} else {
mapFile = file.path + '.map';
}
// sources in external map are relative to map file
sourcePath = path.dirname(mapFile);
try {
sourceMap = JSON.parse(stripBom(fs.readFileSync(mapFile, 'utf8')));
} catch(e) {}
}
// fix source paths and sourceContent for imported source map
if (sourceMap) {
sourceMap.sourcesContent = sourceMap.sourcesContent || [];
sourceMap.sources.forEach(function(source, i) {
if (source.match(urlRegex)) {
sourceMap.sourcesContent[i] = sourceMap.sourcesContent[i] || null;
return;
}
var absPath = path.resolve(sourcePath, source);
sourceMap.sources[i] = unixStylePath(path.relative(file.base, absPath));
if (!sourceMap.sourcesContent[i]) {
var sourceContent = null;
if (sourceMap.sourceRoot) {
if (sourceMap.sourceRoot.match(urlRegex)) {
sourceMap.sourcesContent[i] = null;
return;
}
absPath = path.resolve(sourcePath, sourceMap.sourceRoot, source);
}
// if current file: use content
if (absPath === file.path) {
sourceContent = fileContent;
// else load content from file
} else {
try {
if (options.debug)
console.log(PLUGIN_NAME + '-init: No source content for "' + source + '". Loading from file.');
sourceContent = stripBom(fs.readFileSync(absPath, 'utf8'));
} catch (e) {
if (options.debug)
console.warn(PLUGIN_NAME + '-init: source file not found: ' + absPath);
}
}
sourceMap.sourcesContent[i] = sourceContent;
}
});
// remove source map comment from source
file.contents = new Buffer(fileContent, 'utf8');
}
}
if (!sourceMap) {
// Make an empty source map
sourceMap = {
version : 3,
names: [],
mappings: '',
sources: [unixStylePath(file.relative)],
sourcesContent: [fileContent]
};
}
sourceMap.file = unixStylePath(file.relative);
file.sourceMap = sourceMap;
this.push(file);
callback();
}
return through.obj(sourceMapInit);
};
/**
* Write the source map
*
* @param options options to change the way the source map is written
*
*/
module.exports.write = function write(destPath, options) {
if (options === undefined && Object.prototype.toString.call(destPath) === '[object Object]') {
options = destPath;
destPath = undefined;
}
options = options || {};
// set defaults for options if unset
if (options.includeContent === undefined)
options.includeContent = true;
if (options.addComment === undefined)
options.addComment = true;
function sourceMapWrite(file, encoding, callback) {
/*jshint validthis:true */
if (file.isNull() || !file.sourceMap) {
this.push(file);
return callback();
}
if (file.isStream()) {
return callback(new Error(PLUGIN_NAME + '-write: Streaming not supported'));
}
var sourceMap = file.sourceMap;
// fix paths if Windows style paths
sourceMap.file = unixStylePath(file.relative);
sourceMap.sources = sourceMap.sources.map(function(filePath) {
return unixStylePath(filePath);
});
if (typeof options.sourceRoot === 'function') {
sourceMap.sourceRoot = options.sourceRoot(file);
} else {
sourceMap.sourceRoot = options.sourceRoot;
}
if (options.includeContent) {
sourceMap.sourcesContent = sourceMap.sourcesContent || [];
// load missing source content
for (var i = 0; i < file.sourceMap.sources.length; i++) {
if (!sourceMap.sourcesContent[i]) {
var sourcePath = path.resolve(sourceMap.sourceRoot || file.base, sourceMap.sources[i]);
try {
if (options.debug)
console.log(PLUGIN_NAME + '-write: No source content for "' + sourceMap.sources[i] + '". Loading from file.');
sourceMap.sourcesContent[i] = stripBom(fs.readFileSync(sourcePath, 'utf8'));
} catch (e) {
if (options.debug)
console.warn(PLUGIN_NAME + '-write: source file not found: ' + sourcePath);
}
}
}
if (sourceMap.sourceRoot === undefined) {
sourceMap.sourceRoot = '/source/';
} else if (sourceMap.sourceRoot === null) {
sourceMap.sourceRoot = undefined;
}
} else {
delete sourceMap.sourcesContent;
}
var extension = file.relative.split('.').pop();
var commentFormatter;
switch (extension) {
case 'css':
commentFormatter = function(url) { return "\n/*# sourceMappingURL=" + url + " */\n"; };
break;
case 'js':
commentFormatter = function(url) { return "\n//# sourceMappingURL=" + url + "\n"; };
break;
default:
commentFormatter = function(url) { return ""; };
}
var comment, sourceMappingURLPrefix;
if (!destPath) {
// encode source map into comment
var base64Map = new Buffer(JSON.stringify(sourceMap)).toString('base64');
comment = commentFormatter('data:application/json;base64,' + base64Map);
} else {
var sourceMapPath = path.join(file.base, destPath, file.relative) + '.map';
// add new source map file to stream
var sourceMapFile = new File({
cwd: file.cwd,
base: file.base,
path: sourceMapPath,
contents: new Buffer(JSON.stringify(sourceMap)),
stat: {
isFile: function () { return true; },
isDirectory: function () { return false; },
isBlockDevice: function () { return false; },
isCharacterDevice: function () { return false; },
isSymbolicLink: function () { return false; },
isFIFO: function () { return false; },
isSocket: function () { return false; }
}
});
this.push(sourceMapFile);
var sourceMapPathRelative = path.relative(path.dirname(file.path), sourceMapPath);
if (options.sourceMappingURLPrefix) {
var prefix = '';
if (typeof options.sourceMappingURLPrefix === 'function') {
prefix = options.sourceMappingURLPrefix(file);
} else {
prefix = options.sourceMappingURLPrefix;
}
sourceMapPathRelative = prefix+path.join('/', sourceMapPathRelative);
}
comment = commentFormatter(unixStylePath(sourceMapPathRelative));
if (options.sourceMappingURL && typeof options.sourceMappingURL === 'function') {
comment = commentFormatter(options.sourceMappingURL(file));
}
}
// append source map comment
if (options.addComment)
file.contents = Buffer.concat([file.contents, new Buffer(comment)]);
this.push(file);
callback();
}
return through.obj(sourceMapWrite);
};
function unixStylePath(filePath) {
return filePath.split(path.sep).join('/');
}

View File

@ -0,0 +1,76 @@
{
"_from": "gulp-sourcemaps@1.6.0",
"_id": "gulp-sourcemaps@1.6.0",
"_inBundle": false,
"_integrity": "sha1-uG/zSdgBzrVuHZ59x7vLS33uYAw=",
"_location": "/guppy-cli/gulp-sourcemaps",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "gulp-sourcemaps@1.6.0",
"name": "gulp-sourcemaps",
"escapedName": "gulp-sourcemaps",
"rawSpec": "1.6.0",
"saveSpec": null,
"fetchSpec": "1.6.0"
},
"_requiredBy": [
"/guppy-cli/vinyl-fs"
],
"_resolved": "https://registry.npmjs.org/gulp-sourcemaps/-/gulp-sourcemaps-1.6.0.tgz",
"_shasum": "b86ff349d801ceb56e1d9e7dc7bbcb4b7dee600c",
"_spec": "gulp-sourcemaps@1.6.0",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/vinyl-fs",
"author": {
"name": "Florian Reiterer",
"email": "me@florianreiterer.com"
},
"bugs": {
"url": "https://github.com/floridoo/gulp-sourcemaps/issues"
},
"bundleDependencies": false,
"dependencies": {
"convert-source-map": "^1.1.1",
"graceful-fs": "^4.1.2",
"strip-bom": "^2.0.0",
"through2": "^2.0.0",
"vinyl": "^1.0.0"
},
"deprecated": false,
"description": "Source map support for Gulp.js",
"devDependencies": {
"coveralls": "^2.11.4",
"faucet": "0.0.1",
"istanbul": "^0.3.21",
"jshint": "^2.8.0",
"tape": "^4.2.0"
},
"files": [
"index.js",
"package.json",
"README.md",
"LICENSE.md"
],
"homepage": "http://github.com/floridoo/gulp-sourcemaps",
"keywords": [
"gulpplugin",
"gulp",
"source maps",
"sourcemaps"
],
"license": "ISC",
"main": "index.js",
"name": "gulp-sourcemaps",
"repository": {
"type": "git",
"url": "git://github.com/floridoo/gulp-sourcemaps.git"
},
"scripts": {
"cover": "istanbul cover --dir reports/coverage tape \"test/*.js\"",
"coveralls": "istanbul cover tape \"test/*.js\" --report lcovonly && cat ./coverage/lcov.info | coveralls && rm -rf ./coverage",
"tap": "tape test/*.js",
"test": "jshint *.js test/*.js && faucet test/*.js"
},
"version": "1.6.0"
}

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014-2016, Jon Schlinkert
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,107 @@
# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob)
> Returns true if a string has an extglob.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save is-extglob
```
## Usage
```js
var isExtglob = require('is-extglob');
```
**True**
```js
isExtglob('?(abc)');
isExtglob('@(abc)');
isExtglob('!(abc)');
isExtglob('*(abc)');
isExtglob('+(abc)');
```
**False**
Escaped extglobs:
```js
isExtglob('\\?(abc)');
isExtglob('\\@(abc)');
isExtglob('\\!(abc)');
isExtglob('\\*(abc)');
isExtglob('\\+(abc)');
```
Everything else...
```js
isExtglob('foo.js');
isExtglob('!foo.js');
isExtglob('*.js');
isExtglob('**/abc.js');
isExtglob('abc/*.js');
isExtglob('abc/(aaa|bbb).js');
isExtglob('abc/[a-z].js');
isExtglob('abc/{a,b}.js');
isExtglob('abc/?.js');
isExtglob('abc.js');
isExtglob('abc/def/ghi.js');
```
## History
**v2.0**
Adds support for escaping. Escaped exglobs no longer return true.
## About
### Related projects
* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.")
* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet")
* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.")
### Contributing
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
### Building docs
_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_
To generate the readme and API documentation with [verb](https://github.com/verbose/verb):
```sh
$ npm install -g verb verb-generate-readme && verb
```
### Running tests
Install dev dependencies:
```sh
$ npm install -d && npm test
```
### Author
**Jon Schlinkert**
* [github/jonschlinkert](https://github.com/jonschlinkert)
* [twitter/jonschlinkert](http://twitter.com/jonschlinkert)
### License
Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._

View File

@ -0,0 +1,20 @@
/*!
* is-extglob <https://github.com/jonschlinkert/is-extglob>
*
* Copyright (c) 2014-2016, Jon Schlinkert.
* Licensed under the MIT License.
*/
module.exports = function isExtglob(str) {
if (typeof str !== 'string' || str === '') {
return false;
}
var match;
while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) {
if (match[2]) return true;
str = str.slice(match.index + match[0].length);
}
return false;
};

View File

@ -0,0 +1,100 @@
{
"_from": "is-extglob@^2.1.0",
"_id": "is-extglob@2.1.1",
"_inBundle": false,
"_integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=",
"_location": "/guppy-cli/is-extglob",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "is-extglob@^2.1.0",
"name": "is-extglob",
"escapedName": "is-extglob",
"rawSpec": "^2.1.0",
"saveSpec": null,
"fetchSpec": "^2.1.0"
},
"_requiredBy": [
"/guppy-cli/is-glob"
],
"_resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"_shasum": "a88c02535791f02ed37c76a1b9ea9773c833f8c2",
"_spec": "is-extglob@^2.1.0",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/is-glob",
"author": {
"name": "Jon Schlinkert",
"url": "https://github.com/jonschlinkert"
},
"bugs": {
"url": "https://github.com/jonschlinkert/is-extglob/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Returns true if a string has an extglob.",
"devDependencies": {
"gulp-format-md": "^0.1.10",
"mocha": "^3.0.2"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/jonschlinkert/is-extglob",
"keywords": [
"bash",
"braces",
"check",
"exec",
"expression",
"extglob",
"glob",
"globbing",
"globstar",
"is",
"match",
"matches",
"pattern",
"regex",
"regular",
"string",
"test"
],
"license": "MIT",
"main": "index.js",
"name": "is-extglob",
"repository": {
"type": "git",
"url": "git+https://github.com/jonschlinkert/is-extglob.git"
},
"scripts": {
"test": "mocha"
},
"verb": {
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"plugins": [
"gulp-format-md"
],
"related": {
"list": [
"has-glob",
"is-glob",
"micromatch"
]
},
"reflinks": [
"verb",
"verb-generate-readme"
],
"lint": {
"reflinks": true
}
},
"version": "2.1.1"
}

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014-2016, Jon Schlinkert.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,142 @@
# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-glob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-glob)
> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save is-glob
```
You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob).
## Usage
```js
var isGlob = require('is-glob');
```
**True**
Patterns that have glob characters or regex patterns will return `true`:
```js
isGlob('!foo.js');
isGlob('*.js');
isGlob('**/abc.js');
isGlob('abc/*.js');
isGlob('abc/(aaa|bbb).js');
isGlob('abc/[a-z].js');
isGlob('abc/{a,b}.js');
isGlob('abc/?.js');
//=> true
```
Extglobs
```js
isGlob('abc/@(a).js');
isGlob('abc/!(a).js');
isGlob('abc/+(a).js');
isGlob('abc/*(a).js');
isGlob('abc/?(a).js');
//=> true
```
**False**
Escaped globs or extglobs return `false`:
```js
isGlob('abc/\\@(a).js');
isGlob('abc/\\!(a).js');
isGlob('abc/\\+(a).js');
isGlob('abc/\\*(a).js');
isGlob('abc/\\?(a).js');
isGlob('\\!foo.js');
isGlob('\\*.js');
isGlob('\\*\\*/abc.js');
isGlob('abc/\\*.js');
isGlob('abc/\\(aaa|bbb).js');
isGlob('abc/\\[a-z].js');
isGlob('abc/\\{a,b}.js');
isGlob('abc/\\?.js');
//=> false
```
Patterns that do not have glob patterns return `false`:
```js
isGlob('abc.js');
isGlob('abc/def/ghi.js');
isGlob('foo.js');
isGlob('abc/@.js');
isGlob('abc/+.js');
isGlob();
isGlob(null);
//=> false
```
Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)):
```js
isGlob(['**/*.js']);
isGlob(['foo.js']);
//=> false
```
## About
### Related projects
* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit")
* [base](https://www.npmjs.com/package/base): base is the foundation for creating modular, unit testable and highly pluggable node.js applications, starting… [more](https://github.com/node-base/base) | [homepage](https://github.com/node-base/base "base is the foundation for creating modular, unit testable and highly pluggable node.js applications, starting with a handful of common methods, like `set`, `get`, `del` and `use`.")
* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.")
* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.")
### Contributing
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
### Contributors
| **Commits** | **Contributor**<br/> |
| --- | --- |
| 40 | [jonschlinkert](https://github.com/jonschlinkert) |
| 1 | [tuvistavie](https://github.com/tuvistavie) |
### Building docs
_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_
To generate the readme and API documentation with [verb](https://github.com/verbose/verb):
```sh
$ npm install -g verb verb-generate-readme && verb
```
### Running tests
Install dev dependencies:
```sh
$ npm install -d && npm test
```
### Author
**Jon Schlinkert**
* [github/jonschlinkert](https://github.com/jonschlinkert)
* [twitter/jonschlinkert](http://twitter.com/jonschlinkert)
### License
Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT license](https://github.com/jonschlinkert/is-glob/blob/master/LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._

View File

@ -0,0 +1,25 @@
/*!
* is-glob <https://github.com/jonschlinkert/is-glob>
*
* Copyright (c) 2014-2016, Jon Schlinkert.
* Licensed under the MIT License.
*/
var isExtglob = require('is-extglob');
module.exports = function isGlob(str) {
if (typeof str !== 'string' || str === '') {
return false;
}
if (isExtglob(str)) return true;
var regex = /(\\).|([*?]|\[.*\]|\{.*\}|\(.*\|.*\)|^!)/;
var match;
while ((match = regex.exec(str))) {
if (match[2]) return true;
str = str.slice(match.index + match[0].length);
}
return false;
};

View File

@ -0,0 +1,119 @@
{
"_from": "is-glob@^3.1.0",
"_id": "is-glob@3.1.0",
"_inBundle": false,
"_integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=",
"_location": "/guppy-cli/is-glob",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "is-glob@^3.1.0",
"name": "is-glob",
"escapedName": "is-glob",
"rawSpec": "^3.1.0",
"saveSpec": null,
"fetchSpec": "^3.1.0"
},
"_requiredBy": [
"/guppy-cli/glob-parent"
],
"_resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz",
"_shasum": "7ba5ae24217804ac70707b96922567486cc3e84a",
"_spec": "is-glob@^3.1.0",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/glob-parent",
"author": {
"name": "Jon Schlinkert",
"url": "https://github.com/jonschlinkert"
},
"bugs": {
"url": "https://github.com/jonschlinkert/is-glob/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Daniel Perez",
"email": "daniel@claudetech.com",
"url": "http://tuvistavie.com"
},
{
"name": "Jon Schlinkert",
"email": "jon.schlinkert@sellside.com",
"url": "http://twitter.com/jonschlinkert"
}
],
"dependencies": {
"is-extglob": "^2.1.0"
},
"deprecated": false,
"description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.",
"devDependencies": {
"gulp-format-md": "^0.1.10",
"mocha": "^3.0.2"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/jonschlinkert/is-glob",
"keywords": [
"bash",
"braces",
"check",
"exec",
"expression",
"extglob",
"glob",
"globbing",
"globstar",
"is",
"match",
"matches",
"pattern",
"regex",
"regular",
"string",
"test"
],
"license": "MIT",
"main": "index.js",
"name": "is-glob",
"repository": {
"type": "git",
"url": "git+https://github.com/jonschlinkert/is-glob.git"
},
"scripts": {
"test": "mocha"
},
"verb": {
"layout": "default",
"plugins": [
"gulp-format-md"
],
"related": {
"list": [
"assemble",
"base",
"update",
"verb"
]
},
"reflinks": [
"assemble",
"bach",
"base",
"composer",
"gulp",
"has-glob",
"is-valid-glob",
"micromatch",
"npm",
"scaffold",
"verb",
"vinyl"
]
},
"version": "3.1.0"
}

View File

@ -0,0 +1,54 @@
# isarray
`Array#isArray` for older browsers.
## Usage
```js
var isArray = require('isarray');
console.log(isArray([])); // => true
console.log(isArray({})); // => false
```
## Installation
With [npm](http://npmjs.org) do
```bash
$ npm install isarray
```
Then bundle for the browser with
[browserify](https://github.com/substack/browserify).
With [component](http://component.io) do
```bash
$ component install juliangruber/isarray
```
## License
(MIT)
Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,209 @@
/**
* Require the given path.
*
* @param {String} path
* @return {Object} exports
* @api public
*/
function require(path, parent, orig) {
var resolved = require.resolve(path);
// lookup failed
if (null == resolved) {
orig = orig || path;
parent = parent || 'root';
var err = new Error('Failed to require "' + orig + '" from "' + parent + '"');
err.path = orig;
err.parent = parent;
err.require = true;
throw err;
}
var module = require.modules[resolved];
// perform real require()
// by invoking the module's
// registered function
if (!module.exports) {
module.exports = {};
module.client = module.component = true;
module.call(this, module.exports, require.relative(resolved), module);
}
return module.exports;
}
/**
* Registered modules.
*/
require.modules = {};
/**
* Registered aliases.
*/
require.aliases = {};
/**
* Resolve `path`.
*
* Lookup:
*
* - PATH/index.js
* - PATH.js
* - PATH
*
* @param {String} path
* @return {String} path or null
* @api private
*/
require.resolve = function(path) {
if (path.charAt(0) === '/') path = path.slice(1);
var index = path + '/index.js';
var paths = [
path,
path + '.js',
path + '.json',
path + '/index.js',
path + '/index.json'
];
for (var i = 0; i < paths.length; i++) {
var path = paths[i];
if (require.modules.hasOwnProperty(path)) return path;
}
if (require.aliases.hasOwnProperty(index)) {
return require.aliases[index];
}
};
/**
* Normalize `path` relative to the current path.
*
* @param {String} curr
* @param {String} path
* @return {String}
* @api private
*/
require.normalize = function(curr, path) {
var segs = [];
if ('.' != path.charAt(0)) return path;
curr = curr.split('/');
path = path.split('/');
for (var i = 0; i < path.length; ++i) {
if ('..' == path[i]) {
curr.pop();
} else if ('.' != path[i] && '' != path[i]) {
segs.push(path[i]);
}
}
return curr.concat(segs).join('/');
};
/**
* Register module at `path` with callback `definition`.
*
* @param {String} path
* @param {Function} definition
* @api private
*/
require.register = function(path, definition) {
require.modules[path] = definition;
};
/**
* Alias a module definition.
*
* @param {String} from
* @param {String} to
* @api private
*/
require.alias = function(from, to) {
if (!require.modules.hasOwnProperty(from)) {
throw new Error('Failed to alias "' + from + '", it does not exist');
}
require.aliases[to] = from;
};
/**
* Return a require function relative to the `parent` path.
*
* @param {String} parent
* @return {Function}
* @api private
*/
require.relative = function(parent) {
var p = require.normalize(parent, '..');
/**
* lastIndexOf helper.
*/
function lastIndexOf(arr, obj) {
var i = arr.length;
while (i--) {
if (arr[i] === obj) return i;
}
return -1;
}
/**
* The relative require() itself.
*/
function localRequire(path) {
var resolved = localRequire.resolve(path);
return require(resolved, parent, path);
}
/**
* Resolve relative to the parent.
*/
localRequire.resolve = function(path) {
var c = path.charAt(0);
if ('/' == c) return path.slice(1);
if ('.' == c) return require.normalize(p, path);
// resolve deps by returning
// the dep in the nearest "deps"
// directory
var segs = parent.split('/');
var i = lastIndexOf(segs, 'deps') + 1;
if (!i) i = 0;
path = segs.slice(0, i + 1).join('/') + '/deps/' + path;
return path;
};
/**
* Check if module is defined at `path`.
*/
localRequire.exists = function(path) {
return require.modules.hasOwnProperty(localRequire.resolve(path));
};
return localRequire;
};
require.register("isarray/index.js", function(exports, require, module){
module.exports = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};
});
require.alias("isarray/index.js", "isarray/index.js");

View File

@ -0,0 +1,19 @@
{
"name" : "isarray",
"description" : "Array#isArray for older browsers",
"version" : "0.0.1",
"repository" : "juliangruber/isarray",
"homepage": "https://github.com/juliangruber/isarray",
"main" : "index.js",
"scripts" : [
"index.js"
],
"dependencies" : {},
"keywords": ["browser","isarray","array"],
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
"url": "http://juliangruber.com"
},
"license": "MIT"
}

View File

@ -0,0 +1,3 @@
module.exports = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};

View File

@ -0,0 +1,57 @@
{
"_from": "isarray@0.0.1",
"_id": "isarray@0.0.1",
"_inBundle": false,
"_integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=",
"_location": "/guppy-cli/isarray",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "isarray@0.0.1",
"name": "isarray",
"escapedName": "isarray",
"rawSpec": "0.0.1",
"saveSpec": null,
"fetchSpec": "0.0.1"
},
"_requiredBy": [
"/guppy-cli/glob-stream/readable-stream"
],
"_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
"_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
"_spec": "isarray@0.0.1",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/glob-stream/node_modules/readable-stream",
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
"url": "http://juliangruber.com"
},
"bugs": {
"url": "https://github.com/juliangruber/isarray/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Array#isArray for older browsers",
"devDependencies": {
"tap": "*"
},
"homepage": "https://github.com/juliangruber/isarray",
"keywords": [
"browser",
"isarray",
"array"
],
"license": "MIT",
"main": "index.js",
"name": "isarray",
"repository": {
"type": "git",
"url": "git://github.com/juliangruber/isarray.git"
},
"scripts": {
"test": "tap test/*.js"
},
"version": "0.0.1"
}

View File

@ -0,0 +1,18 @@
This software is released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,7 @@
var stringify = require('../');
var obj = { c: 8, b: [{z:6,y:5,x:4},7], a: 3 };
var s = stringify(obj, function (a, b) {
return a.key < b.key ? 1 : -1;
});
console.log(s);

View File

@ -0,0 +1,3 @@
var stringify = require('../');
var obj = { c: 8, b: [{z:6,y:5,x:4},7], a: 3 };
console.log(stringify(obj));

View File

@ -0,0 +1,3 @@
var stringify = require('../');
var obj = { c: 6, b: [4,5], a: 3 };
console.log(stringify(obj));

View File

@ -0,0 +1,7 @@
var stringify = require('../');
var obj = { d: 6, c: 5, b: [{z:3,y:2,x:1},9], a: 10 };
var s = stringify(obj, function (a, b) {
return a.value < b.value ? 1 : -1;
});
console.log(s);

View File

@ -0,0 +1,84 @@
var json = typeof JSON !== 'undefined' ? JSON : require('jsonify');
module.exports = function (obj, opts) {
if (!opts) opts = {};
if (typeof opts === 'function') opts = { cmp: opts };
var space = opts.space || '';
if (typeof space === 'number') space = Array(space+1).join(' ');
var cycles = (typeof opts.cycles === 'boolean') ? opts.cycles : false;
var replacer = opts.replacer || function(key, value) { return value; };
var cmp = opts.cmp && (function (f) {
return function (node) {
return function (a, b) {
var aobj = { key: a, value: node[a] };
var bobj = { key: b, value: node[b] };
return f(aobj, bobj);
};
};
})(opts.cmp);
var seen = [];
return (function stringify (parent, key, node, level) {
var indent = space ? ('\n' + new Array(level + 1).join(space)) : '';
var colonSeparator = space ? ': ' : ':';
if (node && node.toJSON && typeof node.toJSON === 'function') {
node = node.toJSON();
}
node = replacer.call(parent, key, node);
if (node === undefined) {
return;
}
if (typeof node !== 'object' || node === null) {
return json.stringify(node);
}
if (isArray(node)) {
var out = [];
for (var i = 0; i < node.length; i++) {
var item = stringify(node, i, node[i], level+1) || json.stringify(null);
out.push(indent + space + item);
}
return '[' + out.join(',') + indent + ']';
}
else {
if (seen.indexOf(node) !== -1) {
if (cycles) return json.stringify('__cycle__');
throw new TypeError('Converting circular structure to JSON');
}
else seen.push(node);
var keys = objectKeys(node).sort(cmp && cmp(node));
var out = [];
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
var value = stringify(node, key, node[key], level+1);
if(!value) continue;
var keyValue = json.stringify(key)
+ colonSeparator
+ value;
;
out.push(indent + space + keyValue);
}
seen.splice(seen.indexOf(node), 1);
return '{' + out.join(',') + indent + '}';
}
})({ '': obj }, '', obj, 0);
};
var isArray = Array.isArray || function (x) {
return {}.toString.call(x) === '[object Array]';
};
var objectKeys = Object.keys || function (obj) {
var has = Object.prototype.hasOwnProperty || function () { return true };
var keys = [];
for (var key in obj) {
if (has.call(obj, key)) keys.push(key);
}
return keys;
};

View File

@ -0,0 +1,74 @@
{
"_from": "json-stable-stringify@^1.0.0",
"_id": "json-stable-stringify@1.0.1",
"_inBundle": false,
"_integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=",
"_location": "/guppy-cli/json-stable-stringify",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "json-stable-stringify@^1.0.0",
"name": "json-stable-stringify",
"escapedName": "json-stable-stringify",
"rawSpec": "^1.0.0",
"saveSpec": null,
"fetchSpec": "^1.0.0"
},
"_requiredBy": [
"/guppy-cli/unique-stream"
],
"_resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz",
"_shasum": "9a759d39c5f2ff503fd5300646ed445f88c4f9af",
"_spec": "json-stable-stringify@^1.0.0",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/unique-stream",
"author": {
"name": "James Halliday",
"email": "mail@substack.net",
"url": "http://substack.net"
},
"bugs": {
"url": "https://github.com/substack/json-stable-stringify/issues"
},
"bundleDependencies": false,
"dependencies": {
"jsonify": "~0.0.0"
},
"deprecated": false,
"description": "deterministic JSON.stringify() with custom sorting to get deterministic hashes from stringified results",
"devDependencies": {
"tape": "~1.0.4"
},
"homepage": "https://github.com/substack/json-stable-stringify",
"keywords": [
"json",
"stringify",
"deterministic",
"hash",
"sort",
"stable"
],
"license": "MIT",
"main": "index.js",
"name": "json-stable-stringify",
"repository": {
"type": "git",
"url": "git://github.com/substack/json-stable-stringify.git"
},
"scripts": {
"test": "tape test/*.js"
},
"testling": {
"files": "test/*.js",
"browsers": [
"ie/8..latest",
"ff/5",
"ff/latest",
"chrome/15",
"chrome/latest",
"safari/latest",
"opera/latest"
]
},
"version": "1.0.1"
}

View File

@ -0,0 +1,130 @@
# json-stable-stringify
deterministic version of `JSON.stringify()` so you can get a consistent hash
from stringified results
You can also pass in a custom comparison function.
[![browser support](https://ci.testling.com/substack/json-stable-stringify.png)](https://ci.testling.com/substack/json-stable-stringify)
[![build status](https://secure.travis-ci.org/substack/json-stable-stringify.png)](http://travis-ci.org/substack/json-stable-stringify)
# example
``` js
var stringify = require('json-stable-stringify');
var obj = { c: 8, b: [{z:6,y:5,x:4},7], a: 3 };
console.log(stringify(obj));
```
output:
```
{"a":3,"b":[{"x":4,"y":5,"z":6},7],"c":8}
```
# methods
``` js
var stringify = require('json-stable-stringify')
```
## var str = stringify(obj, opts)
Return a deterministic stringified string `str` from the object `obj`.
## options
### cmp
If `opts` is given, you can supply an `opts.cmp` to have a custom comparison
function for object keys. Your function `opts.cmp` is called with these
parameters:
``` js
opts.cmp({ key: akey, value: avalue }, { key: bkey, value: bvalue })
```
For example, to sort on the object key names in reverse order you could write:
``` js
var stringify = require('json-stable-stringify');
var obj = { c: 8, b: [{z:6,y:5,x:4},7], a: 3 };
var s = stringify(obj, function (a, b) {
return a.key < b.key ? 1 : -1;
});
console.log(s);
```
which results in the output string:
```
{"c":8,"b":[{"z":6,"y":5,"x":4},7],"a":3}
```
Or if you wanted to sort on the object values in reverse order, you could write:
```
var stringify = require('json-stable-stringify');
var obj = { d: 6, c: 5, b: [{z:3,y:2,x:1},9], a: 10 };
var s = stringify(obj, function (a, b) {
return a.value < b.value ? 1 : -1;
});
console.log(s);
```
which outputs:
```
{"d":6,"c":5,"b":[{"z":3,"y":2,"x":1},9],"a":10}
```
### space
If you specify `opts.space`, it will indent the output for pretty-printing.
Valid values are strings (e.g. `{space: \t}`) or a number of spaces
(`{space: 3}`).
For example:
```js
var obj = { b: 1, a: { foo: 'bar', and: [1, 2, 3] } };
var s = stringify(obj, { space: ' ' });
console.log(s);
```
which outputs:
```
{
"a": {
"and": [
1,
2,
3
],
"foo": "bar"
},
"b": 1
}
```
### replacer
The replacer parameter is a function `opts.replacer(key, value)` that behaves
the same as the replacer
[from the core JSON object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_native_JSON#The_replacer_parameter).
# install
With [npm](https://npmjs.org) do:
```
npm install json-stable-stringify
```
# license
MIT

View File

@ -0,0 +1,11 @@
var test = require('tape');
var stringify = require('../');
test('custom comparison function', function (t) {
t.plan(1);
var obj = { c: 8, b: [{z:6,y:5,x:4},7], a: 3 };
var s = stringify(obj, function (a, b) {
return a.key < b.key ? 1 : -1;
});
t.equal(s, '{"c":8,"b":[{"z":6,"y":5,"x":4},7],"a":3}');
});

View File

@ -0,0 +1,35 @@
var test = require('tape');
var stringify = require('../');
test('nested', function (t) {
t.plan(1);
var obj = { c: 8, b: [{z:6,y:5,x:4},7], a: 3 };
t.equal(stringify(obj), '{"a":3,"b":[{"x":4,"y":5,"z":6},7],"c":8}');
});
test('cyclic (default)', function (t) {
t.plan(1);
var one = { a: 1 };
var two = { a: 2, one: one };
one.two = two;
try {
stringify(one);
} catch (ex) {
t.equal(ex.toString(), 'TypeError: Converting circular structure to JSON');
}
});
test('cyclic (specifically allowed)', function (t) {
t.plan(1);
var one = { a: 1 };
var two = { a: 2, one: one };
one.two = two;
t.equal(stringify(one, {cycles:true}), '{"a":1,"two":{"a":2,"one":"__cycle__"}}');
});
test('repeated non-cyclic value', function(t) {
t.plan(1);
var one = { x: 1 };
var two = { a: one, b: one };
t.equal(stringify(two), '{"a":{"x":1},"b":{"x":1}}');
});

View File

@ -0,0 +1,74 @@
var test = require('tape');
var stringify = require('../');
test('replace root', function (t) {
t.plan(1);
var obj = { a: 1, b: 2, c: false };
var replacer = function(key, value) { return 'one'; };
t.equal(stringify(obj, { replacer: replacer }), '"one"');
});
test('replace numbers', function (t) {
t.plan(1);
var obj = { a: 1, b: 2, c: false };
var replacer = function(key, value) {
if(value === 1) return 'one';
if(value === 2) return 'two';
return value;
};
t.equal(stringify(obj, { replacer: replacer }), '{"a":"one","b":"two","c":false}');
});
test('replace with object', function (t) {
t.plan(1);
var obj = { a: 1, b: 2, c: false };
var replacer = function(key, value) {
if(key === 'b') return { d: 1 };
if(value === 1) return 'one';
return value;
};
t.equal(stringify(obj, { replacer: replacer }), '{"a":"one","b":{"d":"one"},"c":false}');
});
test('replace with undefined', function (t) {
t.plan(1);
var obj = { a: 1, b: 2, c: false };
var replacer = function(key, value) {
if(value === false) return;
return value;
};
t.equal(stringify(obj, { replacer: replacer }), '{"a":1,"b":2}');
});
test('replace with array', function (t) {
t.plan(1);
var obj = { a: 1, b: 2, c: false };
var replacer = function(key, value) {
if(key === 'b') return ['one', 'two'];
return value;
};
t.equal(stringify(obj, { replacer: replacer }), '{"a":1,"b":["one","two"],"c":false}');
});
test('replace array item', function (t) {
t.plan(1);
var obj = { a: 1, b: 2, c: [1,2] };
var replacer = function(key, value) {
if(value === 1) return 'one';
if(value === 2) return 'two';
return value;
};
t.equal(stringify(obj, { replacer: replacer }), '{"a":"one","b":"two","c":["one","two"]}');
});

View File

@ -0,0 +1,59 @@
var test = require('tape');
var stringify = require('../');
test('space parameter', function (t) {
t.plan(1);
var obj = { one: 1, two: 2 };
t.equal(stringify(obj, {space: ' '}), ''
+ '{\n'
+ ' "one": 1,\n'
+ ' "two": 2\n'
+ '}'
);
});
test('space parameter (with tabs)', function (t) {
t.plan(1);
var obj = { one: 1, two: 2 };
t.equal(stringify(obj, {space: '\t'}), ''
+ '{\n'
+ '\t"one": 1,\n'
+ '\t"two": 2\n'
+ '}'
);
});
test('space parameter (with a number)', function (t) {
t.plan(1);
var obj = { one: 1, two: 2 };
t.equal(stringify(obj, {space: 3}), ''
+ '{\n'
+ ' "one": 1,\n'
+ ' "two": 2\n'
+ '}'
);
});
test('space parameter (nested objects)', function (t) {
t.plan(1);
var obj = { one: 1, two: { b: 4, a: [2,3] } };
t.equal(stringify(obj, {space: ' '}), ''
+ '{\n'
+ ' "one": 1,\n'
+ ' "two": {\n'
+ ' "a": [\n'
+ ' 2,\n'
+ ' 3\n'
+ ' ],\n'
+ ' "b": 4\n'
+ ' }\n'
+ '}'
);
});
test('space parameter (same as native)', function (t) {
t.plan(1);
// for this test, properties need to be in alphabetical order
var obj = { one: 1, two: { a: [2,3], b: 4 } };
t.equal(stringify(obj, {space: ' '}), JSON.stringify(obj, null, ' '));
});

View File

@ -0,0 +1,32 @@
var test = require('tape');
var stringify = require('../');
test('simple object', function (t) {
t.plan(1);
var obj = { c: 6, b: [4,5], a: 3, z: null };
t.equal(stringify(obj), '{"a":3,"b":[4,5],"c":6,"z":null}');
});
test('object with undefined', function (t) {
t.plan(1);
var obj = { a: 3, z: undefined };
t.equal(stringify(obj), '{"a":3}');
});
test('array with undefined', function (t) {
t.plan(1);
var obj = [4, undefined, 6];
t.equal(stringify(obj), '[4,null,6]');
});
test('object with empty string', function (t) {
t.plan(1);
var obj = { a: 3, z: '' };
t.equal(stringify(obj), '{"a":3,"z":""}');
});
test('array with empty string', function (t) {
t.plan(1);
var obj = [4, '', 6];
t.equal(stringify(obj), '[4,"",6]');
});

View File

@ -0,0 +1,20 @@
var test = require('tape');
var stringify = require('../');
test('toJSON function', function (t) {
t.plan(1);
var obj = { one: 1, two: 2, toJSON: function() { return { one: 1 }; } };
t.equal(stringify(obj), '{"one":1}' );
});
test('toJSON returns string', function (t) {
t.plan(1);
var obj = { one: 1, two: 2, toJSON: function() { return 'one'; } };
t.equal(stringify(obj), '"one"');
});
test('toJSON returns array', function (t) {
t.plan(1);
var obj = { one: 1, two: 2, toJSON: function() { return ['one']; } };
t.equal(stringify(obj), '["one"]');
});

View File

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2014 Artem Medeusheyev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,65 @@
# ordered-read-streams [![NPM version](https://badge.fury.io/js/ordered-read-streams.png)](http://badge.fury.io/js/ordered-read-streams) [![Build Status](https://travis-ci.org/armed/ordered-read-streams.png?branch=master)](https://travis-ci.org/armed/ordered-read-streams)
Combines array of streams into one read stream in strict order.
## Installation
`npm install ordered-read-streams`
## Overview
`ordered-read-streams` handles all data/errors from input streams in parallel, but emits data/errors in strict order in which streams are passed to constructor. This is `objectMode = true` stream.
## Example
```js
var through = require('through2');
var Ordered = require('ordered-read-streams');
var s1 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 200)
});
var s2 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 30)
});
var s3 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 100)
});
var streams = new Ordered([s1, s2, s3]);
streams.on('data', function (data) {
console.log(data);
})
s1.write('stream 1');
s1.end();
s2.write('stream 2');
s2.end();
s3.write('stream 3');
s3.end();
```
Ouput will be:
```
stream 1
stream 2
stream 3
```
## Licence
MIT

View File

@ -0,0 +1,81 @@
var Readable = require('readable-stream/readable');
var isReadable = require('is-stream').readable;
var util = require('util');
function addStream(streams, stream)
{
if(!isReadable(stream)) throw new Error('All input streams must be readable');
var self = this;
stream._buffer = [];
stream.on('readable', function()
{
var chunk = stream.read();
if (chunk === null)
return;
if(this === streams[0])
self.push(chunk);
else
this._buffer.push(chunk);
});
stream.on('end', function()
{
for(var stream = streams[0];
stream && stream._readableState.ended;
stream = streams[0])
{
while(stream._buffer.length)
self.push(stream._buffer.shift());
streams.shift();
}
if(!streams.length) self.push(null);
});
stream.on('error', this.emit.bind(this, 'error'));
streams.push(stream);
}
function OrderedStreams(streams, options) {
if (!(this instanceof(OrderedStreams))) {
return new OrderedStreams(streams, options);
}
streams = streams || [];
options = options || {};
options.objectMode = true;
Readable.call(this, options);
if(!Array.isArray(streams)) streams = [streams];
if(!streams.length) return this.push(null); // no streams, close
var addStream_bind = addStream.bind(this, []);
streams.forEach(function(item)
{
if(Array.isArray(item))
item.forEach(addStream_bind);
else
addStream_bind(item);
});
}
util.inherits(OrderedStreams, Readable);
OrderedStreams.prototype._read = function () {};
module.exports = OrderedStreams;

View File

@ -0,0 +1,60 @@
{
"_from": "ordered-read-streams@^0.3.0",
"_id": "ordered-read-streams@0.3.0",
"_inBundle": false,
"_integrity": "sha1-cTfmmzKYuzQiR6G77jiByA4v14s=",
"_location": "/guppy-cli/ordered-read-streams",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "ordered-read-streams@^0.3.0",
"name": "ordered-read-streams",
"escapedName": "ordered-read-streams",
"rawSpec": "^0.3.0",
"saveSpec": null,
"fetchSpec": "^0.3.0"
},
"_requiredBy": [
"/guppy-cli/glob-stream"
],
"_resolved": "https://registry.npmjs.org/ordered-read-streams/-/ordered-read-streams-0.3.0.tgz",
"_shasum": "7137e69b3298bb342247a1bbee3881c80e2fd78b",
"_spec": "ordered-read-streams@^0.3.0",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli/node_modules/glob-stream",
"author": {
"name": "Artem Medeusheyev",
"email": "artem.medeusheyev@gmail.com"
},
"bugs": {
"url": "https://github.com/armed/ordered-read-streams/issues"
},
"bundleDependencies": false,
"dependencies": {
"is-stream": "^1.0.1",
"readable-stream": "^2.0.1"
},
"deprecated": false,
"description": "Combines array of streams into one read stream in strict order",
"devDependencies": {
"jshint": "^2.8.0",
"mocha": "^2.2.5",
"pre-commit": "^1.0.10",
"should": "^7.0.1",
"through2": "^2.0.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/armed/ordered-read-streams#readme",
"license": "MIT",
"name": "ordered-read-streams",
"repository": {
"type": "git",
"url": "git+https://github.com/armed/ordered-read-streams.git"
},
"scripts": {
"test": "jshint *.js test/*.js && mocha"
},
"version": "0.3.0"
}

View File

@ -0,0 +1,26 @@
Copyright (c) 2012, Artur Adib <aadib@mozilla.com>
All rights reserved.
You may use this project under the terms of the New BSD license as follows:
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Artur Adib nor the
names of the contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL ARTUR ADIB BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,579 @@
# ShellJS - Unix shell commands for Node.js [![Build Status](https://secure.travis-ci.org/arturadib/shelljs.png)](http://travis-ci.org/arturadib/shelljs)
ShellJS is a portable **(Windows/Linux/OS X)** implementation of Unix shell commands on top of the Node.js API. You can use it to eliminate your shell script's dependency on Unix while still keeping its familiar and powerful commands. You can also install it globally so you can run it from outside Node projects - say goodbye to those gnarly Bash scripts!
The project is [unit-tested](http://travis-ci.org/arturadib/shelljs) and battled-tested in projects like:
+ [PDF.js](http://github.com/mozilla/pdf.js) - Firefox's next-gen PDF reader
+ [Firebug](http://getfirebug.com/) - Firefox's infamous debugger
+ [JSHint](http://jshint.com) - Most popular JavaScript linter
+ [Zepto](http://zeptojs.com) - jQuery-compatible JavaScript library for modern browsers
+ [Yeoman](http://yeoman.io/) - Web application stack and development tool
+ [Deployd.com](http://deployd.com) - Open source PaaS for quick API backend generation
and [many more](https://npmjs.org/browse/depended/shelljs).
Connect with [@r2r](http://twitter.com/r2r) on Twitter for questions, suggestions, etc.
## Installing
Via npm:
```bash
$ npm install [-g] shelljs
```
If the global option `-g` is specified, the binary `shjs` will be installed. This makes it possible to
run ShellJS scripts much like any shell script from the command line, i.e. without requiring a `node_modules` folder:
```bash
$ shjs my_script
```
You can also just copy `shell.js` into your project's directory, and `require()` accordingly.
## Examples
### JavaScript
```javascript
require('shelljs/global');
if (!which('git')) {
echo('Sorry, this script requires git');
exit(1);
}
// Copy files to release dir
mkdir('-p', 'out/Release');
cp('-R', 'stuff/*', 'out/Release');
// Replace macros in each .js file
cd('lib');
ls('*.js').forEach(function(file) {
sed('-i', 'BUILD_VERSION', 'v0.1.2', file);
sed('-i', /.*REMOVE_THIS_LINE.*\n/, '', file);
sed('-i', /.*REPLACE_LINE_WITH_MACRO.*\n/, cat('macro.js'), file);
});
cd('..');
// Run external tool synchronously
if (exec('git commit -am "Auto-commit"').code !== 0) {
echo('Error: Git commit failed');
exit(1);
}
```
### CoffeeScript
```coffeescript
require 'shelljs/global'
if not which 'git'
echo 'Sorry, this script requires git'
exit 1
# Copy files to release dir
mkdir '-p', 'out/Release'
cp '-R', 'stuff/*', 'out/Release'
# Replace macros in each .js file
cd 'lib'
for file in ls '*.js'
sed '-i', 'BUILD_VERSION', 'v0.1.2', file
sed '-i', /.*REMOVE_THIS_LINE.*\n/, '', file
sed '-i', /.*REPLACE_LINE_WITH_MACRO.*\n/, cat 'macro.js', file
cd '..'
# Run external tool synchronously
if (exec 'git commit -am "Auto-commit"').code != 0
echo 'Error: Git commit failed'
exit 1
```
## Global vs. Local
The example above uses the convenience script `shelljs/global` to reduce verbosity. If polluting your global namespace is not desirable, simply require `shelljs`.
Example:
```javascript
var shell = require('shelljs');
shell.echo('hello world');
```
## Make tool
A convenience script `shelljs/make` is also provided to mimic the behavior of a Unix Makefile. In this case all shell objects are global, and command line arguments will cause the script to execute only the corresponding function in the global `target` object. To avoid redundant calls, target functions are executed only once per script.
Example (CoffeeScript):
```coffeescript
require 'shelljs/make'
target.all = ->
target.bundle()
target.docs()
target.bundle = ->
cd __dirname
mkdir 'build'
cd 'lib'
(cat '*.js').to '../build/output.js'
target.docs = ->
cd __dirname
mkdir 'docs'
cd 'lib'
for file in ls '*.js'
text = grep '//@', file # extract special comments
text.replace '//@', '' # remove comment tags
text.to 'docs/my_docs.md'
```
To run the target `all`, call the above script without arguments: `$ node make`. To run the target `docs`: `$ node make docs`.
You can also pass arguments to your targets by using the `--` separator. For example, to pass `arg1` and `arg2` to a target `bundle`, do `$ node make bundle -- arg1 arg2`:
```javascript
require('shelljs/make');
target.bundle = function(argsArray) {
// argsArray = ['arg1', 'arg2']
/* ... */
}
```
<!-- DO NOT MODIFY BEYOND THIS POINT - IT'S AUTOMATICALLY GENERATED -->
## Command reference
All commands run synchronously, unless otherwise stated.
### cd('dir')
Changes to directory `dir` for the duration of the script
### pwd()
Returns the current directory.
### ls([options ,] path [,path ...])
### ls([options ,] path_array)
Available options:
+ `-R`: recursive
+ `-A`: all files (include files beginning with `.`, except for `.` and `..`)
Examples:
```javascript
ls('projs/*.js');
ls('-R', '/users/me', '/tmp');
ls('-R', ['/users/me', '/tmp']); // same as above
```
Returns array of files in the given path, or in current directory if no path provided.
### find(path [,path ...])
### find(path_array)
Examples:
```javascript
find('src', 'lib');
find(['src', 'lib']); // same as above
find('.').filter(function(file) { return file.match(/\.js$/); });
```
Returns array of all files (however deep) in the given paths.
The main difference from `ls('-R', path)` is that the resulting file names
include the base directories, e.g. `lib/resources/file1` instead of just `file1`.
### cp([options ,] source [,source ...], dest)
### cp([options ,] source_array, dest)
Available options:
+ `-f`: force
+ `-r, -R`: recursive
Examples:
```javascript
cp('file1', 'dir1');
cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp');
cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above
```
Copies files. The wildcard `*` is accepted.
### rm([options ,] file [, file ...])
### rm([options ,] file_array)
Available options:
+ `-f`: force
+ `-r, -R`: recursive
Examples:
```javascript
rm('-rf', '/tmp/*');
rm('some_file.txt', 'another_file.txt');
rm(['some_file.txt', 'another_file.txt']); // same as above
```
Removes files. The wildcard `*` is accepted.
### mv(source [, source ...], dest')
### mv(source_array, dest')
Available options:
+ `f`: force
Examples:
```javascript
mv('-f', 'file', 'dir/');
mv('file1', 'file2', 'dir/');
mv(['file1', 'file2'], 'dir/'); // same as above
```
Moves files. The wildcard `*` is accepted.
### mkdir([options ,] dir [, dir ...])
### mkdir([options ,] dir_array)
Available options:
+ `p`: full path (will create intermediate dirs if necessary)
Examples:
```javascript
mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g');
mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above
```
Creates directories.
### test(expression)
Available expression primaries:
+ `'-b', 'path'`: true if path is a block device
+ `'-c', 'path'`: true if path is a character device
+ `'-d', 'path'`: true if path is a directory
+ `'-e', 'path'`: true if path exists
+ `'-f', 'path'`: true if path is a regular file
+ `'-L', 'path'`: true if path is a symbolic link
+ `'-p', 'path'`: true if path is a pipe (FIFO)
+ `'-S', 'path'`: true if path is a socket
Examples:
```javascript
if (test('-d', path)) { /* do something with dir */ };
if (!test('-f', path)) continue; // skip if it's a regular file
```
Evaluates expression using the available primaries and returns corresponding value.
### cat(file [, file ...])
### cat(file_array)
Examples:
```javascript
var str = cat('file*.txt');
var str = cat('file1', 'file2');
var str = cat(['file1', 'file2']); // same as above
```
Returns a string containing the given file, or a concatenated string
containing the files if more than one file is given (a new line character is
introduced between each file). Wildcard `*` accepted.
### 'string'.to(file)
Examples:
```javascript
cat('input.txt').to('output.txt');
```
Analogous to the redirection operator `>` in Unix, but works with JavaScript strings (such as
those returned by `cat`, `grep`, etc). _Like Unix redirections, `to()` will overwrite any existing file!_
### 'string'.toEnd(file)
Examples:
```javascript
cat('input.txt').toEnd('output.txt');
```
Analogous to the redirect-and-append operator `>>` in Unix, but works with JavaScript strings (such as
those returned by `cat`, `grep`, etc).
### sed([options ,] search_regex, replacement, file)
Available options:
+ `-i`: Replace contents of 'file' in-place. _Note that no backups will be created!_
Examples:
```javascript
sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js');
sed(/.*DELETE_THIS_LINE.*\n/, '', 'source.js');
```
Reads an input string from `file` and performs a JavaScript `replace()` on the input
using the given search regex and replacement string or function. Returns the new string after replacement.
### grep([options ,] regex_filter, file [, file ...])
### grep([options ,] regex_filter, file_array)
Available options:
+ `-v`: Inverse the sense of the regex and print the lines not matching the criteria.
Examples:
```javascript
grep('-v', 'GLOBAL_VARIABLE', '*.js');
grep('GLOBAL_VARIABLE', '*.js');
```
Reads input string from given files and returns a string containing all lines of the
file that match the given `regex_filter`. Wildcard `*` accepted.
### which(command)
Examples:
```javascript
var nodeExec = which('node');
```
Searches for `command` in the system's PATH. On Windows looks for `.exe`, `.cmd`, and `.bat` extensions.
Returns string containing the absolute path to the command.
### echo(string [,string ...])
Examples:
```javascript
echo('hello world');
var str = echo('hello world');
```
Prints string to stdout, and returns string with additional utility methods
like `.to()`.
### pushd([options,] [dir | '-N' | '+N'])
Available options:
+ `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated.
Arguments:
+ `dir`: Makes the current working directory be the top of the stack, and then executes the equivalent of `cd dir`.
+ `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack.
+ `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack.
Examples:
```javascript
// process.cwd() === '/usr'
pushd('/etc'); // Returns /etc /usr
pushd('+1'); // Returns /usr /etc
```
Save the current directory on the top of the directory stack and then cd to `dir`. With no arguments, pushd exchanges the top two directories. Returns an array of paths in the stack.
### popd([options,] ['-N' | '+N'])
Available options:
+ `-n`: Suppresses the normal change of directory when removing directories from the stack, so that only the stack is manipulated.
Arguments:
+ `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero.
+ `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero.
Examples:
```javascript
echo(process.cwd()); // '/usr'
pushd('/etc'); // '/etc /usr'
echo(process.cwd()); // '/etc'
popd(); // '/usr'
echo(process.cwd()); // '/usr'
```
When no arguments are given, popd removes the top directory from the stack and performs a cd to the new top directory. The elements are numbered from 0 starting at the first directory listed with dirs; i.e., popd is equivalent to popd +0. Returns an array of paths in the stack.
### dirs([options | '+N' | '-N'])
Available options:
+ `-c`: Clears the directory stack by deleting all of the elements.
Arguments:
+ `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero.
+ `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero.
Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if +N or -N was specified.
See also: pushd, popd
### ln(options, source, dest)
### ln(source, dest)
Available options:
+ `s`: symlink
+ `f`: force
Examples:
```javascript
ln('file', 'newlink');
ln('-sf', 'file', 'existing');
```
Links source to dest. Use -f to force the link, should dest already exist.
### exit(code)
Exits the current process with the given exit code.
### env['VAR_NAME']
Object containing environment variables (both getter and setter). Shortcut to process.env.
### exec(command [, options] [, callback])
Available options (all `false` by default):
+ `async`: Asynchronous execution. Defaults to true if a callback is provided.
+ `silent`: Do not echo program output to console.
Examples:
```javascript
var version = exec('node --version', {silent:true}).output;
var child = exec('some_long_running_process', {async:true});
child.stdout.on('data', function(data) {
/* ... do something with data ... */
});
exec('some_long_running_process', function(code, output) {
console.log('Exit code:', code);
console.log('Program output:', output);
});
```
Executes the given `command` _synchronously_, unless otherwise specified.
When in synchronous mode returns the object `{ code:..., output:... }`, containing the program's
`output` (stdout + stderr) and its exit `code`. Otherwise returns the child process object, and
the `callback` gets the arguments `(code, output)`.
**Note:** For long-lived processes, it's best to run `exec()` asynchronously as
the current synchronous implementation uses a lot of CPU. This should be getting
fixed soon.
### chmod(octal_mode || octal_string, file)
### chmod(symbolic_mode, file)
Available options:
+ `-v`: output a diagnostic for every file processed
+ `-c`: like verbose but report only when a change is made
+ `-R`: change files and directories recursively
Examples:
```javascript
chmod(755, '/Users/brandon');
chmod('755', '/Users/brandon'); // same as above
chmod('u+x', '/Users/brandon');
```
Alters the permissions of a file or directory by either specifying the
absolute permissions in octal form or expressing the changes in symbols.
This command tries to mimic the POSIX behavior as much as possible.
Notable exceptions:
+ In symbolic modes, 'a-r' and '-r' are identical. No consideration is
given to the umask.
+ There is no "quiet" option since default behavior is to run silent.
## Non-Unix commands
### tempdir()
Examples:
```javascript
var tmp = tempdir(); // "/tmp" for most *nix platforms
```
Searches and returns string containing a writeable, platform-dependent temporary directory.
Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir).
### error()
Tests if error occurred in the last command. Returns `null` if no error occurred,
otherwise returns string explaining the error
## Configuration
### config.silent
Example:
```javascript
var sh = require('shelljs');
var silentState = sh.config.silent; // save old silent state
sh.config.silent = true;
/* ... */
sh.config.silent = silentState; // restore old silent state
```
Suppresses all command output if `true`, except for `echo()` calls.
Default is `false`.
### config.fatal
Example:
```javascript
require('shelljs/global');
config.fatal = true;
cp('this_file_does_not_exist', '/dev/null'); // dies here
/* more commands... */
```
If `true` the script will die on errors. Default is `false`.

View File

@ -0,0 +1,9 @@
# Release steps
* Ensure master passes CI tests
* Bump version in package.json. Any breaking change or new feature should bump minor (or even major). Non-breaking changes or fixes can just bump patch.
* Update README manually if the changes are not documented in-code. If so, run `scripts/generate-docs.js`
* Commit
* `$ git tag <version>` (see `git tag -l` for latest)
* `$ git push origin master --tags`
* `$ npm publish .`

View File

@ -0,0 +1,51 @@
#!/usr/bin/env node
require('../global');
if (process.argv.length < 3) {
console.log('ShellJS: missing argument (script name)');
console.log();
process.exit(1);
}
var args,
scriptName = process.argv[2];
env['NODE_PATH'] = __dirname + '/../..';
if (!scriptName.match(/\.js/) && !scriptName.match(/\.coffee/)) {
if (test('-f', scriptName + '.js'))
scriptName += '.js';
if (test('-f', scriptName + '.coffee'))
scriptName += '.coffee';
}
if (!test('-f', scriptName)) {
console.log('ShellJS: script not found ('+scriptName+')');
console.log();
process.exit(1);
}
args = process.argv.slice(3);
for (var i = 0, l = args.length; i < l; i++) {
if (args[i][0] !== "-"){
args[i] = '"' + args[i] + '"'; // fixes arguments with multiple words
}
}
if (scriptName.match(/\.coffee$/)) {
//
// CoffeeScript
//
if (which('coffee')) {
exec('coffee ' + scriptName + ' ' + args.join(' '), { async: true });
} else {
console.log('ShellJS: CoffeeScript interpreter not found');
console.log();
process.exit(1);
}
} else {
//
// JavaScript
//
exec('node ' + scriptName + ' ' + args.join(' '), { async: true });
}

View File

@ -0,0 +1,3 @@
var shell = require('./shell.js');
for (var cmd in shell)
global[cmd] = shell[cmd];

View File

@ -0,0 +1,56 @@
require('./global');
global.config.fatal = true;
global.target = {};
var args = process.argv.slice(2),
targetArgs,
dashesLoc = args.indexOf('--');
// split args, everything after -- if only for targets
if (dashesLoc > -1) {
targetArgs = args.slice(dashesLoc + 1, args.length);
args = args.slice(0, dashesLoc);
}
// This ensures we only execute the script targets after the entire script has
// been evaluated
setTimeout(function() {
var t;
if (args.length === 1 && args[0] === '--help') {
console.log('Available targets:');
for (t in global.target)
console.log(' ' + t);
return;
}
// Wrap targets to prevent duplicate execution
for (t in global.target) {
(function(t, oldTarget){
// Wrap it
global.target[t] = function() {
if (oldTarget.done)
return;
oldTarget.done = true;
return oldTarget.apply(oldTarget, arguments);
};
})(t, global.target[t]);
}
// Execute desired targets
if (args.length > 0) {
args.forEach(function(arg) {
if (arg in global.target)
global.target[arg](targetArgs);
else {
console.log('no such target: ' + arg);
}
});
} else if ('all' in global.target) {
global.target.all(targetArgs);
}
}, 0);

View File

@ -0,0 +1,66 @@
{
"_from": "shelljs@^0.5.3",
"_id": "shelljs@0.5.3",
"_inBundle": false,
"_integrity": "sha1-xUmCuZbHbvDB5rWfvcWCX1txMRM=",
"_location": "/guppy-cli/shelljs",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "shelljs@^0.5.3",
"name": "shelljs",
"escapedName": "shelljs",
"rawSpec": "^0.5.3",
"saveSpec": null,
"fetchSpec": "^0.5.3"
},
"_requiredBy": [
"/guppy-cli"
],
"_resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.5.3.tgz",
"_shasum": "c54982b996c76ef0c1e6b59fbdc5825f5b713113",
"_spec": "shelljs@^0.5.3",
"_where": "/home/manfred/enviPath/ketcher2/ketcher/node_modules/guppy-cli",
"author": {
"name": "Artur Adib",
"email": "arturadib@gmail.com"
},
"bin": {
"shjs": "./bin/shjs"
},
"bugs": {
"url": "https://github.com/arturadib/shelljs/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Portable Unix shell commands for Node.js",
"devDependencies": {
"jshint": "~2.1.11"
},
"engines": {
"node": ">=0.8.0"
},
"homepage": "http://github.com/arturadib/shelljs",
"keywords": [
"unix",
"shell",
"makefile",
"make",
"jake",
"synchronous"
],
"license": "BSD*",
"main": "./shell.js",
"name": "shelljs",
"optionalDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/arturadib/shelljs.git"
},
"scripts": {
"test": "node scripts/run-tests"
},
"version": "0.5.3"
}

View File

@ -0,0 +1,21 @@
#!/usr/bin/env node
require('../global');
echo('Appending docs to README.md');
cd(__dirname + '/..');
// Extract docs from shell.js
var docs = grep('//@', 'shell.js');
docs = docs.replace(/\/\/\@include (.+)/g, function(match, path) {
var file = path.match('.js$') ? path : path+'.js';
return grep('//@', file);
});
// Remove '//@'
docs = docs.replace(/\/\/\@ ?/g, '');
// Append docs to README
sed('-i', /## Command reference(.|\n)*/, '## Command reference\n\n' + docs, 'README.md');
echo('All done.');

View File

@ -0,0 +1,50 @@
#!/usr/bin/env node
require('../global');
var path = require('path');
var failed = false;
//
// Lint
//
JSHINT_BIN = './node_modules/jshint/bin/jshint';
cd(__dirname + '/..');
if (!test('-f', JSHINT_BIN)) {
echo('JSHint not found. Run `npm install` in the root dir first.');
exit(1);
}
if (exec(JSHINT_BIN + ' *.js test/*.js').code !== 0) {
failed = true;
echo('*** JSHINT FAILED! (return code != 0)');
echo();
} else {
echo('All JSHint tests passed');
echo();
}
//
// Unit tests
//
cd(__dirname + '/../test');
ls('*.js').forEach(function(file) {
echo('Running test:', file);
if (exec('node ' + file).code !== 123) { // 123 avoids false positives (e.g. premature exit)
failed = true;
echo('*** TEST FAILED! (missing exit code "123")');
echo();
}
});
if (failed) {
echo();
echo('*******************************************************');
echo('WARNING: Some tests did not pass!');
echo('*******************************************************');
exit(1);
} else {
echo();
echo('All tests passed.');
}

View File

@ -0,0 +1,159 @@
//
// ShellJS
// Unix shell commands on top of Node's API
//
// Copyright (c) 2012 Artur Adib
// http://github.com/arturadib/shelljs
//
var common = require('./src/common');
//@
//@ All commands run synchronously, unless otherwise stated.
//@
//@include ./src/cd
var _cd = require('./src/cd');
exports.cd = common.wrap('cd', _cd);
//@include ./src/pwd
var _pwd = require('./src/pwd');
exports.pwd = common.wrap('pwd', _pwd);
//@include ./src/ls
var _ls = require('./src/ls');
exports.ls = common.wrap('ls', _ls);
//@include ./src/find
var _find = require('./src/find');
exports.find = common.wrap('find', _find);
//@include ./src/cp
var _cp = require('./src/cp');
exports.cp = common.wrap('cp', _cp);
//@include ./src/rm
var _rm = require('./src/rm');
exports.rm = common.wrap('rm', _rm);
//@include ./src/mv
var _mv = require('./src/mv');
exports.mv = common.wrap('mv', _mv);
//@include ./src/mkdir
var _mkdir = require('./src/mkdir');
exports.mkdir = common.wrap('mkdir', _mkdir);
//@include ./src/test
var _test = require('./src/test');
exports.test = common.wrap('test', _test);
//@include ./src/cat
var _cat = require('./src/cat');
exports.cat = common.wrap('cat', _cat);
//@include ./src/to
var _to = require('./src/to');
String.prototype.to = common.wrap('to', _to);
//@include ./src/toEnd
var _toEnd = require('./src/toEnd');
String.prototype.toEnd = common.wrap('toEnd', _toEnd);
//@include ./src/sed
var _sed = require('./src/sed');
exports.sed = common.wrap('sed', _sed);
//@include ./src/grep
var _grep = require('./src/grep');
exports.grep = common.wrap('grep', _grep);
//@include ./src/which
var _which = require('./src/which');
exports.which = common.wrap('which', _which);
//@include ./src/echo
var _echo = require('./src/echo');
exports.echo = _echo; // don't common.wrap() as it could parse '-options'
//@include ./src/dirs
var _dirs = require('./src/dirs').dirs;
exports.dirs = common.wrap("dirs", _dirs);
var _pushd = require('./src/dirs').pushd;
exports.pushd = common.wrap('pushd', _pushd);
var _popd = require('./src/dirs').popd;
exports.popd = common.wrap("popd", _popd);
//@include ./src/ln
var _ln = require('./src/ln');
exports.ln = common.wrap('ln', _ln);
//@
//@ ### exit(code)
//@ Exits the current process with the given exit code.
exports.exit = process.exit;
//@
//@ ### env['VAR_NAME']
//@ Object containing environment variables (both getter and setter). Shortcut to process.env.
exports.env = process.env;
//@include ./src/exec
var _exec = require('./src/exec');
exports.exec = common.wrap('exec', _exec, {notUnix:true});
//@include ./src/chmod
var _chmod = require('./src/chmod');
exports.chmod = common.wrap('chmod', _chmod);
//@
//@ ## Non-Unix commands
//@
//@include ./src/tempdir
var _tempDir = require('./src/tempdir');
exports.tempdir = common.wrap('tempdir', _tempDir);
//@include ./src/error
var _error = require('./src/error');
exports.error = _error;
//@
//@ ## Configuration
//@
exports.config = common.config;
//@
//@ ### config.silent
//@ Example:
//@
//@ ```javascript
//@ var sh = require('shelljs');
//@ var silentState = sh.config.silent; // save old silent state
//@ sh.config.silent = true;
//@ /* ... */
//@ sh.config.silent = silentState; // restore old silent state
//@ ```
//@
//@ Suppresses all command output if `true`, except for `echo()` calls.
//@ Default is `false`.
//@
//@ ### config.fatal
//@ Example:
//@
//@ ```javascript
//@ require('shelljs/global');
//@ config.fatal = true;
//@ cp('this_file_does_not_exist', '/dev/null'); // dies here
//@ /* more commands... */
//@ ```
//@
//@ If `true` the script will die on errors. Default is `false`.

View File

@ -0,0 +1,43 @@
var common = require('./common');
var fs = require('fs');
//@
//@ ### cat(file [, file ...])
//@ ### cat(file_array)
//@
//@ Examples:
//@
//@ ```javascript
//@ var str = cat('file*.txt');
//@ var str = cat('file1', 'file2');
//@ var str = cat(['file1', 'file2']); // same as above
//@ ```
//@
//@ Returns a string containing the given file, or a concatenated string
//@ containing the files if more than one file is given (a new line character is
//@ introduced between each file). Wildcard `*` accepted.
function _cat(options, files) {
var cat = '';
if (!files)
common.error('no paths given');
if (typeof files === 'string')
files = [].slice.call(arguments, 1);
// if it's array leave it as it is
files = common.expand(files);
files.forEach(function(file) {
if (!fs.existsSync(file))
common.error('no such file or directory: ' + file);
cat += fs.readFileSync(file, 'utf8') + '\n';
});
if (cat[cat.length-1] === '\n')
cat = cat.substring(0, cat.length-1);
return common.ShellString(cat);
}
module.exports = _cat;

View File

@ -0,0 +1,19 @@
var fs = require('fs');
var common = require('./common');
//@
//@ ### cd('dir')
//@ Changes to directory `dir` for the duration of the script
function _cd(options, dir) {
if (!dir)
common.error('directory not specified');
if (!fs.existsSync(dir))
common.error('no such file or directory: ' + dir);
if (!fs.statSync(dir).isDirectory())
common.error('not a directory: ' + dir);
process.chdir(dir);
}
module.exports = _cd;

View File

@ -0,0 +1,208 @@
var common = require('./common');
var fs = require('fs');
var path = require('path');
var PERMS = (function (base) {
return {
OTHER_EXEC : base.EXEC,
OTHER_WRITE : base.WRITE,
OTHER_READ : base.READ,
GROUP_EXEC : base.EXEC << 3,
GROUP_WRITE : base.WRITE << 3,
GROUP_READ : base.READ << 3,
OWNER_EXEC : base.EXEC << 6,
OWNER_WRITE : base.WRITE << 6,
OWNER_READ : base.READ << 6,
// Literal octal numbers are apparently not allowed in "strict" javascript. Using parseInt is
// the preferred way, else a jshint warning is thrown.
STICKY : parseInt('01000', 8),
SETGID : parseInt('02000', 8),
SETUID : parseInt('04000', 8),
TYPE_MASK : parseInt('0770000', 8)
};
})({
EXEC : 1,
WRITE : 2,
READ : 4
});
//@
//@ ### chmod(octal_mode || octal_string, file)
//@ ### chmod(symbolic_mode, file)
//@
//@ Available options:
//@
//@ + `-v`: output a diagnostic for every file processed//@
//@ + `-c`: like verbose but report only when a change is made//@
//@ + `-R`: change files and directories recursively//@
//@
//@ Examples:
//@
//@ ```javascript
//@ chmod(755, '/Users/brandon');
//@ chmod('755', '/Users/brandon'); // same as above
//@ chmod('u+x', '/Users/brandon');
//@ ```
//@
//@ Alters the permissions of a file or directory by either specifying the
//@ absolute permissions in octal form or expressing the changes in symbols.
//@ This command tries to mimic the POSIX behavior as much as possible.
//@ Notable exceptions:
//@
//@ + In symbolic modes, 'a-r' and '-r' are identical. No consideration is
//@ given to the umask.
//@ + There is no "quiet" option since default behavior is to run silent.
function _chmod(options, mode, filePattern) {
if (!filePattern) {
if (options.length > 0 && options.charAt(0) === '-') {
// Special case where the specified file permissions started with - to subtract perms, which
// get picked up by the option parser as command flags.
// If we are down by one argument and options starts with -, shift everything over.
filePattern = mode;
mode = options;
options = '';
}
else {
common.error('You must specify a file.');
}
}
options = common.parseOptions(options, {
'R': 'recursive',
'c': 'changes',
'v': 'verbose'
});
if (typeof filePattern === 'string') {
filePattern = [ filePattern ];
}
var files;
if (options.recursive) {
files = [];
common.expand(filePattern).forEach(function addFile(expandedFile) {
var stat = fs.lstatSync(expandedFile);
if (!stat.isSymbolicLink()) {
files.push(expandedFile);
if (stat.isDirectory()) { // intentionally does not follow symlinks.
fs.readdirSync(expandedFile).forEach(function (child) {
addFile(expandedFile + '/' + child);
});
}
}
});
}
else {
files = common.expand(filePattern);
}
files.forEach(function innerChmod(file) {
file = path.resolve(file);
if (!fs.existsSync(file)) {
common.error('File not found: ' + file);
}
// When recursing, don't follow symlinks.
if (options.recursive && fs.lstatSync(file).isSymbolicLink()) {
return;
}
var perms = fs.statSync(file).mode;
var type = perms & PERMS.TYPE_MASK;
var newPerms = perms;
if (isNaN(parseInt(mode, 8))) {
// parse options
mode.split(',').forEach(function (symbolicMode) {
/*jshint regexdash:true */
var pattern = /([ugoa]*)([=\+-])([rwxXst]*)/i;
var matches = pattern.exec(symbolicMode);
if (matches) {
var applyTo = matches[1];
var operator = matches[2];
var change = matches[3];
var changeOwner = applyTo.indexOf('u') != -1 || applyTo === 'a' || applyTo === '';
var changeGroup = applyTo.indexOf('g') != -1 || applyTo === 'a' || applyTo === '';
var changeOther = applyTo.indexOf('o') != -1 || applyTo === 'a' || applyTo === '';
var changeRead = change.indexOf('r') != -1;
var changeWrite = change.indexOf('w') != -1;
var changeExec = change.indexOf('x') != -1;
var changeSticky = change.indexOf('t') != -1;
var changeSetuid = change.indexOf('s') != -1;
var mask = 0;
if (changeOwner) {
mask |= (changeRead ? PERMS.OWNER_READ : 0) + (changeWrite ? PERMS.OWNER_WRITE : 0) + (changeExec ? PERMS.OWNER_EXEC : 0) + (changeSetuid ? PERMS.SETUID : 0);
}
if (changeGroup) {
mask |= (changeRead ? PERMS.GROUP_READ : 0) + (changeWrite ? PERMS.GROUP_WRITE : 0) + (changeExec ? PERMS.GROUP_EXEC : 0) + (changeSetuid ? PERMS.SETGID : 0);
}
if (changeOther) {
mask |= (changeRead ? PERMS.OTHER_READ : 0) + (changeWrite ? PERMS.OTHER_WRITE : 0) + (changeExec ? PERMS.OTHER_EXEC : 0);
}
// Sticky bit is special - it's not tied to user, group or other.
if (changeSticky) {
mask |= PERMS.STICKY;
}
switch (operator) {
case '+':
newPerms |= mask;
break;
case '-':
newPerms &= ~mask;
break;
case '=':
newPerms = type + mask;
// According to POSIX, when using = to explicitly set the permissions, setuid and setgid can never be cleared.
if (fs.statSync(file).isDirectory()) {
newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms;
}
break;
}
if (options.verbose) {
log(file + ' -> ' + newPerms.toString(8));
}
if (perms != newPerms) {
if (!options.verbose && options.changes) {
log(file + ' -> ' + newPerms.toString(8));
}
fs.chmodSync(file, newPerms);
}
}
else {
common.error('Invalid symbolic mode change: ' + symbolicMode);
}
});
}
else {
// they gave us a full number
newPerms = type + parseInt(mode, 8);
// POSIX rules are that setuid and setgid can only be added using numeric form, but not cleared.
if (fs.statSync(file).isDirectory()) {
newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms;
}
fs.chmodSync(file, newPerms);
}
});
}
module.exports = _chmod;

View File

@ -0,0 +1,203 @@
var os = require('os');
var fs = require('fs');
var _ls = require('./ls');
// Module globals
var config = {
silent: false,
fatal: false
};
exports.config = config;
var state = {
error: null,
currentCmd: 'shell.js',
tempDir: null
};
exports.state = state;
var platform = os.type().match(/^Win/) ? 'win' : 'unix';
exports.platform = platform;
function log() {
if (!config.silent)
console.log.apply(this, arguments);
}
exports.log = log;
// Shows error message. Throws unless _continue or config.fatal are true
function error(msg, _continue) {
if (state.error === null)
state.error = '';
state.error += state.currentCmd + ': ' + msg + '\n';
if (msg.length > 0)
log(state.error);
if (config.fatal)
process.exit(1);
if (!_continue)
throw '';
}
exports.error = error;
// In the future, when Proxies are default, we can add methods like `.to()` to primitive strings.
// For now, this is a dummy function to bookmark places we need such strings
function ShellString(str) {
return str;
}
exports.ShellString = ShellString;
// Returns {'alice': true, 'bob': false} when passed a dictionary, e.g.:
// parseOptions('-a', {'a':'alice', 'b':'bob'});
function parseOptions(str, map) {
if (!map)
error('parseOptions() internal error: no map given');
// All options are false by default
var options = {};
for (var letter in map)
options[map[letter]] = false;
if (!str)
return options; // defaults
if (typeof str !== 'string')
error('parseOptions() internal error: wrong str');
// e.g. match[1] = 'Rf' for str = '-Rf'
var match = str.match(/^\-(.+)/);
if (!match)
return options;
// e.g. chars = ['R', 'f']
var chars = match[1].split('');
chars.forEach(function(c) {
if (c in map)
options[map[c]] = true;
else
error('option not recognized: '+c);
});
return options;
}
exports.parseOptions = parseOptions;
// Expands wildcards with matching (ie. existing) file names.
// For example:
// expand(['file*.js']) = ['file1.js', 'file2.js', ...]
// (if the files 'file1.js', 'file2.js', etc, exist in the current dir)
function expand(list) {
var expanded = [];
list.forEach(function(listEl) {
// Wildcard present on directory names ?
if(listEl.search(/\*[^\/]*\//) > -1 || listEl.search(/\*\*[^\/]*\//) > -1) {
var match = listEl.match(/^([^*]+\/|)(.*)/);
var root = match[1];
var rest = match[2];
var restRegex = rest.replace(/\*\*/g, ".*").replace(/\*/g, "[^\\/]*");
restRegex = new RegExp(restRegex);
_ls('-R', root).filter(function (e) {
return restRegex.test(e);
}).forEach(function(file) {
expanded.push(file);
});
}
// Wildcard present on file names ?
else if (listEl.search(/\*/) > -1) {
_ls('', listEl).forEach(function(file) {
expanded.push(file);
});
} else {
expanded.push(listEl);
}
});
return expanded;
}
exports.expand = expand;
// Normalizes _unlinkSync() across platforms to match Unix behavior, i.e.
// file can be unlinked even if it's read-only, see https://github.com/joyent/node/issues/3006
function unlinkSync(file) {
try {
fs.unlinkSync(file);
} catch(e) {
// Try to override file permission
if (e.code === 'EPERM') {
fs.chmodSync(file, '0666');
fs.unlinkSync(file);
} else {
throw e;
}
}
}
exports.unlinkSync = unlinkSync;
// e.g. 'shelljs_a5f185d0443ca...'
function randomFileName() {
function randomHash(count) {
if (count === 1)
return parseInt(16*Math.random(), 10).toString(16);
else {
var hash = '';
for (var i=0; i<count; i++)
hash += randomHash(1);
return hash;
}
}
return 'shelljs_'+randomHash(20);
}
exports.randomFileName = randomFileName;
// extend(target_obj, source_obj1 [, source_obj2 ...])
// Shallow extend, e.g.:
// extend({A:1}, {b:2}, {c:3}) returns {A:1, b:2, c:3}
function extend(target) {
var sources = [].slice.call(arguments, 1);
sources.forEach(function(source) {
for (var key in source)
target[key] = source[key];
});
return target;
}
exports.extend = extend;
// Common wrapper for all Unix-like commands
function wrap(cmd, fn, options) {
return function() {
var retValue = null;
state.currentCmd = cmd;
state.error = null;
try {
var args = [].slice.call(arguments, 0);
if (options && options.notUnix) {
retValue = fn.apply(this, args);
} else {
if (args.length === 0 || typeof args[0] !== 'string' || args[0][0] !== '-')
args.unshift(''); // only add dummy option if '-option' not already present
retValue = fn.apply(this, args);
}
} catch (e) {
if (!state.error) {
// If state.error hasn't been set it's an error thrown by Node, not us - probably a bug...
console.log('shell.js: internal error');
console.log(e.stack || e);
process.exit(1);
}
if (config.fatal)
throw e;
}
state.currentCmd = 'shell.js';
return retValue;
};
} // wrap
exports.wrap = wrap;

View File

@ -0,0 +1,204 @@
var fs = require('fs');
var path = require('path');
var common = require('./common');
var os = require('os');
// Buffered file copy, synchronous
// (Using readFileSync() + writeFileSync() could easily cause a memory overflow
// with large files)
function copyFileSync(srcFile, destFile) {
if (!fs.existsSync(srcFile))
common.error('copyFileSync: no such file or directory: ' + srcFile);
var BUF_LENGTH = 64*1024,
buf = new Buffer(BUF_LENGTH),
bytesRead = BUF_LENGTH,
pos = 0,
fdr = null,
fdw = null;
try {
fdr = fs.openSync(srcFile, 'r');
} catch(e) {
common.error('copyFileSync: could not read src file ('+srcFile+')');
}
try {
fdw = fs.openSync(destFile, 'w');
} catch(e) {
common.error('copyFileSync: could not write to dest file (code='+e.code+'):'+destFile);
}
while (bytesRead === BUF_LENGTH) {
bytesRead = fs.readSync(fdr, buf, 0, BUF_LENGTH, pos);
fs.writeSync(fdw, buf, 0, bytesRead);
pos += bytesRead;
}
fs.closeSync(fdr);
fs.closeSync(fdw);
fs.chmodSync(destFile, fs.statSync(srcFile).mode);
}
// Recursively copies 'sourceDir' into 'destDir'
// Adapted from https://github.com/ryanmcgrath/wrench-js
//
// Copyright (c) 2010 Ryan McGrath
// Copyright (c) 2012 Artur Adib
//
// Licensed under the MIT License
// http://www.opensource.org/licenses/mit-license.php
function cpdirSyncRecursive(sourceDir, destDir, opts) {
if (!opts) opts = {};
/* Create the directory where all our junk is moving to; read the mode of the source directory and mirror it */
var checkDir = fs.statSync(sourceDir);
try {
fs.mkdirSync(destDir, checkDir.mode);
} catch (e) {
//if the directory already exists, that's okay
if (e.code !== 'EEXIST') throw e;
}
var files = fs.readdirSync(sourceDir);
for (var i = 0; i < files.length; i++) {
var srcFile = sourceDir + "/" + files[i];
var destFile = destDir + "/" + files[i];
var srcFileStat = fs.lstatSync(srcFile);
if (srcFileStat.isDirectory()) {
/* recursion this thing right on back. */
cpdirSyncRecursive(srcFile, destFile, opts);
} else if (srcFileStat.isSymbolicLink()) {
var symlinkFull = fs.readlinkSync(srcFile);
fs.symlinkSync(symlinkFull, destFile, os.platform() === "win32" ? "junction" : null);
} else {
/* At this point, we've hit a file actually worth copying... so copy it on over. */
if (fs.existsSync(destFile) && !opts.force) {
common.log('skipping existing file: ' + files[i]);
} else {
copyFileSync(srcFile, destFile);
}
}
} // for files
} // cpdirSyncRecursive
//@
//@ ### cp([options ,] source [,source ...], dest)
//@ ### cp([options ,] source_array, dest)
//@ Available options:
//@
//@ + `-f`: force
//@ + `-r, -R`: recursive
//@
//@ Examples:
//@
//@ ```javascript
//@ cp('file1', 'dir1');
//@ cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp');
//@ cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above
//@ ```
//@
//@ Copies files. The wildcard `*` is accepted.
function _cp(options, sources, dest) {
options = common.parseOptions(options, {
'f': 'force',
'R': 'recursive',
'r': 'recursive'
});
// Get sources, dest
if (arguments.length < 3) {
common.error('missing <source> and/or <dest>');
} else if (arguments.length > 3) {
sources = [].slice.call(arguments, 1, arguments.length - 1);
dest = arguments[arguments.length - 1];
} else if (typeof sources === 'string') {
sources = [sources];
} else if ('length' in sources) {
sources = sources; // no-op for array
} else {
common.error('invalid arguments');
}
var exists = fs.existsSync(dest),
stats = exists && fs.statSync(dest);
// Dest is not existing dir, but multiple sources given
if ((!exists || !stats.isDirectory()) && sources.length > 1)
common.error('dest is not a directory (too many sources)');
// Dest is an existing file, but no -f given
if (exists && stats.isFile() && !options.force)
common.error('dest file already exists: ' + dest);
if (options.recursive) {
// Recursive allows the shortcut syntax "sourcedir/" for "sourcedir/*"
// (see Github issue #15)
sources.forEach(function(src, i) {
if (src[src.length - 1] === '/')
sources[i] += '*';
});
// Create dest
try {
fs.mkdirSync(dest, parseInt('0777', 8));
} catch (e) {
// like Unix's cp, keep going even if we can't create dest dir
}
}
sources = common.expand(sources);
sources.forEach(function(src) {
if (!fs.existsSync(src)) {
common.error('no such file or directory: '+src, true);
return; // skip file
}
// If here, src exists
if (fs.statSync(src).isDirectory()) {
if (!options.recursive) {
// Non-Recursive
common.log(src + ' is a directory (not copied)');
} else {
// Recursive
// 'cp /a/source dest' should create 'source' in 'dest'
var newDest = path.join(dest, path.basename(src)),
checkDir = fs.statSync(src);
try {
fs.mkdirSync(newDest, checkDir.mode);
} catch (e) {
//if the directory already exists, that's okay
if (e.code !== 'EEXIST') {
common.error('dest file no such file or directory: ' + newDest, true);
throw e;
}
}
cpdirSyncRecursive(src, newDest, {force: options.force});
}
return; // done with dir
}
// If here, src is a file
// When copying to '/path/dir':
// thisDest = '/path/dir/file1'
var thisDest = dest;
if (fs.existsSync(dest) && fs.statSync(dest).isDirectory())
thisDest = path.normalize(dest + '/' + path.basename(src));
if (fs.existsSync(thisDest) && !options.force) {
common.error('dest file already exists: ' + thisDest, true);
return; // skip file
}
copyFileSync(src, thisDest);
}); // forEach(src)
}
module.exports = _cp;

View File

@ -0,0 +1,191 @@
var common = require('./common');
var _cd = require('./cd');
var path = require('path');
// Pushd/popd/dirs internals
var _dirStack = [];
function _isStackIndex(index) {
return (/^[\-+]\d+$/).test(index);
}
function _parseStackIndex(index) {
if (_isStackIndex(index)) {
if (Math.abs(index) < _dirStack.length + 1) { // +1 for pwd
return (/^-/).test(index) ? Number(index) - 1 : Number(index);
} else {
common.error(index + ': directory stack index out of range');
}
} else {
common.error(index + ': invalid number');
}
}
function _actualDirStack() {
return [process.cwd()].concat(_dirStack);
}
//@
//@ ### pushd([options,] [dir | '-N' | '+N'])
//@
//@ Available options:
//@
//@ + `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated.
//@
//@ Arguments:
//@
//@ + `dir`: Makes the current working directory be the top of the stack, and then executes the equivalent of `cd dir`.
//@ + `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack.
//@ + `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack.
//@
//@ Examples:
//@
//@ ```javascript
//@ // process.cwd() === '/usr'
//@ pushd('/etc'); // Returns /etc /usr
//@ pushd('+1'); // Returns /usr /etc
//@ ```
//@
//@ Save the current directory on the top of the directory stack and then cd to `dir`. With no arguments, pushd exchanges the top two directories. Returns an array of paths in the stack.
function _pushd(options, dir) {
if (_isStackIndex(options)) {
dir = options;
options = '';
}
options = common.parseOptions(options, {
'n' : 'no-cd'
});
var dirs = _actualDirStack();
if (dir === '+0') {
return dirs; // +0 is a noop
} else if (!dir) {
if (dirs.length > 1) {
dirs = dirs.splice(1, 1).concat(dirs);
} else {
return common.error('no other directory');
}
} else if (_isStackIndex(dir)) {
var n = _parseStackIndex(dir);
dirs = dirs.slice(n).concat(dirs.slice(0, n));
} else {
if (options['no-cd']) {
dirs.splice(1, 0, dir);
} else {
dirs.unshift(dir);
}
}
if (options['no-cd']) {
dirs = dirs.slice(1);
} else {
dir = path.resolve(dirs.shift());
_cd('', dir);
}
_dirStack = dirs;
return _dirs('');
}
exports.pushd = _pushd;
//@
//@ ### popd([options,] ['-N' | '+N'])
//@
//@ Available options:
//@
//@ + `-n`: Suppresses the normal change of directory when removing directories from the stack, so that only the stack is manipulated.
//@
//@ Arguments:
//@
//@ + `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero.
//@ + `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero.
//@
//@ Examples:
//@
//@ ```javascript
//@ echo(process.cwd()); // '/usr'
//@ pushd('/etc'); // '/etc /usr'
//@ echo(process.cwd()); // '/etc'
//@ popd(); // '/usr'
//@ echo(process.cwd()); // '/usr'
//@ ```
//@
//@ When no arguments are given, popd removes the top directory from the stack and performs a cd to the new top directory. The elements are numbered from 0 starting at the first directory listed with dirs; i.e., popd is equivalent to popd +0. Returns an array of paths in the stack.
function _popd(options, index) {
if (_isStackIndex(options)) {
index = options;
options = '';
}
options = common.parseOptions(options, {
'n' : 'no-cd'
});
if (!_dirStack.length) {
return common.error('directory stack empty');
}
index = _parseStackIndex(index || '+0');
if (options['no-cd'] || index > 0 || _dirStack.length + index === 0) {
index = index > 0 ? index - 1 : index;
_dirStack.splice(index, 1);
} else {
var dir = path.resolve(_dirStack.shift());
_cd('', dir);
}
return _dirs('');
}
exports.popd = _popd;
//@
//@ ### dirs([options | '+N' | '-N'])
//@
//@ Available options:
//@
//@ + `-c`: Clears the directory stack by deleting all of the elements.
//@
//@ Arguments:
//@
//@ + `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero.
//@ + `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero.
//@
//@ Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if +N or -N was specified.
//@
//@ See also: pushd, popd
function _dirs(options, index) {
if (_isStackIndex(options)) {
index = options;
options = '';
}
options = common.parseOptions(options, {
'c' : 'clear'
});
if (options['clear']) {
_dirStack = [];
return _dirStack;
}
var stack = _actualDirStack();
if (index) {
index = _parseStackIndex(index);
if (index < 0) {
index = stack.length + index;
}
common.log(stack[index]);
return stack[index];
}
common.log(stack.join(' '));
return stack;
}
exports.dirs = _dirs;

View File

@ -0,0 +1,20 @@
var common = require('./common');
//@
//@ ### echo(string [,string ...])
//@
//@ Examples:
//@
//@ ```javascript
//@ echo('hello world');
//@ var str = echo('hello world');
//@ ```
//@
//@ Prints string to stdout, and returns string with additional utility methods
//@ like `.to()`.
function _echo() {
var messages = [].slice.call(arguments, 0);
console.log.apply(this, messages);
return common.ShellString(messages.join(' '));
}
module.exports = _echo;

View File

@ -0,0 +1,10 @@
var common = require('./common');
//@
//@ ### error()
//@ Tests if error occurred in the last command. Returns `null` if no error occurred,
//@ otherwise returns string explaining the error
function error() {
return common.state.error;
};
module.exports = error;

View File

@ -0,0 +1,216 @@
var common = require('./common');
var _tempDir = require('./tempdir');
var _pwd = require('./pwd');
var path = require('path');
var fs = require('fs');
var child = require('child_process');
// Hack to run child_process.exec() synchronously (sync avoids callback hell)
// Uses a custom wait loop that checks for a flag file, created when the child process is done.
// (Can't do a wait loop that checks for internal Node variables/messages as
// Node is single-threaded; callbacks and other internal state changes are done in the
// event loop).
function execSync(cmd, opts) {
var tempDir = _tempDir();
var stdoutFile = path.resolve(tempDir+'/'+common.randomFileName()),
codeFile = path.resolve(tempDir+'/'+common.randomFileName()),
scriptFile = path.resolve(tempDir+'/'+common.randomFileName()),
sleepFile = path.resolve(tempDir+'/'+common.randomFileName());
var options = common.extend({
silent: common.config.silent
}, opts);
var previousStdoutContent = '';
// Echoes stdout changes from running process, if not silent
function updateStdout() {
if (options.silent || !fs.existsSync(stdoutFile))
return;
var stdoutContent = fs.readFileSync(stdoutFile, 'utf8');
// No changes since last time?
if (stdoutContent.length <= previousStdoutContent.length)
return;
process.stdout.write(stdoutContent.substr(previousStdoutContent.length));
previousStdoutContent = stdoutContent;
}
function escape(str) {
return (str+'').replace(/([\\"'])/g, "\\$1").replace(/\0/g, "\\0");
}
if (fs.existsSync(scriptFile)) common.unlinkSync(scriptFile);
if (fs.existsSync(stdoutFile)) common.unlinkSync(stdoutFile);
if (fs.existsSync(codeFile)) common.unlinkSync(codeFile);
var execCommand = '"'+process.execPath+'" '+scriptFile;
var execOptions = {
env: process.env,
cwd: _pwd(),
maxBuffer: 20*1024*1024
};
if (typeof child.execSync === 'function') {
var script = [
"var child = require('child_process')",
" , fs = require('fs');",
"var childProcess = child.exec('"+escape(cmd)+"', {env: process.env, maxBuffer: 20*1024*1024}, function(err) {",
" fs.writeFileSync('"+escape(codeFile)+"', err ? err.code.toString() : '0');",
"});",
"var stdoutStream = fs.createWriteStream('"+escape(stdoutFile)+"');",
"childProcess.stdout.pipe(stdoutStream, {end: false});",
"childProcess.stderr.pipe(stdoutStream, {end: false});",
"childProcess.stdout.pipe(process.stdout);",
"childProcess.stderr.pipe(process.stderr);",
"var stdoutEnded = false, stderrEnded = false;",
"function tryClosing(){ if(stdoutEnded && stderrEnded){ stdoutStream.end(); } }",
"childProcess.stdout.on('end', function(){ stdoutEnded = true; tryClosing(); });",
"childProcess.stderr.on('end', function(){ stderrEnded = true; tryClosing(); });"
].join('\n');
fs.writeFileSync(scriptFile, script);
if (options.silent) {
execOptions.stdio = 'ignore';
} else {
execOptions.stdio = [0, 1, 2];
}
// Welcome to the future
child.execSync(execCommand, execOptions);
} else {
cmd += ' > '+stdoutFile+' 2>&1'; // works on both win/unix
var script = [
"var child = require('child_process')",
" , fs = require('fs');",
"var childProcess = child.exec('"+escape(cmd)+"', {env: process.env, maxBuffer: 20*1024*1024}, function(err) {",
" fs.writeFileSync('"+escape(codeFile)+"', err ? err.code.toString() : '0');",
"});"
].join('\n');
fs.writeFileSync(scriptFile, script);
child.exec(execCommand, execOptions);
// The wait loop
// sleepFile is used as a dummy I/O op to mitigate unnecessary CPU usage
// (tried many I/O sync ops, writeFileSync() seems to be only one that is effective in reducing
// CPU usage, though apparently not so much on Windows)
while (!fs.existsSync(codeFile)) { updateStdout(); fs.writeFileSync(sleepFile, 'a'); }
while (!fs.existsSync(stdoutFile)) { updateStdout(); fs.writeFileSync(sleepFile, 'a'); }
}
// At this point codeFile exists, but it's not necessarily flushed yet.
// Keep reading it until it is.
var code = parseInt('', 10);
while (isNaN(code)) {
code = parseInt(fs.readFileSync(codeFile, 'utf8'), 10);
}
var stdout = fs.readFileSync(stdoutFile, 'utf8');
// No biggie if we can't erase the files now -- they're in a temp dir anyway
try { common.unlinkSync(scriptFile); } catch(e) {}
try { common.unlinkSync(stdoutFile); } catch(e) {}
try { common.unlinkSync(codeFile); } catch(e) {}
try { common.unlinkSync(sleepFile); } catch(e) {}
// some shell return codes are defined as errors, per http://tldp.org/LDP/abs/html/exitcodes.html
if (code === 1 || code === 2 || code >= 126) {
common.error('', true); // unix/shell doesn't really give an error message after non-zero exit codes
}
// True if successful, false if not
var obj = {
code: code,
output: stdout
};
return obj;
} // execSync()
// Wrapper around exec() to enable echoing output to console in real time
function execAsync(cmd, opts, callback) {
var output = '';
var options = common.extend({
silent: common.config.silent
}, opts);
var c = child.exec(cmd, {env: process.env, maxBuffer: 20*1024*1024}, function(err) {
if (callback)
callback(err ? err.code : 0, output);
});
c.stdout.on('data', function(data) {
output += data;
if (!options.silent)
process.stdout.write(data);
});
c.stderr.on('data', function(data) {
output += data;
if (!options.silent)
process.stdout.write(data);
});
return c;
}
//@
//@ ### exec(command [, options] [, callback])
//@ Available options (all `false` by default):
//@
//@ + `async`: Asynchronous execution. Defaults to true if a callback is provided.
//@ + `silent`: Do not echo program output to console.
//@
//@ Examples:
//@
//@ ```javascript
//@ var version = exec('node --version', {silent:true}).output;
//@
//@ var child = exec('some_long_running_process', {async:true});
//@ child.stdout.on('data', function(data) {
//@ /* ... do something with data ... */
//@ });
//@
//@ exec('some_long_running_process', function(code, output) {
//@ console.log('Exit code:', code);
//@ console.log('Program output:', output);
//@ });
//@ ```
//@
//@ Executes the given `command` _synchronously_, unless otherwise specified.
//@ When in synchronous mode returns the object `{ code:..., output:... }`, containing the program's
//@ `output` (stdout + stderr) and its exit `code`. Otherwise returns the child process object, and
//@ the `callback` gets the arguments `(code, output)`.
//@
//@ **Note:** For long-lived processes, it's best to run `exec()` asynchronously as
//@ the current synchronous implementation uses a lot of CPU. This should be getting
//@ fixed soon.
function _exec(command, options, callback) {
if (!command)
common.error('must specify command');
// Callback is defined instead of options.
if (typeof options === 'function') {
callback = options;
options = { async: true };
}
// Callback is defined with options.
if (typeof options === 'object' && typeof callback === 'function') {
options.async = true;
}
options = common.extend({
silent: common.config.silent,
async: false
}, options);
if (options.async)
return execAsync(command, options, callback);
else
return execSync(command, options);
}
module.exports = _exec;

View File

@ -0,0 +1,51 @@
var fs = require('fs');
var common = require('./common');
var _ls = require('./ls');
//@
//@ ### find(path [,path ...])
//@ ### find(path_array)
//@ Examples:
//@
//@ ```javascript
//@ find('src', 'lib');
//@ find(['src', 'lib']); // same as above
//@ find('.').filter(function(file) { return file.match(/\.js$/); });
//@ ```
//@
//@ Returns array of all files (however deep) in the given paths.
//@
//@ The main difference from `ls('-R', path)` is that the resulting file names
//@ include the base directories, e.g. `lib/resources/file1` instead of just `file1`.
function _find(options, paths) {
if (!paths)
common.error('no path specified');
else if (typeof paths === 'object')
paths = paths; // assume array
else if (typeof paths === 'string')
paths = [].slice.call(arguments, 1);
var list = [];
function pushFile(file) {
if (common.platform === 'win')
file = file.replace(/\\/g, '/');
list.push(file);
}
// why not simply do ls('-R', paths)? because the output wouldn't give the base dirs
// to get the base dir in the output, we need instead ls('-R', 'dir/*') for every directory
paths.forEach(function(file) {
pushFile(file);
if (fs.statSync(file).isDirectory()) {
_ls('-RA', file+'/*').forEach(function(subfile) {
pushFile(subfile);
});
}
});
return list;
}
module.exports = _find;

View File

@ -0,0 +1,52 @@
var common = require('./common');
var fs = require('fs');
//@
//@ ### grep([options ,] regex_filter, file [, file ...])
//@ ### grep([options ,] regex_filter, file_array)
//@ Available options:
//@
//@ + `-v`: Inverse the sense of the regex and print the lines not matching the criteria.
//@
//@ Examples:
//@
//@ ```javascript
//@ grep('-v', 'GLOBAL_VARIABLE', '*.js');
//@ grep('GLOBAL_VARIABLE', '*.js');
//@ ```
//@
//@ Reads input string from given files and returns a string containing all lines of the
//@ file that match the given `regex_filter`. Wildcard `*` accepted.
function _grep(options, regex, files) {
options = common.parseOptions(options, {
'v': 'inverse'
});
if (!files)
common.error('no paths given');
if (typeof files === 'string')
files = [].slice.call(arguments, 2);
// if it's array leave it as it is
files = common.expand(files);
var grep = '';
files.forEach(function(file) {
if (!fs.existsSync(file)) {
common.error('no such file or directory: ' + file, true);
return;
}
var contents = fs.readFileSync(file, 'utf8'),
lines = contents.split(/\r*\n/);
lines.forEach(function(line) {
var matched = line.match(regex);
if ((options.inverse && !matched) || (!options.inverse && matched))
grep += line + '\n';
});
});
return common.ShellString(grep);
}
module.exports = _grep;

View File

@ -0,0 +1,53 @@
var fs = require('fs');
var path = require('path');
var common = require('./common');
var os = require('os');
//@
//@ ### ln(options, source, dest)
//@ ### ln(source, dest)
//@ Available options:
//@
//@ + `s`: symlink
//@ + `f`: force
//@
//@ Examples:
//@
//@ ```javascript
//@ ln('file', 'newlink');
//@ ln('-sf', 'file', 'existing');
//@ ```
//@
//@ Links source to dest. Use -f to force the link, should dest already exist.
function _ln(options, source, dest) {
options = common.parseOptions(options, {
's': 'symlink',
'f': 'force'
});
if (!source || !dest) {
common.error('Missing <source> and/or <dest>');
}
source = path.resolve(process.cwd(), String(source));
dest = path.resolve(process.cwd(), String(dest));
if (!fs.existsSync(source)) {
common.error('Source file does not exist', true);
}
if (fs.existsSync(dest)) {
if (!options.force) {
common.error('Destination file exists', true);
}
fs.unlinkSync(dest);
}
if (options.symlink) {
fs.symlinkSync(source, dest, os.platform() === "win32" ? "junction" : null);
} else {
fs.linkSync(source, dest, os.platform() === "win32" ? "junction" : null);
}
}
module.exports = _ln;

Some files were not shown because too many files have changed in this diff Show More