structure, layout and automation

This commit is contained in:
Tancre
2020-09-16 14:23:28 +02:00
commit 0efda7fffe
15549 changed files with 1280031 additions and 0 deletions

30
node_modules/tar/lib/buffer-entry.js generated vendored Normal file
View File

@ -0,0 +1,30 @@
// just like the Entry class, but it buffers the contents
//
// XXX It would be good to set a maximum BufferEntry filesize,
// since it eats up memory. In normal operation,
// these are only for long filenames or link names, which are
// rarely very big.
module.exports = BufferEntry
var inherits = require("inherits")
, Entry = require("./entry.js")
function BufferEntry () {
Entry.apply(this, arguments)
this._buffer = new Buffer(this.props.size)
this._offset = 0
this.body = ""
this.on("end", function () {
this.body = this._buffer.toString().slice(0, -1)
})
}
inherits(BufferEntry, Entry)
// collect the bytes as they come in.
BufferEntry.prototype.write = function (c) {
c.copy(this._buffer, this._offset)
this._offset += c.length
Entry.prototype.write.call(this, c)
}

169
node_modules/tar/lib/entry-writer.js generated vendored Normal file
View File

@ -0,0 +1,169 @@
module.exports = EntryWriter
var tar = require("../tar.js")
, TarHeader = require("./header.js")
, Entry = require("./entry.js")
, inherits = require("inherits")
, BlockStream = require("block-stream")
, ExtendedHeaderWriter
, Stream = require("stream").Stream
, EOF = {}
inherits(EntryWriter, Stream)
function EntryWriter (props) {
var me = this
if (!(me instanceof EntryWriter)) {
return new EntryWriter(props)
}
Stream.apply(this)
me.writable = true
me.readable = true
me._stream = new BlockStream(512)
me._stream.on("data", function (c) {
me.emit("data", c)
})
me._stream.on("drain", function () {
me.emit("drain")
})
me._stream.on("end", function () {
me.emit("end")
me.emit("close")
})
me.props = props
if (props.type === "Directory") {
props.size = 0
}
props.ustar = "ustar\0"
props.ustarver = "00"
me.path = props.path
me._buffer = []
me._didHeader = false
me._meta = false
me.on("pipe", function () {
me._process()
})
}
EntryWriter.prototype.write = function (c) {
// console.error(".. ew write")
if (this._ended) return this.emit("error", new Error("write after end"))
this._buffer.push(c)
this._process()
this._needDrain = this._buffer.length > 0
return !this._needDrain
}
EntryWriter.prototype.end = function (c) {
// console.error(".. ew end")
if (c) this._buffer.push(c)
this._buffer.push(EOF)
this._ended = true
this._process()
this._needDrain = this._buffer.length > 0
}
EntryWriter.prototype.pause = function () {
// console.error(".. ew pause")
this._paused = true
this.emit("pause")
}
EntryWriter.prototype.resume = function () {
// console.error(".. ew resume")
this._paused = false
this.emit("resume")
this._process()
}
EntryWriter.prototype.add = function (entry) {
// console.error(".. ew add")
if (!this.parent) return this.emit("error", new Error("no parent"))
// make sure that the _header and such is emitted, and clear out
// the _currentEntry link on the parent.
if (!this._ended) this.end()
return this.parent.add(entry)
}
EntryWriter.prototype._header = function () {
// console.error(".. ew header")
if (this._didHeader) return
this._didHeader = true
var headerBlock = TarHeader.encode(this.props)
if (this.props.needExtended && !this._meta) {
var me = this
ExtendedHeaderWriter = ExtendedHeaderWriter ||
require("./extended-header-writer.js")
ExtendedHeaderWriter(this.props)
.on("data", function (c) {
me.emit("data", c)
})
.on("error", function (er) {
me.emit("error", er)
})
.end()
}
// console.error(".. .. ew headerBlock emitting")
this.emit("data", headerBlock)
this.emit("header")
}
EntryWriter.prototype._process = function () {
// console.error(".. .. ew process")
if (!this._didHeader && !this._meta) {
this._header()
}
if (this._paused || this._processing) {
// console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing)
return
}
this._processing = true
var buf = this._buffer
for (var i = 0; i < buf.length; i ++) {
// console.error(".. .. .. i=%d", i)
var c = buf[i]
if (c === EOF) this._stream.end()
else this._stream.write(c)
if (this._paused) {
// console.error(".. .. .. paused mid-emission")
this._processing = false
if (i < buf.length) {
this._needDrain = true
this._buffer = buf.slice(i + 1)
}
return
}
}
// console.error(".. .. .. emitted")
this._buffer.length = 0
this._processing = false
// console.error(".. .. .. emitting drain")
this.emit("drain")
}
EntryWriter.prototype.destroy = function () {}

220
node_modules/tar/lib/entry.js generated vendored Normal file
View File

@ -0,0 +1,220 @@
// A passthrough read/write stream that sets its properties
// based on a header, extendedHeader, and globalHeader
//
// Can be either a file system object of some sort, or
// a pax/ustar metadata entry.
module.exports = Entry
var TarHeader = require("./header.js")
, tar = require("../tar")
, assert = require("assert").ok
, Stream = require("stream").Stream
, inherits = require("inherits")
, fstream = require("fstream").Abstract
function Entry (header, extended, global) {
Stream.call(this)
this.readable = true
this.writable = true
this._needDrain = false
this._paused = false
this._reading = false
this._ending = false
this._ended = false
this._remaining = 0
this._abort = false
this._queue = []
this._index = 0
this._queueLen = 0
this._read = this._read.bind(this)
this.props = {}
this._header = header
this._extended = extended || {}
// globals can change throughout the course of
// a file parse operation. Freeze it at its current state.
this._global = {}
var me = this
Object.keys(global || {}).forEach(function (g) {
me._global[g] = global[g]
})
this._setProps()
}
inherits(Entry, Stream)
Entry.prototype.write = function (c) {
if (this._ending) this.error("write() after end()", null, true)
if (this._remaining === 0) {
this.error("invalid bytes past eof")
}
// often we'll get a bunch of \0 at the end of the last write,
// since chunks will always be 512 bytes when reading a tarball.
if (c.length > this._remaining) {
c = c.slice(0, this._remaining)
}
this._remaining -= c.length
// put it on the stack.
var ql = this._queueLen
this._queue.push(c)
this._queueLen ++
this._read()
// either paused, or buffered
if (this._paused || ql > 0) {
this._needDrain = true
return false
}
return true
}
Entry.prototype.end = function (c) {
if (c) this.write(c)
this._ending = true
this._read()
}
Entry.prototype.pause = function () {
this._paused = true
this.emit("pause")
}
Entry.prototype.resume = function () {
// console.error(" Tar Entry resume", this.path)
this.emit("resume")
this._paused = false
this._read()
return this._queueLen - this._index > 1
}
// This is bound to the instance
Entry.prototype._read = function () {
// console.error(" Tar Entry _read", this.path)
if (this._paused || this._reading || this._ended) return
// set this flag so that event handlers don't inadvertently
// get multiple _read() calls running.
this._reading = true
// have any data to emit?
while (this._index < this._queueLen && !this._paused) {
var chunk = this._queue[this._index ++]
this.emit("data", chunk)
}
// check if we're drained
if (this._index >= this._queueLen) {
this._queue.length = this._queueLen = this._index = 0
if (this._needDrain) {
this._needDrain = false
this.emit("drain")
}
if (this._ending) {
this._ended = true
this.emit("end")
}
}
// if the queue gets too big, then pluck off whatever we can.
// this should be fairly rare.
var mql = this._maxQueueLen
if (this._queueLen > mql && this._index > 0) {
mql = Math.min(this._index, mql)
this._index -= mql
this._queueLen -= mql
this._queue = this._queue.slice(mql)
}
this._reading = false
}
Entry.prototype._setProps = function () {
// props = extended->global->header->{}
var header = this._header
, extended = this._extended
, global = this._global
, props = this.props
// first get the values from the normal header.
var fields = tar.fields
for (var f = 0; fields[f] !== null; f ++) {
var field = fields[f]
, val = header[field]
if (typeof val !== "undefined") props[field] = val
}
// next, the global header for this file.
// numeric values, etc, will have already been parsed.
;[global, extended].forEach(function (p) {
Object.keys(p).forEach(function (f) {
if (typeof p[f] !== "undefined") props[f] = p[f]
})
})
// no nulls allowed in path or linkpath
;["path", "linkpath"].forEach(function (p) {
if (props.hasOwnProperty(p)) {
props[p] = props[p].split("\0")[0]
}
})
// set date fields to be a proper date
;["mtime", "ctime", "atime"].forEach(function (p) {
if (props.hasOwnProperty(p)) {
props[p] = new Date(props[p] * 1000)
}
})
// set the type so that we know what kind of file to create
var type
switch (tar.types[props.type]) {
case "OldFile":
case "ContiguousFile":
type = "File"
break
case "GNUDumpDir":
type = "Directory"
break
case undefined:
type = "Unknown"
break
case "Link":
case "SymbolicLink":
case "CharacterDevice":
case "BlockDevice":
case "Directory":
case "FIFO":
default:
type = tar.types[props.type]
}
this.type = type
this.path = props.path
this.size = props.size
// size is special, since it signals when the file needs to end.
this._remaining = props.size
}
// the parser may not call write if _abort is true.
// useful for skipping data from some files quickly.
Entry.prototype.abort = function(){
this._abort = true
}
Entry.prototype.warn = fstream.warn
Entry.prototype.error = fstream.error

191
node_modules/tar/lib/extended-header-writer.js generated vendored Normal file
View File

@ -0,0 +1,191 @@
module.exports = ExtendedHeaderWriter
var inherits = require("inherits")
, EntryWriter = require("./entry-writer.js")
inherits(ExtendedHeaderWriter, EntryWriter)
var tar = require("../tar.js")
, path = require("path")
, TarHeader = require("./header.js")
// props is the props of the thing we need to write an
// extended header for.
// Don't be shy with it. Just encode everything.
function ExtendedHeaderWriter (props) {
// console.error(">> ehw ctor")
var me = this
if (!(me instanceof ExtendedHeaderWriter)) {
return new ExtendedHeaderWriter(props)
}
me.fields = props
var p =
{ path : ("PaxHeader" + path.join("/", props.path || ""))
.replace(/\\/g, "/").substr(0, 100)
, mode : props.mode || 0666
, uid : props.uid || 0
, gid : props.gid || 0
, size : 0 // will be set later
, mtime : props.mtime || Date.now() / 1000
, type : "x"
, linkpath : ""
, ustar : "ustar\0"
, ustarver : "00"
, uname : props.uname || ""
, gname : props.gname || ""
, devmaj : props.devmaj || 0
, devmin : props.devmin || 0
}
EntryWriter.call(me, p)
// console.error(">> ehw props", me.props)
me.props = p
me._meta = true
}
ExtendedHeaderWriter.prototype.end = function () {
// console.error(">> ehw end")
var me = this
if (me._ended) return
me._ended = true
me._encodeFields()
if (me.props.size === 0) {
// nothing to write!
me._ready = true
me._stream.end()
return
}
me._stream.write(TarHeader.encode(me.props))
me.body.forEach(function (l) {
me._stream.write(l)
})
me._ready = true
// console.error(">> ehw _process calling end()", me.props)
this._stream.end()
}
ExtendedHeaderWriter.prototype._encodeFields = function () {
// console.error(">> ehw _encodeFields")
this.body = []
if (this.fields.prefix) {
this.fields.path = this.fields.prefix + "/" + this.fields.path
this.fields.prefix = ""
}
encodeFields(this.fields, "", this.body, this.fields.noProprietary)
var me = this
this.body.forEach(function (l) {
me.props.size += l.length
})
}
function encodeFields (fields, prefix, body, nop) {
// console.error(">> >> ehw encodeFields")
// "%d %s=%s\n", <length>, <keyword>, <value>
// The length is a decimal number, and includes itself and the \n
// Numeric values are decimal strings.
Object.keys(fields).forEach(function (k) {
var val = fields[k]
, numeric = tar.numeric[k]
if (prefix) k = prefix + "." + k
// already including NODETAR.type, don't need File=true also
if (k === fields.type && val === true) return
switch (k) {
// don't include anything that's always handled just fine
// in the normal header, or only meaningful in the context
// of nodetar
case "mode":
case "cksum":
case "ustar":
case "ustarver":
case "prefix":
case "basename":
case "dirname":
case "needExtended":
case "block":
case "filter":
return
case "rdev":
if (val === 0) return
break
case "nlink":
case "dev": // Truly a hero among men, Creator of Star!
case "ino": // Speak his name with reverent awe! It is:
k = "SCHILY." + k
break
default: break
}
if (val && typeof val === "object" &&
!Buffer.isBuffer(val)) encodeFields(val, k, body, nop)
else if (val === null || val === undefined) return
else body.push.apply(body, encodeField(k, val, nop))
})
return body
}
function encodeField (k, v, nop) {
// lowercase keys must be valid, otherwise prefix with
// "NODETAR."
if (k.charAt(0) === k.charAt(0).toLowerCase()) {
var m = k.split(".")[0]
if (!tar.knownExtended[m]) k = "NODETAR." + k
}
// no proprietary
if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) {
return []
}
if (typeof val === "number") val = val.toString(10)
var s = new Buffer(" " + k + "=" + v + "\n")
, digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1
// console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
// if adding that many digits will make it go over that length,
// then add one to it. For example, if the string is:
// " foo=bar\n"
// then that's 9 characters. With the "9", that bumps the length
// up to 10. However, this is invalid:
// "10 foo=bar\n"
// but, since that's actually 11 characters, since 10 adds another
// character to the length, and the length includes the number
// itself. In that case, just bump it up again.
if (s.length + digits >= Math.pow(10, digits)) digits += 1
// console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
var len = digits + s.length
// console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len)
var lenBuf = new Buffer("" + len)
if (lenBuf.length + s.length !== len) {
throw new Error("Bad length calculation\n"+
"len="+len+"\n"+
"lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+
"lenBuf.length="+lenBuf.length+"\n"+
"digits="+digits+"\n"+
"s="+JSON.stringify(s.toString())+"\n"+
"s.length="+s.length)
}
return [lenBuf, s]
}

140
node_modules/tar/lib/extended-header.js generated vendored Normal file
View File

@ -0,0 +1,140 @@
// An Entry consisting of:
//
// "%d %s=%s\n", <length>, <keyword>, <value>
//
// The length is a decimal number, and includes itself and the \n
// \0 does not terminate anything. Only the length terminates the string.
// Numeric values are decimal strings.
module.exports = ExtendedHeader
var Entry = require("./entry.js")
, inherits = require("inherits")
, tar = require("../tar.js")
, numeric = tar.numeric
, keyTrans = { "SCHILY.dev": "dev"
, "SCHILY.ino": "ino"
, "SCHILY.nlink": "nlink" }
function ExtendedHeader () {
Entry.apply(this, arguments)
this.on("data", this._parse)
this.fields = {}
this._position = 0
this._fieldPos = 0
this._state = SIZE
this._sizeBuf = []
this._keyBuf = []
this._valBuf = []
this._size = -1
this._key = ""
}
inherits(ExtendedHeader, Entry)
ExtendedHeader.prototype._parse = parse
var s = 0
, states = ExtendedHeader.states = {}
, SIZE = states.SIZE = s++
, KEY = states.KEY = s++
, VAL = states.VAL = s++
, ERR = states.ERR = s++
Object.keys(states).forEach(function (s) {
states[states[s]] = states[s]
})
states[s] = null
// char code values for comparison
var _0 = "0".charCodeAt(0)
, _9 = "9".charCodeAt(0)
, point = ".".charCodeAt(0)
, a = "a".charCodeAt(0)
, Z = "Z".charCodeAt(0)
, a = "a".charCodeAt(0)
, z = "z".charCodeAt(0)
, space = " ".charCodeAt(0)
, eq = "=".charCodeAt(0)
, cr = "\n".charCodeAt(0)
function parse (c) {
if (this._state === ERR) return
for ( var i = 0, l = c.length
; i < l
; this._position++, this._fieldPos++, i++) {
// console.error("top of loop, size="+this._size)
var b = c[i]
if (this._size >= 0 && this._fieldPos > this._size) {
error(this, "field exceeds length="+this._size)
return
}
switch (this._state) {
case ERR: return
case SIZE:
// console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString())
if (b === space) {
this._state = KEY
// this._fieldPos = this._sizeBuf.length
this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10)
this._sizeBuf.length = 0
continue
}
if (b < _0 || b > _9) {
error(this, "expected [" + _0 + ".." + _9 + "], got " + b)
return
}
this._sizeBuf.push(b)
continue
case KEY:
// can be any char except =, not > size.
if (b === eq) {
this._state = VAL
this._key = new Buffer(this._keyBuf).toString()
if (keyTrans[this._key]) this._key = keyTrans[this._key]
this._keyBuf.length = 0
continue
}
this._keyBuf.push(b)
continue
case VAL:
// field must end with cr
if (this._fieldPos === this._size - 1) {
// console.error("finished with "+this._key)
if (b !== cr) {
error(this, "expected \\n at end of field")
return
}
var val = new Buffer(this._valBuf).toString()
if (numeric[this._key]) {
val = parseFloat(val)
}
this.fields[this._key] = val
this._valBuf.length = 0
this._state = SIZE
this._size = -1
this._fieldPos = -1
continue
}
this._valBuf.push(b)
continue
}
}
}
function error (me, msg) {
msg = "invalid header: " + msg
+ "\nposition=" + me._position
+ "\nfield position=" + me._fieldPos
me.error(msg)
me.state = ERR
}

94
node_modules/tar/lib/extract.js generated vendored Normal file
View File

@ -0,0 +1,94 @@
// give it a tarball and a path, and it'll dump the contents
module.exports = Extract
var tar = require("../tar.js")
, fstream = require("fstream")
, inherits = require("inherits")
, path = require("path")
function Extract (opts) {
if (!(this instanceof Extract)) return new Extract(opts)
tar.Parse.apply(this)
if (typeof opts !== "object") {
opts = { path: opts }
}
// better to drop in cwd? seems more standard.
opts.path = opts.path || path.resolve("node-tar-extract")
opts.type = "Directory"
opts.Directory = true
// similar to --strip or --strip-components
opts.strip = +opts.strip
if (!opts.strip || opts.strip <= 0) opts.strip = 0
this._fst = fstream.Writer(opts)
this.pause()
var me = this
// Hardlinks in tarballs are relative to the root
// of the tarball. So, they need to be resolved against
// the target directory in order to be created properly.
me.on("entry", function (entry) {
// if there's a "strip" argument, then strip off that many
// path components.
if (opts.strip) {
var p = entry.path.split("/").slice(opts.strip).join("/")
entry.path = entry.props.path = p
if (entry.linkpath) {
var lp = entry.linkpath.split("/").slice(opts.strip).join("/")
entry.linkpath = entry.props.linkpath = lp
}
}
if (entry.type === "Link") {
entry.linkpath = entry.props.linkpath =
path.join(opts.path, path.join("/", entry.props.linkpath))
}
if (entry.type === "SymbolicLink") {
var dn = path.dirname(entry.path) || ""
var linkpath = entry.props.linkpath
var target = path.resolve(opts.path, dn, linkpath)
if (target.indexOf(opts.path) !== 0) {
linkpath = path.join(opts.path, path.join("/", linkpath))
}
entry.linkpath = entry.props.linkpath = linkpath
}
})
this._fst.on("ready", function () {
me.pipe(me._fst, { end: false })
me.resume()
})
this._fst.on('error', function(err) {
me.emit('error', err)
})
this._fst.on('drain', function() {
me.emit('drain')
})
// this._fst.on("end", function () {
// console.error("\nEEEE Extract End", me._fst.path)
// })
this._fst.on("close", function () {
// console.error("\nEEEE Extract End", me._fst.path)
me.emit("finish")
me.emit("end")
me.emit("close")
})
}
inherits(Extract, tar.Parse)
Extract.prototype._streamEnd = function () {
var me = this
if (!me._ended || me._entry) me.error("unexpected eof")
me._fst.end()
// my .end() is coming later.
}

14
node_modules/tar/lib/global-header-writer.js generated vendored Normal file
View File

@ -0,0 +1,14 @@
module.exports = GlobalHeaderWriter
var ExtendedHeaderWriter = require("./extended-header-writer.js")
, inherits = require("inherits")
inherits(GlobalHeaderWriter, ExtendedHeaderWriter)
function GlobalHeaderWriter (props) {
if (!(this instanceof GlobalHeaderWriter)) {
return new GlobalHeaderWriter(props)
}
ExtendedHeaderWriter.call(this, props)
this.props.type = "g"
}

385
node_modules/tar/lib/header.js generated vendored Normal file
View File

@ -0,0 +1,385 @@
// parse a 512-byte header block to a data object, or vice-versa
// If the data won't fit nicely in a simple header, then generate
// the appropriate extended header file, and return that.
module.exports = TarHeader
var tar = require("../tar.js")
, fields = tar.fields
, fieldOffs = tar.fieldOffs
, fieldEnds = tar.fieldEnds
, fieldSize = tar.fieldSize
, numeric = tar.numeric
, assert = require("assert").ok
, space = " ".charCodeAt(0)
, slash = "/".charCodeAt(0)
, bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null
function TarHeader (block) {
if (!(this instanceof TarHeader)) return new TarHeader(block)
if (block) this.decode(block)
}
TarHeader.prototype =
{ decode : decode
, encode: encode
, calcSum: calcSum
, checkSum: checkSum
}
TarHeader.parseNumeric = parseNumeric
TarHeader.encode = encode
TarHeader.decode = decode
// note that this will only do the normal ustar header, not any kind
// of extended posix header file. If something doesn't fit comfortably,
// then it will set obj.needExtended = true, and set the block to
// the closest approximation.
function encode (obj) {
if (!obj && !(this instanceof TarHeader)) throw new Error(
"encode must be called on a TarHeader, or supplied an object")
obj = obj || this
var block = obj.block = new Buffer(512)
// if the object has a "prefix", then that's actually an extension of
// the path field.
if (obj.prefix) {
// console.error("%% header encoding, got a prefix", obj.prefix)
obj.path = obj.prefix + "/" + obj.path
// console.error("%% header encoding, prefixed path", obj.path)
obj.prefix = ""
}
obj.needExtended = false
if (obj.mode) {
if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8)
obj.mode = obj.mode & 0777
}
for (var f = 0; fields[f] !== null; f ++) {
var field = fields[f]
, off = fieldOffs[f]
, end = fieldEnds[f]
, ret
switch (field) {
case "cksum":
// special, done below, after all the others
break
case "prefix":
// special, this is an extension of the "path" field.
// console.error("%% header encoding, skip prefix later")
break
case "type":
// convert from long name to a single char.
var type = obj.type || "0"
if (type.length > 1) {
type = tar.types[obj.type]
if (!type) type = "0"
}
writeText(block, off, end, type)
break
case "path":
// uses the "prefix" field if > 100 bytes, but <= 255
var pathLen = Buffer.byteLength(obj.path)
, pathFSize = fieldSize[fields.path]
, prefFSize = fieldSize[fields.prefix]
// paths between 100 and 255 should use the prefix field.
// longer than 255
if (pathLen > pathFSize &&
pathLen <= pathFSize + prefFSize) {
// need to find a slash somewhere in the middle so that
// path and prefix both fit in their respective fields
var searchStart = pathLen - 1 - pathFSize
, searchEnd = prefFSize
, found = false
, pathBuf = new Buffer(obj.path)
for ( var s = searchStart
; (s <= searchEnd)
; s ++ ) {
if (pathBuf[s] === slash || pathBuf[s] === bslash) {
found = s
break
}
}
if (found !== false) {
prefix = pathBuf.slice(0, found).toString("utf8")
path = pathBuf.slice(found + 1).toString("utf8")
ret = writeText(block, off, end, path)
off = fieldOffs[fields.prefix]
end = fieldEnds[fields.prefix]
// console.error("%% header writing prefix", off, end, prefix)
ret = writeText(block, off, end, prefix) || ret
break
}
}
// paths less than 100 chars don't need a prefix
// and paths longer than 255 need an extended header and will fail
// on old implementations no matter what we do here.
// Null out the prefix, and fallthrough to default.
// console.error("%% header writing no prefix")
var poff = fieldOffs[fields.prefix]
, pend = fieldEnds[fields.prefix]
writeText(block, poff, pend, "")
// fallthrough
// all other fields are numeric or text
default:
ret = numeric[field]
? writeNumeric(block, off, end, obj[field])
: writeText(block, off, end, obj[field] || "")
break
}
obj.needExtended = obj.needExtended || ret
}
var off = fieldOffs[fields.cksum]
, end = fieldEnds[fields.cksum]
writeNumeric(block, off, end, calcSum.call(this, block))
return block
}
// if it's a negative number, or greater than will fit,
// then use write256.
var MAXNUM = { 12: 077777777777
, 11: 07777777777
, 8 : 07777777
, 7 : 0777777 }
function writeNumeric (block, off, end, num) {
var writeLen = end - off
, maxNum = MAXNUM[writeLen] || 0
num = num || 0
// console.error(" numeric", num)
if (num instanceof Date ||
Object.prototype.toString.call(num) === "[object Date]") {
num = num.getTime() / 1000
}
if (num > maxNum || num < 0) {
write256(block, off, end, num)
// need an extended header if negative or too big.
return true
}
// god, tar is so annoying
// if the string is small enough, you should put a space
// between the octal string and the \0, but if it doesn't
// fit, then don't.
var numStr = Math.floor(num).toString(8)
if (num < MAXNUM[writeLen - 1]) numStr += " "
// pad with "0" chars
if (numStr.length < writeLen) {
numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
}
if (numStr.length !== writeLen - 1) {
throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
"expected: "+writeLen)
}
block.write(numStr, off, writeLen, "utf8")
block[end - 1] = 0
}
function write256 (block, off, end, num) {
var buf = block.slice(off, end)
var positive = num >= 0
buf[0] = positive ? 0x80 : 0xFF
// get the number as a base-256 tuple
if (!positive) num *= -1
var tuple = []
do {
var n = num % 256
tuple.push(n)
num = (num - n) / 256
} while (num)
var bytes = tuple.length
var fill = buf.length - bytes
for (var i = 1; i < fill; i ++) {
buf[i] = positive ? 0 : 0xFF
}
// tuple is a base256 number, with [0] as the *least* significant byte
// if it's negative, then we need to flip all the bits once we hit the
// first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
// complement is (0xFF - n).
var zero = true
for (i = bytes; i > 0; i --) {
var byte = tuple[bytes - i]
if (positive) buf[fill + i] = byte
else if (zero && byte === 0) buf[fill + i] = 0
else if (zero) {
zero = false
buf[fill + i] = 0x100 - byte
} else buf[fill + i] = 0xFF - byte
}
}
function writeText (block, off, end, str) {
// strings are written as utf8, then padded with \0
var strLen = Buffer.byteLength(str)
, writeLen = Math.min(strLen, end - off)
// non-ascii fields need extended headers
// long fields get truncated
, needExtended = strLen !== str.length || strLen > writeLen
// write the string, and null-pad
if (writeLen > 0) block.write(str, off, writeLen, "utf8")
for (var i = off + writeLen; i < end; i ++) block[i] = 0
return needExtended
}
function calcSum (block) {
block = block || this.block
assert(Buffer.isBuffer(block) && block.length === 512)
if (!block) throw new Error("Need block to checksum")
// now figure out what it would be if the cksum was " "
var sum = 0
, start = fieldOffs[fields.cksum]
, end = fieldEnds[fields.cksum]
for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
sum += block[i]
}
for (var i = start; i < end; i ++) {
sum += space
}
for (var i = end; i < 512; i ++) {
sum += block[i]
}
return sum
}
function checkSum (block) {
var sum = calcSum.call(this, block)
block = block || this.block
var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
cksum = parseNumeric(cksum)
return cksum === sum
}
function decode (block) {
block = block || this.block
assert(Buffer.isBuffer(block) && block.length === 512)
this.block = block
this.cksumValid = this.checkSum()
var prefix = null
// slice off each field.
for (var f = 0; fields[f] !== null; f ++) {
var field = fields[f]
, val = block.slice(fieldOffs[f], fieldEnds[f])
switch (field) {
case "ustar":
// if not ustar, then everything after that is just padding.
if (val.toString() !== "ustar\0") {
this.ustar = false
return
} else {
// console.error("ustar:", val, val.toString())
this.ustar = val.toString()
}
break
// prefix is special, since it might signal the xstar header
case "prefix":
var atime = parseNumeric(val.slice(131, 131 + 12))
, ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
if ((val[130] === 0 || val[130] === space) &&
typeof atime === "number" &&
typeof ctime === "number" &&
val[131 + 12] === space &&
val[131 + 12 + 12] === space) {
this.atime = atime
this.ctime = ctime
val = val.slice(0, 130)
}
prefix = val.toString("utf8").replace(/\0+$/, "")
// console.error("%% header reading prefix", prefix)
break
// all other fields are null-padding text
// or a number.
default:
if (numeric[field]) {
this[field] = parseNumeric(val)
} else {
this[field] = val.toString("utf8").replace(/\0+$/, "")
}
break
}
}
// if we got a prefix, then prepend it to the path.
if (prefix) {
this.path = prefix + "/" + this.path
// console.error("%% header got a prefix", this.path)
}
}
function parse256 (buf) {
// first byte MUST be either 80 or FF
// 80 for positive, FF for 2's comp
var positive
if (buf[0] === 0x80) positive = true
else if (buf[0] === 0xFF) positive = false
else return null
// build up a base-256 tuple from the least sig to the highest
var zero = false
, tuple = []
for (var i = buf.length - 1; i > 0; i --) {
var byte = buf[i]
if (positive) tuple.push(byte)
else if (zero && byte === 0) tuple.push(0)
else if (zero) {
zero = false
tuple.push(0x100 - byte)
} else tuple.push(0xFF - byte)
}
for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
sum += tuple[i] * Math.pow(256, i)
}
return positive ? sum : -1 * sum
}
function parseNumeric (f) {
if (f[0] & 0x80) return parse256(f)
var str = f.toString("utf8").split("\0")[0].trim()
, res = parseInt(str, 8)
return isNaN(res) ? null : res
}

236
node_modules/tar/lib/pack.js generated vendored Normal file
View File

@ -0,0 +1,236 @@
// pipe in an fstream, and it'll make a tarball.
// key-value pair argument is global extended header props.
module.exports = Pack
var EntryWriter = require("./entry-writer.js")
, Stream = require("stream").Stream
, path = require("path")
, inherits = require("inherits")
, GlobalHeaderWriter = require("./global-header-writer.js")
, collect = require("fstream").collect
, eof = new Buffer(512)
for (var i = 0; i < 512; i ++) eof[i] = 0
inherits(Pack, Stream)
function Pack (props) {
// console.error("-- p ctor")
var me = this
if (!(me instanceof Pack)) return new Pack(props)
if (props) me._noProprietary = props.noProprietary
else me._noProprietary = false
me._global = props
me.readable = true
me.writable = true
me._buffer = []
// console.error("-- -- set current to null in ctor")
me._currentEntry = null
me._processing = false
me._pipeRoot = null
me.on("pipe", function (src) {
if (src.root === me._pipeRoot) return
me._pipeRoot = src
src.on("end", function () {
me._pipeRoot = null
})
me.add(src)
})
}
Pack.prototype.addGlobal = function (props) {
// console.error("-- p addGlobal")
if (this._didGlobal) return
this._didGlobal = true
var me = this
GlobalHeaderWriter(props)
.on("data", function (c) {
me.emit("data", c)
})
.end()
}
Pack.prototype.add = function (stream) {
if (this._global && !this._didGlobal) this.addGlobal(this._global)
if (this._ended) return this.emit("error", new Error("add after end"))
collect(stream)
this._buffer.push(stream)
this._process()
this._needDrain = this._buffer.length > 0
return !this._needDrain
}
Pack.prototype.pause = function () {
this._paused = true
if (this._currentEntry) this._currentEntry.pause()
this.emit("pause")
}
Pack.prototype.resume = function () {
this._paused = false
if (this._currentEntry) this._currentEntry.resume()
this.emit("resume")
this._process()
}
Pack.prototype.end = function () {
this._ended = true
this._buffer.push(eof)
this._process()
}
Pack.prototype._process = function () {
var me = this
if (me._paused || me._processing) {
return
}
var entry = me._buffer.shift()
if (!entry) {
if (me._needDrain) {
me.emit("drain")
}
return
}
if (entry.ready === false) {
// console.error("-- entry is not ready", entry)
me._buffer.unshift(entry)
entry.on("ready", function () {
// console.error("-- -- ready!", entry)
me._process()
})
return
}
me._processing = true
if (entry === eof) {
// need 2 ending null blocks.
me.emit("data", eof)
me.emit("data", eof)
me.emit("end")
me.emit("close")
return
}
// Change the path to be relative to the root dir that was
// added to the tarball.
//
// XXX This should be more like how -C works, so you can
// explicitly set a root dir, and also explicitly set a pathname
// in the tarball to use. That way we can skip a lot of extra
// work when resolving symlinks for bundled dependencies in npm.
var root = path.dirname((entry.root || entry).path);
if (me._global && me._global.fromBase && entry.root && entry.root.path) {
// user set 'fromBase: true' indicating tar root should be directory itself
root = entry.root.path;
}
var wprops = {}
Object.keys(entry.props || {}).forEach(function (k) {
wprops[k] = entry.props[k]
})
if (me._noProprietary) wprops.noProprietary = true
wprops.path = path.relative(root, entry.path || '')
// actually not a matter of opinion or taste.
if (process.platform === "win32") {
wprops.path = wprops.path.replace(/\\/g, "/")
}
if (!wprops.type)
wprops.type = 'Directory'
switch (wprops.type) {
// sockets not supported
case "Socket":
return
case "Directory":
wprops.path += "/"
wprops.size = 0
break
case "Link":
var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
wprops.linkpath = path.relative(root, lp) || "."
wprops.size = 0
break
case "SymbolicLink":
var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "."
wprops.size = 0
break
}
// console.error("-- new writer", wprops)
// if (!wprops.type) {
// // console.error("-- no type?", entry.constructor.name, entry)
// }
// console.error("-- -- set current to new writer", wprops.path)
var writer = me._currentEntry = EntryWriter(wprops)
writer.parent = me
// writer.on("end", function () {
// // console.error("-- -- writer end", writer.path)
// })
writer.on("data", function (c) {
me.emit("data", c)
})
writer.on("header", function () {
Buffer.prototype.toJSON = function () {
return this.toString().split(/\0/).join(".")
}
// console.error("-- -- writer header %j", writer.props)
if (writer.props.size === 0) nextEntry()
})
writer.on("close", nextEntry)
var ended = false
function nextEntry () {
if (ended) return
ended = true
// console.error("-- -- writer close", writer.path)
// console.error("-- -- set current to null", wprops.path)
me._currentEntry = null
me._processing = false
me._process()
}
writer.on("error", function (er) {
// console.error("-- -- writer error", writer.path)
me.emit("error", er)
})
// if it's the root, then there's no need to add its entries,
// or data, since they'll be added directly.
if (entry === me._pipeRoot) {
// console.error("-- is the root, don't auto-add")
writer.add = null
}
entry.pipe(writer)
}
Pack.prototype.destroy = function () {}
Pack.prototype.write = function () {}

281
node_modules/tar/lib/parse.js generated vendored Normal file
View File

@ -0,0 +1,281 @@
// A writable stream.
// It emits "entry" events, which provide a readable stream that has
// header info attached.
module.exports = Parse.create = Parse
var stream = require("stream")
, Stream = stream.Stream
, BlockStream = require("block-stream")
, tar = require("../tar.js")
, TarHeader = require("./header.js")
, Entry = require("./entry.js")
, BufferEntry = require("./buffer-entry.js")
, ExtendedHeader = require("./extended-header.js")
, assert = require("assert").ok
, inherits = require("inherits")
, fstream = require("fstream")
// reading a tar is a lot like reading a directory
// However, we're actually not going to run the ctor,
// since it does a stat and various other stuff.
// This inheritance gives us the pause/resume/pipe
// behavior that is desired.
inherits(Parse, fstream.Reader)
function Parse () {
var me = this
if (!(me instanceof Parse)) return new Parse()
// doesn't apply fstream.Reader ctor?
// no, becasue we don't want to stat/etc, we just
// want to get the entry/add logic from .pipe()
Stream.apply(me)
me.writable = true
me.readable = true
me._stream = new BlockStream(512)
me.position = 0
me._ended = false
me._hardLinks = {}
me._stream.on("error", function (e) {
me.emit("error", e)
})
me._stream.on("data", function (c) {
me._process(c)
})
me._stream.on("end", function () {
me._streamEnd()
})
me._stream.on("drain", function () {
me.emit("drain")
})
}
// overridden in Extract class, since it needs to
// wait for its DirWriter part to finish before
// emitting "end"
Parse.prototype._streamEnd = function () {
var me = this
if (!me._ended || me._entry) me.error("unexpected eof")
me.emit("end")
}
// a tar reader is actually a filter, not just a readable stream.
// So, you should pipe a tarball stream into it, and it needs these
// write/end methods to do that.
Parse.prototype.write = function (c) {
if (this._ended) {
// gnutar puts a LOT of nulls at the end.
// you can keep writing these things forever.
// Just ignore them.
for (var i = 0, l = c.length; i > l; i ++) {
if (c[i] !== 0) return this.error("write() after end()")
}
return
}
return this._stream.write(c)
}
Parse.prototype.end = function (c) {
this._ended = true
return this._stream.end(c)
}
// don't need to do anything, since we're just
// proxying the data up from the _stream.
// Just need to override the parent's "Not Implemented"
// error-thrower.
Parse.prototype._read = function () {}
Parse.prototype._process = function (c) {
assert(c && c.length === 512, "block size should be 512")
// one of three cases.
// 1. A new header
// 2. A part of a file/extended header
// 3. One of two or more EOF null blocks
if (this._entry) {
var entry = this._entry
if(!entry._abort) entry.write(c)
else {
entry._remaining -= c.length
if(entry._remaining < 0) entry._remaining = 0
}
if (entry._remaining === 0) {
entry.end()
this._entry = null
}
} else {
// either zeroes or a header
var zero = true
for (var i = 0; i < 512 && zero; i ++) {
zero = c[i] === 0
}
// eof is *at least* 2 blocks of nulls, and then the end of the
// file. you can put blocks of nulls between entries anywhere,
// so appending one tarball to another is technically valid.
// ending without the eof null blocks is not allowed, however.
if (zero) {
if (this._eofStarted)
this._ended = true
this._eofStarted = true
} else {
this._eofStarted = false
this._startEntry(c)
}
}
this.position += 512
}
// take a header chunk, start the right kind of entry.
Parse.prototype._startEntry = function (c) {
var header = new TarHeader(c)
, self = this
, entry
, ev
, EntryType
, onend
, meta = false
if (null === header.size || !header.cksumValid) {
var e = new Error("invalid tar file")
e.header = header
e.tar_file_offset = this.position
e.tar_block = this.position / 512
return this.emit("error", e)
}
switch (tar.types[header.type]) {
case "File":
case "OldFile":
case "Link":
case "SymbolicLink":
case "CharacterDevice":
case "BlockDevice":
case "Directory":
case "FIFO":
case "ContiguousFile":
case "GNUDumpDir":
// start a file.
// pass in any extended headers
// These ones consumers are typically most interested in.
EntryType = Entry
ev = "entry"
break
case "GlobalExtendedHeader":
// extended headers that apply to the rest of the tarball
EntryType = ExtendedHeader
onend = function () {
self._global = self._global || {}
Object.keys(entry.fields).forEach(function (k) {
self._global[k] = entry.fields[k]
})
}
ev = "globalExtendedHeader"
meta = true
break
case "ExtendedHeader":
case "OldExtendedHeader":
// extended headers that apply to the next entry
EntryType = ExtendedHeader
onend = function () {
self._extended = entry.fields
}
ev = "extendedHeader"
meta = true
break
case "NextFileHasLongLinkpath":
// set linkpath=<contents> in extended header
EntryType = BufferEntry
onend = function () {
self._extended = self._extended || {}
self._extended.linkpath = entry.body
}
ev = "longLinkpath"
meta = true
break
case "NextFileHasLongPath":
case "OldGnuLongPath":
// set path=<contents> in file-extended header
EntryType = BufferEntry
onend = function () {
self._extended = self._extended || {}
self._extended.path = entry.body
}
ev = "longPath"
meta = true
break
default:
// all the rest we skip, but still set the _entry
// member, so that we can skip over their data appropriately.
// emit an event to say that this is an ignored entry type?
EntryType = Entry
ev = "ignoredEntry"
break
}
var global, extended
if (meta) {
global = extended = null
} else {
var global = this._global
var extended = this._extended
// extendedHeader only applies to one entry, so once we start
// an entry, it's over.
this._extended = null
}
entry = new EntryType(header, extended, global)
entry.meta = meta
// only proxy data events of normal files.
if (!meta) {
entry.on("data", function (c) {
me.emit("data", c)
})
}
if (onend) entry.on("end", onend)
this._entry = entry
if (entry.type === "Link") {
this._hardLinks[entry.path] = entry
}
var me = this
entry.on("pause", function () {
me.pause()
})
entry.on("resume", function () {
me.resume()
})
if (this.listeners("*").length) {
this.emit("*", ev, entry)
}
this.emit(ev, entry)
// Zero-byte entry. End immediately.
if (entry.props.size === 0) {
entry.end()
this._entry = null
}
}