Configring node js.
This commit is contained in:
26
node_modules/jszip/lib/stream/ConvertWorker.js
generated
vendored
Normal file
26
node_modules/jszip/lib/stream/ConvertWorker.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
var GenericWorker = require('./GenericWorker');
|
||||
var utils = require('../utils');
|
||||
|
||||
/**
|
||||
* A worker which convert chunks to a specified type.
|
||||
* @constructor
|
||||
* @param {String} destType the destination type.
|
||||
*/
|
||||
function ConvertWorker(destType) {
|
||||
GenericWorker.call(this, "ConvertWorker to " + destType);
|
||||
this.destType = destType;
|
||||
}
|
||||
utils.inherits(ConvertWorker, GenericWorker);
|
||||
|
||||
/**
|
||||
* @see GenericWorker.processChunk
|
||||
*/
|
||||
ConvertWorker.prototype.processChunk = function (chunk) {
|
||||
this.push({
|
||||
data : utils.transformTo(this.destType, chunk.data),
|
||||
meta : chunk.meta
|
||||
});
|
||||
};
|
||||
module.exports = ConvertWorker;
|
||||
24
node_modules/jszip/lib/stream/Crc32Probe.js
generated
vendored
Normal file
24
node_modules/jszip/lib/stream/Crc32Probe.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict';
|
||||
|
||||
var GenericWorker = require('./GenericWorker');
|
||||
var crc32 = require('../crc32');
|
||||
var utils = require('../utils');
|
||||
|
||||
/**
|
||||
* A worker which calculate the crc32 of the data flowing through.
|
||||
* @constructor
|
||||
*/
|
||||
function Crc32Probe() {
|
||||
GenericWorker.call(this, "Crc32Probe");
|
||||
this.withStreamInfo("crc32", 0);
|
||||
}
|
||||
utils.inherits(Crc32Probe, GenericWorker);
|
||||
|
||||
/**
|
||||
* @see GenericWorker.processChunk
|
||||
*/
|
||||
Crc32Probe.prototype.processChunk = function (chunk) {
|
||||
this.streamInfo.crc32 = crc32(chunk.data, this.streamInfo.crc32 || 0);
|
||||
this.push(chunk);
|
||||
};
|
||||
module.exports = Crc32Probe;
|
||||
29
node_modules/jszip/lib/stream/DataLengthProbe.js
generated
vendored
Normal file
29
node_modules/jszip/lib/stream/DataLengthProbe.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict';
|
||||
|
||||
var utils = require('../utils');
|
||||
var GenericWorker = require('./GenericWorker');
|
||||
|
||||
/**
|
||||
* A worker which calculate the total length of the data flowing through.
|
||||
* @constructor
|
||||
* @param {String} propName the name used to expose the length
|
||||
*/
|
||||
function DataLengthProbe(propName) {
|
||||
GenericWorker.call(this, "DataLengthProbe for " + propName);
|
||||
this.propName = propName;
|
||||
this.withStreamInfo(propName, 0);
|
||||
}
|
||||
utils.inherits(DataLengthProbe, GenericWorker);
|
||||
|
||||
/**
|
||||
* @see GenericWorker.processChunk
|
||||
*/
|
||||
DataLengthProbe.prototype.processChunk = function (chunk) {
|
||||
if(chunk) {
|
||||
var length = this.streamInfo[this.propName] || 0;
|
||||
this.streamInfo[this.propName] = length + chunk.data.length;
|
||||
}
|
||||
GenericWorker.prototype.processChunk.call(this, chunk);
|
||||
};
|
||||
module.exports = DataLengthProbe;
|
||||
|
||||
116
node_modules/jszip/lib/stream/DataWorker.js
generated
vendored
Normal file
116
node_modules/jszip/lib/stream/DataWorker.js
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
'use strict';
|
||||
|
||||
var utils = require('../utils');
|
||||
var GenericWorker = require('./GenericWorker');
|
||||
|
||||
// the size of the generated chunks
|
||||
// TODO expose this as a public variable
|
||||
var DEFAULT_BLOCK_SIZE = 16 * 1024;
|
||||
|
||||
/**
|
||||
* A worker that reads a content and emits chunks.
|
||||
* @constructor
|
||||
* @param {Promise} dataP the promise of the data to split
|
||||
*/
|
||||
function DataWorker(dataP) {
|
||||
GenericWorker.call(this, "DataWorker");
|
||||
var self = this;
|
||||
this.dataIsReady = false;
|
||||
this.index = 0;
|
||||
this.max = 0;
|
||||
this.data = null;
|
||||
this.type = "";
|
||||
|
||||
this._tickScheduled = false;
|
||||
|
||||
dataP.then(function (data) {
|
||||
self.dataIsReady = true;
|
||||
self.data = data;
|
||||
self.max = data && data.length || 0;
|
||||
self.type = utils.getTypeOf(data);
|
||||
if(!self.isPaused) {
|
||||
self._tickAndRepeat();
|
||||
}
|
||||
}, function (e) {
|
||||
self.error(e);
|
||||
});
|
||||
}
|
||||
|
||||
utils.inherits(DataWorker, GenericWorker);
|
||||
|
||||
/**
|
||||
* @see GenericWorker.cleanUp
|
||||
*/
|
||||
DataWorker.prototype.cleanUp = function () {
|
||||
GenericWorker.prototype.cleanUp.call(this);
|
||||
this.data = null;
|
||||
};
|
||||
|
||||
/**
|
||||
* @see GenericWorker.resume
|
||||
*/
|
||||
DataWorker.prototype.resume = function () {
|
||||
if(!GenericWorker.prototype.resume.call(this)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!this._tickScheduled && this.dataIsReady) {
|
||||
this._tickScheduled = true;
|
||||
utils.delay(this._tickAndRepeat, [], this);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Trigger a tick a schedule an other call to this function.
|
||||
*/
|
||||
DataWorker.prototype._tickAndRepeat = function() {
|
||||
this._tickScheduled = false;
|
||||
if(this.isPaused || this.isFinished) {
|
||||
return;
|
||||
}
|
||||
this._tick();
|
||||
if(!this.isFinished) {
|
||||
utils.delay(this._tickAndRepeat, [], this);
|
||||
this._tickScheduled = true;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Read and push a chunk.
|
||||
*/
|
||||
DataWorker.prototype._tick = function() {
|
||||
|
||||
if(this.isPaused || this.isFinished) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var size = DEFAULT_BLOCK_SIZE;
|
||||
var data = null, nextIndex = Math.min(this.max, this.index + size);
|
||||
if (this.index >= this.max) {
|
||||
// EOF
|
||||
return this.end();
|
||||
} else {
|
||||
switch(this.type) {
|
||||
case "string":
|
||||
data = this.data.substring(this.index, nextIndex);
|
||||
break;
|
||||
case "uint8array":
|
||||
data = this.data.subarray(this.index, nextIndex);
|
||||
break;
|
||||
case "array":
|
||||
case "nodebuffer":
|
||||
data = this.data.slice(this.index, nextIndex);
|
||||
break;
|
||||
}
|
||||
this.index = nextIndex;
|
||||
return this.push({
|
||||
data : data,
|
||||
meta : {
|
||||
percent : this.max ? this.index / this.max * 100 : 0
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = DataWorker;
|
||||
263
node_modules/jszip/lib/stream/GenericWorker.js
generated
vendored
Normal file
263
node_modules/jszip/lib/stream/GenericWorker.js
generated
vendored
Normal file
@@ -0,0 +1,263 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* A worker that does nothing but passing chunks to the next one. This is like
|
||||
* a nodejs stream but with some differences. On the good side :
|
||||
* - it works on IE 6-9 without any issue / polyfill
|
||||
* - it weights less than the full dependencies bundled with browserify
|
||||
* - it forwards errors (no need to declare an error handler EVERYWHERE)
|
||||
*
|
||||
* A chunk is an object with 2 attributes : `meta` and `data`. The former is an
|
||||
* object containing anything (`percent` for example), see each worker for more
|
||||
* details. The latter is the real data (String, Uint8Array, etc).
|
||||
*
|
||||
* @constructor
|
||||
* @param {String} name the name of the stream (mainly used for debugging purposes)
|
||||
*/
|
||||
function GenericWorker(name) {
|
||||
// the name of the worker
|
||||
this.name = name || "default";
|
||||
// an object containing metadata about the workers chain
|
||||
this.streamInfo = {};
|
||||
// an error which happened when the worker was paused
|
||||
this.generatedError = null;
|
||||
// an object containing metadata to be merged by this worker into the general metadata
|
||||
this.extraStreamInfo = {};
|
||||
// true if the stream is paused (and should not do anything), false otherwise
|
||||
this.isPaused = true;
|
||||
// true if the stream is finished (and should not do anything), false otherwise
|
||||
this.isFinished = false;
|
||||
// true if the stream is locked to prevent further structure updates (pipe), false otherwise
|
||||
this.isLocked = false;
|
||||
// the event listeners
|
||||
this._listeners = {
|
||||
'data':[],
|
||||
'end':[],
|
||||
'error':[]
|
||||
};
|
||||
// the previous worker, if any
|
||||
this.previous = null;
|
||||
}
|
||||
|
||||
GenericWorker.prototype = {
|
||||
/**
|
||||
* Push a chunk to the next workers.
|
||||
* @param {Object} chunk the chunk to push
|
||||
*/
|
||||
push : function (chunk) {
|
||||
this.emit("data", chunk);
|
||||
},
|
||||
/**
|
||||
* End the stream.
|
||||
* @return {Boolean} true if this call ended the worker, false otherwise.
|
||||
*/
|
||||
end : function () {
|
||||
if (this.isFinished) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.flush();
|
||||
try {
|
||||
this.emit("end");
|
||||
this.cleanUp();
|
||||
this.isFinished = true;
|
||||
} catch (e) {
|
||||
this.emit("error", e);
|
||||
}
|
||||
return true;
|
||||
},
|
||||
/**
|
||||
* End the stream with an error.
|
||||
* @param {Error} e the error which caused the premature end.
|
||||
* @return {Boolean} true if this call ended the worker with an error, false otherwise.
|
||||
*/
|
||||
error : function (e) {
|
||||
if (this.isFinished) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(this.isPaused) {
|
||||
this.generatedError = e;
|
||||
} else {
|
||||
this.isFinished = true;
|
||||
|
||||
this.emit("error", e);
|
||||
|
||||
// in the workers chain exploded in the middle of the chain,
|
||||
// the error event will go downward but we also need to notify
|
||||
// workers upward that there has been an error.
|
||||
if(this.previous) {
|
||||
this.previous.error(e);
|
||||
}
|
||||
|
||||
this.cleanUp();
|
||||
}
|
||||
return true;
|
||||
},
|
||||
/**
|
||||
* Add a callback on an event.
|
||||
* @param {String} name the name of the event (data, end, error)
|
||||
* @param {Function} listener the function to call when the event is triggered
|
||||
* @return {GenericWorker} the current object for chainability
|
||||
*/
|
||||
on : function (name, listener) {
|
||||
this._listeners[name].push(listener);
|
||||
return this;
|
||||
},
|
||||
/**
|
||||
* Clean any references when a worker is ending.
|
||||
*/
|
||||
cleanUp : function () {
|
||||
this.streamInfo = this.generatedError = this.extraStreamInfo = null;
|
||||
this._listeners = [];
|
||||
},
|
||||
/**
|
||||
* Trigger an event. This will call registered callback with the provided arg.
|
||||
* @param {String} name the name of the event (data, end, error)
|
||||
* @param {Object} arg the argument to call the callback with.
|
||||
*/
|
||||
emit : function (name, arg) {
|
||||
if (this._listeners[name]) {
|
||||
for(var i = 0; i < this._listeners[name].length; i++) {
|
||||
this._listeners[name][i].call(this, arg);
|
||||
}
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Chain a worker with an other.
|
||||
* @param {Worker} next the worker receiving events from the current one.
|
||||
* @return {worker} the next worker for chainability
|
||||
*/
|
||||
pipe : function (next) {
|
||||
return next.registerPrevious(this);
|
||||
},
|
||||
/**
|
||||
* Same as `pipe` in the other direction.
|
||||
* Using an API with `pipe(next)` is very easy.
|
||||
* Implementing the API with the point of view of the next one registering
|
||||
* a source is easier, see the ZipFileWorker.
|
||||
* @param {Worker} previous the previous worker, sending events to this one
|
||||
* @return {Worker} the current worker for chainability
|
||||
*/
|
||||
registerPrevious : function (previous) {
|
||||
if (this.isLocked) {
|
||||
throw new Error("The stream '" + this + "' has already been used.");
|
||||
}
|
||||
|
||||
// sharing the streamInfo...
|
||||
this.streamInfo = previous.streamInfo;
|
||||
// ... and adding our own bits
|
||||
this.mergeStreamInfo();
|
||||
this.previous = previous;
|
||||
var self = this;
|
||||
previous.on('data', function (chunk) {
|
||||
self.processChunk(chunk);
|
||||
});
|
||||
previous.on('end', function () {
|
||||
self.end();
|
||||
});
|
||||
previous.on('error', function (e) {
|
||||
self.error(e);
|
||||
});
|
||||
return this;
|
||||
},
|
||||
/**
|
||||
* Pause the stream so it doesn't send events anymore.
|
||||
* @return {Boolean} true if this call paused the worker, false otherwise.
|
||||
*/
|
||||
pause : function () {
|
||||
if(this.isPaused || this.isFinished) {
|
||||
return false;
|
||||
}
|
||||
this.isPaused = true;
|
||||
|
||||
if(this.previous) {
|
||||
this.previous.pause();
|
||||
}
|
||||
return true;
|
||||
},
|
||||
/**
|
||||
* Resume a paused stream.
|
||||
* @return {Boolean} true if this call resumed the worker, false otherwise.
|
||||
*/
|
||||
resume : function () {
|
||||
if(!this.isPaused || this.isFinished) {
|
||||
return false;
|
||||
}
|
||||
this.isPaused = false;
|
||||
|
||||
// if true, the worker tried to resume but failed
|
||||
var withError = false;
|
||||
if(this.generatedError) {
|
||||
this.error(this.generatedError);
|
||||
withError = true;
|
||||
}
|
||||
if(this.previous) {
|
||||
this.previous.resume();
|
||||
}
|
||||
|
||||
return !withError;
|
||||
},
|
||||
/**
|
||||
* Flush any remaining bytes as the stream is ending.
|
||||
*/
|
||||
flush : function () {},
|
||||
/**
|
||||
* Process a chunk. This is usually the method overridden.
|
||||
* @param {Object} chunk the chunk to process.
|
||||
*/
|
||||
processChunk : function(chunk) {
|
||||
this.push(chunk);
|
||||
},
|
||||
/**
|
||||
* Add a key/value to be added in the workers chain streamInfo once activated.
|
||||
* @param {String} key the key to use
|
||||
* @param {Object} value the associated value
|
||||
* @return {Worker} the current worker for chainability
|
||||
*/
|
||||
withStreamInfo : function (key, value) {
|
||||
this.extraStreamInfo[key] = value;
|
||||
this.mergeStreamInfo();
|
||||
return this;
|
||||
},
|
||||
/**
|
||||
* Merge this worker's streamInfo into the chain's streamInfo.
|
||||
*/
|
||||
mergeStreamInfo : function () {
|
||||
for(var key in this.extraStreamInfo) {
|
||||
if (!this.extraStreamInfo.hasOwnProperty(key)) {
|
||||
continue;
|
||||
}
|
||||
this.streamInfo[key] = this.extraStreamInfo[key];
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Lock the stream to prevent further updates on the workers chain.
|
||||
* After calling this method, all calls to pipe will fail.
|
||||
*/
|
||||
lock: function () {
|
||||
if (this.isLocked) {
|
||||
throw new Error("The stream '" + this + "' has already been used.");
|
||||
}
|
||||
this.isLocked = true;
|
||||
if (this.previous) {
|
||||
this.previous.lock();
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* Pretty print the workers chain.
|
||||
*/
|
||||
toString : function () {
|
||||
var me = "Worker " + this.name;
|
||||
if (this.previous) {
|
||||
return this.previous + " -> " + me;
|
||||
} else {
|
||||
return me;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = GenericWorker;
|
||||
212
node_modules/jszip/lib/stream/StreamHelper.js
generated
vendored
Normal file
212
node_modules/jszip/lib/stream/StreamHelper.js
generated
vendored
Normal file
@@ -0,0 +1,212 @@
|
||||
'use strict';
|
||||
|
||||
var utils = require('../utils');
|
||||
var ConvertWorker = require('./ConvertWorker');
|
||||
var GenericWorker = require('./GenericWorker');
|
||||
var base64 = require('../base64');
|
||||
var support = require("../support");
|
||||
var external = require("../external");
|
||||
|
||||
var NodejsStreamOutputAdapter = null;
|
||||
if (support.nodestream) {
|
||||
try {
|
||||
NodejsStreamOutputAdapter = require('../nodejs/NodejsStreamOutputAdapter');
|
||||
} catch(e) {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the final transformation of the data. If the user wants a Blob for
|
||||
* example, it's easier to work with an U8intArray and finally do the
|
||||
* ArrayBuffer/Blob conversion.
|
||||
* @param {String} type the name of the final type
|
||||
* @param {String|Uint8Array|Buffer} content the content to transform
|
||||
* @param {String} mimeType the mime type of the content, if applicable.
|
||||
* @return {String|Uint8Array|ArrayBuffer|Buffer|Blob} the content in the right format.
|
||||
*/
|
||||
function transformZipOutput(type, content, mimeType) {
|
||||
switch(type) {
|
||||
case "blob" :
|
||||
return utils.newBlob(utils.transformTo("arraybuffer", content), mimeType);
|
||||
case "base64" :
|
||||
return base64.encode(content);
|
||||
default :
|
||||
return utils.transformTo(type, content);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate an array of data of the given type.
|
||||
* @param {String} type the type of the data in the given array.
|
||||
* @param {Array} dataArray the array containing the data chunks to concatenate
|
||||
* @return {String|Uint8Array|Buffer} the concatenated data
|
||||
* @throws Error if the asked type is unsupported
|
||||
*/
|
||||
function concat (type, dataArray) {
|
||||
var i, index = 0, res = null, totalLength = 0;
|
||||
for(i = 0; i < dataArray.length; i++) {
|
||||
totalLength += dataArray[i].length;
|
||||
}
|
||||
switch(type) {
|
||||
case "string":
|
||||
return dataArray.join("");
|
||||
case "array":
|
||||
return Array.prototype.concat.apply([], dataArray);
|
||||
case "uint8array":
|
||||
res = new Uint8Array(totalLength);
|
||||
for(i = 0; i < dataArray.length; i++) {
|
||||
res.set(dataArray[i], index);
|
||||
index += dataArray[i].length;
|
||||
}
|
||||
return res;
|
||||
case "nodebuffer":
|
||||
return Buffer.concat(dataArray);
|
||||
default:
|
||||
throw new Error("concat : unsupported type '" + type + "'");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Listen a StreamHelper, accumulate its content and concatenate it into a
|
||||
* complete block.
|
||||
* @param {StreamHelper} helper the helper to use.
|
||||
* @param {Function} updateCallback a callback called on each update. Called
|
||||
* with one arg :
|
||||
* - the metadata linked to the update received.
|
||||
* @return Promise the promise for the accumulation.
|
||||
*/
|
||||
function accumulate(helper, updateCallback) {
|
||||
return new external.Promise(function (resolve, reject){
|
||||
var dataArray = [];
|
||||
var chunkType = helper._internalType,
|
||||
resultType = helper._outputType,
|
||||
mimeType = helper._mimeType;
|
||||
helper
|
||||
.on('data', function (data, meta) {
|
||||
dataArray.push(data);
|
||||
if(updateCallback) {
|
||||
updateCallback(meta);
|
||||
}
|
||||
})
|
||||
.on('error', function(err) {
|
||||
dataArray = [];
|
||||
reject(err);
|
||||
})
|
||||
.on('end', function (){
|
||||
try {
|
||||
var result = transformZipOutput(resultType, concat(chunkType, dataArray), mimeType);
|
||||
resolve(result);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
dataArray = [];
|
||||
})
|
||||
.resume();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* An helper to easily use workers outside of JSZip.
|
||||
* @constructor
|
||||
* @param {Worker} worker the worker to wrap
|
||||
* @param {String} outputType the type of data expected by the use
|
||||
* @param {String} mimeType the mime type of the content, if applicable.
|
||||
*/
|
||||
function StreamHelper(worker, outputType, mimeType) {
|
||||
var internalType = outputType;
|
||||
switch(outputType) {
|
||||
case "blob":
|
||||
case "arraybuffer":
|
||||
internalType = "uint8array";
|
||||
break;
|
||||
case "base64":
|
||||
internalType = "string";
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
// the type used internally
|
||||
this._internalType = internalType;
|
||||
// the type used to output results
|
||||
this._outputType = outputType;
|
||||
// the mime type
|
||||
this._mimeType = mimeType;
|
||||
utils.checkSupport(internalType);
|
||||
this._worker = worker.pipe(new ConvertWorker(internalType));
|
||||
// the last workers can be rewired without issues but we need to
|
||||
// prevent any updates on previous workers.
|
||||
worker.lock();
|
||||
} catch(e) {
|
||||
this._worker = new GenericWorker("error");
|
||||
this._worker.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
StreamHelper.prototype = {
|
||||
/**
|
||||
* Listen a StreamHelper, accumulate its content and concatenate it into a
|
||||
* complete block.
|
||||
* @param {Function} updateCb the update callback.
|
||||
* @return Promise the promise for the accumulation.
|
||||
*/
|
||||
accumulate : function (updateCb) {
|
||||
return accumulate(this, updateCb);
|
||||
},
|
||||
/**
|
||||
* Add a listener on an event triggered on a stream.
|
||||
* @param {String} evt the name of the event
|
||||
* @param {Function} fn the listener
|
||||
* @return {StreamHelper} the current helper.
|
||||
*/
|
||||
on : function (evt, fn) {
|
||||
var self = this;
|
||||
|
||||
if(evt === "data") {
|
||||
this._worker.on(evt, function (chunk) {
|
||||
fn.call(self, chunk.data, chunk.meta);
|
||||
});
|
||||
} else {
|
||||
this._worker.on(evt, function () {
|
||||
utils.delay(fn, arguments, self);
|
||||
});
|
||||
}
|
||||
return this;
|
||||
},
|
||||
/**
|
||||
* Resume the flow of chunks.
|
||||
* @return {StreamHelper} the current helper.
|
||||
*/
|
||||
resume : function () {
|
||||
utils.delay(this._worker.resume, [], this._worker);
|
||||
return this;
|
||||
},
|
||||
/**
|
||||
* Pause the flow of chunks.
|
||||
* @return {StreamHelper} the current helper.
|
||||
*/
|
||||
pause : function () {
|
||||
this._worker.pause();
|
||||
return this;
|
||||
},
|
||||
/**
|
||||
* Return a nodejs stream for this helper.
|
||||
* @param {Function} updateCb the update callback.
|
||||
* @return {NodejsStreamOutputAdapter} the nodejs stream.
|
||||
*/
|
||||
toNodejsStream : function (updateCb) {
|
||||
utils.checkSupport("nodestream");
|
||||
if (this._outputType !== "nodebuffer") {
|
||||
// an object stream containing blob/arraybuffer/uint8array/string
|
||||
// is strange and I don't know if it would be useful.
|
||||
// I you find this comment and have a good usecase, please open a
|
||||
// bug report !
|
||||
throw new Error(this._outputType + " is not supported by this method");
|
||||
}
|
||||
|
||||
return new NodejsStreamOutputAdapter(this, {
|
||||
objectMode : this._outputType !== "nodebuffer"
|
||||
}, updateCb);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
module.exports = StreamHelper;
|
||||
Reference in New Issue
Block a user