Skip to content

Commit

Permalink
ADDED: option fields to configure whether it should reject or not if …
Browse files Browse the repository at this point in the history
…`parseLimit`, `filesLimit`, `fieldsLimit` and `limit` event is emitted
  • Loading branch information
Mr-M1M3 committed Jul 5, 2022
1 parent 80c581f commit e74f779
Showing 1 changed file with 56 additions and 15 deletions.
71 changes: 56 additions & 15 deletions lib/parter.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@ import {
IncomingMessage
} from "http";

import {Buffer} from "buffer";
import {
Buffer
} from "buffer";

import busboy from "busboy";

Expand Down Expand Up @@ -65,8 +67,11 @@ export default async function parter(req, options) {

bb.on("error", busboyError); // when any error happens
bb.on("file", appendFile); // when a file is received, calls appendFile
bb.on("field", appendField); // calls appendField when a field is received
bb.on("finish", finishParse);
bb.on("field", appendField); // calls appendField when a field is received
bb.on("fieldsLimit", fieldsLimit); // if more than configured number field is reached
bb.on("filesLimit", filesLimit); // if more than configured number of file is reached
bb.on("partsLimit", parseLimit);// if more than configured number of parts received
bb.on("finish", finishParse); // after finishing parsing

function appendFile(name, stream, info) {
// TODO: Process received file
Expand All @@ -76,26 +81,28 @@ export default async function parter(req, options) {

stream.on("error", streamError); // if any error happens, calls streamError function
stream.on("data", appendBuffer); // calls appendBuffer when a new chunk is received
stream.on("limit", fileLimit);
stream.on("limit", fileLimit); // if file is larger than configured size
stream.on("end", done); // when parsing is done

function appendBuffer(chunk){ // just pushes new chunk to `bufferArray`
function appendBuffer(chunk) { // just pushes new chunk to `bufferArray`
bufferArray.push(chunk);
}

function fileLimit(){
if(options.errorOnLargerFileSize){
rej('file is larger than configured size');
}else{
function fileLimit() {
if (options.errorOnLargerFileSize) { // rejects if `options.errorOnLargerFileSize` set to true
rej({
message: "file is larger than configured size"
});
} else {
// do nothing, or maybe return undefined
return;
}
}

function done(){
function done() {
let bufferLength = 0; // initially, total buffer length is assumed 0

for(let _buffer of bufferArray){ // determine the whole length of total buffers
for (let _buffer of bufferArray) { // determine the whole length of total buffers
bufferLength += _buffer.length;
}

Expand Down Expand Up @@ -128,13 +135,47 @@ export default async function parter(req, options) {
}
}

function appendField(name, value, info){
function appendField(name, value, info) {
data.fields[name] = {};
data.fields[name].value = value;
data.fields[name].info = info;
};
}

function fieldsLimit() {
if (options.errorOnMoreFields) {
rej({
message: "more than configured `options.limits.fields` received"
});
}else{
//do nothing
return;
}
}

function filesLimit() {
if (options.errorOnMoreFiles) {
rej({
message: "more than configured `options.limits.files` received"
});
}else{
//do nothing
return;
}
}

function parseLimit(){
if (options.errorOnMoreParts) {
rej({
message: "more than configured `options.limits.parts` received"
});
}else{
//do nothing
return;
}
}

function finishParse(){
function finishParse() {
cleanUp();
acc(data);
}

Expand All @@ -148,7 +189,7 @@ export default async function parter(req, options) {
})
}

function cleanUp(){
function cleanUp() {
bb.removeListener("file", appendFile);
bb.removeListener("finish", finishParse);
bb.removeListener("error", busboyError);
Expand Down

0 comments on commit e74f779

Please sign in to comment.