[SalesForce] Parsing large CSV file in LWC

I have implemented the following code to parse a csv file, convert to a JSON array and send the JSON result to apex controller, which invokes the batch class to process the DML operation for opportunityLineItem object.
The code is working fine up to a maximum of 4000 rows (files have 22 columns with values). When there are 5000 records, the process throws an error and stops (it does not call the apex server).
Why does it stop if there are 4000 records? Is there any limit for parsing the csv records in LWC?

Code:

if (!this.csvFile) {
    console.log("file not found");
    return;
}
this.showprogressbar = true;
let reader = new FileReader();
let ctx = this; // to control 'this' property in an event handler
reader.readAsText(this.csvFile, "Shift_JIS");
reader.onload = function (evt) {
    console.log('reader:'+evt.target.result);
    let payload = ctx.CSV2JSON(evt.target.result, ctx.CSVToArray);
    let json = null;
    let error = null;
    console.log("payload:" + payload);
    setJSON({
        payload: payload,
        crud: crud,
        csvFile:ctx.csvFile
    })
    .then((result) => {
        json = result;
        var err = json.includes("Error");
        console.log('err====='+err);
        if(err) {
            console.log('json==###=='+json);
            ctx.error = json;
            console.log("error:"+ctx.error);
            ///s/ alert('error');

            ctx.showloader=false;
            ctx.hasError=true;
        }
        else {
            ctx.jobinfo(json);
            console.log("apex call setJSON() ===> success: " + json);
            //ctx.error = undefined;
        }
    })
    .catch((error) => {
        error =ctx.error;
        console.log("error:" + error.errorCode + ', message ' + error.message);
        ///s/ alert('error');

        ctx.showloader=false;
        ctx.hasError=true;


        if (error && error.message) {
            json = "{'no':1,'status':'Error', 'msg':'" + error.message + "'}";
        } else {
            json = "{'no':1,'status':'Error','msg':'Unknown error'}";
        }

    });
};

reader.onerror = function (evt) {
    /*ctx.mycolumns = [
        { label: "no", fieldName: "no", type: "number", initialWidth: 50 },
        { label: "status", fieldName: "status", type: "text", initialWidth: 100 },
        { label: "message", fieldName: "msg", type: "text" }
    ];
    ctx.mydata = [{ no: "1", status: "Error", msg: "error reading file" }];
    */
    //$A.util.toggleClass(spinner, "slds-hide"); // hide spinner
};

//  ctx.showloader=false;
console.log("mydata:===" + ctx.mydata);
alert('onerror');

}

CSV2JSON(csv, csv2array) {
    let array = csv2array(csv);
    //console.log("csv:::"+csv);
    //console.log("csv2array:"+csv2array);
    let objArray = [];
    //console.log("objArray:"+objArray);
    var headervar = oppheader;//'Name,BillingCity,Type,Industry';
    console.log('headervar:::'+headervar);
    let headerArray = headervar.split(',');
    for (let i = 1; i < array.length; i++) {
        objArray[i - 1] = {};
        /*for (let k = 0; k < array[0].length && k < array[i].length; k++) {
            let key = array[0][k];
            if(key === 'DW予定日')

            elseif(key === 'DW予定日')

            elseif(key === 'DW予定日')

            console.log("key:"+key);
            this.hearder=key;
            objArray[i - 1][key] = array[i][k];
        }*/
        for (let k = 0; k < headerArray.length; k++) {
            let key = headerArray[k];
            console.log("key====:"+key);
            this.hearder=key;
            objArray[i - 1][key] = array[i][k];
        }
    }
    objArray.pop();
    //console.log("objArray:==="+objArray.length);
    this.rowCount = objArray.length;
    //console.log("rowCount+++++++" + this.rowCount);
    let json = JSON.stringify(objArray);
    //console.log("json:==="+json.length);
    let str = json.replace("/},/g", "},\r\n");
    //console.log("str:======="+str);
    return str;
}

CSVToArray(strData, strDelimiter) {
    console.log('CSVToArray');
    // Check to see if the delimiter is defined. If not,
    // then default to comma.
    //console.log('strData:'+strData);
    //console.log("strDelimiter::" + strDelimiter);
    strDelimiter = strDelimiter || ",";
    //console.log("strDelimiter:" + strDelimiter);
    // Create a regular expression to parse the CSV values.
    var objPattern = new RegExp(
        // Delimiters.
        "(\\" +
            strDelimiter +
            "|\\r?\\n|\\r|^)" +
            // Quoted fields.
            '(?:"([^"]*(?:""[^"]*)*)"|' +
            // Standard fields.
            '([^"\\' +
            strDelimiter +
            "\\r\\n]*))",
        "gi"
    );
    // Create an array to hold our data. Give the array
    // a default empty first row.
    // console.log("objPattern:" + objPattern);
    var arrData = [[]];
    // Create an array to hold our individual pattern
    // matching groups.
    // console.log("arrData:" + arrData);
    var arrMatches = null;
    // Keep looping over the regular expression matches
    // until we can no longer find a match.
    while ((arrMatches = objPattern.exec(strData))) {
        // Get the delimiter that was found.
        var strMatchedDelimiter = arrMatches[1];
        // Check to see if the given delimiter has a length
        // (is not the start of string) and if it matches
        // field delimiter. If id does not, then we know
        // that this delimiter is a row delimiter.
        if (strMatchedDelimiter.length && strMatchedDelimiter != strDelimiter) {
            // Since we have reached a new row of data,
            // add an empty row to our data array.
            arrData.push([]);
        }
        // Now that we have our delimiter out of the way,
        // let's check to see which kind of value we
        // captured (quoted or unquoted).
        if (arrMatches[2]) {
            // We found a quoted value. When we capture
            // this value, unescape any double quotes.
            var strMatchedValue = arrMatches[2].replace(new RegExp('""', "g"), '"');
        } else {
            // We found a non-quoted value.
            var strMatchedValue = arrMatches[3];
        }
        // Now that we have our value string, let's add
        // it to the data array.
        arrData[arrData.length - 1].push(strMatchedValue);
    }
    // Return the parsed data.
    return arrData;
}

Best Answer

There's a maximum payload size of 4MB. You will need to split your file into parts in order to create/update/etc all of the records.

Related Topic