function readOnly(count){ }
Starting November 20, the site will be set to read-only. On December 4, 2023,
forum discussions will move to the Trailblazer Community.
+ Start a Discussion
venkateshyadav1243venkateshyadav1243 

Parse CSV file to bulk upload

Hi
I need to insert/update more than 1 lac records in salesforce,
How can i do it, i tried normal file upload functionality but am getting "regex" error,
any one help me how can i complete this task



Regards,
Venkatesh
@GM@GM
https://developer.salesforce.com/page/Code_Samples01


public static List<List<String>> parseCSV(String contents,Boolean skipHeaders) {
02
    List<List<String>> allFields = new List<List<String>>();
03

04
    // replace instances where a double quote begins a field containing a comma
05
    // in this case you get a double quote followed by a doubled double quote
06
    // do this for beginning and end of a field
07
    contents = contents.replaceAll(',"""',',"DBLQT').replaceall('""",','DBLQT",');
08
    // now replace all remaining double quotes - we do this so that we can reconstruct
09
    // fields with commas inside assuming they begin and end with a double quote
10
    contents = contents.replaceAll('""','DBLQT');
11
    // we are not attempting to handle fields with a newline inside of them
12
    // so, split on newline to get the spreadsheet rows
13
    List<String> lines = new List<String>();
14
    try {
15
        lines = contents.split('\n');
16
    } catch (System.ListException e) {
17
        System.debug('Limits exceeded?' + e.getMessage());
18
    }
19
    Integer num = 0;
20
    for(String line : lines) {
21
        // check for blank CSV lines (only commas)
22
        if (line.replaceAll(',','').trim().length() == 0) break;
23
        
24
        List<String> fields = line.split(',');  
25
        List<String> cleanFields = new List<String>();
26
        String compositeField;
27
        Boolean makeCompositeField = false;
28
        for(String field : fields) {
29
            if (field.startsWith('"') && field.endsWith('"')) {
30
                cleanFields.add(field.replaceAll('DBLQT','"'));
31
            } else if (field.startsWith('"')) {
32
                makeCompositeField = true;
33
                compositeField = field;
34
            } else if (field.endsWith('"')) {
35
                compositeField += ',' + field;
36
                cleanFields.add(compositeField.replaceAll('DBLQT','"'));
37
                makeCompositeField = false;
38
            } else if (makeCompositeField) {
39
                compositeField +=  ',' + field;
40
            } else {
41
                cleanFields.add(field.replaceAll('DBLQT','"'));
42
            }
43
        }
44
        
45
        allFields.add(cleanFields);
46
    }
47
    if (skipHeaders) allFields.remove(0);
48
    return allFields;     


49
}
Gaurav NirwalGaurav Nirwal
public interface IFileParser
{
    IEnumerable<DataTable> GetFileData(string sourceDirectory);
    void WriteChunkData(DataTable table, string distinationTable, 
         IList<KeyValuePair<string,   string>> mapList);
}
You can try this code 
Gaurav NirwalGaurav Nirwal
IEnumerable<DataTable> IFileParser.GetFileData(string sourceFileFullName)
{            
    bool firstLineOfChunk = true;
    int chunkRowCount = 0;
    DataTable chunkDataTable = null;
    string columnData = null;
    bool firstLineOfFile = true;
    using (var sr = new StreamReader(sourceFileFullName))
    {
        string line = null;
        //Read and display lines from the file until the end of the file is reached.                
        while ((line = sr.ReadLine()) != null)
        {
            //when reach first line it is column list need to create 
            //datatable based on that.
            if (firstLineOfFile)
            {
                columnData = line;
                firstLineOfFile = false;
                continue;
            }
            if (firstLineOfChunk)
            {                       
                firstLineOfChunk = false;                        
                chunkDataTable = CreateEmptyDataTable(columnData);                        
            }                  
            AddRow(chunkDataTable, line);                   
            chunkRowCount++;
 
            if (chunkRowCount == _chunkRowLimit)
            {
                firstLineOfChunk = true;
                chunkRowCount = 0;
                yield return chunkDataTable;
                chunkDataTable = null;
            }
        }
    }
    //return last set of data which less then chunk size
    if (null != chunkDataTable)                           
        yield return chunkDataTable;            
}