SqlBulkCopy ColumnMapping error CSV to DataTable to SQL Server Table

c# csv-import datatable sqlbulkcopy sql-server

Question

I am trying to bulk insert a CSV file into a SQL Server database.

The process is .CSV file to DataTable to SqlBulkCopy to SQL Server.

When I run this I get this error:

The given ColumnMapping does not match up with any column in the source or destination

When I directly import the CSV into SQL Server via Management Studio it works! So I think the problem is my data table conversion?

Here is my code to go from .CSV to DataTable:

public DataTable CsvFileToDataTable(string filePath)
{
    var csvData = new DataTable("Table1");
    using (var csvReader = new TextFieldParser(filePath))
    {
        csvReader.SetDelimiters(new string[] { "," });
        csvReader.HasFieldsEnclosedInQuotes = true;
        var readFields = csvReader.ReadFields();

        foreach (var dataColumn in readFields.Select(column => new DataColumn(column) {AllowDBNull = true }))
        {
            csvData.Columns.Add(dataColumn);
        }
        while (!csvReader.EndOfData)
        {
            var data = csvReader.ReadFields();
            for (var i = 0; i < data.Length; i++)
            {
                if (data[i] == "")
                {
                    data[i] = null;
                }
            }
            csvData.Rows.Add(data);
        }
    }

    return csvData;
}

And here is the code for the bulk copy insert:

public void InsertData(DataTable table)
{
    using (var transactionScope = new TransactionScope())
    {
        using (var sqlConnection = new SqlConnection(this.ConnectionString))
        {
            sqlConnection.Open();

            using (var sqlBulkCopy = new SqlBulkCopy(sqlConnection))
            {
                sqlBulkCopy.DestinationTableName = table.TableName;

                foreach (var column in table.Columns)
                {
                    sqlBulkCopy.ColumnMappings.Add(column.ToString(), column.ToString());
                }

                sqlBulkCopy.WriteToServer(table);
            }

            transactionScope.Complete();
        }
    }
}

Does anyone have any suggestions?

Thanks

Accepted Answer

Turns out If I tweked each method

foreach (var dataColumn in readFields.Select(column => new DataColumn(columntypeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
    csvData.Columns.Add(dataColumn);
}

and in the second Method I changed it to use an index rather than column name

for (var count = 0; count < table.Columns.Count; count++)
{
    sqlBulkCopy.ColumnMappings.Add(count, count);
}

Popular Answer

I was able to use these code samples to create a working solution that reads a csv, checks if there is data, and if there is clean out the data and import all the csv data to the table:

private static void ProcessFile(string FilePath, string TableName)
{
    var dt = GetDataTable(FilePath, TableName);
    if (dt == null)
    {
        return;
    }
    if (dt.Rows.Count == 0)
    {
        AuditLog.AddInfo("No rows imported after reading file " + FilePath);
        return;
    }
    ClearData(TableName);
    InsertData(dt);
}
private static DataTable GetDataTable(string FilePath, string TableName)
{
    var dt = new DataTable(TableName);
    using (var csvReader = new TextFieldParser(FilePath))
    {
        csvReader.SetDelimiters(new string[] { "," });
        csvReader.HasFieldsEnclosedInQuotes = true;
        var readFields = csvReader.ReadFields();
        if (readFields == null)
        {
            AuditLog.AddInfo("Could not read header fields for file " + FilePath);
            return null;
        }
        foreach (var dataColumn in readFields.Select(column => new DataColumn(column, typeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
        {
            dt.Columns.Add(dataColumn);
        }
        while (!csvReader.EndOfData)
        {
            var data = csvReader.ReadFields();
            if (data == null)
            {
                AuditLog.AddInfo(string.Format("Could not read fields on line {0} for file {1}", csvReader.LineNumber, FilePath));
                continue;
            }
            var dr = dt.NewRow();
            for (var i = 0; i < data.Length; i++)
            {
                if (!string.IsNullOrEmpty(data[i]))
                {
                    dr[i] = data[i];
                }
            }
            dt.Rows.Add(dr);
        }
    }
    return dt;
}
private static void ClearData(string TableName)
{
    SqlHelper.ExecuteNonQuery(ConfigurationUtil.ConnectionString, CommandType.Text, "TRUNCATE TABLE " + TableName);
}
private static void InsertData(DataTable table)
{
    using (var sqlBulkCopy = new SqlBulkCopy(ConfigurationUtil.ConnectionString))
    {
        sqlBulkCopy.DestinationTableName = table.TableName;
        for (var count = 0; count < table.Columns.Count; count++)
        {
            sqlBulkCopy.ColumnMappings.Add(count, count);
        }
        sqlBulkCopy.WriteToServer(table);
    }
}


Licensed under: CC-BY-SA with attribution
Not affiliated with Stack Overflow
Is this KB legal? Yes, learn why
Licensed under: CC-BY-SA with attribution
Not affiliated with Stack Overflow
Is this KB legal? Yes, learn why