I have a CSV file which contain the following:
ProductName,EmployeeID,EmployeeName,ContactNo,Adddress
iPad,1233,Tom,89897898,34 Pitt st
iPad,1573,Jack,8978 9689,50 George st
iPad,1893,Peter,8878 8989,32 Martin st
The following code will insert into one table. What I am trying to achieve is to insert into 2 tables:
Product table (parent table) ProductId(Pk), ProductName Employee Table (child table) EmployeeId(Pk), ProductId(fk), EmployeeName, ContactNo, Address
So I need to basically insert the record first into Product table and then into Employee table from the CSV file.
Controller.cs
[HttpPost]
public ActionResult Index(HttpPostedFileBase FileUpload)
{
// Set up DataTable place holder
Guid ProductId= Guid.NewGuid();
using (SqlConnection conn = new SqlConnection(connString))
{
conn.Open();
using (SqlCommand cmd = new SqlCommand(
"INSERT INTO Product VALUES(" + "@ReferralListID, @ProductName)", conn))
{
//Note product name need to read from csv file
cmd.Parameters.AddWithValue("@ProductId", ProductId);
cmd.Parameters.AddWithValue("@ProductName", ProductName);
int rows = cmd.ExecuteNonQuery();
//rows number of record got inserted
}
}
DataTable dt = new DataTable();
//check we have a file
if (FileUpload.ContentLength > 0)
{
//Workout our file path
string fileName = Path.GetFileName(FileUpload.FileName);
string path = Path.Combine(Server.MapPath("~/App_Data/uploads"), fileName);
//Try and upload
try
{
FileUpload.SaveAs(path);
//Process the CSV file and capture the results to our DataTable place holder
dt = ProcessCSV(path);
//Process the DataTable and capture the results to our SQL Bulk copy
ViewData["Feedback"] = ProcessBulkCopy(dt);
}
catch (Exception ex)
{
//Catch errors
ViewData["Feedback"] = ex.Message;
}
}
else
{
//Catch errors
ViewData["Feedback"] = "Please select a file";
}
//Tidy up
dt.Dispose();
return View("Index", ViewData["Feedback"]);
}
/// <summary>
/// Process the file supplied and process the CSV to a dynamic datatable
/// </summary>
/// <param name="fileName">String</param>
/// <returns>DataTable</returns>
private static DataTable ProcessCSV(string fileName)
{
//Set up our variables
string Feedback = string.Empty;
string line = string.Empty;
string[] strArray;
DataTable dt = new DataTable();
DataRow row;
// work out where we should split on comma, but not in a sentance
Regex r = new Regex(",(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))");
//Set the filename in to our stream
StreamReader sr = new StreamReader(fileName);
//Read the first line and split the string at , with our regular express in to an array
line = sr.ReadLine();
strArray = r.Split(line);
//For each item in the new split array, dynamically builds our Data columns. Save us having to worry about it.
Array.ForEach(strArray, s => dt.Columns.Add(new DataColumn()));
//Read each line in the CVS file until it's empty
while ((line = sr.ReadLine()) != null)
{
row = dt.NewRow();
//add our current value to our data row
row.ItemArray = r.Split(line);
dt.Rows.Add(row);
}
//Tidy Streameader up
sr.Dispose();
//return a the new DataTable
return dt;
}
/// <summary>
/// Take the DataTable and using WriteToServer(DataTable) send it all to the database table "BulkImportDetails" in one go
/// </summary>
/// <param name="dt">DataTable</param>
/// <returns>String</returns>
private static String ProcessBulkCopy(DataTable dt)
{
string Feedback = string.Empty;
string connString = ConfigurationManager.ConnectionStrings["DataBaseConnectionString"].ConnectionString;
//make our connection and dispose at the end
using( SqlConnection conn = new SqlConnection(connString))
{
//make our command and dispose at the end
using (var copy = new SqlBulkCopy(conn))
{
//Open our connection
conn.Open();
//Set target table and tell the number of rows
copy.DestinationTableName = "Employee";
copy.BatchSize = dt.Rows.Count;
try
{
//Send it to the server
copy.WriteToServer(dt);
Feedback = "Upload complete";
}
catch (Exception ex)
{
Feedback = ex.Message;
}
}
}
return Feedback;
}
View.aspx
<asp:Content ID="Content1" ContentPlaceHolderID="TitleContent" runat="server">
Home Page
</asp:Content>
<asp:Content ID="Content2" ContentPlaceHolderID="MainContent" runat="server">
<h2>CSV Bulk Upload</h2>
<% using (Html.BeginForm("","",FormMethod.Post, new {enctype="multipart/form-data"})){ %>
<input type="file" name="FileUpload" />
<input type="submit" name="Submit" id="Submit" value="Upload" />
<% } %>
<p><%= Html.Encode(ViewData["Feedback"]) %></p>
</asp:Content>
Stored Procedure
USE [BULkDatabase]
GO
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER OFF
GO
CREATE PROCEDURE [dbo].[InsertProdutInfo]
(
@ProductName varchar (50),
@EmployeeName varchar (50),
@EmployeeAddress varchar (50)
)
AS
BEGIN TRAN
update [dbo.Product]
set [ProductName] = @ProductName
where [ProductName] = @ProductName;
-- get product id
select ProductId = [ProductId]
from [dbo.Product]
where [ProductName] = @ProductName;
if @@rowcount = 0
BEGIN TRAN
DECLARE @ProductId uniqueidentifier
-- there's no such product, let's create it
insert into [dbo.Product]
values (NEWID(),@ProductName);
select @ProductId = SCOPE_IDENTITY()
end
-- now that we know we have added the product and have the id, let's add the rest
insert into [dbo.Employees]
values (NEWID(), @EmployeeName, @EmployeeAddress, @ProductId);
COMMIT TRAN
CSV stands for Comma Separated Values. The data fields in a CSV file are separated/delimited by a comma (‘, ‘) and the individual rows are separated by a newline (‘\n’). CSV File management in C++ is similar to text-type file management, except for a few modifications.
A basic CSV file relies on a comma-separated data format, which makes it straightforward to write to this file type from a c program. First you will want to create a file pointer, which will be used to access and write to a file.
Data Addition deals with opening an existing CSV file, taking user inputs for the data to be added to the file, and then adding this data to the CSV file. Open CSV File using File Pointer in append mode which will place a pointer to the end of the file.
The create operation is similar to creating a text file, i.e. input data from the user and write it to the csv file using the file pointer and appropriate delimiters (‘, ‘) between different columns and ‘ ’ after the end of each row. Using getline (), file pointer and ‘ ’ as the delimiter, read an entire row and store it in a string variable.
first of all you should decouple the Controller from your database code, just simply create a new Class project and host all database access there, so you can have in your Controller something like:
[HttpPost]
public ActionResult UploadFile(HttpPostedFileBase FileUpload)
{
if (FileUpload.ContentLength > 0) {
// there's a file that needs our attention
var success = db.UploadProductFile(FileUpload);
// was everything ok?
if (success)
return View("UploadSuccess");
else
return View("UploadFail");
}
return RedirectToAction("Index", new { error = "Please upload a file..." });
}
public ActionResult Index(string error)
{
...
}
This way, the controller does not really care what do you do with the uploaded file as it's not the Controller
concern to know such thing, it has the task to know that it needs to delegate that job and process the result, that's it.
Please see that the action method is called UploadFile
and not Index
. It's not a good practice to post to the same action to avoid, when the user refreshes the page, post it again.
I also suggest you to use ADO.NET Entity Model, there are plenty of Videos out there, in ASP.NET website as well, and it will greatly help you use the database in a simpler and clean way.
back to your question... Inside your Database class, the method UploadProductFile
should be something like, and assuming that you don't have more than 200 records to process it's better to use the memory to deal with the file rather than spend time to save and read again (for more, you should save the file and process it, like you already do):
private bool UploadProductFile(HttpPostedFileBase FileUpload)
{
// get the file stream in a readable way
StreamReader reader = new StreamReader(FileUpload.InputStream);
// get a DataTable representing the passed string
System.Data.DataTable dt = ProcessCSV(reader.ReadToEnd());
// for each row, compose the statement
bool success = true;
foreach (System.Data.DataRow row in dt.Rows)
success = db.InsertProdutInfo(row);
return success;
}
the method InsertProdutInfo
would fire a store procedure that would be something like:
declare @product_key int
begin tran
update [tbl_products]
set [name] = @product_name, [last_update] = getdate()
where [name] = @product_name;
-- get product id
select @product_key = [id]
from [tbl_products]
where [name] = @product_name;
if @@rowcount = 0
begin
-- there's no such product, let's create it
insert into [tbl_products] (name, last_update)
values (@product_name, getdate());
select @product_key = SCOPE_IDENTITY()
end
-- now that we know we have added the product and have the id, let's add the rest
insert into [tbl_Employees] (id, product_id, name, contact, address)
values (@employee_id, @product_key, @employee_name,
@employee_contact, @employee_address);
commit tran
this way you will have everything you need.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With