Insert a list using dapper.NET C# - c#

I'd like to insert a list of objects in an SQL table.
I know this question here but I don't understand.
Here is my class :
public class MyObject
{
public int? ID { get; set; }
public string ObjectType { get; set; }
public string Content { get; set; }
public string PreviewContent { get; set; }
public static void SaveList(List<MyObject> lst)
{
using (DBConnection connection = new DBConnection())
{
if (connection.Connection.State != ConnectionState.Open)
connection.Connection.Open();
connection.Connection.Execute("INSERT INTO [MyObject] VALUE()",lst);
}
}
}
I'd like to know how could I insert my list using Dapper, I don't want to iterate on the list and save them one by one, I would like to insert all of them in one request.

You can insert these just as you would INSERT a single line:
public class MyObject
{
public int? ID { get; set; }
public string ObjectType { get; set; }
public string Content { get; set; }
public string PreviewContent { get; set; }
public static void SaveList(List<MyObject> lst)
{
using (DBConnection connection = new DBConnection())
{
if (connection.Connection.State != ConnectionState.Open)
connection.Connection.Open();
connection.Connection.Execute("INSERT INTO [MyObject] (Id, ObjectType, Content, PreviewContent) VALUES(#Id, #ObjectType, #Content, #PreviewContent)", lst);
}
}
}
Dapper will look for class members named after your SQL parameters (#Id, #ObjectType, #Content, #PreviewContent) and bind them accordingly.

You need pass a table-value parameter.
1. Create table type in your sql database.
2. Create DynamicParameters and add the datatable (new values) to it.
3. Execute.
SQL:
CREATE TYPE [dbo].[tvMyObjects] AS TABLE(
[ID] INT,
[ObjectType] [varchar](70), /*Length on your table*/
[Content] [varchar](70), /*Length on your table*/
[PreviewContent] [varchar](70) /*Length on your table*/
)
C#:
var dynamicParameters = new DynamicParameters();
dynamicParameters.Add("#MyObjects", lst
.AsTableValuedParameter("dbo.tvMyObjects", new[]
{
"ID" ,
"ObjectType",
"Content",
"PreviewContent"
}));
connection.Connection.Execute(#"
INSERT INTO [MyObject] (Id, ObjectType, Content, PreviewContent)
SELECT Id,
ObjectType,
Content,
PreviewContent
FROM #MyObjects", dynamicParameters);
More info: https://www.codeproject.com/Articles/835519/Passing-Table-Valued-Parameters-with-Dapper

You would just change your SQL to a valid insert statment that has parameters matching the names of the properties on your class.
INSERT INTO MyObject VALUES(#Id, #ObjectType, #Content, #PreviewContent)
Or if you need to specify the table columns (where these aren't all the columns in the table, for example):
INSERT INTO MyObject (Id, ObjectType, Content, PreviewContent)
VALUES(#Id, #ObjectType, #Content, #PreviewContent)

You can use the Dapper.Contrib extensions to simplify the code. I've found this works well for a few hundred records, but for very large inserts, I switch to SqlBulkCopy. The synchronous version is Insert instead of InsertAsync (as you'd guess). Make sure your entities are named as Dapper expects and have a primary key, Id, or, add annotations for table name and key to your entity.
using using Dapper.Contrib.Extensions; //Git
public async Task SaveChangesAsync(IList<MyEntity> myEntityValues)
{
var conn = new SqlConnection(myconnectionString);
if(conn.State != ConnectionState.Open)
conn.Open();
await conn.InsertAsync(myEntityValues);
if (conn.State != ConnectionState.Closed)
{
conn.Close();
conn.Dispose();
}
}

In case you need new naming or combined sources:
await connection.ExecuteAsyncWithRetry(SqlSave,
list.Select(x =>
new
{
x.ID,
SomeNew = NewSource.SomeNew, // From other source
NameNew = x.NameOld, // New naming
x.ObjectType,
x.Content,
x.ContentList = String.Join(",", x.Content) // Data transform
}));

Related

Deserialize JSON and then insert into table

I want to insert JSON string into table i deserialize json string to object list and then insert into table but not working
My code :
My JSON data
{"data":[{"ID":"1","Personnel_Number":"1001","Employee_Name":"Employee 1","Password":"12345","Gender":"M","Grade":"W5","Designation":"4","SBU_ID":"1","Department":"1","Category":"1","Email_ID_Official":"","Email_ID_Personal":"","Mobile_Number":"","Current_Address":"","Permanent_Address":"","Aadhaar_No":"","DOB":"","DOJ":"","DOM":"","Marital_Status":"","Profile_Pic_Path":"http://iprofileapi.dev.app6.in/UploadedFiles/6365794593150041732018329.jpg","Remarks":"","Status":"Active","Created_By":"dbo","Created_On":"20-09-2017 19:35:13","Updated_By":"3872","Updated_Date":"09-01-2019 12:48:00"},{"ID":"2","Personnel_Number":"1002","Employee_Name":"Employee 2","Password":"12345","Gender":"M","Grade":"W5","Designation":"153","SBU_ID":"6","Department":"28","Category":"1","Email_ID_Official":"","Email_ID_Personal":"","Mobile_Number":"","Current_Address":"","Permanent_Address":"","Aadhaar_No":"","DOB":"","DOJ":"","DOM":"","Marital_Status":"","Profile_Pic_Path":"","Remarks":"","Status":"Active","Created_By":"dbo","Created_On":"20-09-2017 19:35:13","Updated_By":"4957","Updated_Date":"10-02-2018 19:06:58"},{"ID":"3","Personnel_Number":"1003","Employee_Name":"Employee 3","Password":"12345","Gender":"M","Grade":"W5","Designation":"41","SBU_ID":"1","Department":"3","Category":"1","Email_ID_Official":"","Email_ID_Personal":"","Mobile_Number":"","Current_Address":"","Permanent_Address":"","Aadhaar_No":"","DOB":"","DOJ":"","DOM":"","Marital_Status":"","Profile_Pic_Path":"","Remarks":"","Status":"Active","Created_By":"dbo","Created_On":"20-09-2017 19:35:13","Updated_By":"5482","Updated_Date":"05-10-2017 15:36:16"},{"ID":"4","Personnel_Number":"1004","Employee_Name":"Employee 4","Password":"12345","Gender":"M","Grade":"W2","Designation":"120","SBU_ID":"26","Department":"88","Category":"1","Email_ID_Official":"","Email_ID_Personal":"","Mobile_Number":"","Current_Address":"","Permanent_Address":"","Aadhaar_No":"","DOB":"","DOJ":"","DOM":"","Marital_Status":"","Profile_Pic_Path":"","Remarks":"","Status":"Active","Created_By":"dbo","Created_On":"20-09-2017 19:35:13","Updated_By":"6298","Updated_Date":"06-10-2017 13:30:20"},{"ID":"5","Personnel_Number":"1005","Employee_Name":"Employee
And So On......
first i make class for all items in same basis i am not posting all data because it will be too lengthy its a sample data:
public class tblEmployee
{
public string ID { get; set; }
public string Personnel_Number { get; set; }
public string Employee_Name { get; set; }
public string Password { get; set; }
}
Then i create another class for list of items:
public class SalesTransactions
{
public List<tblEmployee> transactions { get; set; }
public int count { get; set; }
}
And then i have create insert data into sql server table on button click
protected void btninsertjsondata_Click(object sender, EventArgs e)
{
var conString = ConfigurationManager.ConnectionStrings["SQLDBConnection1"];
string strConnString = conString.ConnectionString;
SqlConnection conn = new SqlConnection(strConnString);
SqlCommand com;
string data = HttpContent("https://localhost:45333/Retrive_Jsondata.aspx");
JavaScriptSerializer jsSerializer = new JavaScriptSerializer();
SalesTransactions table = JsonConvert.DeserializeObject<SalesTransactions>(data);
conn.Open();
com = new SqlCommand("INSERT INTO dbo.tbl_Iprofile_Employee select * from " + table, conn);
com.CommandType = CommandType.Text;
int refId = com.ExecuteNonQuery();
conn.Close();
if (refId > 0)
{
Response.Write("{\"response\":{\"status\":\"success\",\"msg\":\"Details Saved Successfully..\"}}");
}
else
{
Response.Write("{\"response\":{\"status\":\"fail\",\"msg\":\"oops!! something went wrong\"}}");
}
}
While debugging my Code in table i am getting Count 0 and transactions null that's why my sql query is not working and i am getting 'Incorrect syntax near '+'.' In Data i am getting my all JSON string but unable to convert into table form
where i am doing wrong please help me should i add for loop for mapping all column and row ???
Edit:
Check how your query looks like realtime in the debugger. You`re adding to your select statement an object not string with the table name.
foreach (var row in table.transactions)
{
com = new SqlCommand(
string.Format("INSERT INTO dbo.tbl_Iprofile_Employee (ID ,Personnel_Number,Employee_Name,Password )
VALUES ({0},{1},{2},{3});",
row.ID, row.Personnel_Number, row.Employee_Name, row.Password),
conn);
com.CommandType = CommandType.Text;
int refId = com.ExecuteNonQuery();
}
Both code examples (yours and mine) have SQL injection vulnerabilities.
For better readability and sustainability try to extract your DB logic in independent classes and methods first. A common way to do this is the Repository Pattern.
For its implementation try to use a micro ORM like Dapper.
This whole approach will help you out with all these time-consuming things like debugging your, testing and maintaining for your application.

How can I add new row to database with values stored in ObservableCollection?

So I have two ObservableCollections:
ObservableCollection<Student> studentslist
ObservableCollection<Subject> subjectslist
and I'm passing them to class where I have set up whole communication with sql database.
studentslist contains values passed by:
StudentsList.Add(new Student { IdStudent = newStudentId, Name = NameBox.Text, Surname = SurnameBox.Text, Index = IndexBox.Text })
subjectlist contains values from checked checkboxes in my listbox (I'm still not sure if this is correct):
var selectedSubjects = SubjectList.Where(subjects => subjects.IsChecked == true);
var selectedSubjectsCollection = new ObservableCollection<Subject>(selectedSubjects);
and this is my Subject class:
public class Subject
{
public int IdSubject{ get; set; }
public string subject{ get; set; }
public bool IsChecked { get; set; }
public override string ToString()
{
return $"{subject}";
}
}
Now in my class, responsible for connection with sql data base, I've created method which grabs both of this collections and based on their values I would like to create new record in database:
public void addRecord(ObservableCollection<Student> studentslist, ObservableCollection<Przedmiot> subjectslist)
{
OpenConection();
//...
CloseConnection();
}
In database I have Student table:
Student(IdStudent, FirstName, LastName, IndexNumber)
and Subject table:
Subject(IdSubject, Name)
And as far as I can read data from my data base, I don't know how in correct way pass these values to corresponding values in data base.
I like to do this kind of thing using stored procedures to minimise the amount of T-SQL code in the app.
An example would be:
public static void InsertSubject(Przedmiot subject)
{
using (SqlConnection conn = new SqlConnection("Connection String"))
{
try
{
SqlCommand command = new SqlCommand("dbo.InsertSubject", conn) { CommandType = CommandType.StoredProcedure };
command.Parameters.Add(new SqlParameter("#IdSubject", subject.IdSubject));
command.Parameters.Add(new SqlParameter("#Name", subject.subject));
conn.Open();
command.ExecuteNonQuery();
}
catch (Exception ex)
{
// handle exceptions
}
}
}
You would then have a stored procedure with the same signature to perform the INSERT, i.e.:
CREATE PROCEDURE dbo.InsertSubject
#IdSubject int
, #Name varchar(50)
AS
...
GO
If you want to pass in the entire collection you could implement a foreach loop in the above example.
Personally I like to call these methods once per insert and have it as an int returning method (for example) so I can pass some data out, e.g., the ID of the inserted row.

C# - MySql - Best way to convert data from MySqlDataReader to different models

I have experience in working and fixing bugs with existing code bases that implement MySql code, but have to design a new program from scratch at my new job. I am not sure what is the best way to return data from MySqlDataReader to my custom models. Please advise!
Here's what I have,
Folder structure:
Models (folder)
Metadata.cs
User.cs
MySqlDb.cs
Metadata.cs: Reresents data from metadata table
public class Metadata
{
public int Id { get; set; }
public string Title { get; set; }
public string Sku { get; set; }
public bool IsLive { get; set; }
}
User.cs: Represents data from user table
public class User
{
public int Id { get; set; }
public string UserName { get; set; }
public int Age { get; set; }
public string Address { get; set; }
}
MySqlDb.cs
using MySql.Data;
using MySql.Data.MySqlClient;
public class MySqlDb
{
public MySqlConnection Connection { get; set;}
public MySqlDb(string connectionString)
{
Connection = new MySqlConnection(connectionString);
}
public List<Metadata> RunSelectQueryForMetadata(string query)
{
var rdr = new MySqlCommand(query, Connection).ExecuteReader();
var metadata = new List<Metadata>();
using (rdr)
{
while(rdr.Read())
{
metadata.Add(
new Metadata {
Id = rdr["id"],
Title = rdr["title"],
Sku = rdr["sku"],
IsLive = rdr["islive"],
});
} // while
} // using
return metadata;
} // public void RunSelectQuery(string query)
} // public class MySqlDb
If I try to get Users data, I am thinking of writing another method (RunSelectQueryForUsers). I would like to avoid writing different methods for different tables. I am not sure how to use one method for retrieving data from different tables with different data structures and typecast them to the Model I want.
Any help is greatly appreciated!!
One way is to use micro-orm such as Dapper which is a simple object mapper built for .Net. Dapper extends the IDbConnection by providing useful extension methods to query your database.
Example of implementing dapper within your current menthod:
public List<Metadata> RunSelectQueryForMetadata(string query)
{
var metadata = new List<Metadata>();
try // implement proper error handling
{
Connection.Open();
metadata = Connection.Query<Metadata>(query).ToList();
Connection.Close();
}
catch(Exception ex)
{
// error here
}
return metadata;
}
Some useful links:
Dapper Github
Dapper Tutorial
Converting it to generic method: (not tested right now)
public List<T> RunSelectQuery<T>(string query)
{
try // implement proper error handling
{
Connection.Open();
metadata = Connection.Query<T>(query).ToList();
Connection.Close();
}
catch(Exception ex)
{
// error here
}
return metadata;
}
and use something like this below:
List<Metadata> myMetadata = RunSelectQuery<Metadata>(query);
I prefer a pattern more like this:
public class MySqlDb
{
//1. This should not be public!
// Keeping it private forces other code to go through your public methods,
// rather than using the connection directly.
// Even better if the class knows how to read the string from a
// config rile rather than accepting it via the constructor.
//2. Don't save a connection object for re-use.
// ADO.Net has a connection pooling feature that works when you
// create new objects for most queries
private string ConnectionString { get; set;}
public MySqlDb(string connectionString)
{
ConnectionString = connectionString;
}
//1. Use IEnumerable instead of List
// ...don't pull all of the results into memory at the same time until/unless you really have to.
//2. Methods that accept query strings should also accept parameters.
// Otherwise you are forced to build sql strings in insecure crazy-vulnerable ways
public IEnumerable<Metadata> RunSelectQueryForMetadata(string query, IEnumerable<MySqlParameter> parameters)
{
using (var cn = new MySqlConnection(ConnectionString))
using (var cmd = new MySqlCommand(query, cn))
{
if (parameters != null)
{
cmd.Parameters.AddRange(parameters.ToArray());
}
cn.Open();
using(var rdr = cmd.ExecuteReader())
{
while(rdr.Read())
{
yield return new Metadata {
Id = rdr["id"],
Title = rdr["title"],
Sku = rdr["sku"],
IsLive = rdr["islive"],
};
}
rdr.Close();
}
}
}
}
Ultimately, the ideal is for the RunSelectQuery__() method to be generic and private, and for public methods to not accept SQL statements. The goal is to force all SQL in your program to live in the MySqlDb class. Each query has a method that accepts specific typed inputs, and returns typed output. The reason you have that goal is to make it easy to manage your database access and easy to audit that all of your SQL code is safely using parameters (and not vulnerable to sql injection attacks! ). You want something like this:
//updated to remove the earlier explanatory comments
// and show example methods for isolating SQL from the rest of the application.
public class MySqlDb
{
private string ConnectionString { get; set;}
private string ReadConnectionStringFromConfigFile()
{
//TODO
throw NotImplementedException();
}
public MySqlDb()
{
ConnectionString = ReadConnectionStringFromConfigFile();
}
//This is now PRIVATE and generic, and allows for parameterized queries
private IEnumerable<T> RunSelectQuery(string query, Func<IDataReader, T> translateRecord, IEnumerable<MySqlParameter> parameters)
{
using (var cn = new MySqlConnection(ConnectionString))
using (var cmd = new MySqlCommand(query, cn))
{
if (parameters != null)
{
cmd.Parameters.AddRange(parameters.ToArray());
}
cn.Open();
using(var rdr = cmd.ExecuteReader())
{
while(rdr.Read())
{
yield return translateRecord(rdr);
}
rdr.Close();
}
}
}
////// Example methods showing how to use the generic method above
// These methods are the only public part of your class
public MetaData GetMetaDataById(int ID)
{
string sql = "SELECT * FROM MetatData WHERE ID= #ID";
var parameters = new List<MySqlParameters> {
new MySqlParameter() {
ParameterName = "#ID",
MySqlDbType = MySqlDbType.Int32,
Value = ID
}
};
return RunSelectQuery<MetaData>(sql, parameters, r =>
new Metadata {
Id = r["id"],
Title = r["title"],
Sku = r["sku"],
IsLive = r["islive"],
}).FirstOrDefault();
}
public IEnumerable<MetaData> GetAllMetaData()
{
string sql = "SELECT * FROM MetatData";
return RunSelectQuery<MetaData>(sql, null, r =>
new Metadata {
Id = r["id"],
Title = r["title"],
Sku = r["sku"],
IsLive = r["islive"],
});
}
public User GetUserByID(int ID)
{
string sql = "SELECT * FROM User WHERE ID= #ID";
var parameters = new List<MySqlParameters> {
new MySqlParameter() {
ParameterName = "#ID",
MySqlDbType = MySqlDbType.Int32,
Value = ID
}
};
return RunSelectQuery<User>(sql, parameters, r =>
new Metadata {
Id = r["id"],
UserName = r["UserName"],
Age = r["Age"],
Address = r["Address"],
}).FirstOrDefault();
}
public User GetUserByUsername(string UserName)
{
string sql = "SELECT * FROM User WHERE Username= #UserName";
var parameters = new List<MySqlParameters> {
new MySqlParameter() {
ParameterName = "#UserName",
MySqlDbType = MySqlDbType.VarChar,
Size = 20, //guessing at username lenght
Value = UserName
}
};
return RunSelectQuery<User>(sql, parameters, r =>
new Metadata {
Id = r["id"],
UserName = r["UserName"],
Age = r["Age"],
Address = r["Address"],
}).FirstOrDefault();
}
public IEnumerable<User> FindUsersByAge(int Age)
{
string sql = "SELECT * FROM User WHERE Age = #Age";
var parameters = new List<MySqlParameters> {
new MySqlParameter() {
ParameterName = "#Age",
MySqlDbType = MySqlDbType.Int32,
Value = Age
}
};
return RunSelectQuery<User>(sql, parameters, r =>
new Metadata {
Id = r["id"],
UserName = r["UserName"],
Age = r["Age"],
Address = r["Address"],
});
}
}
In larger applications, you abstract this further into a separate project, with a private class for the lower-level methods that are private here, and a public class for each of the object types you use via that database. You might even go full-blown service-oriented architecture, where you get all your data via web service calls, and only the service layer talks directly to any database.
Of course, at this level you can also use a mirco-ORM like Dapper. Micro-ORMs will help you avoid re-writing the same mapping code over and over, and also help more with the INSERT/UPDATE side of data operations. Their goal is to take over as much of the boilerplate code for you as they can.
The advantage of a micro-ORM over a full ORM is it keeps you closer to the SQL. This is a good thing. Full-blown ORMs like Entity Framework or NHibernate effectively force you to learn a whole new language on top of the SQL, while mostly limiting you to basic SQL statements that often lose the advantages from the "relational" part of a relational database. Eventually, you often end up needing to understand and write complex raw SQL anyway to optimize performace. Micro-ORMs try to offer a happy-medium... taking away as much of the boiler plate code needed to talk to a database as they can, while still leaving you to write your own SQL.
While not tailored to using MySql and straight up sql, the below code snippets provide a means to do what you're asking using generics. Could use some improvements though...
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Data.General
{
public abstract class DataObject
{
protected abstract void Initialize(IDataRecord dataRow);
private static string _connectionString = "";
/// <summary>
/// Loads a single data object from the results of a stored procedure.
/// </summary>
protected static T ReadObject<T>(string procedureName, SqlParameter[] sqlParameters, Type dataType)
{
DataObject returnItem = null;
using (SqlConnection sqlConnection = new SqlConnection(GetConnectionString()))
using (SqlCommand command = BuildCommand(sqlConnection, procedureName, sqlParameters))
{
sqlConnection.Open();
//Execute the reader for the given stored proc and sql parameters
using (IDataReader reader = command.ExecuteReader())
{
//If we get no records back we'll still return null
while (reader.Read())
{
returnItem = (DataObject)Activator.CreateInstance(typeof(T));
returnItem.Initialize(reader);
break;
}
}
}
//Return our DataObject
return (T)Convert.ChangeType(returnItem, dataType);
}
/// <summary>
/// Reads a collection of data objects from a stored procedure.
/// </summary>
protected static List<T> ReadObjects<T>(string procedureName, SqlParameter[] sqlParameters)
{
//Get cached data if it exists
List<T> returnItems = new List<T>();
T dataObject;
using (SqlConnection sqlConnection = new SqlConnection(GetConnectionString()))
using (SqlCommand command = BuildCommand(sqlConnection, procedureName, sqlParameters, null))
{
sqlConnection.Open();
//Execute the reader for the given stored proc and sql parameters
using (IDataReader reader = command.ExecuteReader())
{
//If we get no records back we'll still return null
while (reader.Read())
{
dataObject = (T)Activator.CreateInstance(typeof(T));
(dataObject as DataObject).Initialize(reader);
returnItems.Add(dataObject);
}
}
}
//Return the DataObjects
return returnItems;
}
/// <summary>
/// Builds a SQL Command object that can be used to execute the given stored procedure.
/// </summary>
private static SqlCommand BuildCommand(SqlConnection sqlConnection, string procedureName, SqlParameter[] sqlParameters, SqlTransaction sqlTransaction = null)
{
SqlParameter param;
SqlCommand cmd = new SqlCommand(procedureName, sqlConnection);
if (sqlTransaction != null)
{
cmd.Transaction = sqlTransaction;
}
cmd.CommandType = CommandType.StoredProcedure;
// Add SQL Parameters (if any)
foreach (SqlParameter parameter in sqlParameters)
{
param = new SqlParameter(parameter.ParameterName, parameter.DbType);
param.Value = parameter.Value;
cmd.Parameters.Add(param);
}
return cmd;
}
private static string GetConnectionString()
{
return _connectionString;
}
public static void SetConnectionString(string connectionString)
{
_connectionString = connectionString;
}
}
}
namespace Data.Library
{
public class Metadata : General.DataObject
{
protected Data.Model.Metadata _metaData;
public Data.Model.Metadata BaseModel
{
get { return _metaData; }
set { _metaData = value; }
}
//Typically I have properties in here pointing to the Data.Model class
protected override void Initialize(System.Data.IDataRecord dataRow)
{
_metaData = new Model.Metadata();
_metaData.Id = Convert.ToInt32(dataRow["Id"].ToString());
_metaData.Title = (dataRow["Title"].ToString());
_metaData.Sku = (dataRow["Sku"].ToString());
_metaData.IsLive = Convert.ToBoolean(dataRow["IsLive"].ToString());
}
public static Metadata ReadByID(int id)
{
return General.DataObject.ReadObject<Metadata>("dbo.s_MetadataGet", new[] { new SqlParameter("#ID", id) },
typeof(Metadata));
}
public static Metadata[] ReadBySku(string sku)
{
List<Metadata> metaDatas = General.DataObject.ReadObjects<Metadata>("dbo.s_MetadataGetBySku", new[] { new SqlParameter("#Sku", sku) });
return metaDatas.ToArray();
}
}
}
namespace Data.Model
{
public class Metadata
{
public int Id { get; set; }
public string Title { get; set; }
public string Sku { get; set; }
public bool IsLive { get; set; }
}
}

In Entity Framework database-first approach how to return multiple result sets from a stored procedure?

I am working with Entity Framework using the database-first approach. I created a stored procedure in MySQL that returns multiple sets, I want to get all result sets on a single hit not only the top one result set.
In my stored procedure I am passing arguments too.
You can use SQLQuery to execute your procedure:
public partial class NorthwindContext
{
public IEnumerable<CustomerOrderHistory> CustomerOrderHistory(string customerID)
{
var customerIDParameter = customerID != null ?
new SqlParameter("#CustomerID", customerID) :
new SqlParameter("#CustomerID", typeof(string));
return Database.SqlQuery<CustomerOrderHistory>("CustOrderHist #CustomerID", customerIDParameter);
}
}
and the model looks like:
public partial class CustomerOrderHistory
{
public string ProductName { get; set; }
public Nullable<int> Total { get; set; }
}
It's sample based on Northwind Database
use basic reader and make use of reader.NextResult method.
var cmd = db.Database.Connection.CreateCommand();
cmd.CommandText = "some command here";
try
{
db.Database.Connection.Open();
var reader = cmd.ExecuteReader();
var books = ((IObjectContextAdapter)db)
.ObjectContext
.Translate<Book>(reader, "Books", MergeOption.AppendOnly);
// Move to second result set and read authors
reader.NextResult();
var authors = ((IObjectContextAdapter)db)
.ObjectContext
.Translate<Author>(reader, "Authors", MergeOption.AppendOnly);
}
finally
{
db.Database.Connection.Close();
}

SQLBulkCopy inserts using Entity Framework with Foreign Key object

I am using EF6 as the Load layer of an ETL tool. I acknowledge there are better tools (such as SSIS, direct SQL queries, etc) however due to the complexity of the transformation, it needed to be done in code, and the target DB is built from an EF Model. The number of records inserted in a batch can exceed 100,000 records. This is not incredibly slow to do (using the DBContext.AddRange() method) but the memory usage is extremely high (exceeding 1GB)
For examples sake, i have the following data classes (which are created in memory)
public class Foo
{
public long FooID { get; set; }
public string SomeProperty { get; set; }
public decimal AverageFlightSpeedOfUnladenSwallow { get; set; }
public IEnumerable<Bar> Bars { get; set; }
}
public class Bar
{
public long BarID { get; set; }
public Foo Foo { get; set; }
public long FooID { get; set; }
public string FavoriteColour { get; set; }
}
dbContext.Foos.AddRange(ListOfFoos); //Pre constructed list of Foos
dbContext.Bars.AddRange(ListOfBars); //Pre constructed list of Bars (parent Foo items populated, FooID is not)
dbContext.SaveChanges();
I am looking at using the LINQ Entity Data reader to enable the conversion of IList<Foo> to a data reader so i can import it using SQLBulkCopy (SqlBulkCopy and Entity Framework,
http://archive.msdn.microsoft.com/LinqEntityDataReader/Release/ProjectReleases.aspx?ReleaseId=389).
Requirement
The List<Bar> will not have the ID's of the parent Foo class. Entity framework handles this just fine, but i am not sure how to get this same functionality in SqlBulkCopy. Is there some way to get it done?
No, there is not direct way to do this with SQL bulkcopy.
SQL Bulkcopy is very close to the database, therefore it is very fast.
The ORM handles the FK/PK relations but has a disadvantage of being slow.
Depending on your datamodel, you could do something like it is in this question: populate batches of datatables
SQL Bulkcopy YYYYMMDD problem
So,
Provided your EF user has the ability to alter schema on the database, you can pursue this method of solving the problem:
Add a GUID column to the table
Identify each object in memory by the GUID
Insert the values via bulk Insert including the identifying GUID
Select them back and map them to the inserted GUIDs
Drop the GUID column
Here is some code to do just that. Its a little dirty and not optimised, but it minimised the original task down to 30MB of memory and 1 minute to process
public static class ForeignKeyBulkInsert
{
private const string GUID_COLUMN_NAME = "GUID_SURROGATE_KEY";
public static string GetTableName<T>(this ObjectContext context) where T : class
{
string sql = context.CreateObjectSet<T>().ToTraceString();
Regex regex = new Regex("FROM (?<table>.*) AS");
Match match = regex.Match(sql);
string table = match.Groups["table"].Value;
return table;
}
public static void AddRange<TEntity>(this DbContext db, IEnumerable<TEntity> range, bool importForeignKeyIDs = false)
where TEntity : class
{
Dictionary<Guid, TEntity> lookup = new Dictionary<Guid, TEntity>();
var objectContext = ((IObjectContextAdapter)db).ObjectContext;
var os = objectContext.CreateObjectSet<TEntity>();
bool hasAutoGeneratedKey = os.EntitySet.ElementType.KeyProperties.Any();
Type entityType = typeof(TEntity);
if (importForeignKeyIDs)
{
var foreignKeyProperties = os.EntitySet.ElementType.NavigationProperties.Where(x => x.ToEndMember.RelationshipMultiplicity == RelationshipMultiplicity.One);
foreach (var foreignKeyProperty in foreignKeyProperties)
{
var foreignKeyIdProperty = foreignKeyProperty.GetDependentProperties().First();
var parentKeyProperty = foreignKeyProperty.ToEndMember.GetEntityType().KeyMembers.First();
PropertyInfo foreignKeyPropertyInfo = null;
Type parentType = null;
PropertyInfo parentKeyPropertyInfo = null;
PropertyInfo foreignKeyIdPropertyInfo = null;
foreach (var item in range)
{
entityType.GetProperty(foreignKeyProperty.Name).GetValue(item);
if (foreignKeyPropertyInfo == null)
foreignKeyPropertyInfo = entityType.GetProperty(foreignKeyProperty.Name);
if (parentType == null)
parentType = foreignKeyPropertyInfo.GetValue(item).GetType();
if (parentKeyPropertyInfo == null)
parentKeyPropertyInfo = parentType.GetProperty(parentKeyProperty.Name);
if (foreignKeyIdPropertyInfo == null)
foreignKeyIdPropertyInfo = entityType.GetProperty(foreignKeyIdProperty.Name);
var foreignKey = foreignKeyPropertyInfo.GetValue(item);
if (foreignKey == null)
break;
var parentKey = parentKeyPropertyInfo.GetValue(foreignKey);
foreignKeyIdPropertyInfo.SetValue(item, parentKey);
}
}
}
string tableName = objectContext.GetTableName<TEntity>();
var entityReader = range.AsDataReader(GUID_COLUMN_NAME, lookup);
if (hasAutoGeneratedKey)
{
try
{
db.Database.ExecuteSqlCommand(string.Format("ALTER TABLE {0} ADD [{1}] uniqueidentifier null", tableName, GUID_COLUMN_NAME));
}
catch (Exception)
{
db.Database.ExecuteSqlCommand(string.Format("ALTER TABLE {0} DROP COLUMN [{1}]", tableName, GUID_COLUMN_NAME));
db.Database.ExecuteSqlCommand(string.Format("ALTER TABLE {0} ADD [{1}] uniqueidentifier null", tableName, GUID_COLUMN_NAME));
}
}
try
{
var connection = db.Database.Connection as SqlConnection;
connection.Open();
using (SqlBulkCopy cpy = new SqlBulkCopy(connection))
{
cpy.BulkCopyTimeout = 0;
cpy.DestinationTableName = tableName;
cpy.WriteToServer(entityReader);
connection.Close();
}
if (hasAutoGeneratedKey)
{
db.Database.Connection.Open();
var comm = db.Database.Connection.CreateCommand();
comm.CommandText = string.Format("SELECT * FROM {0} WHERE [{1}] is not null", tableName, GUID_COLUMN_NAME);
try
{
using (var reader = comm.ExecuteReader())
{
while (reader.Read())
{
Guid surrogateKey = Guid.Parse(reader[GUID_COLUMN_NAME].ToString());
TEntity entity = lookup[surrogateKey];
var keyProperty = entityType.GetProperty(os.EntitySet.ElementType.KeyMembers.First().Name);
keyProperty.SetValue(entity, reader[keyProperty.Name]);
}
}
}
catch (Exception)
{
throw;
}
finally
{
//This should never occur
db.Database.Connection.Close();
}
}
}
catch (Exception)
{
throw;
}
finally
{
if (hasAutoGeneratedKey)
db.Database.ExecuteSqlCommand(string.Format("ALTER TABLE {0} DROP COLUMN [{1}]", tableName, GUID_COLUMN_NAME));
}
}
}

Categories

Resources