when Entity Framework is querying a too big data of Varbinary type
I'm trying to query a varbinary
column that contain a file (1,2 Gb).
I'm using Entity Framework. See below:
Database to test
CREATE TABLE [dbo].[BIGDATA]
(
[id] [bigint] IDENTITY(1,1) NOT NULL,
[BIGDATA] [varbinary](max) NULL,
CONSTRAINT [PK_BIGDATA] PRIMARY KEY CLUSTERED ([id] ASC)
) ON [PRIMARY]
Data to test (any file with 1 Gb)
INSERT INTO [dbo].[BIGDATA]([BIGDATA])
VALUES
((SELECT BulkColumn FROM OPENROWSET(BULK N'C:BigTest.txt', SINGLE_BLOB) AS Document))
Controller to download file
public FileResult Download()
{
try
{
var context = new Models.ELOGTESTEEntities();
var idArquivo = Convert.ToInt32(1);
// The problem is here, when trying send command to SQL Server to read register
var arquivo = (from item in context.BIGDATA
where item.id.Equals(idArquivo)
select item).Single();
var mimeType = ".txt";
byte[] bytes = System.Text.Encoding.GetEncoding("iso-8859-8").GetBytes("BigTest.txt");
return File(arquivo.BIGDATA1, mimeType, System.Text.Encoding.UTF8.GetString(bytes));
}
catch (Exception ex)
{
throw ex;
}
}
I can querying normally on SQL Server with Select * From BigData
.
But, in Entity Framework (or command with ADO) I get this exception:
System.OutOfMemoryException
Does someone know how fix this problem?
Wow that is a lot data. I really think you need to not use EF to get this data, but instead use the good 'ol SqlDataReader.
Given your .net 4.0 restriction, I found a custom implementation of streaming a read from a massive varbinary column. I can't take any credit for this other than reviewing the code and making sure there are not .net 4.5 shortcuts in it:
http://www.syntaxwarriors.com/2013/stream-varbinary-data-to-and-from-mssql-using-csharp/
Mods - let me know if something like this should be copy/pasted into the answer as the original URL may not be persistent.
Edit: Here is the code from the link in case the URL goes away:
Usage:
// reading and returning data to the client
VarbinaryStream filestream = new VarbinaryStream(
DbContext.Database.Connection.ConnectionString,
"FileContents",
"Content",
"ID",
(int)filepost.ID,
true);
// Do what you want with the stream here.
The code:
public class VarbinaryStream : Stream
{
private SqlConnection _Connection;
private string _TableName;
private string _BinaryColumn;
private string _KeyColumn;
private int _KeyValue;
private long _Offset;
private SqlDataReader _SQLReader;
private long _SQLReadPosition;
private bool _AllowedToRead = false;
public VarbinaryStream(
string ConnectionString,
string TableName,
string BinaryColumn,
string KeyColumn,
int KeyValue,
bool AllowRead = false)
{
// create own connection with the connection string.
_Connection = new SqlConnection(ConnectionString);
_TableName = TableName;
_BinaryColumn = BinaryColumn;
_KeyColumn = KeyColumn;
_KeyValue = KeyValue;
// only query the database for a result if we are going to be reading, otherwise skip.
_AllowedToRead = AllowRead;
if (_AllowedToRead == true)
{
try
{
if (_Connection.State != ConnectionState.Open)
_Connection.Open();
SqlCommand cmd = new SqlCommand(
@"SELECT TOP 1 [" + _BinaryColumn + @"]
FROM [dbo].[" + _TableName + @"]
WHERE [" + _KeyColumn + "] = @id",
_Connection);
cmd.Parameters.Add(new SqlParameter("@id", _KeyValue));
_SQLReader = cmd.ExecuteReader(
CommandBehavior.SequentialAccess |
CommandBehavior.SingleResult |
CommandBehavior.SingleRow |
CommandBehavior.CloseConnection);
_SQLReader.Read();
}
catch (Exception e)
{
// log errors here
}
}
}
// this method will be called as part of the Stream ímplementation when we try to write to our VarbinaryStream class.
public override void Write(byte[] buffer, int index, int count)
{
try
{
if (_Connection.State != ConnectionState.Open)
_Connection.Open();
if (_Offset == 0)
{
// for the first write we just send the bytes to the Column
SqlCommand cmd = new SqlCommand(
@"UPDATE [dbo].[" + _TableName + @"]
SET [" + _BinaryColumn + @"] = @firstchunk
WHERE [" + _KeyColumn + "] = @id",
_Connection);
cmd.Parameters.Add(new SqlParameter("@firstchunk", buffer));
cmd.Parameters.Add(new SqlParameter("@id", _KeyValue));
cmd.ExecuteNonQuery();
_Offset = count;
}
else
{
// for all updates after the first one we use the TSQL command .WRITE() to append the data in the database
SqlCommand cmd = new SqlCommand(
@"UPDATE [dbo].[" + _TableName + @"]
SET [" + _BinaryColumn + @"].WRITE(@chunk, NULL, @length)
WHERE [" + _KeyColumn + "] = @id",
_Connection);
cmd.Parameters.Add(new SqlParameter("@chunk", buffer));
cmd.Parameters.Add(new SqlParameter("@length", count));
cmd.Parameters.Add(new SqlParameter("@id", _KeyValue));
cmd.ExecuteNonQuery();
_Offset += count;
}
}
catch (Exception e)
{
// log errors here
}
}
// this method will be called as part of the Stream ímplementation when we try to read from our VarbinaryStream class.
public override int Read(byte[] buffer, int offset, int count)
{
try
{
long bytesRead = _SQLReader.GetBytes(0, _SQLReadPosition, buffer, offset, count);
_SQLReadPosition += bytesRead;
return (int)bytesRead;
}
catch (Exception e)
{
// log errors here
}
return -1;
}
public override bool CanRead
{
get { return _AllowedToRead; }
}
#region unimplemented methods
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return true; }
}
public override void Flush()
{
throw new NotImplementedException();
}
public override long Length
{
get { throw new NotImplementedException(); }
}
public override long Position
{
get
{
throw new NotImplementedException();
}
set
{
throw new NotImplementedException();
}
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
#endregion unimplemented methods
}
Try to load the Data with EF "AsNoTracking()" option!
Sample: MyContext.MyTable.AsNoTracking().Where( x => x.....)
It appears Entity Framework doesn't support streaming the data to a varbinary field.
You have a couple of options.
Edit: Assuming you're using .NET 4.5, you should use SqlDataReader.GetStream
. This will allow streaming the file without having to load the whole thing into memory.