I have the following file upload handler:
public class FileUploader : IHttpHandler
{
public void ProcessRequest(HttpContext context)
{
HttpRequest request = context.Request;
context.Response.ContentType = "text/html";
context.Response.ContentEncoding = System.Text.Encoding.UTF8;
context.Response.Cache.SetCacheability(HttpCacheability.NoCache);
var tempPath = request.PhysicalApplicationPath + "\\Files\\TempFiles\\";
byte[] buffer = new byte[request.ContentLength];
using (BinaryReader br = new BinaryReader(request.InputStream))
{
br.Read(buffer, 0, buffer.Length);
}
var tempName = WriteTempFile(buffer, tempPath);
context.Response.Write("{\"success\":true}");
context.Response.End();
}
public bool IsReusable
{
get { return true; }
}
private string WriteTempFile(byte[] buffer, string tempPath)
{
var fileName = GetUniqueFileName(tempPath);
File.WriteAllBytes(tempPath + fileName, buffer);
return fileName;
}
private string GetUniqueFileName(string tempPath)
{
var guid = Guid.NewGuid().ToString().ToUpper();
while (File.Exists(tempPath + guid))
{
guid = Guid.NewGuid().ToString().ToUpper();
}
return guid;
}
}
When I upload large files, this is causing OutOfMemoryException. Could someone tell what's right way to upload large files using such a handler?
There is no need to load a file into memory to write it to somewhere. You should be using a small buffer (maybe 8k), and looping over the streams. Or, with 4.0, the CopyTo
method. For example:
using(var newFile = File.Create(tempPath)) {
request.InputStream.CopyTo(newFile);
}
(which does the small-buffer/loop for you, using a 4k buffer by default, or allowing a custom buffer-size to be passed via an overload)
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With