I'm trying to transfer a file over a IHttpHandler, the code is pretty simple. However when i start a single transfer it uses about 20% of the CPU. If i were to scale this to 20 simultaneous transfers the CPU is very high. Is there a better way I can be doing this to keep the CPU lower? the client code just sends over chunks of the file 64KB at a time.
public void ProcessRequest(HttpContext context)
{
if (context.Request.Params["secretKey"] == null)
{
}
else
{
accessCode = context.Request.Params["secretKey"].ToString();
}
if (accessCode == "test")
{
string fileName = context.Request.Params["fileName"].ToString();
byte[] buffer = Convert.FromBase64String(context.Request.Form["data"]);
string fileGuid = context.Request.Params["smGuid"].ToString();
string user = context.Request.Params["user"].ToString();
SaveFile(fileName, buffer, user);
}
}
public void SaveFile(string fileName, byte[] buffer, string user)
{
string DirPath = @"E:\Filestorage\" + user + @"\";
if (!Directory.Exists(DirPath))
{
Directory.CreateDirectory(DirPath);
}
string FilePath = @"E:\Filestorage\" + user + @"\" + fileName;
FileStream writer = new FileStream(FilePath, File.Exists(FilePath) ? FileMode.Append : FileMode.Create, FileAccess.Write, FileShare.ReadWrite);
writer.Write(buffer, 0, buffer.Length);
writer.Close();
}
Here is my client code:
//Set filename from object
string FileName;
FileName = System.IO.Path.GetFileName(pubAttFullPath.ToString());
//Open file
string file = System.IO.Path.GetFileName(pubAttFullPath.ToString());
FileStream fileStream = new FileStream(file, FileMode.Open, FileAccess.Read);
//Chunk size that will be sent to Server
int chunkSize = 65536;
// Unique file name
string fileName = smGuid.ToString() + "_" + FileName;
int totalChunks = (int)Math.Ceiling((double)fileStream.Length / chunkSize);
// Loop through the whole stream and send it chunk by chunk;
for (int i = 0; i < totalChunks; i++)
{
bool doRecieve = true;
int cpt = 0;
do
{
int startIndex = i * chunkSize;
int endIndex = (int)(startIndex + chunkSize > fileStream.Length ? fileStream.Length : startIndex + chunkSize);
int length = endIndex - startIndex;
byte[] bytes = new byte[length];
fileStream.Read(bytes, 0, bytes.Length);
//Request url, Method=post Length and data.
string requestURL = "http://localhost:16935/Transfer.doit";
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(requestURL);
// Wait 5 min for answer before close connection.
request.Timeout = 300000;
request.Method = "POST";
request.ContentType = "application/x-www-form-urlencoded";
// Chunk(buffer) is converted to Base64 string that will be convert to Bytes on the handler.
string requestParameters = @"fileName=" + fileName + @"&secretKey=test" + @"¤tChunk=" + i + @"&totalChunks=" + totalChunks + @"&smGuid=" + smGuid
+ "&user=" + userSID.ToString() +
"&data=" + HttpUtility.UrlEncode(Convert.ToBase64String(bytes));
// finally whole request will be converted to bytes that will be transferred to HttpHandler
byte[] byteData = Encoding.UTF8.GetBytes(requestParameters);
request.ContentLength = byteData.Length;
try
{
Stream writer = request.GetRequestStream();
writer.Write(byteData, 0, byteData.Length);
writer.Close();
// here we will receive the response from HttpHandler
StreamReader stIn = new StreamReader(request.GetResponse().GetResponseStream());
string strResponse = stIn.ReadToEnd();
stIn.Close();
doRecieve = true;
}
catch (WebException webException)
{
if (webException.Status == WebExceptionStatus.ConnectFailure ||
webException.Status == WebExceptionStatus.ConnectionClosed ||
webException.Status == WebExceptionStatus.ReceiveFailure ||
webException.Status == WebExceptionStatus.SendFailure ||
webException.Status == WebExceptionStatus.Timeout)
{
Thread.Sleep(5000);
doRecieve = false;
cpt++;
}
else {
// if the exception is not those ones then get out
doRecieve = true;
}
}
catch (Exception e)
{
doRecieve = true;
}
}
// will try to send 3 times the current chunk before quitting
// can't try it so try it and give me the feedback
while(doRecieve == false && cpt < 3);
}
I haven't tested this theory, but working with FromBase64String
may be the cause. I found this case where someone was running out of memory using this method.
You might try FromBase64Transform instead, which is designed to handle a stream of data.
Or if you don't need to use base64 for any reason, check out this solution from Scott Hanselman.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With