mardi 6 mai 2014

c# - morceaux au téléchargement multipartie vers Amazon S3 utilisant des url préalablement signés en utilisant PUT demande fail beaucoup - Stack Overflow


I am trying to upload files to Amazon S3 servers using pre-signed urls. The pre-signed urls are returned to the application by a webserver.


The problem that I am facing is that the chunks keep failing more often then they succeed, this means that it take a lot of retries to "eventually" get the file onto the S3 servers.


When a chunk fails, I get this exception "The request was aborted: The request was canceled" I have been stuck with this problem for more than a couple of days and I am not sure why I am receiving that exception over and over again.


Please note that sometimes the file does manage to get transferred to the S3 servers, so there are no issues related to authentication headers etc..


I am new to working with C# and would appreciate all the help that I can get.


Also, I am trying to upload 1% of a given chunk at a time to monitor progress. This is the code that I am using for uploading the files


WebResponse response = null;
HttpWebRequest httpRequest = null;
try
{
httpRequest = WebRequest.Create(url) as HttpWebRequest;
httpRequest.Method = "PUT";

//>>Various parameter settings that I have tried.<<//
//wr.ContentType = "multipart/form-data; boundary=" + boundary;
//httpRequest.KeepAlive = false;
//httpRequest.Timeout = 60000;
//httpRequest.Timeout = -1;
//httpRequest.ReadWriteTimeout = -1;

httpRequest.ContentLength = bytesForUpload.Length;
httpRequest.Headers["Authorization"] = authHeader;
httpRequest.Headers["x-amz-date"] = dateHeader;

using (Stream dataStream = httpRequest.GetRequestStream())
{
var bytesLeft = bytesForUpload.Length; // assuming array is an array of bytes
var bytesWritten = 0;
double progress = 0.0;
int onePercent;

//Calculate 1% size of data to upload.
if (upload_filePaths.Length == 1)
{
onePercent = (int)Math.Ceiling((fileSize / 100.0));
}
else
{
//onePercent = (int)Math.Ceiling(((1.0 * 100) / fileWeightForProgress));
onePercent = (int)Math.Ceiling(((1.0 / fileWeightForProgress) * fileSize) / 100);
}

double relativeChunkProgress = 0.0;
while (bytesLeft > 0)
{
if (shouldCancel)
{
worker.CancelAsync();
return;
}
else
{
if (bytesLeft < onePercent)
{
onePercent = bytesLeft;
}

var byteStreamChunk = Math.Min(onePercent, bytesLeft);
dataStream.Write(bytesForUpload, bytesWritten, byteStreamChunk);
bytesWritten += byteStreamChunk;
bytesLeft -= byteStreamChunk;

progress = bytesWritten * 100 * 1.0 / bytesForUpload.Length;

int chunkProgress = (int)((((byteStreamChunk * 1.0) / Convert.ToInt32(fileSize)) * 100) * fileWeightForProgress);
if (chunkProgress == 0)
{
chunkProgress = (int)Math.Ceiling(((((byteStreamChunk * 1.0) / Convert.ToInt32(fileSize)) * 100) * fileWeightForProgress));
}

relativeChunkProgress = relativeChunkProgress + chunkProgress;
partsStatus[partNumber, 1] = relativeChunkProgress;

Console.WriteLine("ChunkProgress Progress : {0}", chunkProgress);
ProgressChangedDouble(this, chunkProgress);
}

response = httpRequest.GetResponse() as HttpWebResponse;
Stream stream2 = response.GetResponseStream();
StreamReader reader2 = new StreamReader(stream2);

Console.WriteLine("[RESPONSE FROM S3] : {0}", reader2.ReadToEnd());
var etag = response.Headers.Get("ETag");
Console.WriteLine("[RESPONSE FROM S3: ETAG] : {0}", etag);
}
}
catch (Exception ex)
{
//log.Error("Error uploading file", ex);
if (response != null)
{
response.Close();
response = null;
}
throw ex;
}
finally
{
httpRequest = null;
}

Thank You



I am trying to upload files to Amazon S3 servers using pre-signed urls. The pre-signed urls are returned to the application by a webserver.


The problem that I am facing is that the chunks keep failing more often then they succeed, this means that it take a lot of retries to "eventually" get the file onto the S3 servers.


When a chunk fails, I get this exception "The request was aborted: The request was canceled" I have been stuck with this problem for more than a couple of days and I am not sure why I am receiving that exception over and over again.


Please note that sometimes the file does manage to get transferred to the S3 servers, so there are no issues related to authentication headers etc..


I am new to working with C# and would appreciate all the help that I can get.


Also, I am trying to upload 1% of a given chunk at a time to monitor progress. This is the code that I am using for uploading the files


WebResponse response = null;
HttpWebRequest httpRequest = null;
try
{
httpRequest = WebRequest.Create(url) as HttpWebRequest;
httpRequest.Method = "PUT";

//>>Various parameter settings that I have tried.<<//
//wr.ContentType = "multipart/form-data; boundary=" + boundary;
//httpRequest.KeepAlive = false;
//httpRequest.Timeout = 60000;
//httpRequest.Timeout = -1;
//httpRequest.ReadWriteTimeout = -1;

httpRequest.ContentLength = bytesForUpload.Length;
httpRequest.Headers["Authorization"] = authHeader;
httpRequest.Headers["x-amz-date"] = dateHeader;

using (Stream dataStream = httpRequest.GetRequestStream())
{
var bytesLeft = bytesForUpload.Length; // assuming array is an array of bytes
var bytesWritten = 0;
double progress = 0.0;
int onePercent;

//Calculate 1% size of data to upload.
if (upload_filePaths.Length == 1)
{
onePercent = (int)Math.Ceiling((fileSize / 100.0));
}
else
{
//onePercent = (int)Math.Ceiling(((1.0 * 100) / fileWeightForProgress));
onePercent = (int)Math.Ceiling(((1.0 / fileWeightForProgress) * fileSize) / 100);
}

double relativeChunkProgress = 0.0;
while (bytesLeft > 0)
{
if (shouldCancel)
{
worker.CancelAsync();
return;
}
else
{
if (bytesLeft < onePercent)
{
onePercent = bytesLeft;
}

var byteStreamChunk = Math.Min(onePercent, bytesLeft);
dataStream.Write(bytesForUpload, bytesWritten, byteStreamChunk);
bytesWritten += byteStreamChunk;
bytesLeft -= byteStreamChunk;

progress = bytesWritten * 100 * 1.0 / bytesForUpload.Length;

int chunkProgress = (int)((((byteStreamChunk * 1.0) / Convert.ToInt32(fileSize)) * 100) * fileWeightForProgress);
if (chunkProgress == 0)
{
chunkProgress = (int)Math.Ceiling(((((byteStreamChunk * 1.0) / Convert.ToInt32(fileSize)) * 100) * fileWeightForProgress));
}

relativeChunkProgress = relativeChunkProgress + chunkProgress;
partsStatus[partNumber, 1] = relativeChunkProgress;

Console.WriteLine("ChunkProgress Progress : {0}", chunkProgress);
ProgressChangedDouble(this, chunkProgress);
}

response = httpRequest.GetResponse() as HttpWebResponse;
Stream stream2 = response.GetResponseStream();
StreamReader reader2 = new StreamReader(stream2);

Console.WriteLine("[RESPONSE FROM S3] : {0}", reader2.ReadToEnd());
var etag = response.Headers.Get("ETag");
Console.WriteLine("[RESPONSE FROM S3: ETAG] : {0}", etag);
}
}
catch (Exception ex)
{
//log.Error("Error uploading file", ex);
if (response != null)
{
response.Close();
response = null;
}
throw ex;
}
finally
{
httpRequest = null;
}

Thank You


Related Posts:

0 commentaires:

Enregistrer un commentaire