I want to transfer large files around 20-200 GG from Blob to FTP.
I am using below code to perform the same but getting memory out of exception after the 500 MB is reached.
public void TriggerEfaTrasfer()
{
string blobConnectionString = ConfigurationManager.AppSettings["BlobConnectionString"];
string blobContainer = ConfigurationManager.AppSettings["FileContainer"];
CloudStorageAccount storageaccount = CloudStorageAccount.Parse(blobConnectionString);
var blobClient = storageaccount.CreateCloudBlobClient();
var container = blobClient.GetContainerReference(blobContainer);
var blobs = container.ListBlobs();
TransferFilesFromBlobToEfa(blobs);
}
private void TransferFilesFromBlobToEfa(IEnumerable<IListBlobItem> blobitem)
{
try
{
OpenConnection();
foreach (var item in blobitem.Where((blobItem, type) => blobItem is CloudBlockBlob))
{
UploadFileStream(item as CloudBlockBlob);
}
// List all additional subdirectories in the current directory, and call recursively:
foreach (var item in blobitem.Where((blobItem, type) => blobItem is CloudBlobDirectory))
{
var directory = item as CloudBlobDirectory;
Directory.CreateDirectory(directory.Prefix);
TransferFilesFromBlobToEfa(directory.ListBlobs());
}
}
catch (Exception ex)
{
// throw ex.InnerException;
log.Error(ex);
}
finally
{
ftpConnection.Close();
}
}
private void UploadFileStream(CloudBlockBlob blobFile)
{
string fileName = Path.GetFileName(blobFile.Name);
FtpWebRequest request = (FtpWebRequest)WebRequest.Create("ftp://" + ConfigurationManager.AppSettings["StagingServerHostName"] + "//" + fileName);
request.Method = WebRequestMethods.Ftp.UploadFile;
request.Credentials = new NetworkCredential(ConfigurationManager.AppSettings["FtpUserName"], ConfigurationManager.AppSettings["FtpPassword"]);
request.UsePassive = true;
request.UseBinary = true;
request.KeepAlive = true;
request.Timeout = 700000;
try
{
byte[] chunk = new byte[4000000];
long blobTotalLength = blobFile.Properties.Length;
long totalChunks = (blobTotalLength - 1) / 4000000 + 1;
using (Stream reqStream = request.GetRequestStream())
{
using (var fileStream = new MemoryStream())
{
if (totalChunks > 1)
{
for (int index = 0; index < totalChunks; index++)
{
try
{
if (index == 0)
{
blobFile.DownloadRangeToStream(fileStream, 0, chunk.Length);
reqStream.Write(fileStream.ToArray(), 0, chunk.Length);
}
//last chunk
else if (index == totalChunks - 1)
{
long remaningContent = blobTotalLength - (index * 4000000);
blobFile.DownloadRangeToStream(fileStream, index * 4000000, remaningContent);
reqStream.Write(fileStream.ToArray(), index * 4000000, (Int32)remaningContent);
}
else
{
blobFile.DownloadRangeToStream(fileStream, index * 4000000, chunk.Length);
reqStream.Write(fileStream.ToArray(), index * 4000000, chunk.Length);
}
}
catch (Exception ex)
{
}
}
}
else
{
blobFile.DownloadToStream(fileStream);
reqStream.Write(fileStream.ToArray(), 0, fileStream.ToArray().Length);
}
fileStream.Flush();
fileStream.Close();
reqStream.Flush();
reqStream.Close();
}
}
//Gets the FtpWebResponse of the uploading operation
FtpWebResponse response = (FtpWebResponse)request.GetResponse();
log.Warn("Response for " + fileName + " " + response.StatusDescription);
response.Close();
}
catch (Exception ex)
{
log.Error(ex);
}
}
From your description, I think the problem is because the read file is too large. The application read the full file into memory before processing it. You need to split your file into multiple files, then upload to FTP.
I suggest you using
BlobURL.download
to implement it.Here
is the detailed introduction and the sample code. You could also use Fiddler to monitor the whole process. This could help you know which part of them occurs to error.And you should also notice the FTP upload restriction. About the restriction, you can refer to this article.
If you still have questions, please let me know.