We use cookies to give you the best experience on our website. If you continue to browse, then you agree to our privacy policy and cookie policy. Image for the cookie policy date

Upload Large File to Azure Storage using Chunk Upload...

Hi, am struggling to upload large file to azure blob by use of Chunk Upload. But it fails. What it does it only upload a chunk to the azure blob storage. If I upload a file of 10 MB, in the blob, I can only see 34.3KB. Can you help me please? What am I missing? Below is the exact code and I have attached my solution as Zip. 

index.cshtml

@{
    ViewData["Title"] = "Home Page";
}

Game Upload


@{
    var asyncSettings = new Syncfusion.EJ2.Inputs.UploaderAsyncSettings { SaveUrl = "https://localhost:44357/api/Default/Save", RemoveUrl = "https://localhost:44357/api/Default/Remove", RetryCount=5, RetryAfterDelay=3000, ChunkSize=102400  };
}


Server-Side Scripts


 [HttpPost]
        [Route("Save")]
        public async Task Save(IList chunkFile, IList UploadFiles)
        {
            long size = 0;
            try
            {
                string blobName = "mytest1.zip";
                List b = new List();

                // for chunk-upload
                foreach (var file in chunkFile)
                {
                  
                    size += file.Length;
                    var ms = new MemoryStream();
                    file.OpenReadStream().CopyTo(ms);
                    byte[] Value = ms.ToArray();
                    b.Add(Value);
                }
                IEnumerable result = Enumerable.Empty();
                foreach (byte[] bytes in b)
                {
                    result = result.Concat(bytes);
                }
                byte[] newArray = result.ToArray();

                var storageCredentials = new StorageCredentials("", "");
                var cloudStorageAccount = new CloudStorageAccount(storageCredentials, true);
                var cloudBlobClient = cloudStorageAccount.CreateCloudBlobClient();
                var container = cloudBlobClient.GetContainerReference("filo");
                await container.CreateIfNotExistsAsync();
                var newBlob = container.GetBlockBlobReference(blobName);
                newBlob.StreamWriteSizeInBytes = 6 * 1024 * 1024;
                // This sets the size of the blocks to use when you do a Put Blob and it breaks it into blocks to upload because the file is larger than the value of SingleBlobUploadThresholdInBytes.
                // By default, this is 4MB(4 * 1024 * 1024).

                newBlob.StreamMinimumReadSizeInBytes = 6 * 1024 * 1024;

                //set the blob upload timeout and retry strategy
                BlobRequestOptions options = new BlobRequestOptions();
                options.ServerTimeout = new TimeSpan(0, 180, 0);
                options.RetryPolicy = new ExponentialRetry(TimeSpan.Zero, 20);

                //get the file blocks of 2MB size each and perform upload of each block
                HashSet blocklist = new HashSet();
                List bloksT = GetFileBlocks(newArray).ToList();
                foreach (FileBlock block in GetFileBlocks(newArray))
                {
                    await newBlob.PutBlockAsync(
                         block.Id,
                         new MemoryStream(block.Content, true), null,
                         null, options, null
                         );

                    blocklist.Add(block.Id);

                }
                //commit the blocks that are uploaded in above loop
                await newBlob.PutBlockListAsync(blocklist, null, options, null);

                Console.WriteLine("Done");
            }
            catch (Exception e)
            {
                // Response.Clear();
                Response.StatusCode = 204;
                Response.HttpContext.Features.Get().ReasonPhrase = "File failed to upload";
                Response.HttpContext.Features.Get().ReasonPhrase = e.Message;
            }

        }



Attachment: svc_69be2b70.zip

6 Replies

BC Berly Christopher Syncfusion Team January 14, 2020 11:42 PM UTC

Hi Gich, 
 
Greetings from Syncfusion support. 
 
We are facing some issues while saving the file in Azure storage. Currently we are checking the issues with high priority. So, we will check and update the further details in three business days(21st January 2020). We appreciate your patience until then. 
 
Regards, 
Berly B.C 



GG Gopi Govindasamy Syncfusion Team January 23, 2020 02:56 PM UTC

Hi Gich,

Thanks for your patience.

We have validated your reported scenario for large file upload to Azure storage. By default, Azure file storage not merge the file to same location and multiple chunk file stream. We suggest to use session for  save the chunk file. Once complete all chunk file, you can get the session based on session key and save to Azure location. We have achieved your scenario for save the chunk file to Azure location. Please find the code snippet and sample for your reference.

Save.cs

       [HttpPost]

        [Route("Save")]

        public async Task Save(IList chunkFile)

        {

            try

            {

                foreach (var file in chunkFile)

                {

                    var httpPostedChunkFile = HttpContext.Request.Form.Files["chunkFile"];

                    var chunkIndex = HttpContext.Request.Form["chunk-index"];

                    var totalChunk = HttpContext.Request.Form["total-chunk"];

                    using (var fileStream = file.OpenReadStream())

                    {

                        if(Convert.ToInt32(chunkIndex) <= Convert.ToInt32(totalChunk))

                        {

                            var streamReader = new MemoryStream();

                            fileStream.CopyTo(streamReader);

                            var byteArr = streamReader.ToArray();

                            var content = new byte[] { };

                            if (HttpContext.Session.Get("streamFile") != null)

                            {

                                content = HttpContext.Session.Get("streamFile").Concat(byteArr).ToArray();

                            } else

                            {

                                content = byteArr;

                            }

                            HttpContext.Session.Set("streamFile", content);

                        }

                        if(Convert.ToInt32(chunkIndex) == Convert.ToInt32(totalChunk)-1)

                        {

                            var fileArray = HttpContext.Session.Get("streamFile");

                            var storageCredentials = new StorageCredentials("", "");

                            var cloudStorageAccount = new CloudStorageAccount(storageCredentials, true);

                            var cloudBlobClient = cloudStorageAccount.CreateCloudBlobClient();

                            var container = cloudBlobClient.GetContainerReference("filo");

                            CloudBlockBlob blockBlob = container.GetBlockBlobReference(httpPostedChunkFile.FileName);

                            using (FileStream fileStreams = new FileStream(httpPostedChunkFile.FileName, FileMode.Create))

                            {

                                for (int i = 0; i < fileArray.Length; i++)

                                {

                                    fileStreams.WriteByte(fileArray[i]);

                                }

                                fileStreams.Seek(0, SeekOrigin.Begin);

                                HttpContext.Session.Remove("streamFile");

                                await blockBlob.UploadFromStreamAsync(fileStreams);

                            }

                        }

                    }

                }

            }

            catch (Exception e)

            {

                . . .

            }

        }


Startup.cs

public void ConfigureServices(IServiceCollection services)

        {

         . . .    

 

            services.AddSession(options => {

                options.IdleTimeout = TimeSpan.FromMinutes(30);

            });

. . .

        }

public void Configure(IApplicationBuilder app, IWebHostEnvironment env)

        {

. . .

   app.UseSession();

. . .

}

 

Sample Link:  https://www.syncfusion.com/downloads/support/forum/150303/ze/svc598742026

 

Regards,

Gopi G.

 



MP Megha Patel May 4, 2021 07:44 AM UTC

In every chunk i got HttpContext.Session.Get("streamFile") null. what should I do?


BC Berly Christopher Syncfusion Team May 5, 2021 03:29 PM UTC

Hi Megha, 
  
Can you please share the details that size of the file which is uploaded in to the Uploader component? Based on the shared details, we will check and update the details from our end. 
  
Regards, 
Berly B.C 



MP Megha Patel June 22, 2021 10:16 AM UTC

[On Multiple Request File Some time upload half on azure]. Can you please provide me solution?

public async Task UploadFile(IList UploadFiles)

{

try

{

IFormFile chunkFile = Request.Form.Files[0];

if (chunkFile.Length > 0)

{

var ProductName = HttpContext.Request.Form["ProductName"];

var ProductReleaseNo = HttpContext.Request.Form["ProductReleaseNo"];

var filehash = HttpContext.Request.Form["fileHash"];

var chunkIndex = HttpContext.Request.Form["chunk-index"];

var totalChunk = HttpContext.Request.Form["total-chunk"];

bool SupportFile = Convert.ToBoolean(HttpContext.Request.Form["SupportFile"]);


if (ProductName != "" && ProductReleaseNo != "")

{

using (var fileStream = chunkFile.OpenReadStream())

{

if (Convert.ToInt32(chunkIndex) <= Convert.ToInt32(totalChunk))

{

var streamReader = new MemoryStream();

fileStream.CopyTo(streamReader);

var byteArr = streamReader.ToArray();


if (content.Length > 0)

{

content = content.Concat(byteArr).ToArray();

}

else

{

content = byteArr;

}

}


if (Convert.ToInt32(chunkIndex) == Convert.ToInt32(totalChunk) - 1 || Convert.ToInt32(chunkIndex) == Convert.ToInt32(totalChunk))

{

String strorageconn = Sensit.Activation.API.Helpers.Constants.AzureConnectionString;

CloudStorageAccount cloudStorageAccount = CloudStorageAccount.Parse(strorageconn);


var cloudBlobClient = cloudStorageAccount.CreateCloudBlobClient();


CloudBlobContainer container = cloudBlobClient.GetContainerReference("clickonce-app-container");


CloudBlobDirectory folder = container.GetDirectoryReference(ProductName);

await container.CreateIfNotExistsAsync();


CloudBlobDirectory ReleaseFolder = folder.GetDirectoryReference(ProductReleaseNo);

await container.CreateIfNotExistsAsync();


CloudBlockBlob blockBlob;


blockBlob = ReleaseFolder.GetBlockBlobReference(chunkFile.FileName);

await container.CreateIfNotExistsAsync();



bytes = GetHashSha256(content);

var UploadFileSHA256 = BitConverter.ToString(bytes).Replace("-", string.Empty).ToLower();

//bool compare = CompareHashKey(UploadFileSHA256, filehash);

//if (compare == false)

//{

// Response.Clear();

// Response.StatusCode = 419;

//}

//else

//{

using (MemoryStream ms = new MemoryStream(content))

{

await blockBlob.UploadFromStreamAsync(ms);

ms.Flush();

content = new byte[] { };

}

Response.Clear();

Response.StatusCode = 200;

//}

}

#region Commment Code [This code Create file in this project then Upload on azure]


//using (FileStream fileStreams = new FileStream(chunkFile.FileName, FileMode.Create))

//{

// for (int i = 0; i < content.Length; i++)

// {

// fileStreams.WriteByte(content[i]);

// }

// fileStreams.Seek(0, SeekOrigin.Begin);

// //Get All Bytes Hash generated

// bytes = GetHashSha256(content);

// var UploadFileSHA256 = BitConverter.ToString(bytes).Replace("-", string.Empty).ToLower();

// bool compare = CompareHashKey(UploadFileSHA256, filehash);

// if (compare == false)

// {

// Response.Clear();

// Response.StatusCode = 419;

// }

// else

// {

// content = new byte[] { };

// await blockBlob.UploadFromStreamAsync(fileStream);

// Response.Clear();

// Response.StatusCode = 200;

// }

//}

#endregion

}

}

else

{

Response.Clear();

Response.StatusCode = 404;

}

}

else

{

Response.Clear();

Response.StatusCode = 404;

}


}

catch (Exception e)

{

content = new byte[] { };

Response.Clear();

Response.StatusCode = 400;

}

}





BC Berly Christopher Syncfusion Team June 23, 2021 02:58 PM UTC

Hi Megha, 
  
As we requested earlier, please share the uploaded file size in the Uploader component that will help us to proceed further from our end.  
  
Regards, 
Berly B.C 


Loader.
Live Chat Icon For mobile
Up arrow icon