GA4

Tuesday, February 26, 2013

Downloading Large Size Blobs from MS Azure

To download the the files from the Azure storage bolg which are bigger in size you can try the following code:

 Downloading Large Size Blobs in MS Azure

Private Class DownloadAzureBlob
{

//Call the method Download(fileName, localFilePath) where fileName is the name of the blob you  want to retrieve and localFimePath is the path where you want to store it in the system.

private static void Download(string fileName, string localFilePath)
        {
            int bufferSize = 400 * 1024; // 400 KB.
            //FileStream fileStream = new FileStream(localFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Write);
            FileStream fileStream = File.OpenWrite(localFilePath);
            // Calculate how many times need to download.
            long fileSize = FileSize(fileName);
            int blockCount = (int)(fileSize / bufferSize) + 1;
            for (int i = 0; i < blockCount; i++)
            {
                long offSet = bufferSize * i;
                string downloaded = DownLoadBlock(fileName, offSet, bufferSize);
                byte[] bufferBytes = Convert.FromBase64String(downloaded);
                fileStream.Write(bufferBytes, 0, bufferBytes.Length);
            }
            fileStream.Dispose();
        }
    }

        private static CloudBlockBlob GetBlobkBlob(string fileName)
        {
            CloudBlobClient blobStorage = Main._cloudBlobClient;
            CloudBlobContainer container = blobStorage.GetContainerReference("photos");
            //container.CreateIfNotExist();
            return container.GetBlockBlobReference(fileName);
        }
        private static long FileSize(string fileName)
        {
            var blob = GetBlobkBlob(fileName);
            if (blob.Exists())
            {
                return blob.Properties.Length;
            }
            return 0;
        }

        private static string DownLoadBlock(string fileName, long offSet, int blockSize)
        {
            var blob = GetBlobkBlob(fileName);
            if (blob.Exists())
            {
                BlobStream reader = blob.OpenRead();
                reader.Seek(offSet, SeekOrigin.Begin);
                byte[] bufferBytes = new byte[blockSize];
                int total = reader.Read(bufferBytes, 0, blockSize);
                return Convert.ToBase64String(bufferBytes, 0, total);
            }
            return string.Empty;
        }
}

public static class BlobExtensions
    {
        public static bool Exists(this CloudBlob blob)
        {
            try
            {
                blob.FetchAttributes();
                return true;
            }
            catch (StorageClientException e)
            {
                if (e.ErrorCode == StorageErrorCode.ResourceNotFound)
                {
                    return false;
                }
                else
                {
                    throw;
                }
            }
        }
    }


Now you can play around with the big blog by changing the size of buffer. 

Uploading the blob in the Azure Storage

Uploading large size blob in the Azure Storage

string fileName = string.Format("{0}_{1}", DateTime.Now.ToString("yyyy/MM/dd"), FileUpload.FileName);
string blobUrl =Storage.PutBlog(FileUpload.PostedFile.InputStream, fileName);
Stream st = Storage.GetBlog(blobUrl);
st.Position = 0;
byte[] data = new byte[st.Length];
st.Read(data,0,Convert.ToInt32(st.Length));