Recursive Upload from Disk to Azure Storage

  • 2016 Aug 15 – Updated to work with latest C# libs

using System;

using System.IO;

using System.Threading.Tasks;

using Microsoft.WindowsAzure.Storage;

using Microsoft.WindowsAzure.Storage.Blob;



namespace UploadToStorage


    class Program


        const string StorageAccountConnectionString
DefaultEndpointsProtocol=https;AccountName=[your account];AccountKey=[your key]”;

        const string RootContainerName

        const string startingFolder
@”C:\[your path]”;


        static System.Collections.Specialized.StringCollection
log =
new System.Collections.Specialized.StringCollection();

        static Int64 folderCount = 0;

        static Int64 fileCount = 0;

        static CloudStorageAccount storageAccount;

        static CloudBlobClient blobClient;

        static CloudBlobContainer rootContainer;


        static void Main(string[] args)


            //open the storage account

            storageAccount = CloudStorageAccount.Parse(StorageAccountConnectionString);


            //create a blob client

            blobClient = storageAccount.CreateCloudBlobClient();


            //get a reference to the container which we’ll use to  create the blob

            rootContainer = blobClient.GetContainerReference(RootContainerName);


            //get the DirectoryInfo for the
starting local folder that will be recursed

            DirectoryInfo rootDir = new DirectoryInfo(startingFolder);


            //get started

            WalkDirectoryTree(rootDir, rootContainer);


            // Write out all the files that could not be processed.

            Console.WriteLine(“Files with restricted


            foreach (string s in log)





            // Keep the console window open in debug mode.

            Console.WriteLine(“Press any key”);





        //Co-opted initially from

        //modified for azure upload and parallel processing


        private static void WalkDirectoryTree(System.IO.DirectoryInfo currentPath,
CloudBlobContainer cloudContainer)


files =

subDirs =


            string pathFromRoot
= currentPath.FullName.Substring(startingFolder.Length);


            //replace the slashes in order to transition from local filename
path to Azure Storage Blob name

            string cloudFileNamePathPrefix
= pathFromRoot.Replace(
@”\”, @”/”);


            //fixup terminal character if there is a non-empty path

            //If we’re at the root then we don’t need an end delimiter, but
if we are we do

           cloudFileNamePathPrefix = cloudFileNamePathPrefix.Length
== 0 ?
: cloudFileNamePathPrefix + “/”;


            if (cloudFileNamePathPrefix.Length
> 0)


                if (cloudFileNamePathPrefix.Substring(0,
“/”)) { cloudFileNamePathPrefix = cloudFileNamePathPrefix.Substring(1); }




            Console.WriteLine(“Processing folder “ + currentPath.FullName);


            // First, process all the files directly under this folder



                files = currentPath.GetFiles(*.*);


            // This is thrown if even one of the files requires permissions

            // than the application provides.

            catch (UnauthorizedAccessException e)


                // This code just writes out the message and continues to recurse.



            catch (System.IO.DirectoryNotFoundException e)





            //setting up to as manys as 5 folders
and 10 files per folder working in upload at a time

            //adjust according to bandwidth

            var fileParallelOptions = new ParallelOptions() { MaxDegreeOfParallelism
= 10 };

            var folderParallelOptions = new ParallelOptions() { MaxDegreeOfParallelism
= 5 };


            if (files !=


                //build blob name prefix based on delta of root folder path to
current path

                Parallel.ForEach(files, fileParallelOptions, currentFileInfo =>


                    //just keeping a count of files



                    //construct new filename for cloud

                    //inlcudes path information from the

                    string cloudFilename
= cloudFileNamePathPrefix + currentFileInfo.Name;

                    //string cloudFilename = currentFileInfo.Name;



                        var blobRef = rootContainer.GetBlockBlobReference(cloudFilename);

                        //if exists, don’t upload

                        //if uploading to an empty location, this could be refactored
and skip the check

                        if (!blobRef.Exists())


//upload blob

null, null, null);

// Console.WriteLine(string.Format(“Uploading {
0} as { 1} “, currentFileInfo.Name, cloudFilename));


                            //FileMode refers to access on local
file not azure blob

null, null, null);//, null);

Console.WriteLine(“Upload Complete for “ + cloudFilename) ;



                    catch (Exception ex)


                        //TODO:  Log error



               );  //end Parallel.ForEach


                // Now find all the subdirectories under this directory.

                subDirs = currentPath.GetDirectories();

                Parallel.ForEach(subDirs, folderParallelOptions,
currentDirInfo =>



                    //check for matching folder in container, create if not present

                    // Resursive call for each

                    WalkDirectoryTree(currentDirInfo, cloudContainer);





            } //end if (files != null)


            Console.WriteLine(“Processing complete for
folder “
+ currentPath.FullName);

            Console.WriteLine(“Processed {0} folders and
{1} files “
, folderCount,








Comments (1)

  1. Bob says:

    This saved me a lot of time!  Thanks!

Skip to main content