I'm really not sure it will be worth your while... consider the following program:
class DateCompare : IComparer<FileInfo>
{
public int Compare(FileInfo a, FileInfo b)
{
int result = a.LastWriteTime.CompareTo(b.LastWriteTime);
if (result == 0)
return StringComparer.OrdinalIgnoreCase.Compare(a.FullName, b.FullName);
return result;
}
}
public static void Main(string[] args)
{
DirectoryInfo root = new DirectoryInfo("c:\\Projects\\");
DateTime start = DateTime.Now;
long memory = GC.GetTotalMemory(false);
FileInfo[] allfiles = root.GetFiles("*", SearchOption.AllDirectories);
DateTime sortStart = DateTime.Now;
List<FileInfo> files = new List<FileInfo>(20000);
IComparer<FileInfo> cmp = new DateCompare();
foreach (FileInfo file in allfiles)
{
int pos = ~files.BinarySearch(file, cmp);
files.Insert(pos, file);
}
Console.WriteLine("Count = {0:#,###}, Read = {1}, Sort = {2}, Memory = {3:#,###}", files.Count, sortStart - start, DateTime.Now - sortStart, GC.GetTotalMemory(false) - memory);
}
This is the output of the above program:
Count = 16,357, Read = 00:00:03.5793579, Sort = 00:00:06.7776777, Memory = 5,758,976
Count = 16,357, Read = 00:00:03.2173217, Sort = 00:00:06.1616161, Memory = 7,339,920
Count = 16,357, Read = 00:00:03.5083508, Sort = 00:00:06.7556755, Memory = 10,346,504
That running in 3 seconds allocating between 5~10mb while crawling 6,931 directories and returning 16k file names. That is three times the volume your talking about and I bet most of the time is crawling the directory tree (I don't have a directory with 5k worth of files). The worst expense is always going to be the sort, if you can throw out files by matching file names I would recommend that.