I have the following static class that enumerates directories in a folder, then searches each file in the folder (it seems to only work with text files even thought I don't explicitly specify that) for a given string and returns an IEnumerable that holds the results.
It takes about 15 seconds to go through 40 text files that are about 250kb in size and I think it could be faster. Could I use a better algorithm, or is there a better method of achieving this?
public static class LogFileReader
{
public static IEnumerable<string> GetLines(string path, string searchterm)
{
var dirs = Directory.EnumerateDirectories(path);
List<string> thelines = new List<string>();
foreach (var dir in dirs)
{
var files = Directory.EnumerateFiles(dir);
foreach (var file in files)
{
using (StreamReader sr = new StreamReader(file))
{
string line = string.Empty;
while ((line = sr.ReadLine()) != null)
{
if (line.IndexOf(searchterm, StringComparison.CurrentCultureIgnoreCase) >= 0)
{
thelines.Add(line);
}
}
}
}
}
return thelines;
}
}