Use the Task Parallel Library and use the default scheduler which uses ThreadPool.
OK, this is a minimal implementation which queues 30 URLs at a time:
public static void WebCrawl(Func<string> getNextUrlToCrawl, // returns a URL or null if no more URLs
Action<string> crawlUrl, // action to crawl the URL
int pauseInMilli // if all threads engaged, waits for n milliseconds
)
{
const int maxQueueLength = 50;
string currentUrl = null;
int queueLength = 0;
while ((currentUrl = getNextUrlToCrawl()) != null)
{
string temp = currentUrl;
if (queueLength < maxQueueLength)
{
Task.Factory.StartNew(() =>
{
Interlocked.Increment(ref queueLength);
crawlUrl(temp);
}
).ContinueWith((t) =>
{
if(t.IsFaulted)
Console.WriteLine(t.Exception.ToString());
else
Console.WriteLine("Successfully done!");
Interlocked.Decrement(ref queueLength);
}
);
}
else
{
Thread.Sleep(pauseInMilli);
}
}
}
Dummy usage:
static void Main(string[] args)
{
Random r = new Random();
int i = 0;
WebCrawl(() => (i = r.Next()) % 100 == 0 ? null : ("Some URL: " + i.ToString()),
(url) => Console.WriteLine(url),
500);
Console.Read();
}