Newer
Older
//
// Created by Ben Bergkamp on 1/31/18.
//
#include "crawler.h"
/*
*
* @parms number of spiders
* Creates a number of spiders and starts new threads for them
*
*/
{
for ( size_t i = 0; i < num_spiders; i++ )
{
Spider *temp = new Spider( this->mode, this->urlFrontier, this->IndexerQueue );
temp->StartThread( );
this->spiders.push_back( temp );
}
/*
*
*Function to wait for all of the threads to finish running
*
*/
cout << "Waiting for spiders to finish...\n";
*/
while( ! spiders.empty( ) )
{
Spider *spider = spiders.back();
spiders.pop_back();
spider->WaitForFinish();
spider = 0;
delete spider;
}
}
{
//cout << "Waiting for spiders to finish...\n";
for ( Spider *spider : spiders )
{
spider->kill( );
//delete spider; //FIXME do this in destructor?