Newer
Older
#include "crawler/crawler.h"
#include <string>
#define PATH_TO_BLACKLIST = '/bin/blacklist.txt'
#define PATH_TO_VISITED_URL = 'bin/urls.txt'
#define PATH_TO_HTML_DIR = 'bin/html/'
#define PATH_TO_INDEX = 'bin/index/wordIDX'
#define PATH_TO_DOC_INDEX = 'bin/index/docIDX'
using namespace std;
{
/*
*
* Settings Flags to control program on start up
* to be read in via command line with default settings
*
* string : Mode : Getting content from the web vs local
*
* string : Seed : filename of list of starting urls
*
* int : numberOfSpiders: # of spiders crawler spawns
*
* int : numberOfParsers: # of parsers spawned
*
* bool : restoreFromLog: bool represeting if the program should load from saved state
*/
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
int numberOfSpiders = 1;
opterr = true;
int choice;
int option_index = 0;
option long_options[] = {
{"mode", optional_argument, nullptr, 'm'},
{"num_crawlers", optional_argument, nullptr, 'c'}
};
while ((choice = getopt_long(argc, argv, "m:c:", long_options, &option_index)) != -1) {
switch (choice) {
case 'm':
mode = optarg;
if (mode != "web" && mode != "local") {
cerr << "Unknown input option";
exit(1);
}
break;
case 'c':
numberOfSpiders = atoi(optarg);
if (numberOfSpiders > 100) {
cerr << "Too many crawlers!";
exit(1);
}
break;
default:
cerr << "Unknown input option";
exit(1);
}
}
ProducerConsumerQueue < string > urlFrontier;
cout << "Pushed File\n";
char *seeds;
if ( mode == "local" )
seeds = util::getFileMap( "/tests/localSeed.txt" );
else
seeds = util::getFileMap( "/tests/webSeed.txt" );
string testFile;
while ( *seeds )
{
if ( *seeds == '\n')
{
urlFrontier.Push(testFile);
testFile = "";
}
else
testFile.push_back(*seeds);
++seeds;
}
urlFrontier.Push(testFile);
//urlFrontier.Push("tests/store.html");
unordered_map < string, int > *docMapLookUp = new unordered_map < string, int >( );
Crawler crawler( mode, &urlFrontier );
crawler.SpawnSpiders(numberOfSpiders , docMapLookUp);