From 1cef72a4e285ccdfce6bce387f857b0ab317155d Mon Sep 17 00:00:00 2001
From: vcday <vcday@umich.edu>
Date: Tue, 20 Mar 2018 18:05:44 -0400
Subject: [PATCH] merge conflict

---
 crawler/spider.cpp | 20 --------------------
 1 file changed, 20 deletions(-)

diff --git a/crawler/spider.cpp b/crawler/spider.cpp
index b1818d7..42a4d96 100644
--- a/crawler/spider.cpp
+++ b/crawler/spider.cpp
@@ -157,26 +157,6 @@ bool Spider::shouldURLbeCrawled( size_t docID )
 		this->duplicateUrlMap->insert(std::make_pair(docID, 1));
 		return true;
 		}
-
-<<<<<<< HEAD
-	auto locationOnDisk = this->docMapLookup->find( url.CompleteUrl );
-
-	//bool protectedByRobots = checkRobots( url );
-	//if it doesnt find anything for that url key
-	if ( locationOnDisk == this->docMapLookup->end( ) )
-		{
-		return true;
-		}
-	else
-		{
-		//Just for testing
-		Document::PrintDocMap( url.CompleteUrl, locationOnDisk->second );
-		}
-	return false;
-	 */
-	return true;
-=======
->>>>>>> 36fc45a221e65d4a3a55422486c6e3b8d4aae369
 	}
 
 /*
-- 
GitLab