-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcrawler.js
More file actions
105 lines (81 loc) · 2.44 KB
/
crawler.js
File metadata and controls
105 lines (81 loc) · 2.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
const redis = require('./redis-client');
const axios = require('axios');
const cheerio = require('cheerio');
const utils = require('./utils/index');
let total_crawled = 0;
let id = '';
// Recursive crawl function called for every url up to given level
async function _crawl(url, level) {
// Check if we've reached the last level
// If so, no need to do anymore crawling
if (level == 0) return;
let urls = {};
// Check request cache to see if page contents already stored
const request_cached = await redis.getRequestCache(url);
// Make request to URL if not cached
if (utils.isnull(request_cached)) {
urls = await _getPage(url).then(_getLinks);
// Store links in cache
try {
await redis.setExRequestCache(url, JSON.stringify(urls));
} catch(err){
console.log('caught error when requesting cache');
throw err;
}
} else urls = JSON.parse(request_cached);
// Add newly crawled urls to record
let record = JSON.parse(await redis.getCrawlRecord(id));
let updatedRecord = _urlCount(urls, record);
updatedRecord.urls_crawled = total_crawled;
// Update record in datastore
await redis.setCrawlRecord(id, JSON.stringify(updatedRecord));
return await _crawl(url, level-1);
}
// Add found URLs to record
function _urlCount(urls, record) {
for (let i in urls) {
let url = urls[i];
if (record.urls[url] === undefined) record.urls[url] = 1;
else record.urls[url]++;
total_crawled++;
}
return record;
}
// Crawl the page for link tags
async function _getLinks($) {
let urls = {};
// Grab all links on page
const links = $('a');
$(links).each(function(i, link){
const hrefString = ($(link).attr('href'));
// Only grab absolute links
var pat = /^https?:\/\//i;
if (pat.test(hrefString)) {
urls[i] = hrefString;
}
});
return urls;
}
// Get contents of the page
async function _getPage(url) {
const result = await axios.get(url);
return cheerio.load(result.data);
};
// Receive message from master process
process.on('message', async (message) => {
id = message.id;
try {
await _crawl(message.seedurl, message.levels);
} catch(err){
console.log('caught error when triggering crawl');
throw err;
}
// Get record and mark as complete
let final_record = JSON.parse(await redis.getCrawlRecord(id));
final_record.status = 'complete';
await redis.setCrawlRecord(id, JSON.stringify(final_record));
});
process.on('error', (err) => {
console.log('in process error handler!!!');
console.log(err);
});