This repository was archived by the owner on Mar 3, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathworker.js
More file actions
69 lines (59 loc) · 2.33 KB
/
worker.js
File metadata and controls
69 lines (59 loc) · 2.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
/**
* Filename: worker.js
*
* Worker process that handles network expensive requests
* in the background, while the server itself is allowed
* to listen for new, incoming requests.
*
* This is so that we won't stall the web process (server) itself
* when processing a network expensive request.
*
* A request is added to a FCFS scheduler (see definition in `controllers/proj2/textController.js`),
* and all information within that job is cached using Redis.
*/
/* Modules */
const throng = require("throng");
const Queue = require("bull");
const { MongoClient } = require("mongodb");
/* Setup MongoDB instance */
const uri = process.env.MONGODB_URI;
const client = new MongoClient(uri, { useUnifiedTopology: true });
const dbName = 'datasets';
/* Connect to Heroku provided URL for Redis */
const REDIS_URL = process.env.REDIS_URL;
/* Create multiple processes to handle jobs */
let workers = process.env.WEB_CONCURRENCY;
/* Maximum number of jobs a worker should process */
const maxJobsPerWorker = 50;
function start() {
/* Connect to named work queue.
We can have each controller define its specific work queue,
and have the specific work queue's name globally passed to `worker.js` */
let workQueue = new Queue("work", REDIS_URL);
/* Process queued jobs */
workQueue.process(maxJobsPerWorker, async (job) => {
/* Connect to MongoDB instance */
try {
await client.connect();
} catch (err) {
console.log(err);
}
/* DB information */
const db = client.db(dbName);
const collection = db.collection(job.data.collection);
/* Get cached request data from job information */
let data = job.data.content;
if (data instanceof Array) { // Case for multiple documents being uploaded
console.log("Received the following documents: ", data);
await collection.insertMany(data);
} else { // Case for single document being uploaded
console.log("Received the following document: ", data);
await collection.insertOne(data);
}
/* Signal that we've resolved the promise */
return Promise.resolve();
});
}
// Initialize the clustered worker process
// See: https://devcenter.heroku.com/articles/node-concurrency for more info
throng({ workers, start });