I am using firebase-queue
to handle some server side work. When a user registers, the server will process three tasks
var customSpecs = {
'queue': {
'specs': {
'save_user_to_firebase': {
'in_progress_state': 'save_user_to_firebase_in_progress',
'finished_state': 'save_user_to_firebase_finished',
'retries': 3
},
'fetch_from_third_party_API': {
'start_state': 'save_user_to_firebase_finished',
'in_progress_state': 'fetch_from_third_party_API_in_progress',
'finished_state': 'fetch_from_third_party_API_finished',
'retries': 3
},
'save_to_google_datastore':{
'start_state': 'fetch_from_third_party_API_finished',
'in_progress_state': 'save_to_google_datastore_finished',
'retries': 3
}
}
}
}
I wrote test code without functionality. In order to test the performance of the firebase-queue, I log the time that save_user_to_firebase
task starts for every user.
First queue
var options = {
'specId': 'save_user_to_firebase',
'numWorkers': 100
}
var saveUserQueue = new Queue({ tasksRef: taskRef, specsRef: specsObjectRef }, options, function (data, progress, resolve, reject) {
var t0 = process.hrtime();
var testUser = data.test_user;
var now = new Date();
console.log("started %s %d:%d:%d:%d", testUser, + now.getHours(), now.getMinutes(), now.getSeconds(), now.getMilliseconds());
var t1 = process.hrtime(t0);
console.log("save_user_to_firebase completed in %s %ds %dms", testUser, t1[0], t1[1]/1000000 );
resolve(data);
}
Second queue
var options = {
'specId': 'fetch_from_third_party_API',
'numWorkers': 100
};
var fetchFromAPI = new Queue({ tasksRef: taskRef, specsRef: specsObjectRef }, options, function(data, progress, resolve, reject) {
var testUser = data.test_user;
var t0 = process.hrtime();
//Add code for fetching from API
var t1 = process.hrtime(t0);
console.log("fetchFromAPI completed in %s %ds %dms", testUser, t1[0], t1[1]/1000000 );
resolve(data);
});
Third queue
var options = {
'specId': 'save_to_google_datastore',
'numWorkers': 100
};
var save_to_google_datastoreQueue = new Queue({ tasksRef: taskRef, specsRef: specsObjectRef }, options, function(data, progress, resolve, reject) {
var testUser = data.test_user;
var t0 = process.hrtime();
var now = new Date();
var t1 = process.hrtime(t0);
console.log("datastoreInsertActivitiesQueue completed %s %ds %dms",testUser, t1[0], t1[1]/1000000);
resolve(data);
})
I pushed 40 tasks with a single update call. I use 100 workers for each queue. I saw that there is a significant delay for save_user_to_firebase tasks
. I don't have any functionality inside queues. The results are produced by the above code.
I measure the difference in time between save_user_to_firebase
for every user, with the time of the first user in the queue.
started user1 at 13:5:13:575
……
started user40 at 13:5:34:545
I wrote a script that parses the logs and calculates the delay for each user. Below is the output:
user1 delay = 0:0
user3 delay = 0:0
user4 delay = 0:0
user5 delay = 0:1
user6 delay = 0:2
user7 delay = 0:2
user2 delay = 0:2
user9 delay = 0:3
user10 delay = 0:4
user11 delay = 0:4
user12 delay = 0:5
user13 delay = 0:5
user14 delay = 0:6
user8 delay = 0:7
user16 delay = 0:7
user15 delay = 0:8
user18 delay = 0:9
user19 delay = 0:10
user20 delay = 0:10
user21 delay = 0:11
user22 delay = 0:12
user17 delay = 0:12
user24 delay = 0:13
user23 delay = 0:13
user26 delay = 0:14
user27 delay = 0:14
user28 delay = 0:14
user29 delay = 0:15
user30 delay = 0:16
user25 delay = 0:16
user32 delay = 0:17
user31 delay = 0:17
user34 delay = 0:18
user35 delay = 0:18
user36 delay = 0:18
user37 delay = 0:19
user38 delay = 0:20
user33 delay = 0:21
user40 delay = 0:20
user39 delay = 0:21
Is this a normal performance rate?
The Firebase queue library uses Firebase Database transactions to ensure that only one worker process can grab a task. This means that the maximum throughput depends in a large part on the size of the task. The more workers you have and the shorter the tasks, the higher the contention will be on the queue. For short tasks we recommend not using more than half a dozen workers. After that you will see the throughput gains level off and potentially even decrease.