Skip to content

Commit 2223aad

Browse files
committed
[crashes] Update aggregator
1 parent 2097568 commit 2223aad

File tree

1 file changed

+46
-60
lines changed

1 file changed

+46
-60
lines changed

plugins/crashes/api/aggregator.js

Lines changed: 46 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,11 @@ const UnifiedEventSource = require('../../../api/eventSource/UnifiedEventSource.
66
const { WriteBatcher } = require('../../../api/parts/data/batcher.js');
77
const log = require('../../../api/utils/log.js')('crashes:aggregator');
88

9-
var ranges = ['ram', 'bat', 'disk', 'run', 'session'];
10-
var segments = ['os_version', 'os_name', 'manufacture', 'device', 'resolution', 'app_version', 'cpu', 'opengl', 'orientation', 'view', 'browser'];
11-
var bools = { root: true, online: true, muted: true, signal: true, background: true };
9+
const ranges = ['ram', 'bat', 'disk', 'run', 'session'];
10+
const segments = ['os_version', 'os_name', 'manufacture', 'device', 'resolution', 'app_version', 'cpu', 'opengl', 'orientation', 'view', 'browser'];
11+
const bools = { root: true, online: true, muted: true, signal: true, background: true };
1212

13-
var props = [
13+
const props = [
1414
//device metrics
1515
'os',
1616
'os_version',
@@ -67,9 +67,9 @@ var props = [
6767

6868
const recordCustomMetric = function(params, collection, id, metrics, value, segm, uniques, lastTimestamp, token, localBatcher) {
6969
value = value || 1;
70-
var updateUsersZero = {},
71-
updateUsersMonth = {},
72-
tmpSet = {};
70+
const updateUsersZero = {};
71+
const updateUsersMonth = {};
72+
const tmpSet = {};
7373

7474
if (metrics) {
7575
for (let i = 0; i < metrics.length; i++) {
@@ -82,10 +82,10 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
8282
tmpSet, updateUsersZero, updateUsersMonth);
8383
}
8484
}
85-
var dbDateIds = common.getDateIds(params);
85+
const dbDateIds = common.getDateIds(params);
8686

8787
if (Object.keys(updateUsersZero).length || Object.keys(tmpSet).length) {
88-
var update = {
88+
const update = {
8989
$set: {
9090
m: dbDateIds.zero,
9191
a: `${params.app_id}`,
@@ -232,7 +232,7 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
232232

233233
let AllUsersUpdate = {$set: {group: 0, 'uid': currEvent.uid}};
234234
if (!user || !user.reports) {
235-
var inc = {crashes: 1};
235+
const inc = {crashes: 1};
236236
if (groupSet.nonfatal === false) {
237237
inc.usersfatal = 1;
238238
inc.fatal = 1;
@@ -249,7 +249,7 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
249249
set.sessions = currEvent.up.sc;
250250
}
251251

252-
var userAll = await common.db.collection('app_crashusers' + params.app_id).findOneAndUpdate({group: 0, 'uid': currEvent.uid}, AllUsersUpdate, {upsert: true, new: false, returnDocument: 'before', returnNewDocument: false});
252+
const userAll = await common.db.collection('app_crashusers' + params.app_id).findOneAndUpdate({group: 0, 'uid': currEvent.uid}, AllUsersUpdate, {upsert: true, new: false, returnDocument: 'before', returnNewDocument: false});
253253

254254
if ((currEvent.sg.nonfatal === true) && currEvent.up.sc && currEvent.up.sc > 0 && currEvent.up.tp) {
255255
metaInc.loss = currEvent.up.tp / currEvent.up.sc;
@@ -295,7 +295,7 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
295295
// Segment type range
296296
if (ranges.includes(props[i])) {
297297
if (currEvent.sg[props[i] + '_current'] && currEvent.sg[props[i] + '_total']) {
298-
var ratio = ((parseInt(currEvent.sg[props[i] + '_current']) / parseInt(currEvent.sg[props[i] + '_total'])) * 100).toFixed(2);
298+
const ratio = ((parseInt(currEvent.sg[props[i] + '_current']) / parseInt(currEvent.sg[props[i] + '_total'])) * 100).toFixed(2);
299299
groupInc[props[i] + '.total'] = parseFloat(ratio);
300300
groupInc[props[i] + '.count'] = 1;
301301
groupMin[props[i] + '.min'] = parseFloat(ratio);
@@ -380,17 +380,17 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
380380
app_version_list: currEvent.sg.app_version,
381381
};
382382

383-
var crashGroup = await common.db.collection('app_crashgroups' + params.app_id).findOneAndUpdate({'groups': {$elemMatch: {$eq: hash}}}, update, {upsert: true, new: false, returnDocument: 'before', returnNewDocument: false});
383+
const crashGroup = await common.db.collection('app_crashgroups' + params.app_id).findOneAndUpdate({'groups': {$elemMatch: {$eq: hash}}}, update, {upsert: true, new: false, returnDocument: 'before', returnNewDocument: false});
384384
if (!crashGroup) {
385385
metaInc.isnew = 1;
386386
metaInc.crashes = 1;
387387
}
388-
var lastTs;
388+
let lastTs;
389389

390390
if (crashGroup) {
391391
lastTs = crashGroup.lastTs;
392392
if (crashGroup.latest_version !== currEvent.sg.app_version) {
393-
var group = {};
393+
let group = {};
394394
if (crashGroup.latest_version && common.versionCompare(currEvent.sg.app_version.replace(/\./g, ':'), crashGroup.latest_version.replace(/\./g, ':')) > 0) {
395395
group.latest_version = currEvent.sg.app_version;
396396
group.latest_version_for_sort = common.transformAppVersion(currEvent.sg.app_version);
@@ -448,7 +448,7 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
448448
{
449449
$match: {
450450
operationType: 'insert',
451-
'fullDocument.e': { $in: ['[CLY]_session'] },
451+
'fullDocument.e': { $in: ['[CLY]_session_begin'] },
452452
},
453453
},
454454
{
@@ -469,7 +469,7 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
469469
pipeline: [
470470
{
471471
$match: {
472-
e: { $in: ['[CLY]_session'] }
472+
e: { $in: ['[CLY]_session_begin'] }
473473
},
474474
},
475475
],
@@ -487,7 +487,7 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
487487
for (let idx = 0; idx < events.length; idx += 1) {
488488
const currEvent = events[idx];
489489
// Kafka will send all events here, so filter out if needed.
490-
if (currEvent.e === '[CLY]_session' && 'a' in currEvent) {
490+
if (currEvent.e === '[CLY]_session_begin' && 'a' in currEvent) {
491491
common.readBatcher.getOne('apps', common.db.ObjectID(currEvent.a), async(err, app) => {
492492
if (err) {
493493
log.e('Error getting app data for session', err);
@@ -496,17 +496,17 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
496496

497497
// record event totals in aggregated data
498498
if (app && '_id' in app) {
499-
var params = {
499+
const params = {
500500
'app_id': currEvent.a,
501501
'app': app,
502502
'time': common.initTimeObj(app.timezone, currEvent.ts),
503503
'appTimezone': (app.timezone || 'UTC'),
504504
};
505505

506-
var metrics = ['cr_s', 'cr_u'];
506+
const metrics = ['cr_s', 'cr_u'];
507507
const platform = currEvent.up?.p;
508508
const version = currEvent.up?.av;
509-
var lastTs = currEvent.sg?.prev_start || 0;
509+
const lastTs = currEvent.sg?.prev_start || 0;
510510

511511
//WE DON"T know platfirm and version from previous session. So it if changes - new model is not recording that.
512512
recordCustomMetric(params, 'crashdata', params.app_id, metrics, 1, null, ['cr_u'], lastTs, token, localWriteBatcher);
@@ -541,8 +541,8 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
541541
pipeline: [
542542
{
543543
$match: {
544-
operationType: 'update',
545-
'fullDocument.e': { $in: ['[CLY]_session_update'] }
544+
operationType: 'insert',
545+
'fullDocument.e': { $in: ['[CLY]_session'] }
546546
},
547547
},
548548
{
@@ -564,7 +564,7 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
564564
pipeline: [
565565
{
566566
$match: {
567-
e: { $in: ['[CLY]_session_update'] },
567+
e: { $in: ['[CLY]_session'] },
568568
},
569569
},
570570
],
@@ -580,37 +580,30 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
580580
for (let idx = 0; idx < events.length; idx += 1) {
581581
const currEvent = events[idx];
582582
// Kafka will send all events here, so filter out if needed.
583-
if (currEvent.e === '[CLY]_session_update' && 'a' in currEvent) {
584-
common.readBatcher.getOne('apps', common.db.ObjectID(currEvent.a), async(err, app) => {
585-
if (err) {
586-
log.e('Error getting app data for session update', err);
583+
if (currEvent.e === '[CLY]_session' && 'a' in currEvent) {
584+
common.readBatcher.getOne('apps', common.db.ObjectID(currEvent.a), async(appErr, app) => {
585+
if (appErr) {
586+
log.e('Error getting app data for session update', appErr);
587587
return;
588588
}
589589

590590
if (app && '_id' in app) {
591591
const params = {app_id: currEvent.a, app, time: common.initTimeObj(app.timezone, currEvent.ts), appTimezone: (app.timezone || 'UTC')};
592-
const currUser = await common.db.collection(`app_users${currEvent.a}`).findOne({ _id: currEvent._uid }, { ls: 1, _id: 0 });
592+
const platform = currEvent.up?.p;
593+
const version = currEvent.up?.av;
594+
593595
// check if it is not user's first session
594-
if (currUser?.ls) {
595-
//record crash free session
596-
const fatalCrash = await common.drillDb.collection('drill_events').findOne({
597-
e: '[CLY]_crash',
598-
a: currEvent.a,
599-
uid: currEvent.uid,
600-
ts: { $gte: (currUser?.ls * 1000) },
601-
'sg.nonfatal': false,
602-
}, {ts: 1, _id: 0});
596+
if (currEvent.up?.ls) {
597+
// get app user to get more details about crash user
598+
const currUser = await common.db.collection(`app_users${currEvent.a}`).findOne({ _id: currEvent._uid });
603599

604600
const fatalMetrics = [];
605601

606-
if (!fatalCrash) {
602+
if (!currUser?.hadFatalCrash) {
607603
fatalMetrics.push('crfses');
608604
fatalMetrics.push('crauf');
609605
}
610606

611-
const platform = currUser?.p;
612-
const version = currUser?.av;
613-
614607
if (fatalMetrics.length) {
615608
const ts = currEvent.sg?.prev_start || currUser?.hadAnyFatalCrash || 0;
616609

@@ -620,27 +613,20 @@ const recordCustomMetric = function(params, collection, id, metrics, value, segm
620613
recordCustomMetric(params, 'crashdata', `any**${version}**${params.app_id}`, fatalMetrics, 1, null, ['crauf'], ts, token, localWriteBatcher);
621614
}
622615

623-
var nonFatalMetrics = [];
624-
var nonFatalCrash = await common.drillDb.collection('drill_events').findOne({
625-
e: '[CLY]_crash',
626-
a: currEvent.a,
627-
uid: currEvent.uid,
628-
ts: {$gte: (currUser?.ls * 1000)},
629-
'sg.nonfatal': true,
630-
}, {ts: 1, _id: 0});
631-
632-
if (!nonFatalCrash) {
633-
nonFatalMetrics.push('craunf');
634-
nonFatalMetrics.push('crnfses');
616+
const nonfatalMetrics = [];
617+
618+
if (!currUser?.hadNonfatalCrash) {
619+
nonfatalMetrics.push('craunf');
620+
nonfatalMetrics.push('crnfses');
635621
}
636622

637-
if (nonFatalMetrics.length) {
638-
const ts = currEvent.sg?.prev_start || currUser?.hadAnyNonFatalCrash || 0;
623+
if (nonfatalMetrics.length) {
624+
const ts = currEvent.sg?.prev_start || currUser?.hadAnyNonfatalCrash || 0;
639625

640-
recordCustomMetric(params, 'crashdata', params.app_id, nonFatalMetrics, 1, null, ['craunf'], ts, token, localWriteBatcher);
641-
recordCustomMetric(params, 'crashdata', `${platform}**${version}**${params.app_id}`, nonFatalMetrics, 1, null, ['craunf'], ts, token, localWriteBatcher);
642-
recordCustomMetric(params, 'crashdata', `${platform}**any**${params.app_id}`, nonFatalMetrics, 1, null, ['craunf'], ts, token, localWriteBatcher);
643-
recordCustomMetric(params, 'crashdata', `any**${version}**${params.app_id}`, nonFatalMetrics, 1, null, ['craunf'], ts, token, localWriteBatcher);
626+
recordCustomMetric(params, 'crashdata', params.app_id, nonfatalMetrics, 1, null, ['craunf'], ts, token, localWriteBatcher);
627+
recordCustomMetric(params, 'crashdata', `${platform}**${version}**${params.app_id}`, nonfatalMetrics, 1, null, ['craunf'], ts, token, localWriteBatcher);
628+
recordCustomMetric(params, 'crashdata', `${platform}**any**${params.app_id}`, nonfatalMetrics, 1, null, ['craunf'], ts, token, localWriteBatcher);
629+
recordCustomMetric(params, 'crashdata', `any**${version}**${params.app_id}`, nonfatalMetrics, 1, null, ['craunf'], ts, token, localWriteBatcher);
644630
}
645631
}
646632
}

0 commit comments

Comments
 (0)