diff --git a/tests/unit/handlers/metrics.handlers.test.js b/tests/unit/handlers/metrics.handlers.test.js index e675a58..315ac85 100644 --- a/tests/unit/handlers/metrics.handlers.test.js +++ b/tests/unit/handlers/metrics.handlers.test.js @@ -8,6 +8,7 @@ const { getConsumption, processConsumptionData, calculateConsumptionSummary, + calculateGroupedConsumptionSummary, getEfficiency, processEfficiencyData, calculateEfficiencySummary, @@ -75,7 +76,7 @@ test('getHashrate - grouped by miner uses type group aggregation', async (t) => capturedPayload = payload return [{ ts: 1700006400000, - hashrate_mhs_5m_type_group_sum_aggr: { 'S19-Pro': 100000, 'S21': 23456 } + hashrate_mhs_5m_type_group_sum_aggr: { 'S19-Pro': 100000, S21: 23456 } }] } } @@ -88,12 +89,12 @@ test('getHashrate - grouped by miner uses type group aggregation', async (t) => t.is(capturedPayload.fields.hashrate_mhs_5m_type_group_sum, 1, 'should request type-group source field') t.is(capturedPayload.aggrFields.hashrate_mhs_5m_type_group_sum_aggr, 1, 'should request type-group aggregate field') t.is(result.log.length, 1, 'should map one grouped row') - t.alike(result.log[0].hashrateMhs, { 'S19-Pro': 100000, 'S21': 23456 }, 'should map grouped hashrate value') + t.alike(result.log[0].hashrateMhs, { 'S19-Pro': 100000, S21: 23456 }, 'should map grouped hashrate value') t.is(result.summary.totalHashrateMhs, 123456, 'should have site-wide total') t.is(result.summary.avgHashrateMhs, 123456, 'should have site-wide average') t.ok(result.summary.groupedBy, 'should have per-miner breakdown') t.is(result.summary.groupedBy['S19-Pro'].totalHashrateMhs, 100000, 'should have per-miner total') - t.is(result.summary.groupedBy['S21'].totalHashrateMhs, 23456, 'should have per-miner total') + t.is(result.summary.groupedBy.S21.totalHashrateMhs, 23456, 'should have per-miner total') t.pass() }) @@ -404,6 +405,106 @@ test('calculateConsumptionSummary - handles empty log', (t) => { t.pass() }) +test('getConsumption - grouped by miner uses type group aggregation', async (t) => { + let capturedPayload = null + const mockCtx = withDataProxy({ + conf: { orks: [{ rpcPublicKey: 'key1' }] }, + net_r0: { + jRequest: async (key, method, payload) => { + capturedPayload = payload + return [{ + ts: 1700006400000, + power_w_type_group_sum_aggr: { 'S19-Pro': 3000000, S21: 2000000 } + }] + } + } + }) + + const result = await getConsumption(mockCtx, { + query: { start: 1700000000000, end: 1700100000000, groupBy: 'miner' } + }) + + t.is(capturedPayload.fields.power_w_type_group_sum, 1, 'should request type-group source field') + t.is(capturedPayload.aggrFields.power_w_type_group_sum_aggr, 1, 'should request type-group aggregate field') + t.is(result.log.length, 1, 'should map one grouped row') + t.alike(result.log[0].powerW, { 'S19-Pro': 3000000, S21: 2000000 }, 'should map grouped power value') + t.ok(result.log[0].consumptionMWh, 'should have consumptionMWh object') + t.is(result.summary.totalConsumptionMWh, (5000000 * 24) / 1000000, 'should have site-wide total consumption') + t.ok(result.summary.groupedBy, 'should have per-miner breakdown') + t.is(result.summary.groupedBy['S19-Pro'].totalConsumptionMWh, (3000000 * 24) / 1000000, 'should have per-miner total') + t.is(result.summary.groupedBy.S21.totalConsumptionMWh, (2000000 * 24) / 1000000, 'should have per-miner total') + t.pass() +}) + +test('getConsumption - grouped by container uses container group aggregation', async (t) => { + let capturedPayload = null + const mockCtx = withDataProxy({ + conf: { orks: [{ rpcPublicKey: 'key1' }] }, + net_r0: { + jRequest: async (key, method, payload) => { + capturedPayload = payload + return [{ + ts: 1700006400000, + power_w_container_group_sum_aggr: { 'container-A': 4000000, 'container-B': 1000000 } + }] + } + } + }) + + const result = await getConsumption(mockCtx, { + query: { start: 1700000000000, end: 1700100000000, groupBy: 'container' } + }) + + t.is(capturedPayload.fields.power_w_container_group_sum, 1, 'should request container-group source field') + t.is(capturedPayload.aggrFields.power_w_container_group_sum_aggr, 1, 'should request container-group aggregate field') + t.is(result.log.length, 1, 'should map grouped row') + t.alike(result.log[0].powerW, { 'container-A': 4000000, 'container-B': 1000000 }, 'should map container grouped power value') + t.is(result.summary.totalConsumptionMWh, (5000000 * 24) / 1000000, 'should have site-wide total consumption') + t.ok(result.summary.groupedBy, 'should have per-container breakdown') + t.is(result.summary.groupedBy['container-A'].totalConsumptionMWh, (4000000 * 24) / 1000000, 'should have per-container total') + t.is(result.summary.groupedBy['container-B'].totalConsumptionMWh, (1000000 * 24) / 1000000, 'should have per-container total') + t.pass() +}) + +test('getConsumption - grouped mode handles empty results', async (t) => { + const mockCtx = withDataProxy({ + conf: { orks: [{ rpcPublicKey: 'key1' }] }, + net_r0: { jRequest: async () => [] } + }) + + const result = await getConsumption(mockCtx, { + query: { start: 1700000000000, end: 1700100000000, groupBy: 'miner' } + }) + + t.is(result.log.length, 0, 'grouped log should be empty when no data is returned') + t.is(result.summary.avgPowerW, null, 'grouped empty summary should have null avg') + t.is(result.summary.totalConsumptionMWh, 0, 'grouped empty summary should have zero total') + t.pass() +}) + +test('calculateGroupedConsumptionSummary - calculates per-group and site-wide stats', (t) => { + const log = [ + { ts: 1700006400000, powerW: { 'S19-Pro': 3000000, S21: 2000000 } }, + { ts: 1700092800000, powerW: { 'S19-Pro': 3500000, S21: 1500000 } } + ] + + const summary = calculateGroupedConsumptionSummary(log, 'miner') + t.is(summary.totalConsumptionMWh, (10000000 * 24) / 1000000, 'should have site-wide total') + t.is(summary.avgPowerW, 5000000, 'should have site-wide average') + t.ok(summary.groupedBy, 'should have per-group breakdown') + t.is(summary.groupedBy['S19-Pro'].avgPowerW, 3250000, 'should average per-group power') + t.is(summary.groupedBy['S19-Pro'].totalConsumptionMWh, (6500000 * 24) / 1000000, 'should sum per-group consumption') + t.is(summary.groupedBy.S21.avgPowerW, 1750000, 'should average per-group power') + t.pass() +}) + +test('calculateGroupedConsumptionSummary - handles empty log', (t) => { + const summary = calculateGroupedConsumptionSummary([], 'miner') + t.is(summary.avgPowerW, null, 'should be null') + t.is(summary.totalConsumptionMWh, 0, 'should be zero') + t.pass() +}) + // ==================== Efficiency Tests ==================== test('getEfficiency - happy path', async (t) => { diff --git a/workers/lib/constants.js b/workers/lib/constants.js index e929af9..f972232 100644 --- a/workers/lib/constants.js +++ b/workers/lib/constants.js @@ -474,7 +474,9 @@ const DCS_EFFICIENCY_FIELDS = { const LOG_FIELDS = { HASHRATE_SUM_TYPE_GROUP: 'hashrate_mhs_5m_type_group_sum', - HASHRATE_SUM_CONTAINER_GROUP: 'hashrate_mhs_5m_container_group_sum' + HASHRATE_SUM_CONTAINER_GROUP: 'hashrate_mhs_5m_container_group_sum', + POWER_W_TYPE_GROUP_SUM: 'power_w_type_group_sum', + POWER_W_CONTAINER_GROUP_SUM: 'power_w_container_group_sum' } const AGGR_FIELDS = { @@ -494,7 +496,15 @@ const AGGR_FIELDS = { OFFLINE_CNT: 'offline_cnt', SLEEP_CNT: 'power_mode_sleep_cnt', MAINTENANCE_CNT: 'maintenance_type_cnt', - CONTAINER_SPECIFIC_STATS: 'container_specific_stats_group_aggr' + CONTAINER_SPECIFIC_STATS: 'container_specific_stats_group_aggr', + HASHRATE_1M_CONTAINER_GROUP_SUM: 'hashrate_mhs_1m_container_group_sum_aggr', + POWER_W_CONTAINER_GROUP_SUM: 'power_w_container_group_sum_aggr', + POWER_W_TYPE_GROUP_SUM: 'power_w_type_group_sum_aggr', + POWER_MODE_LOW_CNT: 'power_mode_low_cnt', + POWER_MODE_NORMAL_CNT: 'power_mode_normal_cnt', + POWER_MODE_HIGH_CNT: 'power_mode_high_cnt', + ERROR_CNT: 'error_cnt', + NOT_MINING_CNT: 'not_mining_cnt' } const PERIOD_TYPES = { diff --git a/workers/lib/server/handlers/metrics.handlers.js b/workers/lib/server/handlers/metrics.handlers.js index d19ae25..455c5aa 100644 --- a/workers/lib/server/handlers/metrics.handlers.js +++ b/workers/lib/server/handlers/metrics.handlers.js @@ -156,6 +156,8 @@ function calculateGroupedHashrateSummary (log, groupBy) { async function getConsumption (ctx, req) { const { start, end } = validateStartEnd(req) + if (req.query.groupBy) return getGroupedConsumption(ctx, req) + const startDate = new Date(start).toISOString() const endDate = new Date(end).toISOString() @@ -213,6 +215,88 @@ function calculateConsumptionSummary (log) { } } +async function getGroupedConsumption (ctx, req) { + const { groupBy, start, end } = req.query + + const isMinerGroup = groupBy === WORKER_TYPES.MINER + + const field = isMinerGroup + ? LOG_FIELDS.POWER_W_TYPE_GROUP_SUM + : LOG_FIELDS.POWER_W_CONTAINER_GROUP_SUM + + const aggrField = isMinerGroup + ? AGGR_FIELDS.POWER_W_TYPE_GROUP_SUM + : AGGR_FIELDS.POWER_W_CONTAINER_GROUP_SUM + + const res = await ctx.dataProxy.requestData(RPC_METHODS.TAIL_LOG, { + type: WORKER_TYPES.MINER, + tag: WORKER_TAGS.MINER, + key: LOG_KEYS.STAT_1D, + start, + end, + fields: { [field]: 1 }, + aggrFields: { [aggrField]: 1 } + }) + + const log = res[0].reduce((aggr, val) => { + const powerW = val[aggrField] + aggr.push({ + ts: val.ts, + powerW, + consumptionMWh: typeof powerW === 'object' && powerW !== null + ? Object.fromEntries( + Object.entries(powerW).map(([k, v]) => [k, (Number(v) || 0) * 24 / 1000000]) + ) + : null + }) + return aggr + }, []) + + const summary = calculateGroupedConsumptionSummary(log, groupBy) + + return { log, summary } +} + +function calculateGroupedConsumptionSummary (log, groupBy) { + if (!log.length) { + return { + avgPowerW: null, + totalConsumptionMWh: 0 + } + } + + const groupTotals = {} + const groupCounts = {} + + for (const entry of log) { + const powerW = entry.powerW + if (typeof powerW === 'object' && powerW !== null) { + for (const [name, val] of Object.entries(powerW)) { + const v = Number(val) || 0 + groupTotals[name] = (groupTotals[name] || 0) + v + groupCounts[name] = (groupCounts[name] || 0) + 1 + } + } + } + + const byGroup = {} + let siteTotal = 0 + for (const [name, total] of Object.entries(groupTotals)) { + const avgPowerW = safeDiv(total, groupCounts[name]) + byGroup[name] = { + avgPowerW, + totalConsumptionMWh: (total * 24) / 1000000 + } + siteTotal += total + } + + return { + avgPowerW: safeDiv(siteTotal, log.length), + totalConsumptionMWh: (siteTotal * 24) / 1000000, + groupedBy: byGroup + } +} + async function getEfficiency (ctx, req) { const { start, end } = validateStartEnd(req) @@ -773,6 +857,7 @@ module.exports = { getConsumption, processConsumptionData, calculateConsumptionSummary, + calculateGroupedConsumptionSummary, getEfficiency, processEfficiencyData, calculateEfficiencySummary, diff --git a/workers/lib/server/routes/metrics.routes.js b/workers/lib/server/routes/metrics.routes.js index 6b16758..9de5fea 100644 --- a/workers/lib/server/routes/metrics.routes.js +++ b/workers/lib/server/routes/metrics.routes.js @@ -50,7 +50,8 @@ module.exports = (ctx) => { (req) => [ 'metrics/consumption', req.query.start, - req.query.end + req.query.end, + req.query.groupBy ], ENDPOINTS.METRICS_CONSUMPTION, getConsumption diff --git a/workers/lib/server/schemas/metrics.schemas.js b/workers/lib/server/schemas/metrics.schemas.js index c6c6530..aec1061 100644 --- a/workers/lib/server/schemas/metrics.schemas.js +++ b/workers/lib/server/schemas/metrics.schemas.js @@ -17,6 +17,7 @@ const schemas = { properties: { start: { type: 'integer', minimum: 0 }, end: { type: 'integer', minimum: 0 }, + groupBy: { type: 'string', enum: ['miner', 'container'] }, overwriteCache: { type: 'boolean' } }, required: ['start', 'end']