measure nanoseconds

This commit is contained in:
Para Dox
2025-06-02 01:02:19 +07:00
parent 9ef4a1f02d
commit 8b75dd9bd7

View File

@@ -135,6 +135,10 @@ class RPCProxy {
async handleRequest(req, res) { async handleRequest(req, res) {
const requestId = this.generateRequestId(); const requestId = this.generateRequestId();
const startTime = Date.now(); const startTime = Date.now();
// Add high-resolution timing
const hrStartTime = process.hrtime.bigint();
const requestBody = req.body; const requestBody = req.body;
// Validate request body // Validate request body
@@ -374,6 +378,9 @@ class RPCProxy {
streamEndpoint: this.streamEndpoint, streamEndpoint: this.streamEndpoint,
}, 'Making upstream request'); }, 'Making upstream request');
// Measure time to upstream request
const upstreamStartTime = process.hrtime.bigint();
let response; let response;
try { try {
response = await client.post('/', requestBody, { response = await client.post('/', requestBody, {
@@ -409,11 +416,16 @@ class RPCProxy {
statusCode = response.status; statusCode = response.status;
const streamLatency = Date.now() - startTime; const streamLatency = Date.now() - startTime;
// Calculate pre-streaming overhead in nanoseconds
const preStreamOverheadNs = Number(process.hrtime.bigint() - hrStartTime);
logger.info({ logger.info({
requestId, requestId,
endpoint: 'stream', endpoint: 'stream',
latencyMs: streamLatency, latencyMs: streamLatency,
statusCode: response.status, statusCode: response.status,
preStreamOverheadNs,
upstreamConnectNs: Number(process.hrtime.bigint() - upstreamStartTime),
}, 'Stream response started'); }, 'Stream response started');
// Set response headers if not already sent // Set response headers if not already sent
@@ -481,6 +493,9 @@ class RPCProxy {
let writeQueue = Promise.resolve(); let writeQueue = Promise.resolve();
response.data.on('data', (chunk) => { response.data.on('data', (chunk) => {
// Measure per-chunk overhead
const chunkStartTime = process.hrtime.bigint();
// Always capture raw chunks for comparison // Always capture raw chunks for comparison
chunks.push(chunk); chunks.push(chunk);
@@ -490,6 +505,15 @@ class RPCProxy {
writeQueue = writeQueue.then(() => new Promise((resolve) => { writeQueue = writeQueue.then(() => new Promise((resolve) => {
try { try {
const canContinue = res.write(chunk, (err) => { const canContinue = res.write(chunk, (err) => {
// Log per-chunk overhead
const chunkOverheadNs = Number(process.hrtime.bigint() - chunkStartTime);
if (chunkOverheadNs > 100000) { // Log if over 100 microseconds
logger.debug({
requestId,
chunkOverheadNs,
chunkSize: chunk.length,
}, 'High chunk processing overhead');
}
if (err) { if (err) {
logger.error({ logger.error({
requestId, requestId,