Skip to content

Commit

Permalink
Merge e9c9157 into 2fbfea0
Browse files Browse the repository at this point in the history
  • Loading branch information
hyj1991 committed Jan 12, 2023
2 parents 2fbfea0 + e9c9157 commit a13fdf0
Show file tree
Hide file tree
Showing 5 changed files with 83 additions and 44 deletions.
5 changes: 2 additions & 3 deletions src/hooks/heap_limit.cc
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,8 @@ size_t NearHeapLimitCallback(void* data, size_t current_heap_limit,

EnvironmentData* env_data = static_cast<EnvironmentData*>(data);
InfoT(module_type, env_data->thread_id(),
"current_heap_limit is %d, initial_heap_limit is %d, "
"auto_incr_heap_limit_size is %d, increased_heap is "
"%d.",
"current_heap_limit: %d, initial_heap_limit: %d, "
"auto_incr_heap_limit_size: %d, increased_heap: %d",
current_heap_limit, initial_heap_limit, auto_incr_heap_limit_size,
increased_heap);

Expand Down
13 changes: 11 additions & 2 deletions test/fixtures/cases/limit.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,24 @@ const { filterTestCaseByPlatform } = require('../utils');

const exitFatalErrorScriptPath = path.join(__dirname, '../scripts/fatal_error.js');

const increasedHeapLogStructure = {
current_heap_limit: /^\d+$/,
initial_heap_limit: /^\d+$/,
auto_incr_heap_limit_size: /^\d+$/,
increased_heap: /^\d+$/,
};

exports = module.exports = function () {
const list = [
{
title: 'limit hook is valid',
subTitle: 'auto increase heap limit is ok.',
subTitle: 'auto increase heap limit',
jspath: exitFatalErrorScriptPath,
skip: os.platform() === 'win32'
}
];

return filterTestCaseByPlatform(list);
};
};

exports.increasedHeapLogStructure = increasedHeapLogStructure;
34 changes: 34 additions & 0 deletions test/fixtures/cases/logbypass.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,38 @@ const utils = require('../utils');
const blocking = path.join(__dirname, '../scripts/process_blocking.js');
const nonBlocking = path.join(__dirname, '../scripts/process_normal.js');

function parseLog(component, content, patt, alinode) {
console.log(`parse log ${component}: ${JSON.stringify(content)}`);
const reg = /([^\s]*): (\d+(\.\d{0,2})?)/g;
let matched;
const res = { prefix: {}, detail: {} };
while ((matched = patt.exec(content)) !== null) {
if (!matched || matched[2] !== component) {
continue;
}

// set prefix;
res.prefix.level = matched[1];
res.prefix.component = matched[2];
res.prefix.pid = Number(matched[3]);
let detail;
if (alinode) {
detail = matched[4];
} else {
res.prefix.tid = Number(matched[4]);
res.prefix.version = matched[5];
detail = matched[6];
}

// set detail
let pair;
while ((pair = reg.exec(detail)) !== null) {
res.detail[pair[1]] = pair[2];
}
}
return res;
}

function setRules(list, alinode, { alinodeRule, xprofilerRule }) {
const rules = {};
for (const r of list) {
Expand Down Expand Up @@ -177,3 +209,5 @@ function getTestCases(title, logdirBlocking, logdirNonBlocking, envConfig, struc
exports = module.exports = getTestCases;

exports.getUvRules = getUvRules;

exports.parseLog = parseLog;
42 changes: 35 additions & 7 deletions test/limit.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@ const promisify = require('util').promisify;
const readdir = promisify(fs.readdir);
const unlink = promisify(fs.unlink);
const utils = require('./fixtures/utils');
const cases = require('./fixtures/cases/limit')();
const { parseLog } = require('./fixtures/cases/logbypass');
const limit = require('./fixtures/cases/limit');
const { increasedHeapLogStructure: struct } = limit;
const cases = limit();

const currentPlatform = os.platform();

Expand All @@ -23,6 +26,8 @@ for (const cse of cases) {
const initialHeapLimit = 128;
const autoIncreaseHeapLimitSize = 128;
const MB = 1024 * 1024;
const component = 'heap_limit';
let parsed;

let stdout = '';
let subprocess = null;
Expand All @@ -35,14 +40,15 @@ for (const cse of cases) {
XPROFILER_LOG_TYPE: 1,
XPROFILER_ENABLE_AUTO_INCR_HEAP_LIMIT: 'YES',
XPROFILER_AUTO_INCR_HEAP_LIMIT_SIZE: autoIncreaseHeapLimitSize,
XPROFILER_FATAL_ERROR_INTERVAL: 500,
XPROFILER_FATAL_ERROR_INTERVAL: 250,
}, cse.env),
stdio: [0, 'pipe', 'pipe', 'ipc'],
});
subprocess.stdout.on('data', chunk => stdout += chunk.toString());
await utils.sleep(5000);
await utils.sleep(10000);
subprocess.kill();
console.log('========= stdout =========\n\n', stdout, '\n========= end =========');
parsed = parseLog(component, stdout, utils.xprofilerPrefixRegexp, false);
});
after(async function () {
const files = await readdir(logdir);
Expand All @@ -58,13 +64,35 @@ for (const cse of cases) {

for (let i = 1; i < 3; i++) {
(cse.skip ? it.skip : it)(`${cse.subTitle} with ${i} times heap increase factor`, function () {
const increaseLog = `current_heap_limit is ${(initialHeapLimit + (i - 1) * autoIncreaseHeapLimitSize) * MB}, `
+ `initial_heap_limit is ${initialHeapLimit * MB}, `
+ `auto_incr_heap_limit_size is ${autoIncreaseHeapLimitSize}, `
+ `increased_heap is ${initialHeapLimit * MB + i * autoIncreaseHeapLimitSize * MB}`;
const increaseLog = `current_heap_limit: ${(initialHeapLimit + (i - 1) * autoIncreaseHeapLimitSize) * MB}, `
+ `initial_heap_limit: ${initialHeapLimit * MB}, `
+ `auto_incr_heap_limit_size: ${autoIncreaseHeapLimitSize}, `
+ `increased_heap: ${initialHeapLimit * MB + i * autoIncreaseHeapLimitSize * MB}`;
console.log('increaseLog:', increaseLog);
expect(stdout).to.contain(increaseLog);
});
}

(cse.skip ? it.skip : it)(`${cse.subTitle} should has comonent: ${component}`, function () {
expect(parsed.prefix.component).to.be(component);
});

(cse.skip ? it.skip : it)(`component [${component}] should as expected`, function () {
const detail = parsed.detail;
describe(`${cse.subTitle} content should be ok`, function () {
for (const key of Object.keys(detail)) {
const key2 = utils.formatKey(key);
const regexp = key2 !== key ? struct[key2].regexp : struct[key2];
it(`${key}: ${detail[key]} shoule be ${regexp}`, function () {
if (regexp instanceof RegExp) {
expect(regexp.test(detail[key])).to.be.ok();
}
if (typeof regexp === 'function') {
expect(regexp(detail[key])).to.be.ok();
}
});
}
});
});
});
}
33 changes: 1 addition & 32 deletions test/logbypass.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const moment = require('moment');
const pack = require('../package.json');
const utils = require('./fixtures/utils');
const getTestCases = require('./fixtures/cases/logbypass');
const { parseLog } = getTestCases;

const logdirBlocking = utils.createLogDir('log_bypass_blocking');
const logdirNonBlocking = utils.createLogDir('log_bypass_non_blocking');
Expand Down Expand Up @@ -41,38 +42,6 @@ const casesForHttp = getTestCases('performance log correctly XPROFILER_PATCH_HT
// compose cases
cases = cases.concat(casesForLibuv).concat(casesForHttp);

function parseLog(component, content, patt, alinode) {
console.log(`parse log ${component}: ${JSON.stringify(content)}`);
const reg = /([^\s]*): (\d+(\.\d{0,2})?)/g;
let matched;
const res = { prefix: {}, detail: {} };
while ((matched = patt.exec(content)) !== null) {
if (!matched || matched[2] !== component) {
continue;
}

// set prefix;
res.prefix.level = matched[1];
res.prefix.component = matched[2];
res.prefix.pid = Number(matched[3]);
let detail;
if (alinode) {
detail = matched[4];
} else {
res.prefix.tid = Number(matched[4]);
res.prefix.version = matched[5];
detail = matched[6];
}

// set detail
let pair;
while ((pair = reg.exec(detail)) !== null) {
res.detail[pair[1]] = pair[2];
}
}
return res;
}

for (const testCase of cases) {
for (const target of testCase.targets) {
describe(`${testCase.title} ${target.title}`, function () {
Expand Down

0 comments on commit a13fdf0

Please sign in to comment.