Skip to content

Commit

Permalink
deps: bump package versions
Browse files Browse the repository at this point in the history
Bumps some major package versions so that they are up to date.  Notably,
this includes:
  1. `mkdirp`
  2. `mocha`
  3. `cross-env`

Additionally, I've removed lzma-native since it sometimes causes crashes
on windows and replaced it with the native zlib.  This also fits into
our regular backup strategy.

Closes #4126.
Closes #4127.
Closes #4129.
  • Loading branch information
jniles committed Jan 27, 2020
1 parent 95f3654 commit 7088bcd
Show file tree
Hide file tree
Showing 5 changed files with 334 additions and 437 deletions.
9 changes: 7 additions & 2 deletions gulpfile.js/server.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,13 @@ function moveServerFiles() {
* NOTE(@jniles) - there is an open issue (#3650) to move this to an environmental
* variable.
*/
function createReportsDirectory(cb) {
mkdirp(path.join(SERVER_FOLDER, 'reports/'), cb);
async function createReportsDirectory(cb) {
try {
const res = await mkdirp(path.join(SERVER_FOLDER, 'reports/'));
cb(null, res);
} catch (e) {
cb(e);
}
}

// expose the gulp functions to the outside world
Expand Down
7 changes: 3 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,8 @@
"json-2-csv": "^3.2.0",
"juice": "^6.0.0",
"lodash": "^4.17.11",
"lzma-native": "^6.0.0",
"mailgun-js": "^0.22.0",
"mkdirp": "^0.5.1",
"mkdirp": "^1.0.0",
"moment": "^2.24.0",
"morgan": "^1.9.1",
"multer": "^1.4.1",
Expand Down Expand Up @@ -142,15 +141,15 @@
"karma-mocha": "^1.3.0",
"karma-ng-html2js-preprocessor": "^1.0.0",
"merge-stream": "^2.0.0",
"mocha": "^6.0.2",
"mocha": "^7.0.1",
"mochawesome": "^4.0.0",
"protractor": "^5.4.2",
"qs": "^6.9.1",
"release-it": "^12.3.5",
"sinon": "^8.0.2",
"snyk": "^1.210.0",
"standard-version": "^7.0.0",
"typescript": "^3.3.1"
"typescript": "^3.7.5"
},
"homepage": "https://github.com/IMA-WorldHealth/bhima#readme",
"directories": {
Expand Down
52 changes: 24 additions & 28 deletions server/lib/backup.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

const debug = require('debug')('backups');
const tmp = require('tempy');
const lzma = require('lzma-native');
const zlib = require('zlib');
const streamToPromise = require('stream-to-promise');
const fs = require('fs');
const moment = require('moment');
Expand All @@ -28,7 +28,7 @@ function backup(filename) {
debug(`#backup() beginning backup routine.`);

return mysqldump(file)
.then(() => xz(file))
.then(() => gzip(file))
.then(upload);
}

Expand Down Expand Up @@ -97,45 +97,41 @@ function mysqldump(file, options = {}) {
}

/**
* @function xz
* @function gzip
*
* @description
* This function uses the lzma-native library for ultra-fast compression of the
* This function uses the native zlib library for ultra-fast compression of the
* backup file. Since streams are used, the memory requirements should stay
* relatively low.
*/
function xz(file) {
const outfile = `${file}.xz`;
async function gzip(file) {
const outfile = `${file}.gz`;

debug(`#xz() compressing ${file} into ${outfile}.`);
debug(`#gzip() compressing ${file} into ${outfile}.`);

const compressor = lzma.createCompressor();
const input = fs.createReadStream(file);
const output = fs.createWriteStream(outfile);

let beforeSizeInMegabytes;
let afterSizeInMegabytes;
const stats = await fs.promises.stat(file);
const beforeSizeInMegabytes = stats.size / 1000000.0;
debug(`#gzip() ${file} is ${beforeSizeInMegabytes}MB`);

return fs.promises.stat(file)
.then(stats => {
beforeSizeInMegabytes = stats.size / 1000000.0;
debug(`#xz() ${file} is ${beforeSizeInMegabytes}MB`);
// start the compresion
const streams = input
.pipe(zlib.createGunzip())
.pipe(output);

// start the compresion
const streams = input.pipe(compressor).pipe(output);
return streamToPromise(streams);
})
.then(() => util.statp(outfile))
.then(stats => {
afterSizeInMegabytes = stats.size / 1000000.0;
debug(`#xz() ${outfile} is ${afterSizeInMegabytes}MB`);
await streamToPromise(streams);

const ratio = Number(beforeSizeInMegabytes / afterSizeInMegabytes).toFixed(2);
const statsAfter = await fs.promises.stat(outfile);
const afterSizeInMegabytes = statsAfter.size / 1000000.0;
debug(`#gzip() ${outfile} is ${afterSizeInMegabytes}MB`);

debug(`#xz() compression ratio: ${ratio}`);
const ratio = Number(beforeSizeInMegabytes / afterSizeInMegabytes).toFixed(2);

return outfile;
});
debug(`#gzip() compression ratio: ${ratio}`);

return outfile;
}

/**
Expand All @@ -148,11 +144,11 @@ function upload(file, options = {}) {
debug(`#upload() Not Implemented Yet!`);

if (!options.name) {
options.name = `${process.env.DB_NAME}-${moment().format('YYYY-MM-DD')}.sql.xz`;
options.name = `${process.env.DB_NAME}-${moment().format('YYYY-MM-DD')}.sql.gzip`;
}
}

exports.backup = backup;
exports.mysqldump = mysqldump;
exports.upload = upload;
exports.xz = xz;
exports.gzip = gzip;
10 changes: 8 additions & 2 deletions server/lib/uploader.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,18 @@ function Uploader(prefix, fields) {

// configure the storage space using multer's diskStorage. This will allow
const storage = multer.diskStorage({
destination : (req, file, cb) => {
destination : async (req, file, cb) => {
// note: need absolute path here for mkdirp
const folder = path.join(process.cwd(), directory);
debug(`upload dirctory ${folder} does not exist.`);
debug(`creating upload directory ${folder}.`);
mkdirp(folder, err => cb(err, folder));

try {
await mkdirp(folder);
cb(null, folder);
} catch (err) {
cb(err);
}
},
filename : (req, file, cb) => {
const id = uuid();
Expand Down
Loading

0 comments on commit 7088bcd

Please sign in to comment.