Skip to content

Commit

Permalink
Merge pull request #111 from camicroscope/develop
Browse files Browse the repository at this point in the history
For 3.9.4
  • Loading branch information
birm authored Jun 17, 2021
2 parents 982495b + 4525c9a commit 91fec6d
Show file tree
Hide file tree
Showing 17 changed files with 1,288 additions and 569 deletions.
1 change: 1 addition & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
node_modules
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM node:14-stretch-slim
FROM node:16-stretch-slim
RUN apt-get update && apt-get upgrade -y && apt-get install -y git build-essential python3
RUN mkdir /src
COPY . /src
Expand Down
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,14 @@ userFilter -- list of values which must be present in given field in filtered da
\*\* -- immune to filters (can see everything)
Public -- users with no userFilter are assigned this filter
An item with no filter value is returned in all cases, and is thus also public.

## Local Development Environment
In order to quickly setup a development environment, make use of the `setup_script.sh` script. This will setup the project, initialize and seed the database configurations, import routes and initialize environment config files and generate the keys required.

First clone the <a href = "https://github.com/camicroscope/Caracal/tree/backup-dev">Caracal (backup-dev branch)</a>, <a href = "https://github.com/camicroscope/caMicroscope">caMicroscope</a> and the <a href = "https://github.com/camicroscope/Distro">Distro</a> repositories and make sure that all of them are in the same parent directory.

Run the script with `./setup_script` or `bash ./setup_script.sh`

The script is configured to load a database named "`camic`" from server at "`127.0.0.1`". In order to specify different name and host, simply pass the two while calling the script, like `./setup_script custom_host custom_database_name`

Run `npm start` to start the application and see it running at `localhost:4010`
13 changes: 11 additions & 2 deletions caracal.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ const Model = require('./handlers/modelTrainer.js');
const DataTransformationHandler = require('./handlers/dataTransformationHandler.js');
// TODO validation of data

const {connector} = require("./service/database/connector");

var WORKERS = process.env.NUM_THREADS || 4;

var PORT = process.env.PORT || 4010;
Expand Down Expand Up @@ -196,7 +198,14 @@ var startApp = function(app) {

throng(WORKERS, startApp(app));

const handler = new DataTransformationHandler(MONGO_URI, './json/configuration.json');
handler.startHandler();
/** initialize DataTransformationHandler only after database is ready */
connector.init().then(() => {
const handler = new DataTransformationHandler(MONGO_URI, './json/configuration.json');
handler.startHandler();
}).catch((e) => {
console.error("error connecting to database");
process.exit(1);
});

module.exports = app; // for tests

8 changes: 4 additions & 4 deletions handlers/authHandlers.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,13 @@ try {
}

try {
const prikeyPath = './keys/key.pub';
if (fs.existsSync(prikeyPath)) {
var PUBKEY = fs.readFileSync(prikeyPath, 'utf8');
const pubkeyPath = './keys/key.pub';
if (fs.existsSync(pubkeyPath)) {
var PUBKEY = fs.readFileSync(pubkeyPath, 'utf8');
} else {
if (DISABLE_SEC || ENABLE_SECURITY_AT && Date.parse(ENABLE_SECURITY_AT) > Date.now()) {
PUBKEY = '';
console.warn('pubkey null since DISABLE_SEC and no prikey provided');
console.warn('pubkey null since DISABLE_SEC and no pubkey provided');
} else {
console.error('pubkey does not exist');
}
Expand Down
208 changes: 17 additions & 191 deletions handlers/dataHandlers.js
Original file line number Diff line number Diff line change
@@ -1,185 +1,11 @@
var mongo = require('mongodb');

var MONGO_URI = process.env.MONGO_URI || 'mongodb://localhost';
var DISABLE_SEC = (process.env.DISABLE_SEC === 'true') || false;


function mongoFind(database, collection, query) {
return new Promise(function(res, rej) {
try {
mongo.MongoClient.connect(MONGO_URI, function(err, db) {
if (err) {
rej(err);
} else {
if (query['_id']) {
query['_id'] = new mongo.ObjectID(query['_id']);
}
var dbo = db.db(database);
dbo.collection(collection).find(query).toArray(function(err, result) {
if (err) {
rej(err);
}
// compatible wiht bindaas odd format
result.forEach((x) => {
x['_id'] = {
'$oid': x['_id'],
};
});
res(result);
db.close();
});
}
});
} catch (error) {
rej(error);
}
});
}

function mongoDistinct(database, collection, upon, query) {
return new Promise(function(res, rej) {
try {
mongo.MongoClient.connect(MONGO_URI, function(err, db) {
if (err) {
rej(err);
} else {
var dbo = db.db(database);
dbo.collection(collection).distinct(upon, query, function(err, result) {
if (err) {
rej(err);
}
res(result);
db.close();
});
}
});
} catch (error) {
console.error(error);
rej(error);
}
});
}

function mongoAdd(database, collection, data) {
return new Promise(function(res, rej) {
// if data not array, make it one
if (!Array.isArray(data)) {
data = [data];
}
try {
mongo.MongoClient.connect(MONGO_URI, function(err, db) {
if (err) {
rej(err);
} else {
var dbo = db.db(database);
dbo.collection(collection).insertMany(data, function(err, result) {
if (err) {
rej(err);
}
res(result);
db.close();
});
}
});
} catch (error) {
console.error(error);
rej(error);
}
});
}

function mongoDelete(database, collection, query) {
return new Promise(function(res, rej) {
mongo.MongoClient.connect(MONGO_URI, function(err, db) {
try {
if (err) {
rej(err);
} else {
var dbo = db.db(database);
if (query['_id']) {
query['_id'] = new mongo.ObjectID(query['_id']);
}
dbo.collection(collection).deleteOne(query, function(err, result) {
if (err) {
rej(err);
}
delete result.connection;
res(result);
db.close();
});
}
} catch (error) {
console.error(error);
rej(error);
}
});
});
}

function mongoAggregate(database, collection, pipeline) {
return new Promise(function(res, rej) {
mongo.MongoClient.connect(MONGO_URI, function(err, db) {
try {
if (err) {
rej(err);
} else {
var dbo = db.db(database);
dbo.collection(collection).aggregate(pipeline).toArray(function(err, result) {
if (err) {
rej(err);
}
// compatible wiht bindaas odd format
// result.forEach((x) => {
// x['_id'] = {
// '$oid': x['_id'],
// };
// });
res(result);
db.close();
});
}
} catch (error) {
console.error(error);
rej(error);
}
});
});
}

function mongoUpdate(database, collection, query, newVals) {
return new Promise(function(res, rej) {
try {
mongo.MongoClient.connect(MONGO_URI, function(err, db) {
if (err) {
rej(err);
} else {
var dbo = db.db(database);
if (query['_id']) {
query['_id'] = new mongo.ObjectID(query['_id']);
}
dbo.collection(collection).updateOne(query, newVals, function(err, result) {
if (err) {
console.log(err);
rej(err);
}
delete result.connection;
res(result);
db.close();
});
}
});
} catch (error) {
rej(error);
}
});
}
const DISABLE_SEC = (process.env.DISABLE_SEC === 'true') || false;
const mongoDB = require("../service/database");

var General = {};
General.find = function(db, collection) {
return function(req, res, next) {
var query = req.query;
delete query.token;
mongoFind(db, collection, query).then((x) => {
mongoDB.find(db, collection, query).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand All @@ -190,7 +16,7 @@ General.get = function(db, collection) {
return function(req, res, next) {
var query = req.query;
delete query.token;
mongoFind(db, collection, {_id: req.query.id}).then((x) => {
mongoDB.find(db, collection, {_id: req.query.id}).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand All @@ -201,7 +27,7 @@ General.distinct = function(db, collection, upon) {
return function(req, res, next) {
var query = req.query;
delete query.token;
mongoDistinct(db, collection, upon, query).then((x) => {
mongoDB.distinct(db, collection, upon, query).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand All @@ -211,7 +37,7 @@ General.distinct = function(db, collection, upon) {
General.add = function(db, collection) {
return function(req, res, next) {
var data = JSON.parse(req.body);
mongoAdd(db, collection, data).then((x) => {
mongoDB.add(db, collection, data).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand All @@ -225,7 +51,7 @@ General.update = function(db, collection) {
var newVals = {
$set: JSON.parse(req.body),
};
mongoUpdate(db, collection, query, newVals).then((x) => {
mongoDB.update(db, collection, query, newVals).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand All @@ -236,7 +62,7 @@ General.delete = function(db, collection) {
return function(req, res, next) {
var query = req.query;
delete query.token;
mongoDelete(db, collection, query).then((x) => {
mongoDB.delete(db, collection, query).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand All @@ -249,7 +75,7 @@ Presetlabels.add = function(req, res, next) {
var query = req.query;
delete query.token;
var labels = JSON.parse(req.body);
mongoUpdate('camic', 'configuration', {'config_name': 'preset_label'}, {$push: {configuration: labels}}).then((x) => {
mongoDB.update('camic', 'configuration', {'config_name': 'preset_label'}, {$push: {configuration: labels}}).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand Down Expand Up @@ -287,7 +113,7 @@ Presetlabels.update = function(req, res, next) {
newVals['$unset']['configuration.$.key'] = 1;
}

mongoUpdate('camic', 'configuration',
mongoDB.update('camic', 'configuration',
{
'config_name': 'preset_label',
'configuration.id': query.id,
Expand All @@ -301,7 +127,7 @@ Presetlabels.update = function(req, res, next) {
Presetlabels.remove = function(req, res, next) {
var query = req.query;
delete query.token;
mongoUpdate('camic', 'configuration',
mongoDB.update('camic', 'configuration',
{
'config_name': 'preset_label',
}, {$pull: {configuration: {id: query.id}}}).then((x) => {
Expand Down Expand Up @@ -337,7 +163,7 @@ Mark.spatial = function(req, res, next) {
'$gt': parseFloat(query.footprint),
};
}
mongoFind('camic', 'mark', query).then((x) => {
mongoDB.find('camic', 'mark', query).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand Down Expand Up @@ -383,7 +209,7 @@ Mark.multi = function(req, res, next) {
};
}

mongoFind('camic', 'mark', query).then((x) => {
mongoDB.find('camic', 'mark', query).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand Down Expand Up @@ -415,12 +241,12 @@ Mark.findMarkTypes = function(req, res, next) {
},
},
];
mongoAggregate('camic', 'mark', pipeline).then((x) => {
mongoDB.aggregate('camic', 'mark', pipeline).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
} else {
mongoDistinct('camic', 'mark', 'provenance.analysis', query).then((x) => {
mongoDB.distinct('camic', 'mark', 'provenance.analysis', query).then((x) => {
req.data = x;
next();
}).catch((e) => next(e));
Expand All @@ -431,7 +257,7 @@ var Heatmap = {};
Heatmap.types = function(req, res, next) {
var query = req.query;
delete query.token;
mongoFind('camic', 'heatmap', query, {
mongoDB.find('camic', 'heatmap', query, {
'data': 0,
}).then((x) => {
x.forEach((x)=>delete x.data);
Expand All @@ -443,7 +269,7 @@ Heatmap.types = function(req, res, next) {
var User = {};

User.forLogin = function(email) {
return mongoFind('camic', 'user', {'email': email});
return mongoDB.find('camic', 'user', {'email': email});
};

User.wcido = function(req, res, next) {
Expand Down
Loading

0 comments on commit 91fec6d

Please sign in to comment.