diff --git a/api/controllers/datalad.js b/api/controllers/datalad.js
index 03e43e2f..106cde28 100644
--- a/api/controllers/datalad.js
+++ b/api/controllers/datalad.js
@@ -74,6 +74,11 @@ router.post('/import/:dataset_id', common.jwt(), (req, res, next)=>{
if(project.members.includes(req.user.sub)) canedit = true;
if(!canedit) return next("you can't import to this project");
+ if(dataset.phenotypes) {
+ console.log("importing phenotype_files-----------------------------------------------------");
+ project.phenotypes = dataset.phenotypes;
+ }
+
//update participants info
let participants = new db.Participants({
project,
@@ -82,7 +87,7 @@ router.post('/import/:dataset_id', common.jwt(), (req, res, next)=>{
subjects: dataset.participants,
columns: dataset.participants_info, //might be missing
});
- common.publish("participant.create."+req.user.sub+"."+project._id, participants); //too much data?
+ // common.publish("participant.create."+req.user.sub+"."+project._id, participants); //too much data?
participants.save();
db.DLDatasets.updateOne({_id: dataset._id}, {$inc: {import_count: 1} }).then(err=>{
diff --git a/api/models.js b/api/models.js
index 72afa308..3e9599c5 100644
--- a/api/models.js
+++ b/api/models.js
@@ -41,6 +41,11 @@ var projectSchema = mongoose.Schema({
avatar: String, //url for avatar
+ phenotypes: [{
+ type: mongoose.Schema.Types.ObjectId,
+ ref: 'Phenotype'
+ }],
+
//access control
//* private - only the project member can access
//* public - accessible by anyone
@@ -805,6 +810,20 @@ ruleSchema.pre('save', function(next) {
});
exports.Rules = mongoose.model('Rules', ruleSchema);
+const phenotypeSchema = mongoose.Schema({
+ name: String,
+ file: String,
+ sidecar: String,
+ columns: Object,
+ data: Array,
+ dldataset: {
+ type: mongoose.Schema.Types.ObjectId,
+ ref: 'DLDataset'
+ }
+});
+
+exports.Phenotype = mongoose.model('Phenotype', phenotypeSchema);
+
//////////////////////////////////////////////////////////////
//
// datalad collections
@@ -840,6 +859,11 @@ var dlDatasetSchema = mongoose.Schema({
],
participants_info: mongoose.Schema.Types.Mixed, //metadata for participants info
+ phenotypes: [{
+ type: mongoose.Schema.Types.ObjectId,
+ ref: 'Phenotype'
+ }],
+
stats: {
subjects: Number,
sessions: Number,
@@ -881,4 +905,3 @@ var commentSchema = mongoose.Schema({
});
exports.Comments = mongoose.model('Comments', commentSchema);
-
diff --git a/bin/importdatalad.js b/bin/importdatalad.js
index 16e0456d..58fb8171 100755
--- a/bin/importdatalad.js
+++ b/bin/importdatalad.js
@@ -10,9 +10,13 @@ const child_process = require('child_process');
const cli = require('brainlife');
const axios = require('axios');
-process.chdir('/mnt/datalad');
-
+if(config.dataladDirectory) process.chdir(config.dataladDirectory);
+else {
+ console.error("config.dataladDirectory is not set");
+ process.exit(1);
+}
console.log("connecting");
+
db.init(async err=>{
if(err) throw err;
await load_datatypes();
@@ -28,8 +32,8 @@ db.init(async err=>{
*/
console.log("loading dataset_description.json");
- //let datasets = child_process.execSync("find ./ -name dataset_description.json", {encoding: "utf8"}).split("\n").filter(dataset=>{
- const datasets = fs.readFileSync(process.argv[2], "utf8").split("\n").filter(dataset=>{
+ let datasets = child_process.execSync("find ./ -name dataset_description.json", {encoding: "utf8"}).split("\n").filter(dataset=>{
+ // const datasets = fs.readFileSync(process.argv[2], "utf8").split("\n").filter(dataset=>{
//ignore some datasets
if(dataset.startsWith("datasets.datalad.org/openneuro")) return false;
if(dataset.startsWith("datasets.datalad.org/openfmri")) return false;
@@ -108,7 +112,7 @@ async function load_datatypes() {
function handle_bids(key, bids, cb) {
//upsert dl-dataset record
- db.DLDatasets.findOne(key, (err, dldataset)=>{
+ db.DLDatasets.findOne(key, async (err, dldataset)=>{
if(err) return cb(err);
if(!bids.dataset_description) return cb();
@@ -123,7 +127,31 @@ function handle_bids(key, bids, cb) {
if(bids.dataset_description) dldataset.dataset_description = bids.dataset_description;
if(bids.participants) dldataset.participants = bids.participants;
if(bids.participants_json) dldataset.participants_info = bids.participants_json;
+ if(bids.phenotypes) {
+ let phenotypeIds = [];
+ for (let phenotypeData of bids.phenotypes) {
+ // create a new Phenotype object with the current phenotype data
+ const phenotype = new db.Phenotype({
+ name: phenotypeData.name,
+ file: phenotypeData.file,
+ sidecar: phenotypeData.sidecar,
+ columns: phenotypeData.columns,
+ data: phenotypeData.data,
+ dldataset: dldataset._id
+ });
+
+ // console.log(phenotype.data);
+ // process.exit(1);
+ // save the Phenotype object
+ const savedPhenotype = await phenotype.save();
+ phenotypeIds.push(savedPhenotype._id);
+
+ }
+ dldataset.phenotypes = phenotypeIds;
+ }
+
+ console.log("phenotypes added");
//count
let unique_subjects = [];
let unique_sessions = [];
@@ -154,7 +182,6 @@ function handle_bids(key, bids, cb) {
dldataset.save(err=>{
if(err) throw err;
-
//handle each items
async.eachSeries(bids.datasets, (item, next_dataset)=>{
item.dataset.datatype = datatype_ids[item.dataset.datatype];
diff --git a/ui/src/project.vue b/ui/src/project.vue
index 52eca641..3a9ed719 100644
--- a/ui/src/project.vue
+++ b/ui/src/project.vue
@@ -255,6 +255,15 @@
+
We found the following journals/articles related to this project based on name/description @@ -357,7 +377,7 @@