Skip to content

Commit

Permalink
Update dependancies (#387)
Browse files Browse the repository at this point in the history
* Timer useless packages removed

* Main useless packages removed

* Engine : removed useless packages

* Renamed files http consumer, provider and http file consumer

* Revert "Renamed files http consumer, provider and http file consumer"

This reverts commit 960a206.

* Renamed files postConsumer restApiPost restGetFile

* Renamed component finished

* Removed restGetJson and restApiGet component

* Resolved CVEC critical packages bson, mongoose, json-schema and url-parse

* sequelize package update

* nodemailer && bugs solved

* last changes for package xmldom

* Deleted console.log

* Bugs file httpConsumer + rdf_traitment

* Corrected error in SFTP consumer + restApiGet

* Put back rest api get & rest get json editors in place
  • Loading branch information
Mikoteko authored Feb 1, 2024
1 parent edfabd6 commit ca8ab0c
Show file tree
Hide file tree
Showing 48 changed files with 6,955 additions and 15,912 deletions.
20 changes: 19 additions & 1 deletion config.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@
"httpGet": {
"price": 40
},
"httpConsumerFile": {
"price": 40
},
"sftpConsumer":{
"price": 40
},
Expand Down Expand Up @@ -92,6 +95,12 @@
"postConsumer":{
"price": 20
},
"httpConsumer":{
"price": 20
},
"httpProvider":{
"price": 10
},
"flat":{
"price": 20
},
Expand Down Expand Up @@ -129,5 +138,14 @@
"port": 8081,
"secret": "secret",
"target": "http://localhost:8080"
},
"smtp": {
"host": "",
"port": 587,
"debug": true,
"auth": {
"user": "",
"pass": ""
}
}
}
}
35 changes: 19 additions & 16 deletions core/dataTraitmentLibrary/file_convertor.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
// --------------------------------------------------------------------------------
// TODO =>
// remettre à jour les packages utilisés dans le fichier
// rdf_traitment.js pour les fichiers owl, rdf et ttl
// --------------------------------------------------------------------------------

var exel = require('./exel/exel_traitment.js');
var rdf = require('./rdf/rdf_traitment.js');
var xml = require('./xml/xml_traitment.js');
Expand Down Expand Up @@ -74,7 +80,6 @@ function _data_from_file(filename, dataBuffer, contentType) {
// console.log('filename',filename);
// console.log('contentType',contentType);
const extension= _extension(filename, contentType);
console.log(extension)

return new Promise(async function(resolve, reject) {
switch (extension) {
Expand Down Expand Up @@ -149,26 +154,24 @@ function _data_from_file(filename, dataBuffer, contentType) {

// RDF TTL DONE
case ("ttl"):
rdf.rdf_traitmentTTL(dataBuffer.toString()).then((result) => {
//console.log(reusltat)
resolve({
data: result
})
}, function(err) {
reject("votre fichier n'est pas au norme ou pas du bon format " + extension)
})
// rdf.rdf_traitmentTTL(dataBuffer.toString()).then((result) => {
// //console.log(reusltat)
// resolve({
// data: result
// })
// }, function(err) {
reject("votre fichier n'est pas au norme ou pas du bon format " + extension)
// })
break;

// RDF XML DONE IF TEST PARSE
case ("rdf"):
case ("owl"):
rdf.rdf_traitmentXML(dataBuffer.toString()).then(result => {
// console.log("RDF", result)
//console.log(JSON.stringify(reusltat))
resolve(result)
}, function(err) {
reject("votre fichier n'est pas au norme ou pas du bon format " + extension)
})
// rdf.rdf_traitmentXML(dataBuffer.toString()).then(result => {
// resolve(result)
// }, function(err) {
reject("votre fichier n'est pas au norme ou pas du bon format " + extension)
// })
break;

// EXEL/CSV/XLSX DONE
Expand Down
68 changes: 45 additions & 23 deletions core/dataTraitmentLibrary/rdf/rdf_traitment.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
// --------------------------------------------------------------------------------
// TODO =>
// changer les packages importés obsoletes
// remplacer rdf-parser-rdfxml voir même jsonld-streaming-parser
// --------------------------------------------------------------------------------

var RdfXmlParser = require('rdf-parser-rdfxml');
var JsonLdSerializer = require('rdf-serializer-jsonld');
// var RdfXmlParser = require('rdf-parser-rdfxml');
// var JsonLdSerializer = require('rdf-serializer-jsonld');
var N3Parser = require('rdf-parser-n3');

// const jsonld = require('jsonld');
Expand All @@ -22,25 +27,44 @@ module.exports = {

// --------------------------------------------------------------------------------

const serializer = new JsonLdSerializer({outputFormat: 'string', compact: false});
const parser = new RdfXmlParser();
// const serializer = new JsonLdSerializer({outputFormat: 'string', compact: false});
// const parser = new RdfXmlParser();
const n3Parser = new N3Parser()


function _rdf_traitmentXML (dataXML) {
let n3Tab = []
return new Promise(function(resolve,reject){
new Promise(function (resolve, reject) {
parser.process(dataXML, function (n3) {
n3Tab.push(n3)
resolve(n3Tab)
})
}).then(function (n3T) {
serializer.serialize(n3T).then(function (jsonld) {
resolve(jsonld)
})
})
})
// let n3Tab = []
// return new Promise(function(resolve,reject){
// new Promise(function (resolve, reject) {
// parser.process(dataXML, function (n3) {
// n3Tab.push(n3)
// resolve(n3Tab)
// })
// }).then(function (n3T) {
// serializer.serialize(n3T).then(function (jsonld) {
// resolve(jsonld)
// })
// })
// })
// .catch((error) => {
// console.error('Error parsing RDF/XML:', error);
// });
}

function parseRDF(dataXML) {
// return new Promise((resolve, reject) => {
// const readableStream = Readable.from([dataXML]);
// const triples = [];
// parser.import(readableStream, (triple) => {
// triples.push(triple);
// }, (error) => {
// if (error) {
// reject(error);
// } else {
// resolve(triples);
// }
// });
// });
}

function _rdf_traitmentJSONLD (dataJSON) {
Expand Down Expand Up @@ -69,7 +93,7 @@ function _rdf_traitmentTTL(dataTTL) {
}

function _rdf_traitmentRDFA (dataHTMLRDFA) {
///EN COUR DE DEV
// EN COURS DE DEV
}

async function _json_to_rdf (jsonData, header){
Expand Down Expand Up @@ -114,9 +138,7 @@ async function _json_to_rdf (jsonData, header){



////commentaires////

//commentaires//

//// DBPEDIA PB ==> http://dbpedia.org/resource/Eiffel_Tower PROXY pour format ttl je pense
//redirection vers http://dbpedia.org/page/Eiffel_Tower qui est rdfa et qui ne peux pas ce convertir en n3 du a probleme de lib //
// ISSUE RDF-EXT : https://github.com/rdf-ext/rdf-parser-rdfxml/issues/1 ///
// DBPEDIA PB ==> http://dbpedia.org/resource/Eiffel_Tower PROXY pour format ttl je pense
// redirection vers http://dbpedia.org/page/Eiffel_Tower qui est rdfa et qui ne peux pas ce convertir en n3 du a probleme de lib //
105 changes: 57 additions & 48 deletions core/lib/user_lib.js
Original file line number Diff line number Diff line change
Expand Up @@ -300,15 +300,15 @@ function _getWithRelations(userID,config) {

function _userGraph(userId) {
return new Promise(resolve => {
historiqueModel.getInstance().model.aggregate(
[{
historiqueModel.getInstance().model.aggregate([
{
$match: {
userId: userId
}
},
{
$group: {
_id: {workspaceId :{ workspaceId: "$workspaceId"}, roundDate : { $dayOfMonth: "$date" }},
_id: { workspaceId: "$workspaceId", roundDate: { $dayOfMonth: "$date" } },
totalPrice: {
$sum: "$totalPrice"
},
Expand All @@ -319,50 +319,61 @@ function _userGraph(userId) {
$push: "$$ROOT"
}
}
}],
(_err, result) => {
if(result && result[0]){
const c = {}
const array = []
result[0].workspaces.forEach((histo) => {
}
])
.then(result => {
if (result && result[0]) {
const c = {};
const array = [];

result[0].workspaces.forEach(histo => {
let id = histo.workflowId + histo.roundDate;
if (c[id]) {
c[id].totalPrice += histo.totalPrice;
c[id].totalMo += histo.moCount
c[id].totalMo += histo.moCount;
} else {
c[id] = {};
c[id].totalPrice = histo.totalPrice
c[id].roundDate = histo.roundDate
c[id].totalMo = histo.moCount
c[id].id = histo.workflowId
c[id].totalPrice = histo.totalPrice;
c[id].roundDate = histo.roundDate;
c[id].totalMo = histo.moCount;
c[id].id = histo.workflowId;
}
})
});

for (const workspaceId in c) {
array.push(new Promise(resolve => {
array.push(
workspaceModel.getInstance().model.find({ _id: c[workspaceId].id })
.then((workspace) => {
if(c[workspaceId] && workspace[0]){
c[workspaceId].name = workspace[0].name
c[workspaceId].componentNumber = workspace[0].components ? workspace[0].components.length : 0
c[workspaceId].description = workspace[0].description
resolve(c[workspaceId])
}else {
resolve(c)
}
});
}))
.then(workspace => {
if (c[workspaceId] && workspace[0]) {
c[workspaceId].name = workspace[0].name;
c[workspaceId].componentNumber = workspace[0].components ? workspace[0].components.length : 0;
c[workspaceId].description = workspace[0].description;
return c[workspaceId];
} else {
return c;
}
})
);
}

Promise.all(array)
.then(WorspaceWithConsuption => (graphTraitement.formatDataUserGraph(WorspaceWithConsuption)))
.then(worspaceTraited => (resolve(worspaceTraited)))
.then(WorspaceWithConsuption => graphTraitement.formatDataUserGraph(WorspaceWithConsuption))
.then(worspaceTraited => resolve(worspaceTraited))
.catch(error => {
console.error('Error in processing workspaces:', error);
resolve(null);
});
} else {
resolve(null)
resolve(null);
}
}
);
})
.catch(error => {
console.error('Error in aggregating data:', error);
resolve(null);
});
});
} // <= _userGraph
}


/**
* @param {string} id
Expand Down Expand Up @@ -687,7 +698,7 @@ function _hash_password(password, passwordConfirm) {
}
}
if (!pattern.password.test(password)) {
return reject(new Error.PropertyValidationError("password"))
return reject("Le mot de passe doit avoir entre 6 et 20 caractères.")
}

bcrypt.genSalt(10, function (err, salt) {
Expand Down Expand Up @@ -747,20 +758,18 @@ function _is_google_user(user) {

// --------------------------------------------------------------------------------

function _createUpdatePasswordEntity(userMail, token ) {
return new Promise(function (resolve, reject) {
SecureMailModel.get()
.update({userMail}, {userMail, token}, {upsert: true, setDefaultsOnInsert: true})
.exec(function (err, userData) {
if(err){
return reject(new Error.DataBaseProcessError(err))
} else {
resolve(true);
}
});
});
} // <= _createUpdatePasswordEntity

async function _createUpdatePasswordEntity(userMail, token) {
try {
const result = await SecureMailModel.updateOne(
{ userMail },
{ userMail, token },
{ upsert: true, setDefaultsOnInsert: true }
);
return result;
} catch (error) {
reject(new Error('DataBaseProcessError', error));
}
}

// --------------------------------------------------------------------------------

Expand Down
20 changes: 10 additions & 10 deletions core/lib/workspace_component_lib.js
Original file line number Diff line number Diff line change
Expand Up @@ -91,18 +91,18 @@ function _get_all_withConsomation(filter) {
return new Promise(function(resolve, reject) {
workspaceComponentModel.getInstance().model.find(filter)
.lean()
.exec(function(err, workspaceComponents) {
if (err) {
reject(new Error.DataBaseProcessError(err))
} else {
workspaceComponents.forEach(c => {
c.specificData = c.specificData || {}
}); //protection against empty specificData : corrupt data
resolve(workspaceComponents);
}
.exec()
.then(workspaceComponents => {
workspaceComponents.forEach(c => {
c.specificData = c.specificData || {}
});
resolve(workspaceComponents);
})
.catch(err => {
reject(new Error.DataBaseProcessError(err))
});
})
} // <= _get_all
}


// --------------------------------------------------------------------------------
Expand Down
7 changes: 1 addition & 6 deletions core/models/security_mail.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,6 @@
const MongoClient = require('../db/mongo_client');
const SecureMailShema = require('../model_schemas/security_mail');

class SecureMailModel {

static get(){
return MongoClient.getInstance().connection.model('secureMail', SecureMailShema);
}
}
const SecureMailModel = MongoClient.getInstance().connection.model('secureMail', SecureMailShema);

module.exports = SecureMailModel;
Loading

0 comments on commit ca8ab0c

Please sign in to comment.