diff --git a/index.js b/index.js index e95e321..09da26b 100644 --- a/index.js +++ b/index.js @@ -4,6 +4,11 @@ const remove = require('./lib/remove'); const create = require('./lib/create'); const update = require('./lib/update'); const validate = require('./lib/validation'); +const importer = require('./lib/import'); +const exporter= require('./lib/export'); + +exports.toRDF = importer.toRDF; +exports.toJSON = exporter.toJSON; exports.search = () => {}; diff --git a/lib/export.js b/lib/export.js new file mode 100644 index 0000000..a824129 --- /dev/null +++ b/lib/export.js @@ -0,0 +1,64 @@ +"use strict" + +const fs = require("fs"); +const path = require("path"); + +function normalizeDirPath (target) { + + if (path.extname(target)) { + target = path.dirname(target); + } + + return path.normalize(target) + "/"; +} + +exports.toJSON = (target) => { + + target = normalizeDirPath(target); + + // ..open writable file stream + const rdf_file = fs.createWriteStream(path.resolve(target + "../__rdf.nq")); + + const parser = new Promise((resolve, reject) => { + fs.readdir(target, (err, files) => { + if (err) { + reject(err); + } else { + resolve(files); + } + }); + }) + .then((files) => { + + return new Promise((resolve, reject) => { + let index = 0; + const next = (file) => { + fs.readFile(target + file, (err, data) => { + if (err) { + return reject(err); + } + + try { + convertToRdf(rdf_file, JSON.parse(data.toString())); + } catch (err) { + reject(err); + } + + if (!files[++index]) { + return resolve("parsing finished!"); + } + + next(files[index]); + }); + }; + next(files[index]); + }); + }) + .then((message) => { + console.log("Resolved:", message); + }) + .catch((err) => { + // ..remove file + console.log("Rejected:", err); + }); +}; diff --git a/lib/import.js b/lib/import.js new file mode 100644 index 0000000..f4ae8eb --- /dev/null +++ b/lib/import.js @@ -0,0 +1,417 @@ +"use strict" + +const fs = require("fs"); +const path = require("path"); + +function normalizeDirPath (target) { + + if (path.extname(target)) { + target = path.dirname(target); + } + + return path.normalize(target) + "/"; +} + +function writeTriple (writable, subject, predicate, object) { + writable.write(subject + ' <' + predicate + '> ' + object + ' .\n'); +} + +function convertToRdf (writable, seq_id, jsonSeq) { +console.log("Convert to RDF:", seq_id); + // ..convert to rdf + jsonSeq[0].forEach((handler) => { + // ..handler id + // ..handler fn + // ..handler state + // ..handler args + }); + + if (jsonSeq[1]) { + jsonSeq[1].A && writeTriple(writable, seq_id, "args", JSON.stringify(jsonSeq[1].A)); + // ..seq args + // ..seq roles + // ..seq error + } +} + +function getHash (string, name, type) { + let hash = '_:' + crypto.createHash('md5').update(string).digest('hex'); + if (!hashids[hash]) { + hashids[hash] = 1; + write(hash, rdf_syntax + 'string', '"' + string.replace(/"/g, '\\"') + '"'); + write(hash, rdf_syntax + 'type', ''); + + if (name) { + write(hash, 'http://schema.org/name', getHash(name)); + } + } + + return hash; +} + +exports.toRDF = (target) => { + + target = normalizeDirPath(target); + + // ..open writable file stream + const rdf_file = fs.createWriteStream(path.resolve(target + "../__rdf.nq")); + + const parser = new Promise((resolve, reject) => { + fs.readdir(target, (err, files) => { + if (err) { + reject(err); + } else { + resolve(files); + } + }); + }) + .then((files) => { + + return new Promise((resolve, reject) => { + let index = 0; + const next = (file) => { + fs.readFile(target + file, (err, data) => { + if (err) { + return reject(err); + } + + try { + convertToRdf(rdf_file, file.split(".")[0], JSON.parse(data.toString())); + } catch (err) { + reject(err); + } + + if (!files[++index]) { + return resolve("parsing finished!"); + } + + next(files[index]); + }); + }; + next(files[index]); + }); + }) + .then((message) => { + console.log("Resolved:", message); + }) + .catch((err) => { + // ..remove file + console.log("Rejected:", err); + }); +}; + +/* +const crypto = require('crypto'); +const resolve = require('path').resolve; +const suffixTest = /\.json$/; +const root = resolve(process.argv[2] || '.') + '/'; +const env_config = require(root + 'flow.json'); +const path = root + 'network/'; +const sequences = {}; +const rdf_syntax = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'; +const hashids = {}; +const hashlbs = {}; +const envs = {}; +const fn_states = {}; +const temp_index = {}; + +function write (subject, predicate, object) { + process.stdout.write(subject + ' <' + predicate + '> ' + object + ' .\n'); +} + +function UID (len) { + len = len || 23; + let i = 0, random = ''; + for (; i < len; ++i) { + random += '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'[0 | Math.random() * 62]; + } + return '_:' + crypto.createHash('md5').update(random).digest('hex'); +} + +function getFnState (name) { + + if (!fn_states[name]) { + + fn_states[name] = UID(); + + write( + fn_states[name], + rdf_syntax + 'type', + '' + ); + + // role name + write( + fn_states[name], + 'http://schema.org/name', + getHash(name) + ); + } + + return fn_states[name]; +} + +// public role +const public_role = getHash('*', "Public Role", "Role"); + +// Convert composition files to triples +files.forEach(file => { + + if (!suffixTest.test(file)) { + return; + } + + let group; + try { + group = JSON.parse(fs.readFileSync(path + file)); + } catch (error) { + throw new Error(path + file + '\n' + error); + } + + Object.keys(group).forEach((sequence) => { + + if (sequence[sequence]) { + throw new Error('Converter: Duplicate sequence "' + sequence + '".'); + } + + sequences[sequence] = group[sequence]; + }); +}); + +// create env objects +if (env_config) { + + if (env_config.environments) { + env_config.environments.forEach(env => { + const env_uid = UID(); + + // create json edge + write( + env_uid, + 'http://schema.jillix.net/vocab/json', + getHash(JSON.stringify(env.vars)) + ); + + // evnironment type + write( + env_uid, + rdf_syntax + 'type', + '' + ); + + // environment name + write( + env_uid, + 'http://schema.org/name', + getHash(env.name) + ); + + envs[env.name] = env_uid; + }); + } + + if (env_config.entrypoints) { + env_config.entrypoints.forEach(ep => { + + const entrypoint_id = '_:' + crypto.createHash('md5').update(ep.emit).digest('hex'); + + // entrypoint environment + if (ep.env) { + ep.env.forEach(env => { + if (envs[env]) { + write( + entrypoint_id, + 'http://schema.jillix.net/vocab/args', + envs[env] + ); + } + }); + } + }); + } +} + +// sequences +for (let sequence in sequences) { + let seq = sequences[sequence]; + let sequence_id = '_:' + crypto.createHash('md5').update(sequence).digest('hex'); + + if (!seq[0] || !seq[0].length) { + continue; + } + + // name + write( + sequence_id, + 'http://schema.org/name', + getHash(sequence.toUpperCase()) + ); + + // type + write( + sequence_id, + rdf_syntax + 'type', + '' + ); + + // roles + write( + sequence_id, + 'http://schema.jillix.net/vocab/role', + public_role + ); + + // end event + if (seq[2]) { + write( + sequence_id, + 'http://schema.jillix.net/vocab/onEnd', + '_:' + crypto.createHash('md5').update(seq[2]).digest('hex') + ); + } + + // error event + if (seq[1]) { + write( + sequence_id, + 'http://schema.jillix.net/vocab/error', + '_:' + crypto.createHash('md5').update(seq[1]).digest('hex') + ); + } + + // handlers + let previous; + + // handler + seq[0].forEach((handler, index) => { + + let handler_id = UID(); + let handler_name = typeof handler === 'string' ? 'Emit:' + handler : handler[1] + '/' + handler[2]; + + // name + write( + handler_id, + 'http://schema.org/name', + getHash(handler_name) + ); + + // sequence emit + if (typeof handler === 'string') { + + // type Emit + write( + handler_id, + rdf_syntax + 'type', + '' + ); + + // sequence + write( + handler_id, + 'http://schema.jillix.net/vocab/sequence', + '_:' + crypto.createHash('md5').update(handler).digest('hex') + ); + + // data handler + } else { + + // state + if (typeof handler[3] === 'string') { + write( + handler_id, + 'http://schema.jillix.net/vocab/state', + getFnState(handler[3]) + ); + } + + // type data + write( + handler_id, + rdf_syntax + 'type', + '' + ); + + // function + write( + handler_id, + 'http://schema.jillix.net/vocab/fn', + '<' + handler[0] + '/' + handler[1] + '?' + handler[2] + '>' + ); + } + + // next + write( + index === 0 ? sequence_id : previous, + 'http://schema.jillix.net/vocab/next', + handler_id + ); + + previous = handler_id; + + // link back to sequence (owner) + write( + sequence_id, + 'http://schema.jillix.net/vocab/handler', + handler_id + ); + + // method args + if (typeof handler !== 'string' && handler[4]) { + let args = JSON.stringify(handler[4]); + + // potential emits from args + let potential_emits = args.match(/\{FLOW\:([^\}]+)\}/g); + let emits = []; + if (potential_emits) { + potential_emits.forEach(emit => { + let replace = emit; + emit = emit.slice(6, -1); + emit = '_:' + crypto.createHash('md5').update(emit).digest('hex'); + args = args.replace(replace, emit); + emits.push(emit); + }); + } + + const args_uid = UID(); + + write( + handler_id, + 'http://schema.jillix.net/vocab/args', + args_uid + ); + + // create json edge + write( + args_uid, + 'http://schema.jillix.net/vocab/json', + getHash(args) + ); + + // args type + write( + args_uid, + rdf_syntax + 'type', + '' + ); + + // args name + write( + args_uid, + 'http://schema.org/name', + getHash("Args:" + handler[2]) + ); + + emits.forEach(emit => { + let key = args_uid + emit; + if (!temp_index[key]) { + temp_index[key] = 1; + write( + args_uid, + 'http://schema.jillix.net/vocab/sequence', + emit + ); + } + }); + } + }); +}*/ diff --git a/lib/stores/cayley/index.js b/lib/stores/cayley/index.js index fccc29e..d1f19c5 100644 --- a/lib/stores/cayley/index.js +++ b/lib/stores/cayley/index.js @@ -60,16 +60,9 @@ module.exports = (config) => { sequence: (sequence_id, role, objectMode) => { return Triples.parse(sequence(client, sequence_id, role), objectMode); }, - entrypoint: (entrypoint_name, objectMode) => { - return Triples.parse(entrypoint(client, entrypoint_name), objectMode); - }, outNodes: (id) => { return Triples.parse(visualization.outNodes(client, id)); }, - // TODO move to service api - networks: (id) => { - return Triples.parse(visualization.networks(client, id)); - }, getObject: (id) => { return Triples.parse(getObject(id)); }, diff --git a/lib/stores/cayley/visualization.js b/lib/stores/cayley/visualization.js index bc325c1..be02ae8 100644 --- a/lib/stores/cayley/visualization.js +++ b/lib/stores/cayley/visualization.js @@ -5,8 +5,6 @@ const RDF_SYNTAX = '', - FLOW_VOCAB + 'environment>', FLOW_VOCAB + 'sequence>', FLOW_VOCAB + 'error>', FLOW_VOCAB + 'next>', @@ -37,5 +35,12 @@ exports.outNodes = (store, node_id) => { .Tag('subject') .Out(FLOW_VOCAB + 'handler>', 'predicate') .All() - ]; + ] +} + +// TODO write query +// get all sequences for sub types +exports.inSubSequence = (store, user_id) => { + return [store.g.V().Has(RDF_SYNTAX + 'type>', FLOW_VOCAB + 'Network>') + .Tag('subject').All()]; }; diff --git a/test.js b/test.js new file mode 100644 index 0000000..a173664 --- /dev/null +++ b/test.js @@ -0,0 +1,13 @@ +"use strict" + +const resolve = require("path").resolve; + +if (!process.argv[2]) { + throw new Error("Please specify a path to a sequence folder."); +} + +const seq_path = resolve(process.argv[2]); +const API = require(__dirname + "/index.js"); + +// test converter toRDF +API.toRDF(seq_path);