diff --git a/input.ttl b/input.ttl index 8887d5e..48668a5 100644 --- a/input.ttl +++ b/input.ttl @@ -19,7 +19,7 @@ a js:JsReaderChannel. a js:JsWriterChannel. [ ] a js:Send; - js:msg "Hallo world"; + js:msg "Hello"; js:sendWriter . [ ] a js:Resc; diff --git a/ontology.ttl b/ontology.ttl index f3d4a92..83f4577 100644 --- a/ontology.ttl +++ b/ontology.ttl @@ -7,6 +7,7 @@ @prefix rdfs: . @prefix rdf: . @prefix dc: . +@prefix rdfl: . [ ] a sh:NodeShape; sh:targetClass :Channel; @@ -14,6 +15,7 @@ sh:path ( ); sh:name "id"; sh:maxCount 1; + sh:minCount 1; sh:datatype xsd:iri; ], [ sh:path :reader; @@ -30,34 +32,56 @@ [ ] a sh:NodeShape; sh:targetClass :ReaderChannel; sh:property [ - sh:path [ sh:inversePath :reader ]; - sh:name "channel"; - sh:maxCount 1; - sh:class :Channel; - ], [ sh:path rdf:type; sh:name "ty"; sh:maxCount 1; sh:datatype xsd:iri; + ], [ + sh:path ( ); + sh:name "config"; + sh:maxCount 1; + sh:minCount 1; + sh:class rdfl:TypedExtract; ]. [ ] a sh:NodeShape; sh:targetClass :WriterChannel; sh:property [ - sh:path [ sh:inversePath :writer ]; - sh:name "channel"; - sh:maxCount 1; - sh:class :Channel; - ], [ sh:path rdf:type; sh:name "ty"; sh:maxCount 1; + sh:minCount 1; sh:datatype xsd:iri; + ], [ + sh:path ( ); + sh:name "config"; + sh:maxCount 1; + sh:minCount 1; + sh:class rdfl:TypedExtract; ]. js:JsChannel rdfs:subClassOf :Channel. js:JsReaderChannel rdfs:subClassOf :ReaderChannel. +[ ] a sh:NodeShape; + sh:targetClass js:JsReaderChannel; + sh:property [ + sh:path [ sh:inversePath :reader ]; + sh:name "channel"; + sh:maxCount 1; + sh:minCount 1; + sh:class :Channel; + ]. + js:JsWriterChannel rdfs:subClassOf :WriterChannel. +[ ] a sh:NodeShape; + sh:targetClass js:JsWriterChannel; + sh:property [ + sh:path [ sh:inversePath :writer ]; + sh:name "channel"; + sh:maxCount 1; + sh:minCount 1; + sh:class :Channel; + ]. # js:JsChannel a :Channel; diff --git a/package-lock.json b/package-lock.json index 11be47f..a5bce23 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@ajuvercr/js-runner", - "version": "0.1.16", + "version": "0.1.18-alpha.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@ajuvercr/js-runner", - "version": "0.1.16", + "version": "0.1.18-alpha.0", "license": "MIT", "dependencies": { "@rdfjs/types": "^1.1.0", @@ -15,7 +15,7 @@ "command-line-usage": "^6.1.3", "kafkajs": "^2.2.4", "n3": "^1.17.1", - "rdf-lens": "^1.1.3", + "rdf-lens": "^1.2.3", "stream-to-array": "^2.3.0", "ws": "^8.14.2" }, @@ -3219,9 +3219,9 @@ } }, "node_modules/rdf-lens": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/rdf-lens/-/rdf-lens-1.1.3.tgz", - "integrity": "sha512-Q80k5CNCPFkxK0duJr7cXW9zxr72aUgcAKQlHbX6wf0uDwayCakMu3GeWJ0wf9uTyYF1ImLl8Y53IPjXYMwpBQ==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/rdf-lens/-/rdf-lens-1.2.3.tgz", + "integrity": "sha512-FxYs7K9d4CNR4RcUa/1tf/pPIkiniRsyfCRWt/RdwAym2ox+dxo13hMQsQuM1ByycemD/OBtoX6Yt2mNiJIk2Q==", "dependencies": { "@rdfjs/types": "^1.1.0", "@treecg/types": "^0.4.5" diff --git a/package.json b/package.json index 2852c3d..c830d42 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "command-line-usage": "^6.1.3", "kafkajs": "^2.2.4", "n3": "^1.17.1", - "rdf-lens": "^1.1.3", + "rdf-lens": "^1.2.3", "stream-to-array": "^2.3.0", "ws": "^8.14.2" }, diff --git a/processor/test.js b/processor/test.js index 57fb468..ade9720 100644 --- a/processor/test.js +++ b/processor/test.js @@ -11,10 +11,12 @@ function streamToString(ev) { export async function send(msg, writer) { const host = "0.0.0.0"; const port = 8000; - const requestListener = function (req, res) { - streamToString(req).then((st) => writer.push(st.toString())); + const requestListener = async function (req, res) { + const data = await streamToString(req); + const ret = `${msg} ${data}`; + await writer.push(ret); res.writeHead(200); - res.end(msg); + res.end(ret); }; const server = http.createServer(requestListener); diff --git a/src/connectors.ts b/src/connectors.ts index 4aa9f2d..169c9f5 100644 --- a/src/connectors.ts +++ b/src/connectors.ts @@ -2,7 +2,6 @@ import { createTermNamespace } from "@treecg/types"; import { NamedNode, Term } from "@rdfjs/types"; import { - FileReaderConfig, FileWriterConfig, startFileStreamReader, startFileStreamWriter, @@ -12,7 +11,6 @@ export * from "./connectors/file"; import { startWsStreamReader, startWsStreamWriter, - WsReaderConfig, WsWriterConfig, } from "./connectors/ws"; export * from "./connectors/ws"; @@ -43,18 +41,21 @@ export const Conn = createTermNamespace( "KafkaWriterChannel", "WsReaderChannel", "WsWriterChannel", + "WriterChannel", + "ReaderChannel", ); -export interface Config { +export interface Config { ty: NamedNode; + config: T; } -export type ReaderConstructor = (config: C) => { +export type ReaderConstructor = (config: C) => { reader: Stream; init: () => Promise; }; -export type WriterConstructor = (config: C) => { +export type WriterConstructor = (config: C) => { writer: Writer; init: () => Promise; }; @@ -67,7 +68,7 @@ export const JsOntology = createTermNamespace( "JsWriterChannel", ); type JsChannel = { - channel?: { + channel: { id: Term; }; }; @@ -77,16 +78,16 @@ export class ChannelFactory { private jsChannelsNamedNodes: { [label: string]: SimpleStream } = {}; private jsChannelsBlankNodes: { [label: string]: SimpleStream } = {}; - createReader(config: Config): Stream { + createReader(config: Config): Stream { if (config.ty.equals(Conn.FileReaderChannel)) { - const { reader, init } = startFileStreamReader(config); + const { reader, init } = startFileStreamReader(config.config); this.inits.push(init); return reader; } if (config.ty.equals(Conn.WsReaderChannel)) { - const { reader, init } = startWsStreamReader(config); + const { reader, init } = startWsStreamReader(config.config); this.inits.push(init); return reader; @@ -94,20 +95,22 @@ export class ChannelFactory { if (config.ty.equals(Conn.KafkaReaderChannel)) { const { reader, init } = startKafkaStreamReader( - config, + config.config, ); this.inits.push(init); return reader; } if (config.ty.equals(Conn.HttpReaderChannel)) { - const { reader, init } = startHttpStreamReader(config); + const { reader, init } = startHttpStreamReader( + config.config, + ); this.inits.push(init); return reader; } if (config.ty.equals(JsOntology.JsReaderChannel)) { - const c = config; + const c = config.config; if (c.channel) { const id = c.channel.id.value; if (c.channel.id.termType === "NamedNode") { @@ -131,16 +134,20 @@ export class ChannelFactory { throw "Unknown reader channel " + config.ty.value; } - createWriter(config: Config): Writer { + createWriter(config: Config): Writer { if (config.ty.equals(Conn.FileWriterChannel)) { - const { writer, init } = startFileStreamWriter(config); + const { writer, init } = startFileStreamWriter( + config.config, + ); this.inits.push(init); return writer; } if (config.ty.equals(Conn.WsWriterChannel)) { - const { writer, init } = startWsStreamWriter(config); + const { writer, init } = startWsStreamWriter( + config.config, + ); this.inits.push(init); return writer; @@ -148,20 +155,22 @@ export class ChannelFactory { if (config.ty.equals(Conn.KafkaWriterChannel)) { const { writer, init } = startKafkaStreamWriter( - config, + config.config, ); this.inits.push(init); return writer; } if (config.ty.equals(Conn.HttpWriterChannel)) { - const { writer, init } = startHttpStreamWriter(config); + const { writer, init } = startHttpStreamWriter( + config.config, + ); this.inits.push(init); return writer; } if (config.ty.equals(JsOntology.JsWriterChannel)) { - const c = config; + const c = config.config; if (c.channel) { const id = c.channel.id.value; if (c.channel.id.termType === "NamedNode") { diff --git a/src/connectors/file.ts b/src/connectors/file.ts index f0e7b7c..34bdef0 100644 --- a/src/connectors/file.ts +++ b/src/connectors/file.ts @@ -3,7 +3,6 @@ import { appendFile, readFile, stat, writeFile } from "fs/promises"; import { isAbsolute } from "path"; import { watch } from "node:fs"; import { - Config, ReaderConstructor, SimpleStream, WriterConstructor, @@ -13,14 +12,14 @@ interface FileError extends Error { code: string; } -export interface FileReaderConfig extends Config { +export interface FileReaderConfig { path: string; onReplace: boolean; readFirstContent?: boolean; encoding?: string; } -export interface FileWriterConfig extends Config { +export interface FileWriterConfig { path: string; onReplace: boolean; readFirstContent?: boolean; @@ -99,7 +98,6 @@ export const startFileStreamReader: ReaderConstructor = ( ); if (config.onReplace && config.readFirstContent) { - console.log("reading first content"); const content = await readFile(path, { encoding }); await reader.push(content); } diff --git a/src/connectors/http.ts b/src/connectors/http.ts index cc3243e..ac50aa7 100644 --- a/src/connectors/http.ts +++ b/src/connectors/http.ts @@ -31,7 +31,7 @@ function streamToString( }); } -export interface HttpReaderConfig extends Config { +export interface HttpReaderConfig { endpoint: string; port: number; binary: boolean; @@ -92,7 +92,7 @@ export const startHttpStreamReader: ReaderConstructor = ( return { reader: stream, init }; }; -export interface HttpWriterConfig extends Config { +export interface HttpWriterConfig { endpoint: string; method: string; } diff --git a/src/connectors/kafka.ts b/src/connectors/kafka.ts index 509a63a..1b2faa6 100644 --- a/src/connectors/kafka.ts +++ b/src/connectors/kafka.ts @@ -40,7 +40,7 @@ export interface CSTopic { topic: string; fromBeginning?: boolean; } -export interface KafkaReaderConfig extends Config { +export interface KafkaReaderConfig { topic: { name: string; fromBeginning?: boolean; @@ -106,7 +106,7 @@ export const startKafkaStreamReader: ReaderConstructor = ( return { reader: stream, init }; }; -export interface KafkaWriterConfig extends Config { +export interface KafkaWriterConfig { topic: { name: string; }; diff --git a/src/connectors/ws.ts b/src/connectors/ws.ts index 1a7d19c..3c13d77 100644 --- a/src/connectors/ws.ts +++ b/src/connectors/ws.ts @@ -7,7 +7,7 @@ import { import { RawData, WebSocket } from "ws"; import { WebSocketServer } from "ws"; -export interface WsWriterConfig extends Config { +export interface WsWriterConfig { url: string; } @@ -27,7 +27,7 @@ function connectWs(url: string): Promise { return new Promise((res) => _connectWs(url, res)); } -export interface WsReaderConfig extends Config { +export interface WsReaderConfig { host: string; port: number; } diff --git a/src/index.ts b/src/index.ts index 67185a1..0a997de 100644 --- a/src/index.ts +++ b/src/index.ts @@ -3,14 +3,14 @@ import { getArgs } from "./args"; import { load_store } from "./util"; export * from "./connectors"; -export * from "./shacl"; import path from "path"; -import { extractShapes, Shapes } from "./shacl"; import { RDF } from "@treecg/types"; import { ChannelFactory, Conn, JsOntology } from "./connectors"; import { Quad, Term } from "@rdfjs/types"; +import { extractShapes, Shapes } from "rdf-lens"; + function safeJoin(a: string, b: string) { if (b.startsWith("/")) { return b; @@ -56,7 +56,6 @@ export async function extractProcessors( x.object.equals(JsOntology.JsProcess), ) .map((x) => x.subject); - console.log("Finding", JsOntology.JsChannel.value) const processorLens = config.lenses[JsOntology.JsProcess.value]; const processors = subjects.map((id) => processorLens.execute({ id, quads })); return { processors, quads, shapes: config }; @@ -93,7 +92,6 @@ export function extractSteps( } export async function jsRunner() { - console.log("JS runner is running!"); const args = getArgs(); const cwd = process.cwd(); @@ -105,25 +103,8 @@ export async function jsRunner() { const factory = new ChannelFactory(); /// Small hack, if something is extracted from these types, that should be converted to a reader/writer const apply: { [label: string]: (item: any) => any } = {}; - for (let ty of [ - Conn.FileReaderChannel, - Conn.WsReaderChannel, - Conn.HttpReaderChannel, - Conn.KafkaReaderChannel, - JsOntology.JsReaderChannel, - ]) { - apply[ty.value] = (x) => factory.createReader(x); - } - - for (let ty of [ - Conn.FileWriterChannel, - Conn.WsWriterChannel, - Conn.HttpWriterChannel, - Conn.KafkaWriterChannel, - JsOntology.JsWriterChannel, - ]) { - apply[ty.value] = (x) => factory.createWriter(x); - } + apply[Conn.ReaderChannel.value] = factory.createReader.bind(factory); + apply[Conn.WriterChannel.value] = factory.createWriter.bind(factory); const { processors, diff --git a/src/shacl.ts b/src/shacl.ts deleted file mode 100644 index 3a18e49..0000000 --- a/src/shacl.ts +++ /dev/null @@ -1,429 +0,0 @@ -import { Quad, Term } from "@rdfjs/types"; -import { createTermNamespace, RDF, XSD } from "@treecg/types"; -import { - BasicLens, - BasicLensM, - Cont, - empty, - invPred, - pred, - subjects, - unique, -} from "rdf-lens"; -import { CONN2, literal } from "./util"; - -export const RDFS = createTermNamespace( - "http://www.w3.org/2000/01/rdf-schema#", - "subClassOf", -); - -export const SHACL = createTermNamespace( - "http://www.w3.org/ns/shacl#", - // Basics - "Shape", - "NodeShape", - "PropertyShape", - // SHACL target constraints - "targetNode", - "targetClass", - "targetSubjectsOf", - "targetObjectsOf", - // Property things - "property", - "path", - "class", - "name", - "description", - "defaultValue", - // Path things - "alternativePath", - "zeroOrMorePath", - "inversePath", - "minCount", - "maxCount", - "datatype", -); - -export interface ShapeField { - name: string; - path: BasicLensM; - minCount?: number; - maxCount?: number; - extract: BasicLens; - // extract: (term: Term, quads: Quad[]) => any; -} - -export interface Shape { - id: string; - ty: Term; - description?: string; - fields: ShapeField[]; -} - -export function toLens( - shape: Shape, -): BasicLens { - if (shape.fields.length === 0) return empty().map(() => ({})); - - const fields = shape.fields.map((field) => { - const minCount = field.minCount || 0; - const maxCount = field.maxCount || Number.MAX_SAFE_INTEGER; - const base = - maxCount < 2 // There will be at most one - ? field.path.one().then(field.extract) - : field.path.thenAll(field.extract).map((xs) => { - if (xs.length < minCount) { - throw `${shape.ty}:${field.name} required at least ${minCount} elements, found ${xs.length}`; - } - if (xs.length > maxCount) { - throw `${shape.ty}:${field.name} required at most ${maxCount} elements, found ${xs.length}`; - } - return xs; - }); - - const asField = base.map((x) => { - const out = <{ [label: string]: any }>{}; - out[field.name] = x; - return out; - }); - - return minCount > 0 ? asField : asField.or(empty().map(() => ({}))); - }); - - return fields[0] - .and(...fields.slice(1)) - .map((xs) => Object.assign({}, ...xs)); -} - -const RDFListElement = pred(RDF.terms.first) - .one() - .and(pred(RDF.terms.rest).one()); -export const RdfList: BasicLens = new BasicLens((c) => { - if (c.id.equals(RDF.terms.nil)) { - return []; - } - - const [first, rest] = RDFListElement.execute(c); - const els = RdfList.execute(rest); - els.unshift(first.id); - return els; -}); - -export const ShaclSequencePath: BasicLens< - Cont, - BasicLensM -> = new BasicLens((c) => { - const pathList = RdfList.execute(c); - - if (pathList.length === 0) { - return new BasicLensM((c) => [c]); - } - - let start = pred(pathList[0]); - - for (let i = 1; i < pathList.length; i++) { - start = start.thenFlat(pred(pathList[i])); - } - - return start; -}); - -export const ShaclAlternativepath: BasicLens< - Cont, - BasicLensM -> = new BasicLens((c) => { - const options = pred(SHACL.alternativePath).one().then(RdfList).execute(c); - const optionLenses = options.map((id) => - ShaclPath.execute({ id, quads: c.quads }), - ); - return optionLenses[0].orAll(...optionLenses.slice(1)); -}); - -export const ShaclPredicatePath: BasicLens< - Cont, - BasicLensM -> = new BasicLens((c) => { - return pred(c.id); -}); - -export const ShaclInversePath: BasicLens> = pred( - SHACL.inversePath, -) - .one() - .then( - new BasicLens>((c) => { - const pathList = RdfList.execute(c); - - if (pathList.length === 0) { - return new BasicLensM((c) => [c]); - } - - pathList.reverse(); - - let start = invPred(pathList[0]); - - for (let i = 1; i < pathList.length; i++) { - start = start.thenFlat(invPred(pathList[i])); - } - - return start; - }).or( - new BasicLens>((c) => { - return invPred(c.id); - }), - ), - ); - -export const ShaclPath = ShaclSequencePath.or( - ShaclAlternativepath, - ShaclInversePath, - ShaclPredicatePath, -); - -function field( - predicate: Term, - name: T, - convert?: (inp: string) => O, -): BasicLens { - const conv = convert || ((x: string) => x); - - return pred(predicate) - .one() - .map(({ id }) => { - const out = <{ [F in T]: O }>{}; - out[name] = conv(id.value); - return out; - }); -} - -function optionalField( - predicate: Term, - name: T, - convert?: (inp: string) => O | undefined, -): BasicLens { - const conv = convert || ((x: string) => x); - - return pred(predicate) - .one(undefined) - .map((inp) => { - const out = <{ [F in T]: O | undefined }>{}; - if (inp) { - out[name] = conv(inp.id.value); - } - return out; - }); -} - -function dataTypeToExtract(dataType: Term, t: Term): any { - if (dataType.equals(XSD.terms.integer)) return +t.value; - if (dataType.equals(XSD.terms.custom("float"))) return +t.value; - if (dataType.equals(XSD.terms.custom("double"))) return +t.value; - if (dataType.equals(XSD.terms.custom("decimal"))) return +t.value; - if (dataType.equals(XSD.terms.string)) return t.value; - if (dataType.equals(XSD.terms.dateTime)) return new Date(t.value); - if (dataType.equals(XSD.terms.custom("boolean"))) return t.value === "true"; - - return t; -} - -function envLens(dataType: Term): BasicLens { - const checkType = pred(RDF.terms.type) - .thenSome( - new BasicLens(({ id }) => { - if (!id.equals(CONN2.terms.EnvVariable)) { - throw "expected type " + CONN2.EnvVariable; - } - return { checked: true }; - }), - ) - .expectOne(); - - const envName = pred(CONN2.terms.envKey) - .one() - .map(({ id }) => ({ - key: id.value, - })); - - const defaultValue = pred(CONN2.terms.envDefault) - .one(undefined) - .map((found) => ({ - defaultValue: found?.id.value, - })); - - return checkType - .and(envName, defaultValue) - .map(([_, { key }, { defaultValue }]) => { - const value = process.env[key] || defaultValue; - if (value) { - return dataTypeToExtract(dataType, literal(value)); - } else { - throw "Nothing set for ENV " + key + ". No default was set either!"; - } - }); -} - -type Cache = { - [clazz: string]: BasicLens; -}; - -type SubClasses = { - [clazz: string]: string; -}; - -function extractProperty( - cache: Cache, - subClasses: SubClasses, - apply: { [clazz: string]: (item: any) => any }, -): BasicLens { - const pathLens = pred(SHACL.path) - .one() - .then(ShaclPath) - .map((path) => ({ - path, - })); - const nameLens = field(SHACL.name, "name"); - const minCount = optionalField(SHACL.minCount, "minCount", (x) => +x); - const maxCount = optionalField(SHACL.maxCount, "maxCount", (x) => +x); - - const dataTypeLens: BasicLens = - pred(SHACL.datatype) - .one() - .map(({ id }) => ({ - extract: envLens(id).or( - empty().map((item) => dataTypeToExtract(id, item.id)), - ), - })); - - const clazzLens: BasicLens = field( - SHACL.class, - "clazz", - ).map(({ clazz: expected_class }) => { - return { - extract: new BasicLens(({ id, quads }) => { - // How do I extract this value: use a pre - let found_class = false; - - const ty = quads.find( - (q) => q.subject.equals(id) && q.predicate.equals(RDF.terms.type), - )?.object.value; - - if (!ty) { - // We did not find a type, so use the expected class lens - const lens = cache[expected_class]; - if (!lens) { - throw `Tried extracting class ${expected_class} but no shape was defined`; - } - return lens.execute({ id, quads }); - } - - // We found a type, let's see if the expected class is inside the class hierachry - const lenses: (typeof cache)[string][] = []; - - let current = ty; - while (!!current) { - if (lenses.length < 2) { - const lens = cache[current]; - if (lens) { - lenses.push(lens); - } - } - found_class = found_class || current === expected_class; - current = subClasses[current]; - } - - if (!found_class) { - throw `${ty} is not a subClassOf ${expected_class}`; - } - - if (lenses.length === 0) { - throw `Tried the classhierarchy for ${ty}, but found no shape definition`; - } - - const finalLens = - lenses.length == 1 - ? lenses[0] - : lenses[0].and(lenses[1]).map(([a, b]) => Object.assign({}, a, b)); - - if (apply[ty]) { - return finalLens.map(apply[ty]).execute({ id, quads }); - } else { - return finalLens.execute({ id, quads }); - } - }), - }; - }); - - return pathLens - .and(nameLens, minCount, maxCount, clazzLens.or(dataTypeLens)) - .map((xs) => Object.assign({}, ...xs)); -} - -export function extractShape( - cache: Cache, - subclasses: { [label: string]: string }, - apply: { [label: string]: (item: any) => any }, -): BasicLens { - const checkTy = pred(RDF.terms.type) - .one() - .map(({ id }) => { - if (id.equals(SHACL.NodeShape)) return {}; - throw "Shape is not sh:NodeShape"; - }); - - const idLens = empty().map(({ id }) => ({ id: id.value })); - const clazzs = pred(SHACL.targetClass); - - const multiple = clazzs.thenAll(empty().map(({ id }) => ({ ty: id }))); - - // TODO: Add implictTargetClass - const descriptionClassLens = optionalField(SHACL.description, "description"); - const fields = pred(SHACL.property) - .thenSome(extractProperty(cache, subclasses, apply)) - .map((fields) => ({ fields })); - - return multiple - .and(checkTy, idLens, descriptionClassLens, fields) - .map(([multiple, ...others]) => - multiple.map((xs) => Object.assign({}, xs, ...others)), - ); -} - -export type Shapes = { - shapes: Shape[]; - lenses: Cache; - subClasses: SubClasses; -}; - -export function extractShapes( - quads: Quad[], - apply: { [label: string]: (item: any) => any } = {}, -): Shapes { - const cache: Cache = {}; - const subClasses: SubClasses = {}; - quads - .filter((x) => x.predicate.equals(RDFS.subClassOf)) - .forEach((x) => (subClasses[x.subject.value] = x.object.value)); - const shapes = subjects() - .then(unique()) - .asMulti() - .thenSome(extractShape(cache, subClasses, apply)) - .execute(quads) - .flat(); - const lenses = []; - - // Populate cache - for (let shape of shapes) { - const lens = toLens(shape); - const target = cache[shape.ty.value]; - - if (target) { - cache[shape.ty.value] = target.or(lens); - // subClasses: shape.subTypes, - } else { - cache[shape.ty.value] = lens; - } - lenses.push(lens); - } - - return { lenses: cache, shapes, subClasses }; -} diff --git a/src/util.ts b/src/util.ts index 1a5a066..c3b8d11 100644 --- a/src/util.ts +++ b/src/util.ts @@ -56,7 +56,6 @@ async function get_readstream(location: string): Promise { export async function load_quads(location: string, baseIRI?: string) { try { - console.log("load_quads", location, baseIRI); const parser = new StreamParser({ baseIRI: baseIRI || location }); const rdfStream = await get_readstream(location); rdfStream.pipe(parser); diff --git a/test/configuration.test.ts b/test/configuration.test.ts index bbbe6cc..bafe28c 100644 --- a/test/configuration.test.ts +++ b/test/configuration.test.ts @@ -3,7 +3,7 @@ import { Quad } from "@rdfjs/types"; import { RDF } from "@treecg/types"; import { readFileSync } from "fs"; import { DataFactory, Parser } from "n3"; -import { extractShapes } from "../src/shacl"; +import { extractShapes } from "rdf-lens"; function parseQuads(inp: string): Quad[] { return new Parser().parse(inp); @@ -19,7 +19,7 @@ const JsProcessor = DataFactory.namedNode("https://w3id.org/conn/js#JsProcess"); describe("Input test", () => { test("Parse configuration", () => { const output = parseConfig(); - expect(output.shapes.length).toBe(6); + expect(output.shapes.length).toBe(8); expect(output.lenses[JsProcessor.value]).toBeDefined(); }); @@ -61,7 +61,7 @@ describe("Input test", () => { .map((x) => x.subject); const processorLens = config.lenses[JsProcessor.value]; const processors = subjects.map((id) => - processorLens.execute({ id, quads }), + processorLens.execute({ id, quads: quads }), ); const found: any[] = []; @@ -78,7 +78,9 @@ describe("Input test", () => { ); const processorLens = config.lenses[proc.ty.value]; - found.push(...subjects.map((id) => processorLens.execute({ id, quads }))); + found.push( + ...subjects.map((id) => processorLens.execute({ id, quads: quads })), + ); } console.log( diff --git a/test/connectors/file.test.ts b/test/connectors/file.test.ts index 455343b..1a6251a 100644 --- a/test/connectors/file.test.ts +++ b/test/connectors/file.test.ts @@ -6,13 +6,11 @@ import * as conn from "../../src/connectors"; describe("File Channel", () => { test("Reader - Writer", async () => { const config: FileReaderConfig = { - ty: conn.Conn.FileReaderChannel, path: "/tmp/test.txt", onReplace: true, encoding: "utf-8", }; - const writerConfig: FileReaderConfig = { - ty: conn.Conn.FileWriterChannel, + const writerConfig: FileWriterConfig = { path: "/tmp/test.txt", onReplace: true, encoding: "utf-8", @@ -23,14 +21,20 @@ describe("File Channel", () => { const factory = new conn.ChannelFactory(); const items: string[] = []; - const reader = factory.createReader(config); + const reader = factory.createReader({ + config, + ty: conn.Conn.FileReaderChannel, + }); expect(reader).toBeInstanceOf(conn.SimpleStream); reader.data((x) => { - items.push(x); + items.push(x); }); - const writer = factory.createWriter(writerConfig); + const writer = factory.createWriter({ + config: writerConfig, + ty: conn.Conn.FileWriterChannel, + }); await factory.init(); await writer.push("Number 1 " + Math.random()); diff --git a/test/connectors/http.test.ts b/test/connectors/http.test.ts index ea7bfa7..4bdd2c9 100644 --- a/test/connectors/http.test.ts +++ b/test/connectors/http.test.ts @@ -8,17 +8,21 @@ describe("connector-http", () => { endpoint: "localhost", port: 8080, binary: false, - ty: conn.Conn.HttpReaderChannel, }; const writerConfig: HttpWriterConfig = { endpoint: "http://localhost:8080", method: "POST", - ty: conn.Conn.HttpWriterChannel, }; const factory = new conn.ChannelFactory(); - const reader = factory.createReader(readerConfig); - const writer = factory.createWriter(writerConfig); + const reader = factory.createReader({ + config: readerConfig, + ty: conn.Conn.HttpReaderChannel, + }); + const writer = factory.createWriter({ + config: writerConfig, + ty: conn.Conn.HttpWriterChannel, + }); reader.data((data) => { items.push(data); @@ -44,17 +48,21 @@ describe("connector-http", () => { port: 8081, binary: true, waitHandled: false, - ty: conn.Conn.HttpReaderChannel, }; const writerConfig: HttpWriterConfig = { endpoint: "http://localhost:8081", method: "POST", - ty: conn.Conn.HttpWriterChannel, }; const factory = new conn.ChannelFactory(); - const reader = factory.createReader(readerConfig); - const writer = factory.createWriter(writerConfig); + const reader = factory.createReader({ + config: readerConfig, + ty: conn.Conn.HttpReaderChannel, + }); + const writer = factory.createWriter({ + config: writerConfig, + ty: conn.Conn.HttpWriterChannel, + }); reader.data((data) => { console.log("This reader works"); @@ -82,17 +90,21 @@ describe("connector-http", () => { port: 8082, binary: true, waitHandled: true, - ty: conn.Conn.HttpReaderChannel, }; const writerConfig: HttpWriterConfig = { endpoint: "http://localhost:8082", method: "POST", - ty: conn.Conn.HttpWriterChannel, }; const factory = new conn.ChannelFactory(); - const reader = factory.createReader(readerConfig); - const writer = factory.createWriter(writerConfig); + const reader = factory.createReader({ + config: readerConfig, + ty: conn.Conn.HttpReaderChannel, + }); + const writer = factory.createWriter({ + config: writerConfig, + ty: conn.Conn.HttpWriterChannel, + }); reader.data(async (data) => { expect(Buffer.isBuffer(data)).toBeTruthy(); @@ -121,11 +133,13 @@ describe("connector-http", () => { port: 8083, binary: false, responseCode: 202, - ty: conn.Conn.HttpReaderChannel, }; const factory = new conn.ChannelFactory(); - const reader = factory.createReader(readerConfig); + const reader = factory.createReader({ + config: readerConfig, + ty: conn.Conn.HttpReaderChannel, + }); reader.data((data) => { items.push(data); diff --git a/test/connectors/ws.test.ts b/test/connectors/ws.test.ts index e97b196..5369666 100644 --- a/test/connectors/ws.test.ts +++ b/test/connectors/ws.test.ts @@ -5,19 +5,23 @@ import { WsReaderConfig, WsWriterConfig } from "../../src/connectors/ws"; describe("connector-ws", () => { test("Should write -> WebSocket -> read", async () => { const readerConfig: WsReaderConfig = { - ty: conn.Conn.WsReaderChannel, host: "0.0.0.0", port: 8123, }; const writerConfig: WsWriterConfig = { - ty: conn.Conn.WsWriterChannel, url: "ws://127.0.0.1:8123", }; const factory = new conn.ChannelFactory(); - const reader = factory.createReader(readerConfig); - const writer = factory.createWriter(writerConfig); + const reader = factory.createReader({ + config: readerConfig, + ty: conn.Conn.WsReaderChannel, + }); + const writer = factory.createWriter({ + config: writerConfig, + ty: conn.Conn.WsWriterChannel, + }); const items: unknown[] = []; reader.data((x) => { items.push(x); diff --git a/test/processors.test.ts b/test/processors.test.ts index 03ac38b..7e3ada8 100644 --- a/test/processors.test.ts +++ b/test/processors.test.ts @@ -8,6 +8,7 @@ const prefixes = ` @prefix rdfs: . @prefix xsd: . @prefix sh: . +@prefix rdfl: . `; const JS = "https://w3id.org/conn/js#"; @@ -47,8 +48,8 @@ describe("test existing processors", () => { const [[arg]] = argss; expect(arg).toBeInstanceOf(Object); - expect(arg.channel).toBeDefined(); - expect(arg.channel.id).toBeDefined(); + expect(arg.config.channel).toBeDefined(); + expect(arg.config.channel.id).toBeDefined(); expect(arg.ty).toBeDefined(); }); @@ -90,8 +91,8 @@ describe("test existing processors", () => { const [[msg, writer]] = argss; expect(msg).toBe("Hello world!"); expect(writer).toBeInstanceOf(Object); - expect(writer.channel).toBeDefined(); - expect(writer.channel.id).toBeDefined(); + expect(writer.config.channel).toBeDefined(); + expect(writer.config.channel.id).toBeDefined(); expect(writer.ty).toBeDefined(); }); @@ -106,9 +107,9 @@ describe("test existing processors", () => { a js:JsWriterChannel. [ ] a js:Send; js:msg [ - a :EnvVariable; - :envDefault "FromEnv"; - :envKey "msg" + a rdfl:EnvVariable; + rdfl:envDefault "FromEnv"; + rdfl:envKey "msg" ]; js:sendWriter . `; @@ -138,8 +139,8 @@ describe("test existing processors", () => { const [[msg, writer]] = argss; expect(msg).toBe("FromEnv"); expect(writer).toBeInstanceOf(Object); - expect(writer.channel).toBeDefined(); - expect(writer.channel.id).toBeDefined(); + expect(writer.config.channel).toBeDefined(); + expect(writer.config.channel.id).toBeDefined(); expect(writer.ty).toBeDefined(); }); @@ -155,8 +156,8 @@ describe("test existing processors", () => { const [[msg, writer]] = argss; expect(msg).toBe("FROM ENV"); expect(writer).toBeInstanceOf(Object); - expect(writer.channel).toBeDefined(); - expect(writer.channel.id).toBeDefined(); + expect(writer.config.channel).toBeDefined(); + expect(writer.config.channel.id).toBeDefined(); expect(writer.ty).toBeDefined(); }); }); @@ -168,8 +169,10 @@ describe("test existing processors", () => { [ ] a :Channel; :reader ; :writer . + a js:JsReaderChannel. a js:JsWriterChannel. + [ ] a js:Echo; js:input ; js:output . @@ -194,16 +197,20 @@ describe("test existing processors", () => { const argss = extractSteps(proc!, quads, config); expect(argss.length).toBe(1); expect(argss[0].length).toBe(2); + console.log(argss); + const [[reader, writer]] = argss; + console.log(reader.ty.value); + console.log(writer.ty.value); expect(reader).toBeInstanceOf(Object); - expect(reader.channel).toBeDefined(); - expect(reader.channel.id).toBeDefined(); + expect(reader.config.channel).toBeDefined(); + expect(reader.config.channel.id).toBeDefined(); expect(reader.ty).toBeDefined(); expect(writer).toBeInstanceOf(Object); - expect(writer.channel).toBeDefined(); - expect(writer.channel.id).toBeDefined(); + expect(writer.config.channel).toBeDefined(); + expect(writer.config.channel.id).toBeDefined(); expect(writer.ty).toBeDefined(); }); }); diff --git a/test/shacl.test.ts b/test/shacl.test.ts deleted file mode 100644 index 9c7887a..0000000 --- a/test/shacl.test.ts +++ /dev/null @@ -1,301 +0,0 @@ -import { describe, expect, test } from "@jest/globals"; -import { Quad } from "@rdfjs/types"; -import { RDF } from "@treecg/types"; -import { Parser } from "n3"; -import { extractShapes } from "../src/shacl"; - -const prefixes = ` -@prefix js: . -@prefix fno: . -@prefix fnom: . -@prefix xsd: . -@prefix : . -@prefix sh: . -@prefix rdfs: . -@prefix dc: . -@prefix rdfs: . -`; - -const shapes = ` -${prefixes} -[] a sh:NodeShape; - sh:targetClass js:3DPoint; - sh:property [ - sh:datatype xsd:integer; - sh:path js:z; - sh:name "z"; - sh:maxCount 1; - ]. - -js:3DPoint rdfs:subClassOf js:Point. - -[] a sh:NodeShape; - sh:targetClass js:Point; - sh:property [ - sh:datatype xsd:integer; - sh:path js:x; - sh:name "x"; - sh:maxCount 1; - ], [ - sh:datatype xsd:integer; - sh:path js:y; - sh:name "y"; - sh:maxCount 1; - ]. - -js:JsProcessorShape a sh:NodeShape; - sh:targetClass js:JsProcess; - sh:property [ - sh:datatype xsd:string; - sh:path :required; - sh:name "required"; - sh:maxCount 1; - sh:minCount 1; - ], [ - sh:datatype xsd:string; - sh:path :multiple; - sh:name "multiple"; - ], [ - sh:datatype xsd:string; - sh:path :at_least; - sh:name "atLeast"; - sh:minCount 1; - ], [ - sh:path :point; - sh:class js:Point; - sh:name "point"; - sh:maxCount 1; - sh:minCount 1; - ]. -`; - -function parseQuads(inp: string): Quad[] { - return new Parser().parse(inp); -} - -describe("Shapes test", () => { - test("Parse shapes", () => { - const quads = parseQuads(shapes); - const output = extractShapes(quads); - expect(output.shapes.length).toBe(3); - }); - - test("Parse objects", () => { - const data = ` -${prefixes} - a js:JsProcess; - :required "true"; - :multiple "one!"; - :at_least "two!"; - :point [ - a js:Point; - js:x 5; - js:y 42; - ]. -`; - - const output = extractShapes(parseQuads(shapes)); - const quads = parseQuads(data); - const quad = quads.find((x) => x.predicate.equals(RDF.terms.type))!; - - const object = output.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }); - expect(object.required).toBe("true"); - expect(object.multiple).toEqual(["one!"]); - expect(object.atLeast).toEqual(["two!"]); - expect(object.point.x).toBe(5); - expect(object.point.y).toBe(42); - }); - - test("Invalid objects", () => { - const data = ` -${prefixes} - a js:JsProcess; - # :required "true"; - :multiple "one!"; - :at_least "two!"; - :point [ - a js:Point; - js:x 5; - js:y 42; - ]. -`; - const output = extractShapes(parseQuads(shapes)); - - const quads = parseQuads(data); - const quad = quads.find((x) => x.predicate.equals(RDF.terms.type))!; - - expect(() => - output.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }), - ).toThrow(); - }); - - test("Parse subclassed objects", () => { - const data = ` -${prefixes} - a js:JsProcess; - :required "true"; - :multiple "one!"; - :at_least "two!"; - :point [ - a js:3DPoint; - js:x 5; - js:y 42; - js:z 64; - ]. -`; - const output = extractShapes(parseQuads(shapes)); - - const quads = parseQuads(data); - const quad = quads.find((x) => x.predicate.equals(RDF.terms.type))!; - - const object = output.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }); - expect(object.point.z).toBe(64); - expect(object.point.x).toBe(5); - expect(object.point.y).toBe(42); - }); - - test("Parse objects without type", () => { - const data = ` -${prefixes} - a js:JsProcess; - :required "true"; - :multiple "one!"; - :at_least "two!"; - :point [ - js:x 5; - js:y 42; - ]. -`; - const output = extractShapes(parseQuads(shapes)); - - const quads = parseQuads(data); - const quad = quads.find((x) => x.predicate.equals(RDF.terms.type))!; - const object = output.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }); - - expect(object.point.x).toBe(5); - expect(object.point.y).toBe(42); - }); - - test("Parse fake subclassed objects fail", () => { - const data = ` -${prefixes} - a js:JsProcess; - :required "true"; - :multiple "one!"; - :at_least "two!"; - :point [ - a js:JsProcess; - :required "true"; - :multiple "one!"; - :at_least "two!"; - :point [ - js:x 5; - js:y 42; - ]; - ]. -`; - const output = extractShapes(parseQuads(shapes)); - - const quads = parseQuads(data); - const quad = quads.find((x) => x.predicate.equals(RDF.terms.type))!; - - expect(() => - output.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }), - ).toThrow(); - }); - - test("Empty list", () => { - const data = ` -${prefixes} - a js:JsProcess; - :required "true"; - :at_least "two!"; - :point [ - js:x 5; - js:y 42; - ]. -`; - const output = extractShapes(parseQuads(shapes)); - - const quads = parseQuads(data); - const quad = quads.find((x) => x.predicate.equals(RDF.terms.type))!; - const object = output.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }); - expect(object.multiple).toEqual([]); - }); - - test("Empty list fails", () => { - const data = ` -${prefixes} - a js:JsProcess; - :required "true"; - :point [ - js:x 5; - js:y 42; - ]. -`; - const output = extractShapes(parseQuads(shapes)); - - const quads = parseQuads(data); - const quad = quads.find((x) => x.predicate.equals(RDF.terms.type))!; - expect(() => - output.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }), - ).toThrow(); - }); - - test("Inverse path", () => { - const shapes = ` -${prefixes} -[] a sh:NodeShape; - sh:targetClass js:Point; - sh:property [ - sh:datatype xsd:string; - sh:path [ sh:inversePath js:x ]; - sh:name "x"; - sh:maxCount 1; - ], [ - sh:datatype xsd:string; - sh:path [ sh:inversePath ( js:x js:y ) ]; - sh:name "y"; - sh:maxCount 1; - ]. -`; - const data = ` -${prefixes} - a js:Point. - - js:x . - js:x [ js:y ]. -`; - const output = extractShapes(parseQuads(shapes)); - - const quads = parseQuads(data); - const quad = quads.find((x) => x.predicate.equals(RDF.terms.type))!; - const obj = output.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }); - expect(obj.x).toBe("x"); - expect(obj.y).toBe("y"); - }); -});