diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000..3063f07 --- /dev/null +++ b/.eslintignore @@ -0,0 +1,2 @@ +lib +node_modules diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..ac0d2f8 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,24 @@ +{ + "env": { + "browser": true, + "es2021": true + }, + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/recommended", + "prettier" + ], + "parser": "@typescript-eslint/parser", + "parserOptions": { + "ecmaVersion": "latest", + "sourceType": "module" + }, + "plugins": ["@typescript-eslint"], + "rules": { + "indent": ["error", 4], + "linebreak-style": ["error", "unix"], + "quotes": ["error", "double"], + "semi": ["error", "always"], + "@typescript-eslint/no-unused-vars": "warn" + } +} diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..9b580b8 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 9a35336..90b9a6b 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -19,4 +19,5 @@ jobs: bun-version: latest - run: bun i # Install dependencies - - run: bun test # Run tests \ No newline at end of file + - run: bun test # Run tests + diff --git a/.github/workflows/npm-publish-github-packages.yml b/.github/workflows/npm-publish-github-packages.yml new file mode 100644 index 0000000..1042f67 --- /dev/null +++ b/.github/workflows/npm-publish-github-packages.yml @@ -0,0 +1,35 @@ +name: Node.js Package + +on: + workflow_dispatch: + release: + types: [created] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 20 + - run: npm ci + - run: npm run build + - run: npm run tests + + publish: + needs: build + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 20 + registry-url: https://npm.pkg.github.com/ + - run: npm ci + - run: npm publish + env: + NODE_AUTH_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.gitignore b/.gitignore index 5c3ddef..15c871e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,180 @@ -node_modules + +bin/bundle.mjs +# Outputs lib + +# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore + +# Logs + +logs +_.log +npm-debug.log_ +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Caches + +.cache + +# Diagnostic reports (https://nodejs.org/api/report.html) + +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# Runtime data + +pids +_.pid +_.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover + +lib-cov + +# Coverage directory used by tools like istanbul + +coverage +*.lcov + +# nyc test coverage + +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) + +.grunt + +# Bower dependency directory (https://bower.io/) + +bower_components + +# node-waf configuration + +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) + +build/Release + +# Dependency directories + +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) + +web_modules/ + +# TypeScript cache + +*.tsbuildinfo + +# Optional npm cache directory + +.npm + +# Optional eslint cache + +.eslintcache + +# Optional stylelint cache + +.stylelintcache + +# Microbundle cache + +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history + +.node_repl_history + +# Output of 'npm pack' + +*.tgz + +# Yarn Integrity file + +.yarn-integrity + +# dotenv environment variable files + +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) + +.parcel-cache + +# Next.js build output + +.next +out + +# Nuxt.js build / generate output + +.nuxt dist -bin/bundle.mjs + +# Gatsby files + +# Comment in the public line in if your project uses Gatsby and not Next.js + +# https://nextjs.org/blog/next-9-1#public-directory-support + +# public + +# vuepress build output + +.vuepress/dist + +# vuepress v2.x temp and cache directory + +.temp + +# Docusaurus cache and generated files + +.docusaurus + +# Serverless directories + +.serverless/ + +# FuseBox cache + +.fusebox/ + +# DynamoDB Local files + +.dynamodb/ + +# TernJS port file + +.tern-port + +# Stores VSCode versions used for testing VSCode extensions + +.vscode-test + +# yarn v2 + +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +# IntelliJ based IDEs +.idea + +# Finder (MacOS) folder config +.DS_Store diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 0000000..2312dc5 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1 @@ +npx lint-staged diff --git a/.lintstagedrc.json b/.lintstagedrc.json new file mode 100644 index 0000000..5192d1e --- /dev/null +++ b/.lintstagedrc.json @@ -0,0 +1,3 @@ +{ + "*.ts": ["eslint --fix", "prettier --write"] +} diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..349c45f --- /dev/null +++ b/.npmignore @@ -0,0 +1,2 @@ +* +!lib diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..3063f07 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,2 @@ +lib +node_modules diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 0000000..2259cff --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,6 @@ +{ + "trailingComma": "all", + "tabWidth": 4, + "semi": true, + "singleQuote": false +} diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..85fbb03 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Jens Pots + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/package-lock.json b/package-lock.json index ba693d8..217f07d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,7 +16,7 @@ "debug": "^4.3.4", "kafkajs": "^2.2.4", "n3": "^1.17.1", - "rdf-lens": "^1.2.6", + "rdf-lens": "^1.2.8", "stream-to-array": "^2.3.0", "ws": "^8.14.2" }, @@ -32,8 +32,21 @@ "@types/n3": "^1.16.3", "@types/node": "^18.11.15", "@types/ws": "^8.5.8", + "@typescript-eslint/eslint-plugin": "^7.5.0", + "@typescript-eslint/parser": "^7.4.0", + "@vitest/coverage-v8": "^1.4.0", + "dotenv": "^16.4.5", + "eslint": "^8.57.0", + "eslint-config-prettier": "^9.1.0", + "husky": "^9.0.11", + "lint-staged": "^15.2.2", + "prettier": "^3.2.5", "rollup": "^4.12.0", - "typescript": "^5.2.2" + "ts-node": "^10.9.2", + "tsc-alias": "^1.8.8", + "typescript": "^5.4.3", + "vite-tsconfig-paths": "^4.3.2", + "vitest": "^1.4.0" } }, "node_modules/@ampproject/remapping": { diff --git a/package.json b/package.json index 84b6be0..1660910 100644 --- a/package.json +++ b/package.json @@ -19,10 +19,11 @@ "js-runner": "bin/bundle.mjs" }, "scripts": { - "build": "tsc && rollup ./dist/index.js --file ./dist/index.cjs --format cjs && bun build --external debug ./bin/js-runner.js --outfile bin/bundle.mjs --target node && npm run build:recompose", + "build": "tsc && tsc-alias && rollup ./dist/index.js --file ./dist/index.cjs --format cjs && bun build --external debug ./bin/js-runner.js --outfile bin/bundle.mjs --target node && npm run build:recompose", "build:recompose": "sed -z 's/var __require = (id) => {\\n return import.meta.require(id);\\n};/import Module from \"node:module\";\\nconst __require = Module.createRequire(import.meta.url);/' -i bin/bundle.mjs", "watch": "tsc -w", - "test": "bun test" + "test": "vitest run --coverage --coverage.include src", + "prepare": "husky" }, "keywords": [], "author": "", @@ -35,7 +36,7 @@ "debug": "^4.3.4", "kafkajs": "^2.2.4", "n3": "^1.17.1", - "rdf-lens": "^1.2.6", + "rdf-lens": "^1.2.8", "stream-to-array": "^2.3.0", "ws": "^8.14.2" }, @@ -49,6 +50,19 @@ "@types/node": "^18.11.15", "@types/ws": "^8.5.8", "rollup": "^4.12.0", - "typescript": "^5.2.2" + "@typescript-eslint/eslint-plugin": "^7.5.0", + "@typescript-eslint/parser": "^7.4.0", + "@vitest/coverage-v8": "^1.4.0", + "dotenv": "^16.4.5", + "eslint": "^8.57.0", + "eslint-config-prettier": "^9.1.0", + "husky": "^9.0.11", + "lint-staged": "^15.2.2", + "prettier": "^3.2.5", + "ts-node": "^10.9.2", + "tsc-alias": "^1.8.8", + "typescript": "^5.4.3", + "vite-tsconfig-paths": "^4.3.2", + "vitest": "^1.4.0" } } diff --git a/processors.ttl b/processors.ttl new file mode 100644 index 0000000..38c01d6 --- /dev/null +++ b/processors.ttl @@ -0,0 +1,39 @@ +@prefix js: . +@prefix fno: . +@prefix fnom: . +@prefix xsd: . +@prefix : . +@prefix sh: . + +js:Log a js:JsProcess; + js:file <./lib/index.js>; + js:function "log"; + js:location <./>; + js:mapping [ + a fno:Mapping; + fno:parameterMapping [ + a fnom:PositionParameterMapping; + fnom:functionParameter "incoming"; + fnom:implementationParameterPosition "0"^^xsd:int; + ], [ + a fnom:PositionParameterMapping; + fnom:functionParameter "outgoing"; + fnom:implementationParameterPosition "1"^^xsd:int; + ]; + ]. + +[ ] a sh:NodeShape; + sh:targetClass js:Log; + sh:property [ + sh:class :ReaderChannel; + sh:path js:incoming; + sh:name "incoming"; + sh:maxCount 1; + sh:minCount 1; + ], [ + sh:class :WriterChannel; + sh:path js:outgoing; + sh:name "outgoing"; + sh:maxCount 1; + sh:minCount 1; + ]. diff --git a/test/configuration.test.ts b/test/configuration.test.ts index d8b2934..105733b 100644 --- a/test/configuration.test.ts +++ b/test/configuration.test.ts @@ -1,4 +1,4 @@ -import { describe, expect, test } from "@jest/globals"; +import { describe, expect, test } from "vitest"; import { Quad } from "@rdfjs/types"; import { RDF } from "@treecg/types"; import { readFileSync } from "fs"; @@ -6,76 +6,83 @@ import { DataFactory, Parser } from "n3"; import { extractShapes } from "rdf-lens"; function parseQuads(inp: string): Quad[] { - return new Parser().parse(inp); + return new Parser().parse(inp); } function parseConfig() { - const file = readFileSync("./ontology.ttl", { encoding: "utf8" }); - const quads = parseQuads(file); - return extractShapes(quads); + const file = readFileSync("./ontology.ttl", { encoding: "utf8" }); + const quads = parseQuads(file); + return extractShapes(quads); } const JsProcessor = DataFactory.namedNode("https://w3id.org/conn/js#JsProcess"); describe("Input test", () => { - test("Parse configuration", () => { - const output = parseConfig(); - expect(output.shapes.length).toBe(8); - expect(output.lenses[JsProcessor.value]).toBeDefined(); - }); - - test("Parse processor config", () => { - const config = parseConfig(); - const processorFile = readFileSync("./processor/send.ttl", { - encoding: "utf8", + test("Parse configuration", () => { + const output = parseConfig(); + expect(output.shapes.length).toBe(8); + expect(output.lenses[JsProcessor.value]).toBeDefined(); }); - const quads = parseQuads(processorFile); - const quad = quads.find( - (x) => x.predicate.equals(RDF.terms.type) && x.object.equals(JsProcessor), - )!; - const object = config.lenses[quad.object.value].execute({ - id: quad.subject, - quads, - }); + test("Parse processor config", () => { + const config = parseConfig(); + const processorFile = readFileSync("./processor/send.ttl", { + encoding: "utf8", + }); + const quads = parseQuads(processorFile); - expect(object).toBeDefined(); - }); + const quad = quads.find( + (x) => + x.predicate.equals(RDF.terms.type) && + x.object.equals(JsProcessor), + )!; + const object = config.lenses[quad.object.value].execute({ + id: quad.subject, + quads, + }); - test("parse js-runner pipeline", () => { - const parse = (location: string) => - parseQuads(readFileSync(location, { encoding: "utf8" })); - const files = [ - "./ontology.ttl", - "./processor/send.ttl", - "./processor/resc.ttl", - "./input.ttl", - ]; - const quads = files.flatMap(parse); - const config = extractShapes(quads); + expect(object).toBeDefined(); + }); - const subjects = quads - .filter( - (x) => - x.predicate.equals(RDF.terms.type) && x.object.equals(JsProcessor), - ) - .map((x) => x.subject); - const processorLens = config.lenses[JsProcessor.value]; - const processors = subjects.map((id) => - processorLens.execute({ id, quads: quads }), - ); + test("parse js-runner pipeline", () => { + const parse = (location: string) => + parseQuads(readFileSync(location, { encoding: "utf8" })); + const files = [ + "./ontology.ttl", + "./processor/send.ttl", + "./processor/resc.ttl", + "./input.ttl", + ]; + const quads = files.flatMap(parse); + const config = extractShapes(quads); - const found: any[] = []; - for (let proc of processors) { - const subjects = quads - .filter( - (x) => x.predicate.equals(RDF.terms.type) && x.object.equals(proc.ty), - ) - .map((x) => x.subject); - const processorLens = config.lenses[proc.ty.value]; + const subjects = quads + .filter( + (x) => + x.predicate.equals(RDF.terms.type) && + x.object.equals(JsProcessor), + ) + .map((x) => x.subject); + const processorLens = config.lenses[JsProcessor.value]; + const processors = subjects.map((id) => + processorLens.execute({ id, quads: quads }), + ); - found.push( - ...subjects.map((id) => processorLens.execute({ id, quads: quads })), - ); - } - }); + const found: unknown[] = []; + for (const proc of processors) { + const subjects = quads + .filter( + (x) => + x.predicate.equals(RDF.terms.type) && + x.object.equals(proc.ty), + ) + .map((x) => x.subject); + const processorLens = config.lenses[proc.ty.value]; + + found.push( + ...subjects.map((id) => + processorLens.execute({ id, quads: quads }), + ), + ); + } + }); }); diff --git a/test/connectors/file.test.ts b/test/connectors/file.test.ts index 062b07b..93fe95e 100644 --- a/test/connectors/file.test.ts +++ b/test/connectors/file.test.ts @@ -1,58 +1,58 @@ -import { describe, expect, test } from "@jest/globals"; +import { describe, expect, test } from "vitest"; import { writeFile } from "fs/promises"; import { FileReaderConfig, FileWriterConfig } from "../../src/connectors/file"; import * as conn from "../../src/connectors"; import { namedNode } from "../../src/util"; describe("File Channel", () => { - test("Reader - Writer", async () => { - const config: FileReaderConfig = { - path: "/tmp/test.txt", - onReplace: true, - encoding: "utf-8", - }; - const writerConfig: FileWriterConfig = { - path: "/tmp/test.txt", - onReplace: true, - encoding: "utf-8", - }; - - await writeFile("/tmp/test.txt", ""); - - const factory = new conn.ChannelFactory(); - const items: string[] = []; - - const reader = factory.createReader({ - config, - id: namedNode("reader"), - ty: conn.Conn.FileReaderChannel, + test("Reader - Writer", async () => { + const config: FileReaderConfig = { + path: "/tmp/test.txt", + onReplace: true, + encoding: "utf-8", + }; + const writerConfig: FileWriterConfig = { + path: "/tmp/test.txt", + onReplace: true, + encoding: "utf-8", + }; + + await writeFile("/tmp/test.txt", ""); + + const factory = new conn.ChannelFactory(); + const items: string[] = []; + + const reader = factory.createReader({ + config, + id: namedNode("reader"), + ty: conn.Conn.FileReaderChannel, + }); + expect(reader).toBeInstanceOf(conn.SimpleStream); + + reader.data((x) => { + items.push(x); + }); + + const writer = factory.createWriter({ + config: writerConfig, + id: namedNode("writer"), + ty: conn.Conn.FileWriterChannel, + }); + await factory.init(); + await writer.push("Number 1 " + Math.random()); + + await sleep(300); + expect(items.length).toBe(1); + expect(items[0].startsWith("Number 1")).toBeTruthy(); + + await writer.push("Number 2"); + + await sleep(300); + expect(items.length).toBe(2); + expect(items[1]).toBe("Number 2"); }); - expect(reader).toBeInstanceOf(conn.SimpleStream); - - reader.data((x) => { - items.push(x); - }); - - const writer = factory.createWriter({ - config: writerConfig, - id: namedNode("writer"), - ty: conn.Conn.FileWriterChannel, - }); - await factory.init(); - await writer.push("Number 1 " + Math.random()); - - await sleep(300); - expect(items.length).toBe(1); - expect(items[0].startsWith("Number 1")).toBeTruthy(); - - await writer.push("Number 2"); - - await sleep(300); - expect(items.length).toBe(2); - expect(items[1]).toBe("Number 2"); - }); }); function sleep(x: number): Promise { - return new Promise((resolve) => setTimeout(resolve, x)); + return new Promise((resolve) => setTimeout(resolve, x)); } diff --git a/test/connectors/http.test.ts b/test/connectors/http.test.ts index bebace0..fa4c864 100644 --- a/test/connectors/http.test.ts +++ b/test/connectors/http.test.ts @@ -1,173 +1,173 @@ -import { describe, expect, test } from "@jest/globals"; +import { describe, expect, test } from "vitest"; import * as conn from "../../src/connectors"; import { HttpReaderConfig, HttpWriterConfig } from "../../src/connectors/http"; import { namedNode } from "../../src/util"; describe("connector-http", () => { - test("Should write -> HTTP -> read (string)", async () => { - const readerConfig: HttpReaderConfig = { - endpoint: "localhost", - port: 8080, - binary: false, - }; - const writerConfig: HttpWriterConfig = { - endpoint: "http://localhost:8080", - method: "POST", - }; - - const factory = new conn.ChannelFactory(); - const reader = factory.createReader({ - config: readerConfig, - id: namedNode("reader"), - ty: conn.Conn.HttpReaderChannel, - }); - const writer = factory.createWriter({ - config: writerConfig, - id: namedNode("writer"), - ty: conn.Conn.HttpWriterChannel, - }); - - reader.data((data) => { - items.push(data); - }); - - await factory.init(); - - const items: unknown[] = []; - - await writer.push("test1"); - await sleep(200); - await writer.push("test2"); - await sleep(200); - - expect(items).toEqual(["test1", "test2"]); - - await Promise.all([reader.end(), writer.end()]); - }); - - test("Should write -> HTTP -> read (Buffer)", async () => { - const readerConfig: HttpReaderConfig = { - endpoint: "localhost", - port: 8081, - binary: true, - waitHandled: false, - }; - const writerConfig: HttpWriterConfig = { - endpoint: "http://localhost:8081", - method: "POST", - }; - - const factory = new conn.ChannelFactory(); - const reader = factory.createReader({ - config: readerConfig, - id: namedNode("reader"), - ty: conn.Conn.HttpReaderChannel, - }); - const writer = factory.createWriter({ - config: writerConfig, - id: namedNode("writer"), - ty: conn.Conn.HttpWriterChannel, - }); - - reader.data((data) => { - expect(Buffer.isBuffer(data)).toBeTruthy(); - items.push(data.toString()); + test("Should write -> HTTP -> read (string)", async () => { + const readerConfig: HttpReaderConfig = { + endpoint: "localhost", + port: 8080, + binary: false, + }; + const writerConfig: HttpWriterConfig = { + endpoint: "http://localhost:8080", + method: "POST", + }; + + const factory = new conn.ChannelFactory(); + const reader = factory.createReader({ + config: readerConfig, + id: namedNode("reader"), + ty: conn.Conn.HttpReaderChannel, + }); + const writer = factory.createWriter({ + config: writerConfig, + id: namedNode("writer"), + ty: conn.Conn.HttpWriterChannel, + }); + + reader.data((data) => { + items.push(data); + }); + + await factory.init(); + + const items: unknown[] = []; + + await writer.push("test1"); + await sleep(200); + await writer.push("test2"); + await sleep(200); + + expect(items).toEqual(["test1", "test2"]); + + await Promise.all([reader.end(), writer.end()]); }); - await factory.init(); - - const items: unknown[] = []; - - await writer.push(Buffer.from("test1", "utf8")); - await sleep(200); - await writer.push(Buffer.from("test2", "utf8")); - await sleep(200); - - expect(items).toEqual(["test1", "test2"]); - - await Promise.all([reader.end(), writer.end()]); - }); - - test("Should write -> HTTP -> read (Buffer) and await response", async () => { - const readerConfig: HttpReaderConfig = { - endpoint: "localhost", - port: 8082, - binary: true, - waitHandled: true, - }; - const writerConfig: HttpWriterConfig = { - endpoint: "http://localhost:8082", - method: "POST", - }; - - const factory = new conn.ChannelFactory(); - const reader = factory.createReader({ - config: readerConfig, - id: namedNode("reader"), - ty: conn.Conn.HttpReaderChannel, - }); - const writer = factory.createWriter({ - config: writerConfig, - id: namedNode("writer"), - ty: conn.Conn.HttpWriterChannel, + test("Should write -> HTTP -> read (Buffer)", async () => { + const readerConfig: HttpReaderConfig = { + endpoint: "localhost", + port: 8081, + binary: true, + waitHandled: false, + }; + const writerConfig: HttpWriterConfig = { + endpoint: "http://localhost:8081", + method: "POST", + }; + + const factory = new conn.ChannelFactory(); + const reader = factory.createReader({ + config: readerConfig, + id: namedNode("reader"), + ty: conn.Conn.HttpReaderChannel, + }); + const writer = factory.createWriter({ + config: writerConfig, + id: namedNode("writer"), + ty: conn.Conn.HttpWriterChannel, + }); + + reader.data((data) => { + expect(Buffer.isBuffer(data)).toBeTruthy(); + items.push(data.toString()); + }); + + await factory.init(); + + const items: unknown[] = []; + + await writer.push(Buffer.from("test1", "utf8")); + await sleep(200); + await writer.push(Buffer.from("test2", "utf8")); + await sleep(200); + + expect(items).toEqual(["test1", "test2"]); + + await Promise.all([reader.end(), writer.end()]); }); - reader.data(async (data) => { - expect(Buffer.isBuffer(data)).toBeTruthy(); - items.push(data.toString()); - await sleep(1500); + test("Should write -> HTTP -> read (Buffer) and await response", async () => { + const readerConfig: HttpReaderConfig = { + endpoint: "localhost", + port: 8082, + binary: true, + waitHandled: true, + }; + const writerConfig: HttpWriterConfig = { + endpoint: "http://localhost:8082", + method: "POST", + }; + + const factory = new conn.ChannelFactory(); + const reader = factory.createReader({ + config: readerConfig, + id: namedNode("reader"), + ty: conn.Conn.HttpReaderChannel, + }); + const writer = factory.createWriter({ + config: writerConfig, + id: namedNode("writer"), + ty: conn.Conn.HttpWriterChannel, + }); + + reader.data(async (data) => { + expect(Buffer.isBuffer(data)).toBeTruthy(); + items.push(data.toString()); + await sleep(1500); + }); + + await factory.init(); + + const items: unknown[] = []; + + const start = new Date().getTime(); + await writer.push(Buffer.from("test1", "utf8")); + const end = new Date().getTime(); + await sleep(200); + + expect(end - start > 1000).toBeTruthy(); + expect(items).toEqual(["test1"]); + + await Promise.all([reader.end(), writer.end()]); }); - await factory.init(); - - const items: unknown[] = []; - - const start = new Date().getTime(); - await writer.push(Buffer.from("test1", "utf8")); - const end = new Date().getTime(); - await sleep(200); - - expect(end - start > 1000).toBeTruthy(); - expect(items).toEqual(["test1"]); + test("http channel uses correct response code", async () => { + const readerConfig: HttpReaderConfig = { + endpoint: "localhost", + port: 8083, + binary: false, + responseCode: 202, + }; - await Promise.all([reader.end(), writer.end()]); - }); + const factory = new conn.ChannelFactory(); + const reader = factory.createReader({ + config: readerConfig, + id: namedNode("reader"), + ty: conn.Conn.HttpReaderChannel, + }); - test("http channel uses correct response code", async () => { - const readerConfig: HttpReaderConfig = { - endpoint: "localhost", - port: 8083, - binary: false, - responseCode: 202, - }; + reader.data((data) => { + items.push(data); + }); - const factory = new conn.ChannelFactory(); - const reader = factory.createReader({ - config: readerConfig, - id: namedNode("reader"), - ty: conn.Conn.HttpReaderChannel, - }); + await factory.init(); - reader.data((data) => { - items.push(data); - }); + const items: unknown[] = []; - await factory.init(); + const resp = await fetch("http://localhost:8083", { + body: "test1", + method: "PUT", + }); - const items: unknown[] = []; + expect(items).toEqual(["test1"]); + expect(resp.status).toEqual(202); - const resp = await fetch("http://localhost:8083", { - body: "test1", - method: "PUT", + await Promise.all([reader.end()]); }); - - expect(items).toEqual(["test1"]); - expect(resp.status).toEqual(202); - - await Promise.all([reader.end()]); - }); }); function sleep(x: number): Promise { - return new Promise((resolve) => setTimeout(resolve, x)); + return new Promise((resolve) => setTimeout(resolve, x)); } diff --git a/test/connectors/kafka.test.ts b/test/connectors/kafka.test.ts deleted file mode 100644 index 0ffdd02..0000000 --- a/test/connectors/kafka.test.ts +++ /dev/null @@ -1 +0,0 @@ -// TODO \ No newline at end of file diff --git a/test/connectors/ws.test.ts b/test/connectors/ws.test.ts index 2dab162..07bbbe9 100644 --- a/test/connectors/ws.test.ts +++ b/test/connectors/ws.test.ts @@ -1,47 +1,47 @@ -import { describe, expect, test } from "@jest/globals"; +import { describe, expect, test } from "vitest"; import * as conn from "../../src/connectors"; import { WsReaderConfig, WsWriterConfig } from "../../src/connectors/ws"; import { namedNode } from "../../src/util"; describe("connector-ws", () => { - test("Should write -> WebSocket -> read", async () => { - const readerConfig: WsReaderConfig = { - host: "0.0.0.0", - port: 8123, - }; - - const writerConfig: WsWriterConfig = { - url: "ws://127.0.0.1:8123", - }; - - const factory = new conn.ChannelFactory(); - const reader = factory.createReader({ - config: readerConfig, - id: namedNode("reader"), - ty: conn.Conn.WsReaderChannel, + test("Should write -> WebSocket -> read", async () => { + const readerConfig: WsReaderConfig = { + host: "0.0.0.0", + port: 8123, + }; + + const writerConfig: WsWriterConfig = { + url: "ws://127.0.0.1:8123", + }; + + const factory = new conn.ChannelFactory(); + const reader = factory.createReader({ + config: readerConfig, + id: namedNode("reader"), + ty: conn.Conn.WsReaderChannel, + }); + const writer = factory.createWriter({ + config: writerConfig, + id: namedNode("writer"), + ty: conn.Conn.WsWriterChannel, + }); + const items: unknown[] = []; + reader.data((x) => { + items.push(x); + }); + + await factory.init(); + + await writer.push("test1"); + await writer.push("test2"); + await sleep(200); + + expect(items).toEqual(["test1", "test2"]); + + await Promise.all([writer.end(), reader.end()]); }); - const writer = factory.createWriter({ - config: writerConfig, - id: namedNode("writer"), - ty: conn.Conn.WsWriterChannel, - }); - const items: unknown[] = []; - reader.data((x) => { - items.push(x); - }); - - await factory.init(); - - await writer.push("test1"); - await writer.push("test2"); - await sleep(200); - - expect(items).toEqual(["test1", "test2"]); - - await Promise.all([writer.end(), reader.end()]); - }); }); function sleep(x: number): Promise { - return new Promise((resolve) => setTimeout(resolve, x)); + return new Promise((resolve) => setTimeout(resolve, x)); } diff --git a/test/models.test.ts b/test/models.test.ts index 20d1cf0..06c8027 100644 --- a/test/models.test.ts +++ b/test/models.test.ts @@ -1,4 +1,4 @@ -import { describe, expect, test } from "@jest/globals"; +import { describe, expect, test } from "vitest"; const prefixes = ` @prefix js: . @@ -10,8 +10,8 @@ const prefixes = ` `; describe("Processor Lens", () => { - test("Parse full config", () => { - const turtle = ` + test("Parse full config", () => { + const turtle = ` ${prefixes} js:Echo a js:JsProcess; @@ -43,19 +43,19 @@ js:Echo a js:JsProcess; sh:name "Output Channel" ]. `; - // const quads = new Parser().parse(turtle); - // - // const lens = subjects().then(unique()).asMulti().thenSome(ProcessorLens) - // const out = lens.execute(quads); - // - // expect(out.length).toBe(1); - // expect(out[0].id).toEqual("https://w3id.org/conn/js#Echo"); - // expect(out[0].mappings.length).toBe(2); - // expect(out[0].shape).toBeInstanceOf(BasicLens); - }); + // const quads = new Parser().parse(turtle); + // + // const lens = subjects().then(unique()).asMulti().thenSome(ProcessorLens) + // const out = lens.execute(quads); + // + // expect(out.length).toBe(1); + // expect(out[0].id).toEqual("https://w3id.org/conn/js#Echo"); + // expect(out[0].mappings.length).toBe(2); + // expect(out[0].shape).toBeInstanceOf(BasicLens); + }); - test("2 + 2 = 4", () => { - const four = 2 + 2; - expect(four).toBe(4); - }); + test("2 + 2 = 4", () => { + const four = 2 + 2; + expect(four).toBe(4); + }); }); diff --git a/test/processors.test.ts b/test/processors.test.ts index edcba59..0aa89cb 100644 --- a/test/processors.test.ts +++ b/test/processors.test.ts @@ -1,4 +1,4 @@ -import { describe, expect, test } from "@jest/globals"; +import { describe, expect, test } from "vitest"; import { extractProcessors, extractSteps, Source } from "../src/index"; const prefixes = ` @prefix js: . @@ -13,8 +13,8 @@ const prefixes = ` const JS = "https://w3id.org/conn/js#"; describe("test existing processors", () => { - test("resc.ttl", async () => { - const value = `${prefixes} + test("resc.ttl", async () => { + const value = `${prefixes} <> owl:imports <./ontology.ttl>, <./processor/resc.ttl>. [ ] a :Channel; @@ -24,36 +24,36 @@ describe("test existing processors", () => { [ ] a js:Resc; js:rescReader . `; - const baseIRI = process.cwd() + "/config.ttl"; - - const source: Source = { - value, - baseIRI, - type: "memory", - }; - - const { - processors, - quads, - shapes: config, - } = await extractProcessors(source); - - const proc = processors.find((x) => x.ty.value === JS + "Resc"); - expect(proc).toBeDefined(); - - const argss = extractSteps(proc!, quads, config); - expect(argss.length).toBe(1); - expect(argss[0].length).toBe(1); - - const [[arg]] = argss; - expect(arg).toBeInstanceOf(Object); - expect(arg.config.channel).toBeDefined(); - expect(arg.config.channel.id).toBeDefined(); - expect(arg.ty).toBeDefined(); - }); - - test("send.ttl", async () => { - const value = `${prefixes} + const baseIRI = process.cwd() + "/config.ttl"; + + const source: Source = { + value, + baseIRI, + type: "memory", + }; + + const { + processors, + quads, + shapes: config, + } = await extractProcessors(source); + + const proc = processors.find((x) => x.ty.value === JS + "Resc"); + expect(proc).toBeDefined(); + + const argss = extractSteps(proc!, quads, config); + expect(argss.length).toBe(1); + expect(argss[0].length).toBe(1); + + const [[arg]] = argss; + expect(arg).toBeInstanceOf(Object); + expect(arg.config.channel).toBeDefined(); + expect(arg.config.channel.id).toBeDefined(); + expect(arg.ty).toBeDefined(); + }); + + test("send.ttl", async () => { + const value = `${prefixes} <> owl:imports <./ontology.ttl>, <./processor/send.ttl> . [ ] a :Channel; @@ -65,37 +65,37 @@ describe("test existing processors", () => { js:msg "Hello world!"; js:sendWriter . `; - const baseIRI = process.cwd() + "/config.ttl"; - - const source: Source = { - value, - baseIRI, - type: "memory", - }; - - const { - processors, - quads, - shapes: config, - } = await extractProcessors(source); - - const proc = processors.find((x) => x.ty.value === JS + "Send"); - expect(proc).toBeDefined(); - - const argss = extractSteps(proc!, quads, config); - expect(argss.length).toBe(1); - expect(argss[0].length).toBe(2); - - const [[msg, writer]] = argss; - expect(msg).toBe("Hello world!"); - expect(writer).toBeInstanceOf(Object); - expect(writer.config.channel).toBeDefined(); - expect(writer.config.channel.id).toBeDefined(); - expect(writer.ty).toBeDefined(); - }); - - describe("send.ttl from env", async () => { - const value = `${prefixes} + const baseIRI = process.cwd() + "/config.ttl"; + + const source: Source = { + value, + baseIRI, + type: "memory", + }; + + const { + processors, + quads, + shapes: config, + } = await extractProcessors(source); + + const proc = processors.find((x) => x.ty.value === JS + "Send"); + expect(proc).toBeDefined(); + + const argss = extractSteps(proc!, quads, config); + expect(argss.length).toBe(1); + expect(argss[0].length).toBe(2); + + const [[msg, writer]] = argss; + expect(msg).toBe("Hello world!"); + expect(writer).toBeInstanceOf(Object); + expect(writer.config.channel).toBeDefined(); + expect(writer.config.channel.id).toBeDefined(); + expect(writer.ty).toBeDefined(); + }); + + describe("send.ttl from env", async () => { + const value = `${prefixes} <> owl:imports <./ontology.ttl>, <./processor/send.ttl> . [ ] a :Channel; @@ -111,56 +111,56 @@ describe("test existing processors", () => { ]; js:sendWriter . `; - const baseIRI = process.cwd() + "/config.ttl"; - - const source: Source = { - value, - baseIRI, - type: "memory", - }; - - const { - processors, - quads, - shapes: config, - } = await extractProcessors(source); - - test("Env default value", () => { - const proc = processors.find((x) => x.ty.value === JS + "Send"); - expect(proc).toBeDefined(); - - const argss = extractSteps(proc!, quads, config); - expect(argss.length).toBe(1); - expect(argss[0].length).toBe(2); - - const [[msg, writer]] = argss; - expect(msg).toBe("FromEnv"); - expect(writer).toBeInstanceOf(Object); - expect(writer.config.channel).toBeDefined(); - expect(writer.config.channel.id).toBeDefined(); - expect(writer.ty).toBeDefined(); + const baseIRI = process.cwd() + "/config.ttl"; + + const source: Source = { + value, + baseIRI, + type: "memory", + }; + + const { + processors, + quads, + shapes: config, + } = await extractProcessors(source); + + test("Env default value", () => { + const proc = processors.find((x) => x.ty.value === JS + "Send"); + expect(proc).toBeDefined(); + + const argss = extractSteps(proc!, quads, config); + expect(argss.length).toBe(1); + expect(argss[0].length).toBe(2); + + const [[msg, writer]] = argss; + expect(msg).toBe("FromEnv"); + expect(writer).toBeInstanceOf(Object); + expect(writer.config.channel).toBeDefined(); + expect(writer.config.channel.id).toBeDefined(); + expect(writer.ty).toBeDefined(); + }); + + test("Env value", () => { + process.env["msg"] = "FROM ENV"; + const proc = processors.find((x) => x.ty.value === JS + "Send"); + expect(proc).toBeDefined(); + + const argss = extractSteps(proc!, quads, config); + expect(argss.length).toBe(1); + expect(argss[0].length).toBe(2); + + const [[msg, writer]] = argss; + expect(msg).toBe("FROM ENV"); + expect(writer).toBeInstanceOf(Object); + expect(writer.config.channel).toBeDefined(); + expect(writer.config.channel.id).toBeDefined(); + expect(writer.ty).toBeDefined(); + }); }); - test("Env value", () => { - process.env["msg"] = "FROM ENV"; - const proc = processors.find((x) => x.ty.value === JS + "Send"); - expect(proc).toBeDefined(); - - const argss = extractSteps(proc!, quads, config); - expect(argss.length).toBe(1); - expect(argss[0].length).toBe(2); - - const [[msg, writer]] = argss; - expect(msg).toBe("FROM ENV"); - expect(writer).toBeInstanceOf(Object); - expect(writer.config.channel).toBeDefined(); - expect(writer.config.channel.id).toBeDefined(); - expect(writer.ty).toBeDefined(); - }); - }); - - test("echo.ttl", async () => { - const value = `${prefixes} + test("echo.ttl", async () => { + const value = `${prefixes} <> owl:imports <./ontology.ttl>, <./processor/echo.ttl> . [ ] a :Channel; @@ -174,36 +174,36 @@ describe("test existing processors", () => { js:input ; js:output . `; - const baseIRI = process.cwd() + "/config.ttl"; - - const source: Source = { - value, - baseIRI, - type: "memory", - }; - - const { - processors, - quads, - shapes: config, - } = await extractProcessors(source); - - const proc = processors.find((x) => x.ty.value === JS + "Echo"); - expect(proc).toBeDefined(); - const argss = extractSteps(proc!, quads, config); - expect(argss.length).toBe(1); - expect(argss[0].length).toBe(2); - - const [[reader, writer]] = argss; - - expect(reader).toBeInstanceOf(Object); - expect(reader.config.channel).toBeDefined(); - expect(reader.config.channel.id).toBeDefined(); - expect(reader.ty).toBeDefined(); - - expect(writer).toBeInstanceOf(Object); - expect(writer.config.channel).toBeDefined(); - expect(writer.config.channel.id).toBeDefined(); - expect(writer.ty).toBeDefined(); - }); + const baseIRI = process.cwd() + "/config.ttl"; + + const source: Source = { + value, + baseIRI, + type: "memory", + }; + + const { + processors, + quads, + shapes: config, + } = await extractProcessors(source); + + const proc = processors.find((x) => x.ty.value === JS + "Echo"); + expect(proc).toBeDefined(); + const argss = extractSteps(proc!, quads, config); + expect(argss.length).toBe(1); + expect(argss[0].length).toBe(2); + + const [[reader, writer]] = argss; + + expect(reader).toBeInstanceOf(Object); + expect(reader.config.channel).toBeDefined(); + expect(reader.config.channel.id).toBeDefined(); + expect(reader.ty).toBeDefined(); + + expect(writer).toBeInstanceOf(Object); + expect(writer.config.channel).toBeDefined(); + expect(writer.config.channel.id).toBeDefined(); + expect(writer.ty).toBeDefined(); + }); }); diff --git a/tsconfig.json b/tsconfig.json index 3921413..4010bc1 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,21 +1,38 @@ { "compilerOptions": { - // Generate d.ts files "declaration": true, + "outDir": "dist/", + "lib": [ + "ES2022" + ], + "target": "ES2022", + "module": "ES2022", "moduleResolution": "node", - "target": "esnext", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ - /* Modules */ - "module": "esnext", /* Specify what module code is generated. */ - "outDir": "./dist", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - "rootDir": "./src", /* Specify an output folder for all emitted files. */ - "sourceMap": true, - "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ - "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ - // "emitDeclarationOnly": true, - "strict": true /* Enable all strict type-checking options. */ + "preserveConstEnums": true, + "downlevelIteration": true, + "skipLibCheck": true, + "strict": true, + "strictFunctionTypes": false, + "strictPropertyInitialization": false, + "experimentalDecorators": true, + "allowSyntheticDefaultImports": true, + "incremental": true, + "esModuleInterop": true, + "removeComments": true }, + "ts-node": { + "esm": true, + "experimentalSpecifierResolution": "node" + }, + "tsc-alias": { + "resolveFullPaths": true, + "verbose": false + }, + "include": [ + "src/**/*" + ], "exclude": [ - "test/**/*.ts", + "node_modules/**/*", "dist/**/*" ] } diff --git a/vite.config.ts b/vite.config.ts new file mode 100644 index 0000000..7d5ca2f --- /dev/null +++ b/vite.config.ts @@ -0,0 +1,16 @@ +import { defineConfig } from "vitest/config"; +import tsconfigPaths from "vite-tsconfig-paths"; + +export default defineConfig({ + plugins: [tsconfigPaths()], + test: { + deps: { + optimizer: { + ssr: { + enabled: true, + include: ["@ajuvercr/js-runner"], + }, + }, + }, + }, +});