mirror of
https://github.com/kmitresse/Compo-Service-Log-Project.git
synced 2026-05-14 01:21:48 +00:00
feat: Use container for the API service
This commit is contained in:
@@ -0,0 +1,29 @@
|
||||
import express from "express";
|
||||
import router from "./routes/randomize";
|
||||
import { createServer } from "node:http";
|
||||
import { bodyToSchema, xmlBodyParser } from "./middlewares";
|
||||
|
||||
export default class Server {
|
||||
private readonly app: express.Application;
|
||||
private static PORT: number =
|
||||
process.env.PORT
|
||||
? parseInt(process.env.PORT)
|
||||
: 3000;
|
||||
|
||||
constructor() {
|
||||
this.app = express();
|
||||
this.app.use(
|
||||
express.json(),
|
||||
xmlBodyParser,
|
||||
bodyToSchema,
|
||||
router,
|
||||
);
|
||||
}
|
||||
|
||||
public start() {
|
||||
const server = createServer(this.app);
|
||||
server.listen(Server.PORT, () => {
|
||||
console.info(`Server is running on http://localhost:${Server.PORT}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import "reflect-metadata";
|
||||
import { DataSource } from "typeorm";
|
||||
import { Log } from "./entity/Log";
|
||||
|
||||
export const TypeOrmDataSource = new DataSource({
|
||||
type: "mariadb",
|
||||
host: process.env.MARIADB_HOST || "localhost",
|
||||
port: process.env.MARIADB_PORT ? parseInt(process.env.MARIADB_PORT) : 3306,
|
||||
username: process.env.MARIADB_USER || "root",
|
||||
password: process.env.MARIADB_PASSWORD || "root",
|
||||
database: process.env.MARIADB_DATABASE || "database",
|
||||
synchronize: true,
|
||||
logging: false,
|
||||
entities: [Log],
|
||||
subscribers: [],
|
||||
migrations: [],
|
||||
});
|
||||
@@ -0,0 +1,34 @@
|
||||
import { Column, Entity, PrimaryGeneratedColumn } from "typeorm";
|
||||
|
||||
@Entity()
|
||||
export class Log {
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number = 0;
|
||||
|
||||
@Column("timestamp")
|
||||
timestamp: Date = new Date();
|
||||
|
||||
@Column("text")
|
||||
url: string;
|
||||
|
||||
@Column("text")
|
||||
method: "GET" | "POST" | "PUT" | "DELETE" = "GET";
|
||||
|
||||
@Column("blob")
|
||||
body: string = "";
|
||||
|
||||
@Column("blob")
|
||||
output: string = "";
|
||||
|
||||
constructor(
|
||||
url: string,
|
||||
method: "GET" | "POST" | "PUT" | "DELETE",
|
||||
body: string,
|
||||
output: string,
|
||||
) {
|
||||
this.url = url;
|
||||
this.method = method;
|
||||
this.body = body;
|
||||
this.output = output;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import dotenv from "dotenv";
|
||||
import Server from "./Server";
|
||||
import { DatasetCollection } from "./services/dataset";
|
||||
import { TypeOrmDataSource } from "./TypeOrmDataSource";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
TypeOrmDataSource.initialize().then(() => {
|
||||
new Server().start();
|
||||
DatasetCollection.loadAll()
|
||||
.then(() => console.log("All datasets are loaded"))
|
||||
.catch(console.error);
|
||||
});
|
||||
@@ -0,0 +1,16 @@
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
import { Definitions } from "../services/dmn/interfaces";
|
||||
import { DMN } from "../services/dmn/DMN";
|
||||
|
||||
export default async function (
|
||||
req: Request,
|
||||
_: Response,
|
||||
next: NextFunction,
|
||||
) {
|
||||
if (req.is("application/xml")) {
|
||||
const dmn: Definitions = await DMN.parse(req.body);
|
||||
|
||||
req.body = DMN.getSchema(dmn);
|
||||
next();
|
||||
} else next();
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
export { default as xmlBodyParser } from "./xmlBodyParser";
|
||||
export { default as bodyToSchema } from "./bodyToSchema";
|
||||
@@ -0,0 +1,13 @@
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
|
||||
export default function (req: Request, _: Response, next: NextFunction) {
|
||||
if (req.is("application/xml")) {
|
||||
let data = "";
|
||||
req.setEncoding("utf8");
|
||||
req.on("data", (chunk: any) => (data += chunk));
|
||||
req.on("end", () => {
|
||||
req.body = data;
|
||||
next();
|
||||
});
|
||||
} else next();
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
import { Router, Request, Response } from "express";
|
||||
import { DatasetCollection } from "../services/dataset";
|
||||
import axios from "axios";
|
||||
import { Log } from "../entity/Log";
|
||||
import { TypeOrmDataSource } from "../TypeOrmDataSource";
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.all("/", (_: Request, res: Response) => {
|
||||
res.status(200).json({
|
||||
status: "OK",
|
||||
routes: ["/randomize", "/randomize/:id"],
|
||||
});
|
||||
});
|
||||
|
||||
router.post("/randomize", async (req: Request, res: Response) => {
|
||||
const size: number = req.query.size ? parseInt(req.query.size as string) : 10;
|
||||
if (size < 1) return res.status(400).json({ status: "INVALID_SIZE" });
|
||||
let output: { status: string; data?: any[]; error?: any; };
|
||||
|
||||
await Promise
|
||||
.all(
|
||||
DatasetCollection.datasets.map((dataset) =>
|
||||
axios.post(dataset.endpoint, req.body, { params: { size } })
|
||||
.then((res) => res.data.data),
|
||||
),
|
||||
)
|
||||
.then((r) => r.flat()
|
||||
.sort(() => Math.random() - 0.5)
|
||||
.slice(0, size))
|
||||
.then((data) => {
|
||||
output = { status: "RANDOMIZED", data };
|
||||
res.status(200).json(output);
|
||||
})
|
||||
.catch(async (e) => {
|
||||
output = { status: "ERROR", error: e.message };
|
||||
const log: Log = new Log(
|
||||
req.url,
|
||||
req.method as any,
|
||||
JSON.stringify(req.body),
|
||||
JSON.stringify(output),
|
||||
);
|
||||
await TypeOrmDataSource.manager.save(log);
|
||||
|
||||
return res.status(500).json(output);
|
||||
})
|
||||
.finally(() => {
|
||||
const log: Log = new Log(
|
||||
req.url,
|
||||
req.method as any,
|
||||
JSON.stringify(req.body),
|
||||
JSON.stringify(output),
|
||||
);
|
||||
TypeOrmDataSource.manager.save(log);
|
||||
});
|
||||
});
|
||||
|
||||
router.post("/randomize/:id", async (req: Request, res: Response) => {
|
||||
const { id } = req.params;
|
||||
const size: number = req.query.size ? parseInt(req.query.size as string) : 10;
|
||||
|
||||
const dataset = DatasetCollection.datasets.find(
|
||||
(dataset) => dataset.id === id,
|
||||
);
|
||||
if (!dataset) return res.status(404).json({ status: "NOT_FOUND" });
|
||||
|
||||
const data = await dataset.get(size, req.body);
|
||||
return res.status(200).json({ status: "RANDOMIZED", data });
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -0,0 +1,22 @@
|
||||
import { MongoClient, Db } from "mongodb";
|
||||
|
||||
const uri: string = `mongodb://${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}`;
|
||||
const dbName: string = process.env.MONGODB_DATABASE || "database";
|
||||
|
||||
let db: Db;
|
||||
|
||||
export const getDatabaseConnexion = async (): Promise<Db> => {
|
||||
if (!db) {
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
username: process.env.MONGODB_USER || "user",
|
||||
password: process.env.MONGODB_PASSWORD || "password",
|
||||
},
|
||||
connectTimeoutMS: 60000,
|
||||
socketTimeoutMS: 60000,
|
||||
});
|
||||
await client.connect();
|
||||
db = client.db(dbName);
|
||||
}
|
||||
return db;
|
||||
};
|
||||
@@ -0,0 +1,35 @@
|
||||
import { Readable } from "node:stream";
|
||||
import axios from "axios";
|
||||
import * as fs from "node:fs";
|
||||
import { WriteStream } from "node:fs";
|
||||
|
||||
class FileService {
|
||||
/**
|
||||
* Get the compressed file stream from a given url
|
||||
* @param url - The url of the file
|
||||
* @return Promise<Readable> - The compressed file stream
|
||||
*/
|
||||
public static async getFileStream(url: string): Promise<Readable> {
|
||||
return axios({ method: "GET", url, responseType: "stream" }).then(
|
||||
(response) => response.data,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a write stream to a file
|
||||
* @param path - The path of the file
|
||||
*/
|
||||
public static createWriteStream(path: string): WriteStream {
|
||||
return fs.createWriteStream(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a file from the file system
|
||||
* @param cachePath - The path of the file to delete
|
||||
*/
|
||||
public static deleteFile(cachePath: string): void {
|
||||
fs.unlinkSync(cachePath);
|
||||
}
|
||||
}
|
||||
|
||||
export default FileService;
|
||||
@@ -0,0 +1,5 @@
|
||||
import { Duplex } from "node:stream";
|
||||
|
||||
export default interface Extractor {
|
||||
extract(options: { file: string }): Duplex;
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
import Extractor from "./Extractor";
|
||||
import ZipExtractor from "./ZipExtractor";
|
||||
import GzipExtractor from "./GzipExtractor";
|
||||
import NoneExtractor from "./NoneExtractor";
|
||||
|
||||
export enum ExtractorType {
|
||||
ZIP,
|
||||
GZIP,
|
||||
NONE,
|
||||
}
|
||||
|
||||
export default class ExtractorFactory {
|
||||
static getExtractor(extractorType: ExtractorType): Extractor {
|
||||
switch (extractorType) {
|
||||
case ExtractorType.ZIP:
|
||||
return ZipExtractor.instance;
|
||||
case ExtractorType.GZIP:
|
||||
return GzipExtractor.instance;
|
||||
default:
|
||||
return NoneExtractor.instance;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
import Extractor from "./Extractor";
|
||||
import { Duplex } from "node:stream";
|
||||
import { createGunzip } from "node:zlib";
|
||||
|
||||
export default class GzipExtractor implements Extractor {
|
||||
public static instance = new GzipExtractor();
|
||||
|
||||
extract(_: any): Duplex {
|
||||
return createGunzip();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
import Extractor from "./Extractor";
|
||||
import { Duplex, Transform } from "node:stream";
|
||||
import { createGunzip } from "node:zlib";
|
||||
|
||||
export default class NoneExtractor implements Extractor {
|
||||
public static instance = new NoneExtractor();
|
||||
|
||||
extract(_: any): Duplex {
|
||||
return new Transform({
|
||||
transform(chunk, _, callback) {
|
||||
callback(null, chunk);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import { Duplex } from "node:stream";
|
||||
import Extractor from "./Extractor";
|
||||
import { ParseOne } from "unzipper";
|
||||
|
||||
export default class ZipExtractor implements Extractor {
|
||||
public static instance: ZipExtractor = new ZipExtractor();
|
||||
|
||||
public extract(options: { file: string }): Duplex {
|
||||
return ParseOne(new RegExp(options.file), {
|
||||
forceStream: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
import { Transform } from "node:stream";
|
||||
|
||||
type NudgerData = {
|
||||
code: string; // "3260014791012",
|
||||
brand: string; // "ALSATEK",
|
||||
model: string; // "TL33171",
|
||||
name: string; // "alsatek lg g3 coque protection aluminium rouge bumper tl33171",
|
||||
last_updated: string; // "1562430134146",
|
||||
gs1_country: string; // "FR",
|
||||
offers_count: string; // "0",
|
||||
min_price: string; // "",
|
||||
min_price_compensation: string; // "",
|
||||
currency: string; // "",
|
||||
categories: string; // "ACCESSOIRES>COQUE SMARTPHONE",
|
||||
url: string; // ""
|
||||
};
|
||||
|
||||
export default class NudgerDataTransformer extends Transform {
|
||||
constructor() {
|
||||
super({ objectMode: true });
|
||||
}
|
||||
|
||||
_transform(chunk: NudgerData, encoding: string, callback: () => void) {
|
||||
const { code, gs1_country } = chunk;
|
||||
|
||||
if (code && gs1_country) {
|
||||
this.push({
|
||||
"Barcode (EAN 13)": code,
|
||||
Country: gs1_country,
|
||||
});
|
||||
}
|
||||
|
||||
callback();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { Transform } from "node:stream";
|
||||
|
||||
type OpenfoodfactsData = {
|
||||
code: string;
|
||||
countries_en: string;
|
||||
};
|
||||
|
||||
export default class OpenfoodfactsDataTransformer extends Transform {
|
||||
constructor() {
|
||||
super({ objectMode: true });
|
||||
}
|
||||
|
||||
_transform(chunk: OpenfoodfactsData, encoding: string, callback: () => void) {
|
||||
const { code, countries_en } = chunk;
|
||||
|
||||
if (code && countries_en) {
|
||||
this.push({
|
||||
"Barcode (EAN 13)": code,
|
||||
Country: countries_en,
|
||||
});
|
||||
}
|
||||
|
||||
callback();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Transform } from "node:stream";
|
||||
|
||||
type WorldCitiesData = {
|
||||
name: string;
|
||||
country: string;
|
||||
subcountry: string;
|
||||
geonameid: string;
|
||||
};
|
||||
|
||||
export default class WorldCitiesDataTransformer extends Transform {
|
||||
constructor() {
|
||||
super({ objectMode: true });
|
||||
}
|
||||
|
||||
_transform(chunk: WorldCitiesData, encoding: string, callback: () => void) {
|
||||
const { geonameid, country } = chunk;
|
||||
|
||||
if (geonameid && country) {
|
||||
this.push({
|
||||
"Geoname ID": geonameid,
|
||||
Country: country,
|
||||
});
|
||||
}
|
||||
|
||||
callback();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,150 @@
|
||||
import { pipeline, Transform, Writable } from "node:stream";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
import FileService from "../FileService";
|
||||
|
||||
import { ParserFactory, ParserType } from "../parser";
|
||||
import ExtractorFactory, {
|
||||
ExtractorType,
|
||||
} from "../archive_extractor/ExtractorFactory";
|
||||
import Extractor from "../archive_extractor/Extractor";
|
||||
import Parser from "../parser/Parser";
|
||||
import { getDatabaseConnexion } from "../DataLake";
|
||||
import { validate } from "jsonschema";
|
||||
|
||||
/**
|
||||
* Represents a dataset that can be loaded and queried
|
||||
*/
|
||||
export default class Dataset {
|
||||
readonly id: string;
|
||||
readonly uri: string;
|
||||
readonly endpoint: string;
|
||||
|
||||
private extractor: Extractor = ExtractorFactory.getExtractor(
|
||||
ExtractorType.NONE,
|
||||
);
|
||||
private parser: Parser = ParserFactory.getParser(ParserType.CSV);
|
||||
private extractorOptions: any;
|
||||
private parserOptions: any;
|
||||
private dataTransformer?: Transform;
|
||||
|
||||
/**
|
||||
* Create a new dataset instance
|
||||
* @param id - The unique identifier of the dataset
|
||||
* @param source - The URL of the dataset
|
||||
*/
|
||||
constructor({
|
||||
id,
|
||||
uri,
|
||||
endpoint,
|
||||
}: {
|
||||
id: string;
|
||||
uri: string;
|
||||
endpoint: string;
|
||||
}) {
|
||||
this.id = id;
|
||||
this.uri = uri;
|
||||
this.endpoint = endpoint;
|
||||
}
|
||||
|
||||
setExtractor(type: ExtractorType, options: any): this {
|
||||
this.extractor = ExtractorFactory.getExtractor(type);
|
||||
this.extractorOptions = options;
|
||||
return this;
|
||||
}
|
||||
|
||||
setParser(type: ParserType, options: any): this {
|
||||
this.parser = ParserFactory.getParser(type);
|
||||
this.parserOptions = options;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDataTransformer(dataTransformer: Transform): this {
|
||||
this.dataTransformer = dataTransformer;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the dataset by downloading, extracting, parsing and saving it in cache
|
||||
* @return Promise<void> - A promise that resolves when the dataset is loaded
|
||||
* @throws {Error} - If the dataset cannot be loaded
|
||||
*/
|
||||
public async load(): Promise<void> {
|
||||
if (!this.dataTransformer) {
|
||||
throw new Error("Data transformer is not set");
|
||||
}
|
||||
|
||||
const db = await getDatabaseConnexion();
|
||||
const collection = db.collection(this.id);
|
||||
const count = await collection.countDocuments();
|
||||
if (count > 0) {
|
||||
console.log(`Dataset ${this.id} already loaded`);
|
||||
return;
|
||||
}
|
||||
|
||||
const pipelineAsync = promisify(pipeline);
|
||||
|
||||
console.log(`Download: ${this.uri}`);
|
||||
|
||||
const datasetId = this.id;
|
||||
|
||||
const batch = 1000;
|
||||
const buffer: any[] = [];
|
||||
|
||||
await pipelineAsync(
|
||||
await FileService.getFileStream(this.uri),
|
||||
this.extractor.extract(this.extractorOptions),
|
||||
this.parser.parse(this.parserOptions),
|
||||
this.dataTransformer,
|
||||
new Writable({
|
||||
objectMode: true,
|
||||
async write(chunk, _, callback) {
|
||||
buffer.push(chunk);
|
||||
if (buffer.length < batch) return callback();
|
||||
|
||||
const db = await getDatabaseConnexion();
|
||||
db.collection(datasetId)
|
||||
.insertMany(buffer.splice(0, batch))
|
||||
.then(() => callback())
|
||||
.catch((error) => callback(error));
|
||||
},
|
||||
}),
|
||||
)
|
||||
.then(async () => {
|
||||
if (buffer.length > 0) {
|
||||
await db.collection(datasetId).insertMany(buffer);
|
||||
}
|
||||
})
|
||||
.then(() => console.log(`Dataset ${this.id} loaded`))
|
||||
.catch((error) =>
|
||||
console.error(`Error loading dataset ${this.id}: ${error.message}`),
|
||||
);
|
||||
}
|
||||
|
||||
async get(length: number, schema: any) {
|
||||
const db = await getDatabaseConnexion();
|
||||
|
||||
// Convert JSON schema to projection
|
||||
const projection = { _id: 0 };
|
||||
if (schema?.properties) {
|
||||
for (const field in schema.properties) {
|
||||
// @ts-ignore
|
||||
projection[field] = 1;
|
||||
}
|
||||
}
|
||||
|
||||
const datas = await db
|
||||
.collection(this.id)
|
||||
.aggregate([{ $project: projection }, { $sample: { size: length } }])
|
||||
.limit(length)
|
||||
.toArray();
|
||||
|
||||
return datas
|
||||
.map((data) => {
|
||||
const res = validate(data, schema);
|
||||
if (!res.valid) return null;
|
||||
return data;
|
||||
})
|
||||
.filter((data) => data !== null);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
import { Dataset } from "./index";
|
||||
import { ParserType } from "../parser";
|
||||
import WorldCitiesDataTransformer from "../data_transformer/WorldCitiesDataTransformer";
|
||||
import { ExtractorType } from "../archive_extractor/ExtractorFactory";
|
||||
import NudgerDataTransformer from "../data_transformer/NudgerDataTransformer";
|
||||
import OpenfoodfactsDataTransformer from "../data_transformer/OpenfoodfactsDataTransformer";
|
||||
|
||||
class DatasetCollection {
|
||||
public static datasets: Dataset[] = [
|
||||
new Dataset({
|
||||
id: "nudger",
|
||||
uri: "https://files.opendatarchives.fr/data.cquest.org/open4goods/gtin-open-data.zip",
|
||||
endpoint: `http://localhost:${process.env.PORT || 3000}/randomize/nudger`,
|
||||
})
|
||||
.setExtractor(ExtractorType.ZIP, {
|
||||
file: "open4goods-full-gtin-dataset.csv",
|
||||
})
|
||||
.setParser(ParserType.CSV, {})
|
||||
.setDataTransformer(new NudgerDataTransformer()),
|
||||
new Dataset({
|
||||
id: "openfoodfacts",
|
||||
uri: "https://static.openfoodfacts.org/data/en.openfoodfacts.org.products.csv.gz",
|
||||
endpoint: `http://localhost:${process.env.PORT || 3000}/randomize/openfoodfacts`,
|
||||
})
|
||||
.setExtractor(ExtractorType.GZIP, {
|
||||
file: "en.openfoodfacts.org.products.csv",
|
||||
})
|
||||
.setParser(ParserType.CSV, { delimiter: "\t", quote: null })
|
||||
.setDataTransformer(new OpenfoodfactsDataTransformer()),
|
||||
new Dataset({
|
||||
id: "world-cities",
|
||||
uri: "https://raw.githubusercontent.com/datasets/world-cities/refs/heads/main/data/world-cities.csv",
|
||||
endpoint: `http://localhost:${process.env.PORT || 3000}/randomize/world-cities`,
|
||||
})
|
||||
.setExtractor(ExtractorType.NONE, {})
|
||||
.setParser(ParserType.CSV, {})
|
||||
.setDataTransformer(new WorldCitiesDataTransformer()),
|
||||
];
|
||||
|
||||
public static loadAll(): Promise<void[]> {
|
||||
return Promise.all(this.datasets.map((dataset) => dataset.load()));
|
||||
}
|
||||
}
|
||||
|
||||
export default DatasetCollection;
|
||||
@@ -0,0 +1,2 @@
|
||||
export { default as Dataset } from "./Dataset";
|
||||
export { default as DatasetCollection } from "./DatasetCollection";
|
||||
@@ -0,0 +1,68 @@
|
||||
// @ts-ignore
|
||||
import DmnModdle from "dmn-moddle";
|
||||
import {
|
||||
Decision,
|
||||
Is_DMN_Decision,
|
||||
Is_DMN_DecisionTable,
|
||||
Name_of_DMN_InputClause,
|
||||
Name_of_DMN_OutputClause,
|
||||
Definitions,
|
||||
InputClause,
|
||||
OutputClause,
|
||||
} from "./interfaces";
|
||||
|
||||
export class DMN {
|
||||
static async parse(xml: string): Promise<Definitions> {
|
||||
const { rootElement, warnings } = await new DmnModdle().fromXML(xml);
|
||||
if (warnings.length !== 0)
|
||||
console.warn(warnings.map((warning: any) => warning.message).join(" * "));
|
||||
return rootElement as Definitions;
|
||||
}
|
||||
|
||||
public static getSchema(dmnDefinitions: Definitions) {
|
||||
const { inputs, outputs } = this.getInputOutput(dmnDefinitions);
|
||||
const properties = this.getProperties(inputs || [], outputs || []);
|
||||
|
||||
return {
|
||||
type: "object",
|
||||
properties,
|
||||
required: Object.keys(properties),
|
||||
};
|
||||
}
|
||||
|
||||
private static getInputOutput(dmnDefinitions: Definitions) {
|
||||
const decisions: Decision[] = dmnDefinitions.drgElement.filter((element) =>
|
||||
Is_DMN_Decision(element),
|
||||
);
|
||||
const { input: inputs, output: outputs } = decisions
|
||||
.map((decision) => decision.decisionLogic)
|
||||
.filter((decisionLogic) => Is_DMN_DecisionTable(decisionLogic))[0];
|
||||
|
||||
return { inputs, outputs };
|
||||
}
|
||||
|
||||
private static getProperties(inputs: InputClause[], outputs: OutputClause[]) {
|
||||
let properties = {};
|
||||
|
||||
inputs.forEach((input) => {
|
||||
const name = Name_of_DMN_InputClause(input) as string;
|
||||
const type = input.typeRef || "string";
|
||||
// @ts-ignore
|
||||
properties[name] = {
|
||||
type,
|
||||
};
|
||||
});
|
||||
|
||||
outputs.forEach((output) => {
|
||||
const name = Name_of_DMN_OutputClause(output) as string;
|
||||
const type = output.typeRef || "string";
|
||||
|
||||
// @ts-ignore
|
||||
properties[name] = {
|
||||
type,
|
||||
};
|
||||
});
|
||||
|
||||
return properties;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import { ModdleElement } from "../interfaces/ModdleElement";
|
||||
import { Name_of_ModdleElement } from "../interfaces/enums";
|
||||
|
||||
export class DmnError extends Error {
|
||||
static readonly Inconsistent_DMN_diagram =
|
||||
"<strong>Inconsistent DMN diagram</strong>";
|
||||
static readonly Invalid_data_format = "<strong>Invalid data format</strong>";
|
||||
static readonly Invalid_drop_mode = "<strong>Invalid drop mode</strong>";
|
||||
static readonly Invalid_JSON = "<strong>Invalid JSON</strong>";
|
||||
static readonly No_business_logic = "<strong>No business logic</strong>";
|
||||
static readonly No_possible_randomization =
|
||||
"<strong>No possible randomization</strong>";
|
||||
static readonly No_possible_visualization =
|
||||
"<strong>No possible visualization</strong>";
|
||||
static readonly Not_trained = "<strong>Not trained</strong>";
|
||||
static readonly Separator = " ⤳ ";
|
||||
static readonly TensorFlow_js = "<strong>TensorFlow.js</strong>";
|
||||
static readonly Undefined_DMN_type = "<strong>Undefined DMN type</strong>";
|
||||
|
||||
constructor(
|
||||
readonly me: ModdleElement,
|
||||
...messages: Array<string>
|
||||
) {
|
||||
super(
|
||||
Name_of_ModdleElement(me) +
|
||||
DmnError.Separator +
|
||||
messages.join(DmnError.Separator),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { ModdleElementReference } from "./ModdleElementReference";
|
||||
|
||||
const _DMN_AuthorityRequirement: "dmn:AuthorityRequirement" =
|
||||
"dmn:AuthorityRequirement";
|
||||
|
||||
interface AuthorityRequirement extends ModdleElement {
|
||||
$type: typeof _DMN_AuthorityRequirement;
|
||||
requiredAuthority?: ModdleElementReference;
|
||||
requiredDecision?: ModdleElementReference;
|
||||
}
|
||||
|
||||
// class AuthorityRequirement extends ModdleElement {
|
||||
// readonly $type: string = "dmn:AuthorityRequirement";
|
||||
// requiredAuthority?: ModdleElementReference;
|
||||
// requiredDecision?: ModdleElementReference;
|
||||
// }
|
||||
|
||||
export { AuthorityRequirement, _DMN_AuthorityRequirement };
|
||||
@@ -0,0 +1,18 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
|
||||
const _DMN_BusinessKnowledgeModel: "dmn:BusinessKnowledgeModel" =
|
||||
"dmn:BusinessKnowledgeModel";
|
||||
|
||||
interface DMN_BusinessKnowledgeModel extends ModdleElement {
|
||||
$type: typeof _DMN_BusinessKnowledgeModel;
|
||||
}
|
||||
|
||||
// class BusinessKnowledgeModel extends ModdleElement {
|
||||
// readonly $type: string = "dmn:BusinessKnowledgeModel";
|
||||
// }
|
||||
|
||||
export {
|
||||
// BusinessKnowledgeModel,
|
||||
DMN_BusinessKnowledgeModel,
|
||||
_DMN_BusinessKnowledgeModel,
|
||||
};
|
||||
@@ -0,0 +1,28 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { ContextEntry } from "./ContextEntry";
|
||||
import { DMN_type_reference_ } from "./enums";
|
||||
|
||||
const _DMN_Context: "dmn:Context" = "dmn:Context";
|
||||
|
||||
interface Context extends ModdleElement {
|
||||
$type: typeof _DMN_Context;
|
||||
contextEntry: Array<ContextEntry>;
|
||||
typeRef: DMN_type_reference_;
|
||||
}
|
||||
|
||||
function Is_DMN_Context(me: ModdleElement): me is Context {
|
||||
return (
|
||||
"$type" in me &&
|
||||
me.$type === _DMN_Context &&
|
||||
"contextEntry" in me &&
|
||||
"typeRef" in me
|
||||
);
|
||||
}
|
||||
|
||||
// class Context extends ModdleElement {
|
||||
// readonly $type: string = "dmn:Context";
|
||||
// contextEntry: ContextEntry[] = [];
|
||||
// typeRef: DMN_type_reference_ ;
|
||||
// }
|
||||
|
||||
export { Context, _DMN_Context, Is_DMN_Context };
|
||||
@@ -0,0 +1,19 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { LiteralExpression } from "./LiteralExpression";
|
||||
import { InformationItem } from "./InformationItem";
|
||||
|
||||
const _DMN_ContextEntry: "dmn:ContextEntry" = "dmn:ContextEntry";
|
||||
|
||||
interface ContextEntry extends ModdleElement {
|
||||
$type: typeof _DMN_ContextEntry;
|
||||
value: LiteralExpression;
|
||||
variable: InformationItem;
|
||||
}
|
||||
|
||||
// class ContextEntry extends ModdleElement {
|
||||
// readonly $type: string = "dmn:ContextEntry";
|
||||
// value: LiteralExpression = new LiteralExpression();
|
||||
// variable: InformationItem;
|
||||
// }
|
||||
|
||||
export { ContextEntry, _DMN_ContextEntry };
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Drop_mode, Status_mode } from "./enums";
|
||||
|
||||
interface Data {
|
||||
action: Drop_mode | Status_mode;
|
||||
data: Array<Object>;
|
||||
}
|
||||
|
||||
// class Data {
|
||||
// action: Drop_mode | Status_mode;
|
||||
// data: object[];
|
||||
// }
|
||||
|
||||
function Is_Data(data: Data): boolean {
|
||||
return (
|
||||
"action" in data &&
|
||||
(Object.values(Drop_mode).includes(
|
||||
(data.action as string).toUpperCase() as Drop_mode,
|
||||
) ||
|
||||
Object.values(Status_mode).includes(
|
||||
(data.action as string).toUpperCase() as Status_mode,
|
||||
)) &&
|
||||
"data" in data &&
|
||||
Array.isArray(data.data)
|
||||
);
|
||||
}
|
||||
|
||||
export { Data, Is_Data };
|
||||
@@ -0,0 +1,34 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { AuthorityRequirement } from "./AuthorityRequirement";
|
||||
import { Context } from "./Context";
|
||||
import { DecisionTable } from "./DecisionTable";
|
||||
import { Definitions } from "./Definitions";
|
||||
import { LiteralExpression } from "./LiteralExpression";
|
||||
import { InformationRequirement } from "./InformationRequirement";
|
||||
import { KnowledgeRequirement } from "./KnowledgeRequirement";
|
||||
import { InformationItem } from "./InformationItem";
|
||||
|
||||
const _DMN_Decision: "dmn:Decision" = "dmn:Decision";
|
||||
|
||||
interface Decision extends ModdleElement {
|
||||
$parent: Definitions;
|
||||
$type: typeof _DMN_Decision;
|
||||
decisionLogic: Context | DecisionTable | LiteralExpression;
|
||||
allowedAnswers?: string;
|
||||
authorityRequirement?: Array<AuthorityRequirement>; // Knowledge source(s)
|
||||
description?: string;
|
||||
informationRequirement?: Array<InformationRequirement>; // Input data
|
||||
knowledgeRequirement?: Array<KnowledgeRequirement>; // Knowledge model(s)
|
||||
question?: string;
|
||||
variable?: InformationItem;
|
||||
}
|
||||
|
||||
function Is_DMN_Decision(me: ModdleElement): me is Decision {
|
||||
return "$type" in me && me.$type === _DMN_Decision && "decisionLogic" in me;
|
||||
}
|
||||
|
||||
function Name_of_DMN_Decision(decision: Decision): string {
|
||||
return "name" in decision ? decision.name! : decision.id;
|
||||
}
|
||||
|
||||
export { Decision, _DMN_Decision, Is_DMN_Decision, Name_of_DMN_Decision };
|
||||
@@ -0,0 +1,14 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { UnaryTests } from "./UnaryTests";
|
||||
import { LiteralExpression } from "./LiteralExpression";
|
||||
|
||||
const _DMN_DecisionRule: "dmn:DecisionRule" = "dmn:DecisionRule";
|
||||
|
||||
interface DecisionRule extends ModdleElement {
|
||||
$type: typeof _DMN_DecisionRule;
|
||||
description: string;
|
||||
inputEntry: Array<UnaryTests>;
|
||||
outputEntry: Array<LiteralExpression>;
|
||||
}
|
||||
|
||||
export { DecisionRule, _DMN_DecisionRule };
|
||||
@@ -0,0 +1,32 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { Decision } from "./Decision";
|
||||
import { DecisionRule } from "./DecisionRule";
|
||||
import { RuleAnnotationClause } from "./RuleAnnotationClause";
|
||||
import { Hit_policy } from "./enums";
|
||||
import { InputClause } from "./InputClause";
|
||||
import { OutputClause } from "./OutputClause";
|
||||
|
||||
const _DMN_DecisionTable: "dmn:DecisionTable" = "dmn:DecisionTable";
|
||||
|
||||
interface DecisionTable extends ModdleElement {
|
||||
$parent: Decision; // Overriding...
|
||||
$type: typeof _DMN_DecisionTable;
|
||||
annotation?: Array<RuleAnnotationClause>;
|
||||
hitPolicy?: Hit_policy;
|
||||
input?: Array<InputClause>;
|
||||
output?: Array<OutputClause>;
|
||||
outputLabel?: string;
|
||||
rule?: Array<DecisionRule>;
|
||||
}
|
||||
|
||||
function Is_DMN_DecisionTable(me: ModdleElement): me is DecisionTable {
|
||||
return (
|
||||
"$type" in me &&
|
||||
me.$type === _DMN_DecisionTable &&
|
||||
"input" in me &&
|
||||
"output" in me &&
|
||||
"rule" in me
|
||||
);
|
||||
}
|
||||
|
||||
export { DecisionTable, _DMN_DecisionTable, Is_DMN_DecisionTable };
|
||||
@@ -0,0 +1,45 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { DMN_BusinessKnowledgeModel } from "./BusinessKnoledgeModel";
|
||||
import { Decision } from "./Decision";
|
||||
import { InputData } from "./InputData";
|
||||
import { DMN_KnowledgeSource } from "./KnowledgSource";
|
||||
import { ItemDefinition } from "./ItemDefinition";
|
||||
import { DMN_type_reference_, Trace } from "./enums";
|
||||
|
||||
const _DMN_Definitions: "dmn:Definitions" = "dmn:Definitions";
|
||||
|
||||
interface Definitions extends ModdleElement {
|
||||
$type: typeof _DMN_Definitions;
|
||||
drgElement: Array<
|
||||
DMN_BusinessKnowledgeModel | Decision | InputData | DMN_KnowledgeSource
|
||||
>;
|
||||
itemDefinition: ItemDefinition[];
|
||||
}
|
||||
|
||||
function _Get_type_reference_from_DMN_Definitions(
|
||||
me: Definitions,
|
||||
type_name?: string,
|
||||
): DMN_type_reference_ | undefined {
|
||||
if (Trace)
|
||||
console.assert(
|
||||
Is_DMN_Definitions(me),
|
||||
"'_Get_type_reference_from_DMN_Definitions' >> 'Is_DMN_Definitions(me)', untrue",
|
||||
);
|
||||
|
||||
if (type_name === undefined) return undefined;
|
||||
const index = me.itemDefinition.findIndex(
|
||||
(item: ItemDefinition) => item.name === type_name,
|
||||
);
|
||||
return index !== -1 ? me.itemDefinition[index].typeRef : undefined;
|
||||
}
|
||||
|
||||
function Is_DMN_Definitions(me: ModdleElement): me is Definitions {
|
||||
return "$type" in me && me.$type === _DMN_Definitions && "drgElement" in me;
|
||||
}
|
||||
|
||||
export {
|
||||
_DMN_Definitions,
|
||||
Definitions,
|
||||
_Get_type_reference_from_DMN_Definitions,
|
||||
Is_DMN_Definitions,
|
||||
};
|
||||
@@ -0,0 +1,11 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { DMN_type_reference_ } from "./enums";
|
||||
|
||||
const _DMN_InformationItem: "dmn:InformationItem" = "dmn:InformationItem";
|
||||
|
||||
interface InformationItem extends ModdleElement {
|
||||
$type: typeof _DMN_InformationItem;
|
||||
typeRef: DMN_type_reference_;
|
||||
}
|
||||
|
||||
export { InformationItem, _DMN_InformationItem };
|
||||
@@ -0,0 +1,27 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { ModdleElementReference } from "./ModdleElementReference";
|
||||
|
||||
const _DMN_InformationRequirement: "dmn:InformationRequirement" =
|
||||
"dmn:InformationRequirement";
|
||||
|
||||
interface InformationRequirement extends ModdleElement {
|
||||
$type: typeof _DMN_InformationRequirement;
|
||||
requiredDecision?: ModdleElementReference;
|
||||
requiredInput?: ModdleElementReference;
|
||||
}
|
||||
|
||||
function Is_DMN_InformationRequirement(
|
||||
me: ModdleElement,
|
||||
): me is InformationRequirement {
|
||||
return (
|
||||
"$type" in me &&
|
||||
me.$type === _DMN_InformationRequirement &&
|
||||
"requiredInput" in me
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
InformationRequirement,
|
||||
_DMN_InformationRequirement,
|
||||
Is_DMN_InformationRequirement,
|
||||
};
|
||||
@@ -0,0 +1,137 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { DmnError } from "../error/DmnError";
|
||||
import { DecisionTable } from "./DecisionTable";
|
||||
import {
|
||||
_Get_type_reference_from_DMN_Definitions,
|
||||
Definitions,
|
||||
} from "./Definitions";
|
||||
import { Decision } from "./Decision";
|
||||
import { LiteralExpression } from "./LiteralExpression";
|
||||
import { UnaryTests } from "./UnaryTests";
|
||||
import {
|
||||
_Extract_enumeration_values,
|
||||
DMN_type_reference_,
|
||||
Is_DMN_type_reference_,
|
||||
} from "./enums";
|
||||
|
||||
const _DMN_InputClause: "dmn:InputClause" = "dmn:InputClause";
|
||||
|
||||
interface InputClause extends ModdleElement {
|
||||
$parent: DecisionTable; // Overriding...
|
||||
$type: typeof _DMN_InputClause;
|
||||
inputExpression?: LiteralExpression;
|
||||
inputValues?: UnaryTests;
|
||||
label?: string;
|
||||
typeRef?: DMN_type_reference_;
|
||||
width?: number;
|
||||
}
|
||||
|
||||
function Get_enumeration_from_DMN_InputClause(
|
||||
me: InputClause,
|
||||
): Array<any> | never {
|
||||
// if (Trace)
|
||||
// console.assert(_Is_DMN_InputClause_enumeration_(me), "Get_enumeration_from_DMN_InputClause >> '_Is_DMN_InputClause_enumeration_(me)', untrue");
|
||||
let type_reference =
|
||||
"inputExpression" in me ? me.inputExpression!.typeRef : undefined;
|
||||
if (Is_DMN_type_reference_(type_reference) === false) {
|
||||
type_reference = _Get_type_reference_from_DMN_Definitions(
|
||||
me.$parent.$parent.$parent as Definitions,
|
||||
type_reference,
|
||||
);
|
||||
if (type_reference === undefined)
|
||||
throw new DmnError(
|
||||
me,
|
||||
DmnError.Undefined_DMN_type,
|
||||
Name_of_DMN_InputClause(me),
|
||||
);
|
||||
}
|
||||
if (type_reference === DMN_type_reference_.BOOLEAN) return [false, true];
|
||||
else if (type_reference === DMN_type_reference_.STRING) {
|
||||
const extraction = _Extract_enumeration_values(me.inputValues!.text);
|
||||
if (extraction === null)
|
||||
throw new DmnError(
|
||||
me,
|
||||
DmnError.Undefined_DMN_type,
|
||||
Name_of_DMN_InputClause(me),
|
||||
);
|
||||
return extraction;
|
||||
} else {
|
||||
const extraction = _Extract_enumeration_values(
|
||||
me.inputValues!.text,
|
||||
type_reference,
|
||||
);
|
||||
if (extraction === null)
|
||||
throw new DmnError(
|
||||
me,
|
||||
DmnError.Undefined_DMN_type,
|
||||
Name_of_DMN_InputClause(me),
|
||||
);
|
||||
return extraction;
|
||||
}
|
||||
}
|
||||
|
||||
function _Is_DMN_InputClause_enumeration_(me: InputClause): boolean {
|
||||
return (
|
||||
"inputExpression" in me &&
|
||||
"typeRef" in me.inputExpression! &&
|
||||
("inputValues" in me ||
|
||||
me.inputExpression.typeRef === DMN_type_reference_.BOOLEAN)
|
||||
);
|
||||
}
|
||||
|
||||
function Name_of_DMN_InputClause(me: InputClause): string {
|
||||
return "label" in me
|
||||
? me.label!
|
||||
: "name" in me
|
||||
? me.name!
|
||||
: "inputExpression" in me && "name" in me.inputExpression!
|
||||
? me.inputExpression.name!
|
||||
: me.id;
|
||||
}
|
||||
|
||||
function Type_of_DMN_InputClause(
|
||||
me: InputClause,
|
||||
decision: Decision,
|
||||
): DMN_type_reference_ | never {
|
||||
if (_Is_DMN_InputClause_enumeration_(me))
|
||||
return DMN_type_reference_.ENUMERATION;
|
||||
else if ("typeRef" in me) {
|
||||
if (Is_DMN_type_reference_(me.typeRef!)) return me.typeRef;
|
||||
else {
|
||||
const base_type = _Get_type_reference_from_DMN_Definitions(
|
||||
decision.$parent,
|
||||
me.typeRef! as string,
|
||||
);
|
||||
return base_type && Is_DMN_type_reference_(base_type)
|
||||
? base_type
|
||||
: DMN_type_reference_.STRING;
|
||||
}
|
||||
} else {
|
||||
if ("inputExpression" in me && "typeRef" in me.inputExpression!) {
|
||||
if (Is_DMN_type_reference_(me.inputExpression.typeRef))
|
||||
return me.inputExpression.typeRef;
|
||||
else {
|
||||
const base_type = _Get_type_reference_from_DMN_Definitions(
|
||||
decision.$parent,
|
||||
me.inputExpression!.typeRef as string,
|
||||
);
|
||||
return base_type && Is_DMN_type_reference_(base_type)
|
||||
? base_type
|
||||
: DMN_type_reference_.STRING;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new DmnError(
|
||||
me,
|
||||
DmnError.Undefined_DMN_type,
|
||||
Name_of_DMN_InputClause(me),
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
InputClause,
|
||||
_DMN_InputClause,
|
||||
Get_enumeration_from_DMN_InputClause,
|
||||
Name_of_DMN_InputClause,
|
||||
Type_of_DMN_InputClause,
|
||||
};
|
||||
@@ -0,0 +1,16 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { InformationItem } from "./InformationItem";
|
||||
|
||||
const _DMN_InputData: "dmn:InputData" = "dmn:InputData";
|
||||
|
||||
interface InputData extends ModdleElement {
|
||||
$type: typeof _DMN_InputData;
|
||||
name: string;
|
||||
variable?: InformationItem;
|
||||
}
|
||||
|
||||
function Is_DMN_InputData(me: ModdleElement): me is InputData {
|
||||
return "$type" in me && me.$type === _DMN_InputData;
|
||||
}
|
||||
|
||||
export { InputData, _DMN_InputData, Is_DMN_InputData };
|
||||
@@ -0,0 +1,15 @@
|
||||
import { UnaryTests } from "./UnaryTests";
|
||||
import { DMN_type_reference_ } from "./enums";
|
||||
|
||||
const _DMN_ItemDefinition: "dmn:ItemDefinition" = "dmn:ItemDefinition";
|
||||
|
||||
interface ItemDefinition {
|
||||
$type: typeof _DMN_ItemDefinition;
|
||||
allowedValues?: UnaryTests;
|
||||
itemComponent?: Array<ItemDefinition>;
|
||||
label: string;
|
||||
name: string;
|
||||
typeRef?: DMN_type_reference_;
|
||||
}
|
||||
|
||||
export { ItemDefinition, _DMN_ItemDefinition };
|
||||
@@ -0,0 +1,11 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { AuthorityRequirement } from "./AuthorityRequirement";
|
||||
|
||||
const _DMN_KnowledgeSource: "dmn:KnowledgeSource" = "dmn:KnowledgeSource";
|
||||
|
||||
interface DMN_KnowledgeSource extends ModdleElement {
|
||||
$type: typeof _DMN_KnowledgeSource;
|
||||
authorityRequirement?: Array<AuthorityRequirement>;
|
||||
}
|
||||
|
||||
export { DMN_KnowledgeSource, _DMN_KnowledgeSource };
|
||||
@@ -0,0 +1,12 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { ModdleElementReference } from "./ModdleElementReference";
|
||||
|
||||
const _DMN_KnowledgeRequirement: "dmn:KnowledgeRequirement" =
|
||||
"dmn:KnowledgeRequirement";
|
||||
|
||||
interface KnowledgeRequirement extends ModdleElement {
|
||||
$type: typeof _DMN_KnowledgeRequirement;
|
||||
requiredKnowledge: ModdleElementReference;
|
||||
}
|
||||
|
||||
export { KnowledgeRequirement, _DMN_KnowledgeRequirement };
|
||||
@@ -0,0 +1,21 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
import { DMN_type_reference_ } from "./enums";
|
||||
|
||||
const _DMN_LiteralExpression: "dmn:LiteralExpression" = "dmn:LiteralExpression";
|
||||
|
||||
interface LiteralExpression extends ModdleElement {
|
||||
$type: typeof _DMN_LiteralExpression;
|
||||
text: string;
|
||||
typeRef: DMN_type_reference_;
|
||||
}
|
||||
|
||||
function Is_DMN_LiteralExpression(me: ModdleElement): me is LiteralExpression {
|
||||
return (
|
||||
"$type" in me &&
|
||||
me.$type === _DMN_LiteralExpression &&
|
||||
"text" in me &&
|
||||
"typeRef" in me
|
||||
);
|
||||
}
|
||||
|
||||
export { LiteralExpression, _DMN_LiteralExpression, Is_DMN_LiteralExpression };
|
||||
@@ -0,0 +1,29 @@
|
||||
// type ModdleElement = {
|
||||
// id: string;
|
||||
// $type: string;
|
||||
// name?: string;
|
||||
// $parent?: ModdleElement;
|
||||
// };
|
||||
|
||||
class ModdleElement {
|
||||
readonly $type: string = "dmn:ModdleElement";
|
||||
|
||||
id: string = "";
|
||||
name?: string;
|
||||
$parent?: ModdleElement;
|
||||
|
||||
getName(): string {
|
||||
if (!this.name) return `${this.$type}${this.id}`;
|
||||
return this.name;
|
||||
}
|
||||
}
|
||||
|
||||
export { ModdleElement };
|
||||
|
||||
// export interface ModdleElement {
|
||||
// // $attrs: Object; // Unused...
|
||||
// id: string;
|
||||
// name?: string;
|
||||
// $parent: ModdleElement | undefined;
|
||||
// $type: string;
|
||||
// }
|
||||
@@ -0,0 +1,10 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
|
||||
const _DMN_DMNElementReference: "dmn:DMNElementReference" =
|
||||
"dmn:DMNElementReference";
|
||||
interface ModdleElementReference extends ModdleElement {
|
||||
$type: typeof _DMN_DMNElementReference;
|
||||
href: string; // Example: "#temperature_id"
|
||||
}
|
||||
|
||||
export { ModdleElementReference, _DMN_DMNElementReference };
|
||||
@@ -0,0 +1,119 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
|
||||
import { DecisionTable } from "./DecisionTable";
|
||||
import { Decision } from "./Decision";
|
||||
import { DmnError } from "../error/DmnError";
|
||||
import {
|
||||
_Get_type_reference_from_DMN_Definitions,
|
||||
Definitions,
|
||||
} from "./Definitions";
|
||||
import { UnaryTests } from "./UnaryTests";
|
||||
import {
|
||||
_Extract_enumeration_values,
|
||||
DMN_type_reference_,
|
||||
Is_DMN_type_reference_,
|
||||
} from "./enums";
|
||||
|
||||
const _DMN_OutputClause: "dmn:OutputClause" = "dmn:OutputClause";
|
||||
|
||||
interface OutputClause extends ModdleElement {
|
||||
$parent: DecisionTable; // Overriding...
|
||||
$type: typeof _DMN_OutputClause;
|
||||
label?: string;
|
||||
outputValues?: UnaryTests;
|
||||
typeRef?: DMN_type_reference_;
|
||||
}
|
||||
|
||||
function Get_enumeration_from_DMN_OutputClause(
|
||||
me: OutputClause,
|
||||
): any[] | never {
|
||||
// if (Trace)
|
||||
// console.assert(_Is_DMN_OutputClause_enumeration_(me), "Get_enumeration_from_DMN_OutputClause >> '_Is_DMN_OutputClause_enumeration_(me)', untrue");
|
||||
let type_reference = me.typeRef;
|
||||
if (Is_DMN_type_reference_(type_reference) === false) {
|
||||
type_reference = _Get_type_reference_from_DMN_Definitions(
|
||||
me.$parent.$parent.$parent as Definitions,
|
||||
type_reference,
|
||||
);
|
||||
if (type_reference === undefined)
|
||||
throw new DmnError(
|
||||
me,
|
||||
DmnError.Undefined_DMN_type,
|
||||
Name_of_DMN_OutputClause(me),
|
||||
);
|
||||
}
|
||||
if (type_reference === DMN_type_reference_.BOOLEAN) return [false, true];
|
||||
else if (type_reference === DMN_type_reference_.STRING) {
|
||||
const extraction = _Extract_enumeration_values(me.outputValues!.text);
|
||||
if (extraction === null)
|
||||
throw new DmnError(
|
||||
me,
|
||||
DmnError.Undefined_DMN_type,
|
||||
Name_of_DMN_OutputClause(me),
|
||||
);
|
||||
return extraction;
|
||||
} else {
|
||||
const extraction = _Extract_enumeration_values(
|
||||
me.outputValues!.text,
|
||||
type_reference,
|
||||
);
|
||||
if (extraction === null)
|
||||
throw new DmnError(
|
||||
me,
|
||||
DmnError.Undefined_DMN_type,
|
||||
Name_of_DMN_OutputClause(me),
|
||||
);
|
||||
return extraction;
|
||||
}
|
||||
}
|
||||
|
||||
function _Is_DMN_OutputClause_enumeration_(me: OutputClause): boolean {
|
||||
// 'typeRef' may be missing even though 'outputValues' is present -> "enumeration" anyway...
|
||||
return /*'typeRef' in me &&*/ "outputValues" in me;
|
||||
}
|
||||
|
||||
function Name_of_DMN_OutputClause(me: OutputClause): string {
|
||||
return "label" in me
|
||||
? me.label!
|
||||
: "name" in me
|
||||
? me.name!
|
||||
: "outputLabel" in me.$parent
|
||||
? me.$parent.outputLabel!
|
||||
: "name" in me.$parent.$parent
|
||||
? me.$parent.$parent.name!
|
||||
: me.id;
|
||||
}
|
||||
|
||||
function Type_of_DMN_OutputClause(
|
||||
me: OutputClause,
|
||||
decision: Decision,
|
||||
primitive_type = false,
|
||||
): DMN_type_reference_ | never {
|
||||
if (!primitive_type && _Is_DMN_OutputClause_enumeration_(me))
|
||||
return DMN_type_reference_.ENUMERATION;
|
||||
else if ("typeRef" in me)
|
||||
if (Is_DMN_type_reference_(me.typeRef!)) return me.typeRef;
|
||||
else {
|
||||
const base_type = _Get_type_reference_from_DMN_Definitions(
|
||||
decision.$parent,
|
||||
me.typeRef! as string,
|
||||
);
|
||||
return base_type && Is_DMN_type_reference_(base_type)
|
||||
? base_type
|
||||
: DMN_type_reference_.STRING;
|
||||
}
|
||||
throw new DmnError(
|
||||
me,
|
||||
DmnError.Undefined_DMN_type,
|
||||
Name_of_DMN_OutputClause(me),
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
OutputClause,
|
||||
_DMN_OutputClause,
|
||||
Get_enumeration_from_DMN_OutputClause,
|
||||
_Is_DMN_OutputClause_enumeration_,
|
||||
Name_of_DMN_OutputClause,
|
||||
Type_of_DMN_OutputClause,
|
||||
};
|
||||
@@ -0,0 +1,10 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
|
||||
const _DMN_RuleAnnotationClause: "dmn:RuleAnnotationClause" =
|
||||
"dmn:RuleAnnotationClause";
|
||||
|
||||
interface RuleAnnotationClause extends ModdleElement {
|
||||
$type: typeof _DMN_RuleAnnotationClause;
|
||||
}
|
||||
|
||||
export { RuleAnnotationClause, _DMN_RuleAnnotationClause };
|
||||
@@ -0,0 +1,14 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
|
||||
const _DMN_UnaryTests: "dmn:UnaryTests" = "dmn:UnaryTests";
|
||||
|
||||
interface UnaryTests extends ModdleElement {
|
||||
$type: typeof _DMN_UnaryTests;
|
||||
text: string;
|
||||
}
|
||||
|
||||
function Is_DMN_UnaryTests(me: ModdleElement): me is UnaryTests {
|
||||
return "$type" in me && me.$type === _DMN_UnaryTests && "text" in me;
|
||||
}
|
||||
|
||||
export { UnaryTests, _DMN_UnaryTests, Is_DMN_UnaryTests };
|
||||
@@ -0,0 +1,124 @@
|
||||
import { ModdleElement } from "./ModdleElement";
|
||||
|
||||
const _DMiNer_ = "_DMiNer_";
|
||||
const FEEL_range = /^[[(\]]\d{1,}\.\.\d{1,}[[)\]]$/;
|
||||
const Trace = true; // 'false' in production mode...
|
||||
|
||||
enum Drop_mode {
|
||||
FEEL = "FEEL",
|
||||
PREDICT = "PREDICT",
|
||||
TRAIN = "TRAIN",
|
||||
}
|
||||
|
||||
enum State_mode {
|
||||
MENU = "MENU",
|
||||
RANDOMIZE = "RANDOMIZE",
|
||||
}
|
||||
|
||||
enum Status_mode {
|
||||
FELT = "FELT",
|
||||
PREDICTED = "PREDICTED",
|
||||
RANDOMIZED = "RANDOMIZED",
|
||||
}
|
||||
|
||||
enum Hit_policy { // DMN 1.3
|
||||
ANY = "ANY",
|
||||
COLLECT = "COLLECT",
|
||||
FIRST = "FIRST",
|
||||
OUTPUT_ORDER = "OUTPUT ORDER",
|
||||
PRIORITY = "PRIORITY",
|
||||
RULE_ORDER = "RULE ORDER",
|
||||
UNIQUE = "UNIQUE",
|
||||
}
|
||||
|
||||
function Name_of_ModdleElement(me: ModdleElement): string {
|
||||
return "name" in me ? me.name! : me.$type + me.id;
|
||||
}
|
||||
|
||||
// https://docs.camunda.org/manual/7.18/user-guide/dmn-engine/data-types/#supported-data-types
|
||||
enum DMN_type_reference_ {
|
||||
BOOLEAN = "boolean",
|
||||
DATE = "date",
|
||||
DOUBLE = "double",
|
||||
ENUMERATION = "enum",
|
||||
INTEGER = "integer",
|
||||
LONG = "long",
|
||||
NUMBER = "number",
|
||||
STRING = "string",
|
||||
}
|
||||
|
||||
function Is_DMN_type_reference_(
|
||||
type_reference: string | undefined,
|
||||
): type_reference is DMN_type_reference_ {
|
||||
if (type_reference === undefined) return false;
|
||||
return Object.values(DMN_type_reference_).includes(
|
||||
type_reference.toLowerCase() as DMN_type_reference_,
|
||||
);
|
||||
}
|
||||
|
||||
type DMN_type_reference = boolean | Date | number | string;
|
||||
|
||||
type TensorFlow_datum = Array<Array<0 | 1> | DMN_type_reference>;
|
||||
type TensorFlow_data = Array<TensorFlow_datum>;
|
||||
|
||||
const _DMN_DecisionService: "dmn:DecisionService" = "dmn:DecisionService";
|
||||
|
||||
const _DMN_Invocation: "dmn:Invocation" = "dmn:Invocation"; // Alternative to decision table and literal expression
|
||||
|
||||
function _Extract_enumeration_values(enumeration: string): string[] | null;
|
||||
|
||||
function _Extract_enumeration_values(
|
||||
enumeration: string,
|
||||
type_reference: DMN_type_reference_,
|
||||
): number[] | null;
|
||||
|
||||
function _Extract_enumeration_values(
|
||||
enumeration: string,
|
||||
type_reference?: DMN_type_reference_,
|
||||
): string[] | number[] | null {
|
||||
if (type_reference) {
|
||||
if (FEEL_range.test(enumeration)) {
|
||||
const values = enumeration.match(/\d+/g)!.map((value) => parseInt(value));
|
||||
const start = /^[\(\]]/.test(enumeration)
|
||||
? Math.min(...values) + 1
|
||||
: Math.min(...values);
|
||||
const end = /[\)\[]$/.test(enumeration)
|
||||
? Math.max(...values) - 1
|
||||
: Math.max(...values);
|
||||
values.length = 0;
|
||||
for (let i = start; i <= end; i++) values.push(i);
|
||||
return values;
|
||||
} else if (
|
||||
type_reference === DMN_type_reference_.INTEGER ||
|
||||
type_reference === DMN_type_reference_.LONG
|
||||
)
|
||||
return enumeration.split(",").map((value) => parseInt(value));
|
||||
else if (
|
||||
type_reference === DMN_type_reference_.DOUBLE ||
|
||||
type_reference === DMN_type_reference_.NUMBER
|
||||
)
|
||||
return enumeration.split(",").map((value) => parseFloat(value));
|
||||
return null;
|
||||
}
|
||||
const values = enumeration.match(/"\w+( \w+)*"/g);
|
||||
return values === null
|
||||
? values
|
||||
: values.map((value) => value.replace(/^"/g, "").replace(/"$/g, ""));
|
||||
}
|
||||
|
||||
export {
|
||||
_DMiNer_,
|
||||
FEEL_range,
|
||||
Trace,
|
||||
Drop_mode,
|
||||
State_mode,
|
||||
Status_mode,
|
||||
Hit_policy,
|
||||
Name_of_ModdleElement,
|
||||
DMN_type_reference_,
|
||||
Is_DMN_type_reference_,
|
||||
DMN_type_reference,
|
||||
TensorFlow_datum,
|
||||
TensorFlow_data,
|
||||
_Extract_enumeration_values,
|
||||
};
|
||||
@@ -0,0 +1,23 @@
|
||||
export * from "./AuthorityRequirement";
|
||||
export * from "./BusinessKnoledgeModel";
|
||||
export * from "./Context";
|
||||
export * from "./ContextEntry";
|
||||
export * from "./Data";
|
||||
export * from "./Decision";
|
||||
export * from "./DecisionRule";
|
||||
export * from "./DecisionTable";
|
||||
export * from "./Definitions";
|
||||
export * from "./enums";
|
||||
export * from "./InformationItem";
|
||||
export * from "./InformationRequirement";
|
||||
export * from "./InputClause";
|
||||
export * from "./InputData";
|
||||
export * from "./ItemDefinition";
|
||||
export * from "./KnowledgeRequirement";
|
||||
export * from "./KnowledgSource";
|
||||
export * from "./LiteralExpression";
|
||||
export * from "./ModdleElement";
|
||||
export * from "./ModdleElementReference";
|
||||
export * from "./OutputClause";
|
||||
export * from "./RuleAnnotationClause";
|
||||
export * from "./UnaryTests";
|
||||
@@ -0,0 +1,18 @@
|
||||
import { Parser } from "./index";
|
||||
import { Duplex } from "node:stream";
|
||||
import * as csv from "fast-csv";
|
||||
|
||||
class CsvParser implements Parser {
|
||||
public static instance: CsvParser = new CsvParser();
|
||||
|
||||
public parse(options: any): Duplex {
|
||||
return csv.parse({
|
||||
headers: true,
|
||||
objectMode: true,
|
||||
trim: true,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default CsvParser;
|
||||
@@ -0,0 +1,10 @@
|
||||
import { Duplex } from "node:stream";
|
||||
|
||||
interface Parser {
|
||||
/**
|
||||
* Parse the content of the stream into JSON objects
|
||||
*/
|
||||
parse(options: any): Duplex;
|
||||
}
|
||||
|
||||
export default Parser;
|
||||
@@ -0,0 +1,16 @@
|
||||
import { Parser, CsvParser } from "./index";
|
||||
import { ParserType } from "./index";
|
||||
|
||||
class ParserFactory {
|
||||
/**
|
||||
* Get the parser corresponding to the dataset type
|
||||
* @param fileType The type of the dataset
|
||||
* @returns The parser corresponding to the dataset type
|
||||
*/
|
||||
static getParser(fileType: ParserType): Parser {
|
||||
if (fileType === ParserType.CSV) return CsvParser.instance;
|
||||
throw new Error("Unsupported file type");
|
||||
}
|
||||
}
|
||||
|
||||
export default ParserFactory;
|
||||
@@ -0,0 +1,5 @@
|
||||
enum ParserType {
|
||||
CSV = ".csv",
|
||||
}
|
||||
|
||||
export default ParserType;
|
||||
@@ -0,0 +1,6 @@
|
||||
export { default as ParserType } from "./ParserType";
|
||||
|
||||
export { default as ParserFactory } from "./ParserFactory";
|
||||
export { default as Parser } from "./Parser";
|
||||
|
||||
export { default as CsvParser } from "./CsvParser";
|
||||
Vendored
+1
@@ -0,0 +1 @@
|
||||
declare module "src/types/JSONStream";
|
||||
Vendored
+1
@@ -0,0 +1 @@
|
||||
declare module "src/types/dmn-moddle";
|
||||
Reference in New Issue
Block a user