1633 lines
52 KiB
TypeScript
1633 lines
52 KiB
TypeScript
import test from "node:test";
|
|
import assert from "node:assert/strict";
|
|
import { mkdtemp, mkdir, writeFile } from "node:fs/promises";
|
|
import os from "node:os";
|
|
import path from "node:path";
|
|
|
|
import { MongoMemoryServer } from "mongodb-memory-server";
|
|
import { MongoClient } from "mongodb";
|
|
|
|
import { createApiRuntime, type ApiRuntimeConfig } from "../src/runtime/server.ts";
|
|
|
|
async function readJson<T>(response: Response): Promise<T> {
|
|
const payload = (await response.json()) as T;
|
|
if (!response.ok) {
|
|
throw new Error(`request failed with status ${response.status}: ${JSON.stringify(payload)}`);
|
|
}
|
|
return payload;
|
|
}
|
|
|
|
async function startRuntimeServer(config: ApiRuntimeConfig) {
|
|
const runtime = await createApiRuntime(config);
|
|
const server = await new Promise<import("node:http").Server>((resolve) => {
|
|
const listening = runtime.app.listen(0, config.host, () => resolve(listening));
|
|
});
|
|
const address = server.address();
|
|
if (!address || typeof address === "string") {
|
|
throw new Error("failed to start runtime server");
|
|
}
|
|
|
|
return {
|
|
baseUrl: `http://${config.host}:${address.port}`,
|
|
close: async () => {
|
|
await new Promise<void>((resolve, reject) => {
|
|
server.close((error) => {
|
|
if (error) {
|
|
reject(error);
|
|
return;
|
|
}
|
|
resolve();
|
|
});
|
|
});
|
|
await runtime.client.close();
|
|
},
|
|
};
|
|
}
|
|
|
|
test("mongo-backed runtime reuses bootstrapped workspace and project across restarts", async (t) => {
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27117,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const config: ApiRuntimeConfig = {
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-reuse",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
};
|
|
|
|
const first = await startRuntimeServer(config);
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string; name: string };
|
|
project: { _id: string; name: string };
|
|
}>(
|
|
await fetch(`${first.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "runtime-user", projectName: "Runtime Demo" }),
|
|
}),
|
|
);
|
|
|
|
await first.close();
|
|
|
|
const second = await startRuntimeServer(config);
|
|
t.after(async () => {
|
|
await second.close();
|
|
});
|
|
|
|
const workspaces = await readJson<Array<{ _id: string }>>(
|
|
await fetch(`${second.baseUrl}/api/workspaces?ownerId=runtime-user`),
|
|
);
|
|
const projects = await readJson<Array<{ _id: string }>>(
|
|
await fetch(`${second.baseUrl}/api/projects?workspaceId=${bootstrap.workspace._id}`),
|
|
);
|
|
|
|
assert.equal(workspaces.length, 1);
|
|
assert.equal(projects.length, 1);
|
|
assert.equal(workspaces[0]?._id, bootstrap.workspace._id);
|
|
assert.equal(projects[0]?._id, bootstrap.project._id);
|
|
});
|
|
|
|
test("mongo-backed runtime provisions a default workflow template for newly created projects", async (t) => {
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27217,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-project-template",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "project-template-user", projectName: "Seed Project" }),
|
|
}),
|
|
);
|
|
|
|
const project = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/projects`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
name: "Second Project",
|
|
description: "Project created after bootstrap",
|
|
createdBy: "project-template-user",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const templates = await readJson<Array<{ slug: string; projectId?: string }>>(
|
|
await fetch(
|
|
`${server.baseUrl}/api/workflow-templates?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}&projectId=${encodeURIComponent(project._id)}`,
|
|
),
|
|
);
|
|
|
|
assert.equal(
|
|
templates.some((template) => template.projectId === project._id),
|
|
true,
|
|
);
|
|
assert.equal(
|
|
templates.some((template) => template.slug === "delivery-normalization-template"),
|
|
true,
|
|
);
|
|
});
|
|
|
|
test("mongo-backed runtime persists probed assets and workflow runs through the HTTP API", async (t) => {
|
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-"));
|
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "DJI_001", "DJI_001.mp4"), "");
|
|
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27118,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-flow",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "flow-user", projectName: "Flow Project" }),
|
|
}),
|
|
);
|
|
|
|
const asset = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
sourcePath: sourceDir,
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const probe = await readJson<{ detectedFormatCandidates: string[]; recommendedNextNodes: string[] }>(
|
|
await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }),
|
|
);
|
|
const assets = await readJson<Array<{ _id: string; status: string; detectedFormats: string[] }>>(
|
|
await fetch(
|
|
`${server.baseUrl}/api/assets?projectId=${encodeURIComponent(bootstrap.project._id)}`,
|
|
),
|
|
);
|
|
|
|
const workflow = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Delivery Flow",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const version = await readJson<{ _id: string; versionNumber: number }>(
|
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
|
logicGraph: {
|
|
nodes: [
|
|
{ id: "source-asset", type: "source" },
|
|
{ id: "validate-structure", type: "inspect" },
|
|
{ id: "export-delivery-package", type: "export" },
|
|
],
|
|
edges: [
|
|
{ from: "source-asset", to: "validate-structure" },
|
|
{ from: "validate-structure", to: "export-delivery-package" },
|
|
],
|
|
},
|
|
runtimeGraph: { selectedPreset: "delivery-normalization" },
|
|
pluginRefs: ["builtin:delivery-nodes"],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const run = await readJson<{ _id: string; status: string }>(
|
|
await fetch(`${server.baseUrl}/api/runs`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workflowDefinitionId: workflow._id,
|
|
workflowVersionId: version._id,
|
|
assetIds: [asset._id],
|
|
}),
|
|
}),
|
|
);
|
|
const tasks = await readJson<
|
|
Array<{
|
|
nodeId: string;
|
|
status: string;
|
|
executorType: string;
|
|
upstreamNodeIds: string[];
|
|
assetIds: string[];
|
|
}>
|
|
>(
|
|
await fetch(`${server.baseUrl}/api/runs/${run._id}/tasks`),
|
|
);
|
|
|
|
assert.deepEqual(probe.detectedFormatCandidates.includes("delivery_package"), true);
|
|
assert.deepEqual(probe.recommendedNextNodes.includes("validate_structure"), true);
|
|
assert.equal(assets[0]?._id, asset._id);
|
|
assert.equal(assets[0]?.status, "probed");
|
|
assert.deepEqual(assets[0]?.detectedFormats.includes("delivery_package"), true);
|
|
assert.equal(version.versionNumber, 1);
|
|
assert.equal(run.status, "queued");
|
|
assert.deepEqual((run as { assetIds?: string[] }).assetIds, [asset._id]);
|
|
assert.equal(tasks.length, 3);
|
|
assert.equal(tasks[0]?.nodeId, "source-asset");
|
|
assert.equal(tasks[0]?.executorType, "docker");
|
|
assert.deepEqual(tasks[0]?.assetIds, [asset._id]);
|
|
assert.deepEqual(tasks[0]?.upstreamNodeIds, []);
|
|
assert.equal(tasks[0]?.status, "queued");
|
|
assert.deepEqual(tasks[1]?.assetIds, [asset._id]);
|
|
assert.deepEqual(tasks[1]?.upstreamNodeIds, ["source-asset"]);
|
|
assert.equal(tasks[1]?.status, "pending");
|
|
});
|
|
|
|
test("mongo-backed runtime snapshots per-node executor config into runs and tasks", async (t) => {
|
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-snapshot-"));
|
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "DJI_001", "DJI_001.mp4"), "");
|
|
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27124,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-snapshots",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "snapshot-user", projectName: "Snapshot Project" }),
|
|
}),
|
|
);
|
|
|
|
const asset = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
sourcePath: sourceDir,
|
|
}),
|
|
}),
|
|
);
|
|
await readJson(await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }));
|
|
|
|
const workflow = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Snapshot Flow",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const version = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
|
logicGraph: {
|
|
nodes: [
|
|
{ id: "source-asset", type: "source" },
|
|
{ id: "validate-structure", type: "inspect" },
|
|
{ id: "export-delivery-package", type: "export" },
|
|
],
|
|
edges: [
|
|
{ from: "source-asset", to: "validate-structure" },
|
|
{ from: "validate-structure", to: "export-delivery-package" },
|
|
],
|
|
},
|
|
runtimeGraph: {
|
|
selectedPreset: "delivery-normalization",
|
|
nodeConfigs: {
|
|
"source-asset": {
|
|
executorType: "docker",
|
|
executorConfig: {
|
|
image: "python:3.11",
|
|
command: ["python", "-V"],
|
|
},
|
|
},
|
|
"validate-structure": {
|
|
executorType: "python",
|
|
codeHookSpec: {
|
|
language: "python",
|
|
entrypoint: "process",
|
|
source: [
|
|
"def process(task, context):",
|
|
" return {'nodeId': task['nodeId'], 'hooked': True}",
|
|
].join("\n"),
|
|
},
|
|
},
|
|
"export-delivery-package": {
|
|
executorType: "http",
|
|
executorConfig: {
|
|
url: "http://127.0.0.1:3010/mock-executor",
|
|
method: "POST",
|
|
},
|
|
},
|
|
},
|
|
},
|
|
pluginRefs: ["builtin:delivery-nodes"],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const createdRun = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/runs`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workflowDefinitionId: workflow._id,
|
|
workflowVersionId: version._id,
|
|
assetIds: [asset._id],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const run = await readJson<{
|
|
_id: string;
|
|
runtimeSnapshot?: {
|
|
selectedPreset?: string;
|
|
nodeConfigs?: Record<
|
|
string,
|
|
{
|
|
executorType?: string;
|
|
executorConfig?: { image?: string; url?: string };
|
|
codeHookSpec?: { source?: string };
|
|
}
|
|
>;
|
|
};
|
|
}>(await fetch(`${server.baseUrl}/api/runs/${createdRun._id}`));
|
|
const tasks = await readJson<
|
|
Array<{
|
|
nodeId: string;
|
|
executorType: string;
|
|
executorConfig?: { image?: string; url?: string };
|
|
codeHookSpec?: { source?: string };
|
|
}>
|
|
>(await fetch(`${server.baseUrl}/api/runs/${createdRun._id}/tasks`));
|
|
|
|
const sourceTask = tasks.find((task) => task.nodeId === "source-asset");
|
|
const validateTask = tasks.find((task) => task.nodeId === "validate-structure");
|
|
const exportTask = tasks.find((task) => task.nodeId === "export-delivery-package");
|
|
|
|
assert.equal(run.runtimeSnapshot?.selectedPreset, "delivery-normalization");
|
|
assert.equal(run.runtimeSnapshot?.nodeConfigs?.["source-asset"]?.executorType, "docker");
|
|
assert.equal(
|
|
run.runtimeSnapshot?.nodeConfigs?.["source-asset"]?.executorConfig?.image,
|
|
"python:3.11",
|
|
);
|
|
assert.match(
|
|
run.runtimeSnapshot?.nodeConfigs?.["validate-structure"]?.codeHookSpec?.source ?? "",
|
|
/def process/,
|
|
);
|
|
assert.equal(sourceTask?.executorType, "docker");
|
|
assert.equal(sourceTask?.executorConfig?.image, "python:3.11");
|
|
assert.equal(validateTask?.executorType, "python");
|
|
assert.match(validateTask?.codeHookSpec?.source ?? "", /hooked/);
|
|
assert.equal(exportTask?.executorType, "http");
|
|
assert.equal(exportTask?.executorConfig?.url, "http://127.0.0.1:3010/mock-executor");
|
|
});
|
|
|
|
test("mongo-backed runtime rejects workflow runs without bound assets", async (t) => {
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27119,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-run-inputs",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "run-input-user", projectName: "Run Inputs" }),
|
|
}),
|
|
);
|
|
|
|
const workflow = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Run Input Workflow",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const version = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
|
logicGraph: {
|
|
nodes: [{ id: "source-asset", type: "source" }],
|
|
edges: [],
|
|
},
|
|
runtimeGraph: { selectedPreset: "delivery-normalization" },
|
|
pluginRefs: ["builtin:delivery-nodes"],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const response = await fetch(`${server.baseUrl}/api/runs`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workflowDefinitionId: workflow._id,
|
|
workflowVersionId: version._id,
|
|
}),
|
|
});
|
|
|
|
assert.equal(response.status, 400);
|
|
assert.match(await response.text(), /assetIds/i);
|
|
});
|
|
|
|
test("mongo-backed runtime lists recent runs for a project", async (t) => {
|
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-runs-"));
|
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
|
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27120,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-run-list",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "run-list-user", projectName: "Run List Project" }),
|
|
}),
|
|
);
|
|
|
|
const asset = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
sourcePath: sourceDir,
|
|
}),
|
|
}),
|
|
);
|
|
await readJson(await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }));
|
|
|
|
const workflow = await readJson<{ _id: string; name: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Recent Runs Flow",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const version = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
|
logicGraph: {
|
|
nodes: [{ id: "source-asset", type: "source" }],
|
|
edges: [],
|
|
},
|
|
runtimeGraph: { selectedPreset: "delivery-normalization" },
|
|
pluginRefs: ["builtin:delivery-nodes"],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
await readJson(
|
|
await fetch(`${server.baseUrl}/api/runs`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workflowDefinitionId: workflow._id,
|
|
workflowVersionId: version._id,
|
|
assetIds: [asset._id],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const runs = await readJson<
|
|
Array<{
|
|
_id: string;
|
|
projectId: string;
|
|
workflowDefinitionId: string;
|
|
workflowName: string;
|
|
assetIds: string[];
|
|
status: string;
|
|
}>
|
|
>(
|
|
await fetch(
|
|
`${server.baseUrl}/api/runs?projectId=${encodeURIComponent(bootstrap.project._id)}`,
|
|
),
|
|
);
|
|
|
|
assert.equal(runs.length, 1);
|
|
assert.equal(runs[0]?.projectId, bootstrap.project._id);
|
|
assert.equal(runs[0]?.workflowDefinitionId, workflow._id);
|
|
assert.equal(runs[0]?.workflowName, "Recent Runs Flow");
|
|
assert.deepEqual(runs[0]?.assetIds, [asset._id]);
|
|
assert.equal(runs[0]?.status, "queued");
|
|
});
|
|
|
|
test("mongo-backed runtime exposes persisted task execution summaries and logs", async (t) => {
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27121,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const database = "emboflow-runtime-task-summaries";
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database,
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "task-summary-user", projectName: "Task Summary Project" }),
|
|
}),
|
|
);
|
|
|
|
const workflow = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Task Summary Flow",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const version = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
|
logicGraph: {
|
|
nodes: [{ id: "source-asset", type: "source" }],
|
|
edges: [],
|
|
},
|
|
runtimeGraph: { selectedPreset: "delivery-normalization" },
|
|
pluginRefs: ["builtin:delivery-nodes"],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const assetId = "asset-task-summary";
|
|
const client = new MongoClient(mongod.getUri());
|
|
await client.connect();
|
|
t.after(async () => {
|
|
await client.close();
|
|
});
|
|
const db = client.db(database);
|
|
await db.collection("assets").insertOne({
|
|
_id: assetId,
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
type: "folder",
|
|
sourceType: "local_path",
|
|
displayName: "Summary Asset",
|
|
status: "probed",
|
|
storageRef: {},
|
|
topLevelPaths: ["DJI_001"],
|
|
detectedFormats: ["delivery_package"],
|
|
fileCount: 1,
|
|
summary: {},
|
|
createdBy: "task-summary-user",
|
|
createdAt: new Date().toISOString(),
|
|
updatedAt: new Date().toISOString(),
|
|
});
|
|
|
|
const run = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/runs`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workflowDefinitionId: workflow._id,
|
|
workflowVersionId: version._id,
|
|
assetIds: [assetId],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const [task] = await db
|
|
.collection("run_tasks")
|
|
.find({ workflowRunId: run._id })
|
|
.sort({ createdAt: 1 })
|
|
.toArray();
|
|
|
|
await db.collection("run_tasks").updateOne(
|
|
{ _id: task?._id },
|
|
{
|
|
$set: {
|
|
status: "success",
|
|
startedAt: "2026-03-27T10:00:00.000Z",
|
|
finishedAt: "2026-03-27T10:00:02.500Z",
|
|
durationMs: 2500,
|
|
logLines: ["Task claimed by worker", "Executor completed successfully"],
|
|
stdoutLines: ["python executor processed source-asset"],
|
|
stderrLines: [],
|
|
summary: {
|
|
outcome: "success",
|
|
executorType: "python",
|
|
assetCount: 1,
|
|
artifactIds: ["artifact-1"],
|
|
stdoutLineCount: 1,
|
|
stderrLineCount: 0,
|
|
},
|
|
lastResultPreview: {
|
|
taskId: task?._id,
|
|
executor: "python",
|
|
},
|
|
},
|
|
},
|
|
);
|
|
await db.collection("workflow_runs").updateOne(
|
|
{ _id: run._id },
|
|
{
|
|
$set: {
|
|
status: "success",
|
|
startedAt: "2026-03-27T10:00:00.000Z",
|
|
finishedAt: "2026-03-27T10:00:02.500Z",
|
|
durationMs: 2500,
|
|
summary: {
|
|
totalTaskCount: 1,
|
|
completedTaskCount: 1,
|
|
artifactCount: 1,
|
|
stdoutLineCount: 1,
|
|
stderrLineCount: 0,
|
|
failedTaskIds: [],
|
|
taskCounts: {
|
|
pending: 0,
|
|
queued: 0,
|
|
running: 0,
|
|
success: 1,
|
|
failed: 0,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
);
|
|
|
|
const runDetail = await readJson<{
|
|
_id: string;
|
|
status: string;
|
|
startedAt?: string;
|
|
finishedAt?: string;
|
|
durationMs?: number;
|
|
summary?: {
|
|
totalTaskCount?: number;
|
|
artifactCount?: number;
|
|
stdoutLineCount?: number;
|
|
stderrLineCount?: number;
|
|
taskCounts?: {
|
|
success?: number;
|
|
};
|
|
};
|
|
}>(await fetch(`${server.baseUrl}/api/runs/${run._id}`));
|
|
|
|
const tasks = await readJson<
|
|
Array<{
|
|
_id: string;
|
|
status: string;
|
|
startedAt?: string;
|
|
finishedAt?: string;
|
|
durationMs?: number;
|
|
logLines?: string[];
|
|
stdoutLines?: string[];
|
|
stderrLines?: string[];
|
|
summary?: {
|
|
outcome?: string;
|
|
executorType?: string;
|
|
assetCount?: number;
|
|
artifactIds?: string[];
|
|
stdoutLineCount?: number;
|
|
stderrLineCount?: number;
|
|
};
|
|
lastResultPreview?: {
|
|
taskId?: string;
|
|
executor?: string;
|
|
};
|
|
}>
|
|
>(await fetch(`${server.baseUrl}/api/runs/${run._id}/tasks`));
|
|
|
|
assert.equal(runDetail.status, "success");
|
|
assert.equal(runDetail.durationMs, 2500);
|
|
assert.equal(runDetail.summary?.totalTaskCount, 1);
|
|
assert.equal(runDetail.summary?.artifactCount, 1);
|
|
assert.equal(runDetail.summary?.stdoutLineCount, 1);
|
|
assert.equal(runDetail.summary?.stderrLineCount, 0);
|
|
assert.equal(runDetail.summary?.taskCounts?.success, 1);
|
|
assert.equal(tasks.length, 1);
|
|
assert.equal(tasks[0]?._id, task?._id);
|
|
assert.equal(tasks[0]?.status, "success");
|
|
assert.equal(tasks[0]?.durationMs, 2500);
|
|
assert.deepEqual(tasks[0]?.logLines, [
|
|
"Task claimed by worker",
|
|
"Executor completed successfully",
|
|
]);
|
|
assert.deepEqual(tasks[0]?.stdoutLines, ["python executor processed source-asset"]);
|
|
assert.deepEqual(tasks[0]?.stderrLines, []);
|
|
assert.equal(tasks[0]?.summary?.outcome, "success");
|
|
assert.equal(tasks[0]?.summary?.executorType, "python");
|
|
assert.equal(tasks[0]?.summary?.assetCount, 1);
|
|
assert.deepEqual(tasks[0]?.summary?.artifactIds, ["artifact-1"]);
|
|
assert.equal(tasks[0]?.summary?.stdoutLineCount, 1);
|
|
assert.equal(tasks[0]?.summary?.stderrLineCount, 0);
|
|
assert.deepEqual(tasks[0]?.lastResultPreview, {
|
|
taskId: task?._id,
|
|
executor: "python",
|
|
});
|
|
});
|
|
|
|
test("mongo-backed runtime supports storage connections, datasets, workflow templates, and workflow creation from templates", async (t) => {
|
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-datasets-"));
|
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "DJI_001", "DJI_001.mp4"), "");
|
|
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27125,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-datasets-templates",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "dataset-user", projectName: "Dataset Project" }),
|
|
}),
|
|
);
|
|
|
|
const connections = await readJson<Array<{ _id: string; provider: string; name: string }>>(
|
|
await fetch(
|
|
`${server.baseUrl}/api/storage-connections?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}`,
|
|
),
|
|
);
|
|
|
|
const cloudConnection = await readJson<{ _id: string; provider: string; bucket: string }>(
|
|
await fetch(`${server.baseUrl}/api/storage-connections`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
name: "Project OSS",
|
|
provider: "oss",
|
|
bucket: "emboflow-datasets",
|
|
endpoint: "oss-cn-hangzhou.aliyuncs.com",
|
|
basePath: "datasets/project-a",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const asset = await readJson<{ _id: string; displayName: string }>(
|
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
sourcePath: sourceDir,
|
|
}),
|
|
}),
|
|
);
|
|
await readJson(await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }));
|
|
|
|
const dataset = await readJson<{
|
|
_id: string;
|
|
latestVersionNumber: number;
|
|
storageConnectionId: string;
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/datasets`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Delivery Dataset",
|
|
description: "Dataset derived from the probed delivery asset",
|
|
sourceAssetIds: [asset._id],
|
|
storageConnectionId: cloudConnection._id,
|
|
storagePath: "delivery/dataset-v1",
|
|
createdBy: "dataset-user",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const datasets = await readJson<Array<{ _id: string; latestVersionNumber: number }>>(
|
|
await fetch(`${server.baseUrl}/api/datasets?projectId=${encodeURIComponent(bootstrap.project._id)}`),
|
|
);
|
|
const datasetVersions = await readJson<Array<{ datasetId: string; versionNumber: number }>>(
|
|
await fetch(`${server.baseUrl}/api/datasets/${dataset._id}/versions`),
|
|
);
|
|
|
|
const template = await readJson<{ _id: string; name: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflow-templates`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Delivery Review Template",
|
|
description: "Template with inspect and export nodes",
|
|
visualGraph: {
|
|
viewport: { x: 0, y: 0, zoom: 1 },
|
|
nodePositions: {
|
|
"source-asset": { x: 120, y: 120 },
|
|
"validate-structure": { x: 460, y: 220 },
|
|
"export-delivery-package": { x: 820, y: 340 },
|
|
},
|
|
},
|
|
logicGraph: {
|
|
nodes: [
|
|
{ id: "source-asset", type: "source" },
|
|
{ id: "validate-structure", type: "inspect" },
|
|
{ id: "export-delivery-package", type: "export" },
|
|
],
|
|
edges: [
|
|
{ from: "source-asset", to: "validate-structure" },
|
|
{ from: "validate-structure", to: "export-delivery-package" },
|
|
],
|
|
},
|
|
runtimeGraph: {
|
|
selectedPreset: "delivery-template",
|
|
nodeConfigs: {
|
|
"validate-structure": {
|
|
executorType: "python",
|
|
},
|
|
},
|
|
},
|
|
pluginRefs: ["builtin:delivery-nodes"],
|
|
createdBy: "dataset-user",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const templates = await readJson<Array<{ _id: string; name: string }>>(
|
|
await fetch(
|
|
`${server.baseUrl}/api/workflow-templates?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}&projectId=${encodeURIComponent(bootstrap.project._id)}`,
|
|
),
|
|
);
|
|
|
|
const workflowFromTemplate = await readJson<{ _id: string; name: string; latestVersionNumber: number }>(
|
|
await fetch(`${server.baseUrl}/api/workflow-templates/${template._id}/workflows`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Delivery Review Flow",
|
|
createdBy: "dataset-user",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const workflowVersions = await readJson<Array<{ versionNumber: number; runtimeGraph?: { selectedPreset?: string } }>>(
|
|
await fetch(`${server.baseUrl}/api/workflows/${workflowFromTemplate._id}/versions`),
|
|
);
|
|
|
|
assert.equal(connections[0]?.provider, "local");
|
|
assert.equal(cloudConnection.provider, "oss");
|
|
assert.equal(cloudConnection.bucket, "emboflow-datasets");
|
|
assert.equal(dataset.storageConnectionId, cloudConnection._id);
|
|
assert.equal(dataset.latestVersionNumber, 1);
|
|
assert.equal(datasets.length, 1);
|
|
assert.equal(datasets[0]?._id, dataset._id);
|
|
assert.equal(datasetVersions.length, 1);
|
|
assert.equal(datasetVersions[0]?.datasetId, dataset._id);
|
|
assert.equal(datasetVersions[0]?.versionNumber, 1);
|
|
assert.equal(template.name, "Delivery Review Template");
|
|
assert.equal(templates.some((item) => item._id === template._id), true);
|
|
assert.equal(workflowFromTemplate.latestVersionNumber, 1);
|
|
assert.equal(workflowVersions.length, 1);
|
|
assert.equal(workflowVersions[0]?.versionNumber, 1);
|
|
assert.equal(workflowVersions[0]?.runtimeGraph?.selectedPreset, "delivery-template");
|
|
});
|
|
|
|
test("mongo-backed runtime can cancel a run, retry a run snapshot, and retry a failed task", async (t) => {
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27122,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-run-controls",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "run-controls-user", projectName: "Run Controls Project" }),
|
|
}),
|
|
);
|
|
|
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-controls-"));
|
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
|
|
|
const asset = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
sourcePath: sourceDir,
|
|
}),
|
|
}),
|
|
);
|
|
await readJson(await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }));
|
|
|
|
const workflow = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Run Control Flow",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const version = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
|
logicGraph: {
|
|
nodes: [
|
|
{ id: "source-asset", type: "source" },
|
|
{ id: "validate-structure", type: "inspect" },
|
|
{ id: "export-delivery-package", type: "export" },
|
|
],
|
|
edges: [
|
|
{ from: "source-asset", to: "validate-structure" },
|
|
{ from: "validate-structure", to: "export-delivery-package" },
|
|
],
|
|
},
|
|
runtimeGraph: { selectedPreset: "delivery-normalization" },
|
|
pluginRefs: ["builtin:delivery-nodes"],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const run = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/runs`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workflowDefinitionId: workflow._id,
|
|
workflowVersionId: version._id,
|
|
assetIds: [asset._id],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const cancelledRun = await readJson<{
|
|
_id: string;
|
|
status: string;
|
|
summary?: { taskCounts?: { cancelled?: number } };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/runs/${run._id}/cancel`, { method: "POST" }),
|
|
);
|
|
const cancelledTasks = await readJson<Array<{ status: string }>>(
|
|
await fetch(`${server.baseUrl}/api/runs/${run._id}/tasks`),
|
|
);
|
|
|
|
assert.equal(cancelledRun.status, "cancelled");
|
|
assert.equal(cancelledRun.summary?.taskCounts?.cancelled, 3);
|
|
assert.deepEqual(
|
|
cancelledTasks.map((task) => task.status),
|
|
["cancelled", "cancelled", "cancelled"],
|
|
);
|
|
|
|
const retriedRun = await readJson<{
|
|
_id: string;
|
|
workflowVersionId: string;
|
|
assetIds: string[];
|
|
status: string;
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/runs/${run._id}/retry`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ triggeredBy: "run-controls-user" }),
|
|
}),
|
|
);
|
|
|
|
assert.notEqual(retriedRun._id, run._id);
|
|
assert.equal(retriedRun.workflowVersionId, version._id);
|
|
assert.deepEqual(retriedRun.assetIds, [asset._id]);
|
|
assert.equal(retriedRun.status, "queued");
|
|
|
|
const client = new MongoClient(mongod.getUri());
|
|
await client.connect();
|
|
t.after(async () => {
|
|
await client.close();
|
|
});
|
|
const db = client.db("emboflow-runtime-run-controls");
|
|
|
|
const retriedRunTasks = await db
|
|
.collection("run_tasks")
|
|
.find({ workflowRunId: retriedRun._id })
|
|
.sort({ createdAt: 1 })
|
|
.toArray();
|
|
const sourceTask = retriedRunTasks.find((task) => task.nodeId === "source-asset");
|
|
const failedTask = retriedRunTasks.find((task) => task.nodeId === "validate-structure");
|
|
const downstreamTask = retriedRunTasks.find((task) => task.nodeId === "export-delivery-package");
|
|
await db.collection("run_tasks").updateOne(
|
|
{ _id: sourceTask?._id },
|
|
{
|
|
$set: {
|
|
status: "success",
|
|
summary: {
|
|
outcome: "success",
|
|
executorType: "python",
|
|
assetCount: 1,
|
|
artifactIds: [],
|
|
stdoutLineCount: 1,
|
|
stderrLineCount: 0,
|
|
},
|
|
stdoutLines: ["source completed"],
|
|
logLines: ["Task claimed by worker", "stdout: source completed", "Executor completed successfully"],
|
|
},
|
|
},
|
|
);
|
|
await db.collection("run_tasks").updateOne(
|
|
{ _id: failedTask?._id },
|
|
{
|
|
$set: {
|
|
status: "failed",
|
|
errorMessage: "validation failed",
|
|
summary: {
|
|
outcome: "failed",
|
|
executorType: "python",
|
|
assetCount: 1,
|
|
artifactIds: [],
|
|
stdoutLineCount: 0,
|
|
stderrLineCount: 1,
|
|
errorMessage: "validation failed",
|
|
},
|
|
stderrLines: ["validation failed"],
|
|
logLines: ["Task claimed by worker", "stderr: validation failed", "Execution failed: validation failed"],
|
|
},
|
|
},
|
|
);
|
|
await db.collection("run_tasks").updateOne(
|
|
{ _id: downstreamTask?._id },
|
|
{
|
|
$set: {
|
|
status: "cancelled",
|
|
logLines: ["Task cancelled after upstream failure"],
|
|
},
|
|
},
|
|
);
|
|
await db.collection("workflow_runs").updateOne(
|
|
{ _id: retriedRun._id },
|
|
{
|
|
$set: {
|
|
status: "failed",
|
|
summary: {
|
|
totalTaskCount: 3,
|
|
completedTaskCount: 2,
|
|
artifactCount: 0,
|
|
stdoutLineCount: 0,
|
|
stderrLineCount: 1,
|
|
failedTaskIds: [failedTask?._id],
|
|
taskCounts: {
|
|
pending: 0,
|
|
queued: 0,
|
|
running: 0,
|
|
success: 1,
|
|
failed: 1,
|
|
cancelled: 1,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
);
|
|
|
|
const retriedTaskRun = await readJson<{
|
|
_id: string;
|
|
status: string;
|
|
summary?: { taskCounts?: { queued?: number; pending?: number } };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/runs/${retriedRun._id}/tasks/${failedTask?._id}/retry`, {
|
|
method: "POST",
|
|
}),
|
|
);
|
|
const refreshedTasks = await readJson<
|
|
Array<{
|
|
_id: string;
|
|
status: string;
|
|
attempt: number;
|
|
errorMessage?: string;
|
|
logLines?: string[];
|
|
stderrLines?: string[];
|
|
}>
|
|
>(await fetch(`${server.baseUrl}/api/runs/${retriedRun._id}/tasks`));
|
|
|
|
const refreshedFailedTask = refreshedTasks.find((task) => task._id === failedTask?._id);
|
|
const refreshedDownstreamTask = refreshedTasks.find((task) => task._id === downstreamTask?._id);
|
|
|
|
assert.equal(retriedTaskRun.status, "queued");
|
|
assert.equal(retriedTaskRun.summary?.taskCounts?.queued, 1);
|
|
assert.equal(retriedTaskRun.summary?.taskCounts?.pending, 1);
|
|
assert.equal(refreshedFailedTask?.status, "queued");
|
|
assert.equal(refreshedFailedTask?.attempt, 2);
|
|
assert.equal(refreshedFailedTask?.errorMessage, undefined);
|
|
assert.deepEqual(refreshedFailedTask?.stderrLines, []);
|
|
assert.match(refreshedFailedTask?.logLines?.[0] ?? "", /retry/i);
|
|
assert.equal(refreshedDownstreamTask?.status, "pending");
|
|
});
|
|
|
|
test("mongo-backed runtime manages custom docker nodes and exposes them as project node definitions", async (t) => {
|
|
const mongod = await MongoMemoryServer.create({
|
|
instance: {
|
|
ip: "127.0.0.1",
|
|
port: 27131,
|
|
},
|
|
});
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-custom-nodes",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "custom-node-user", projectName: "Custom Node Project" }),
|
|
}),
|
|
);
|
|
|
|
const imageNode = await readJson<{ _id: string; definitionId: string }>(
|
|
await fetch(`${server.baseUrl}/api/custom-nodes`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Merge Labels",
|
|
description: "Combine label reports from upstream nodes",
|
|
category: "Utility",
|
|
source: {
|
|
kind: "image",
|
|
image: "python:3.11-alpine",
|
|
command: ["python3", "-c", "print('custom image node')"],
|
|
},
|
|
contract: {
|
|
inputMode: "multi_asset_set",
|
|
outputMode: "asset_set_with_report",
|
|
artifactType: "json",
|
|
},
|
|
createdBy: "custom-node-user",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const dockerfileNode = await readJson<{ _id: string; definitionId: string }>(
|
|
await fetch(`${server.baseUrl}/api/custom-nodes`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Dockerfile Union",
|
|
description: "Union assets with a self-contained Dockerfile",
|
|
category: "Utility",
|
|
source: {
|
|
kind: "dockerfile",
|
|
imageTag: "emboflow-test/dockerfile-union:latest",
|
|
dockerfileContent: [
|
|
"FROM python:3.11-alpine",
|
|
"CMD [\"python3\", \"-c\", \"print('dockerfile custom node')\"]",
|
|
].join("\n"),
|
|
},
|
|
contract: {
|
|
inputMode: "multi_asset_set",
|
|
outputMode: "asset_set",
|
|
artifactType: "json",
|
|
},
|
|
createdBy: "custom-node-user",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const nodeDefinitions = await readJson<
|
|
Array<{
|
|
id: string;
|
|
defaultExecutorType?: string;
|
|
defaultExecutorConfig?: Record<string, unknown>;
|
|
allowsMultipleIncoming?: boolean;
|
|
}>
|
|
>(
|
|
await fetch(
|
|
`${server.baseUrl}/api/node-definitions?projectId=${encodeURIComponent(bootstrap.project._id)}`,
|
|
),
|
|
);
|
|
|
|
const imageDefinition = nodeDefinitions.find((definition) => definition.id === imageNode.definitionId);
|
|
const dockerfileDefinition = nodeDefinitions.find((definition) => definition.id === dockerfileNode.definitionId);
|
|
|
|
assert.equal(imageDefinition?.defaultExecutorType, "docker");
|
|
assert.equal(imageDefinition?.allowsMultipleIncoming, true);
|
|
assert.equal(imageDefinition?.defaultExecutorConfig?.image, "python:3.11-alpine");
|
|
assert.equal(
|
|
(imageDefinition?.defaultExecutorConfig?.contract as { outputMode?: string } | undefined)?.outputMode,
|
|
"asset_set_with_report",
|
|
);
|
|
assert.equal(dockerfileDefinition?.defaultExecutorType, "docker");
|
|
assert.equal(
|
|
typeof dockerfileDefinition?.defaultExecutorConfig?.dockerfileContent,
|
|
"string",
|
|
);
|
|
assert.equal(
|
|
(dockerfileDefinition?.defaultExecutorConfig?.contract as { outputMode?: string } | undefined)?.outputMode,
|
|
"asset_set",
|
|
);
|
|
});
|
|
|
|
test("mongo-backed runtime rejects invalid custom node definitions with a 400 error", async (t) => {
|
|
const { MongoMemoryServer } = await import("mongodb-memory-server");
|
|
const mongod = await MongoMemoryServer.create();
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-custom-node-validation",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "custom-node-validation-user", projectName: "Validation Project" }),
|
|
}),
|
|
);
|
|
|
|
const response = await fetch(`${server.baseUrl}/api/custom-nodes`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Broken Source",
|
|
category: "Source",
|
|
source: {
|
|
kind: "dockerfile",
|
|
dockerfileContent: "CMD [\"python3\"]",
|
|
},
|
|
contract: {
|
|
inputMode: "multi_asset_set",
|
|
outputMode: "report",
|
|
artifactType: "json",
|
|
},
|
|
createdBy: "custom-node-validation-user",
|
|
}),
|
|
});
|
|
|
|
assert.equal(response.status, 400);
|
|
const payload = (await response.json()) as { message: string };
|
|
assert.equal(payload.message, "source category custom nodes cannot declare multi_asset_set input");
|
|
});
|
|
|
|
test("mongo-backed runtime preflights workflow runs before creation and blocks invalid executor config", async (t) => {
|
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-preflight-"));
|
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
|
await writeFile(path.join(sourceDir, "DJI_001", "DJI_001.mp4"), "");
|
|
|
|
const mongod = await MongoMemoryServer.create();
|
|
t.after(async () => {
|
|
await mongod.stop();
|
|
});
|
|
|
|
const server = await startRuntimeServer({
|
|
host: "127.0.0.1",
|
|
port: 0,
|
|
mongoUri: mongod.getUri(),
|
|
database: "emboflow-runtime-preflight",
|
|
corsOrigin: "http://127.0.0.1:3000",
|
|
});
|
|
t.after(async () => {
|
|
await server.close();
|
|
});
|
|
|
|
const bootstrap = await readJson<{
|
|
workspace: { _id: string };
|
|
project: { _id: string };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({ userId: "preflight-user", projectName: "Preflight Project" }),
|
|
}),
|
|
);
|
|
|
|
const asset = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
sourcePath: sourceDir,
|
|
}),
|
|
}),
|
|
);
|
|
await readJson(await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }));
|
|
|
|
const workflow = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workspaceId: bootstrap.workspace._id,
|
|
projectId: bootstrap.project._id,
|
|
name: "Preflight Flow",
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const version = await readJson<{ _id: string }>(
|
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
|
logicGraph: {
|
|
nodes: [
|
|
{ id: "source-asset", type: "source" },
|
|
{ id: "validate-structure", type: "inspect" },
|
|
{ id: "export-delivery-package", type: "export" },
|
|
],
|
|
edges: [
|
|
{ from: "source-asset", to: "validate-structure" },
|
|
{ from: "validate-structure", to: "export-delivery-package" },
|
|
],
|
|
},
|
|
runtimeGraph: {
|
|
nodeConfigs: {
|
|
"validate-structure": {
|
|
executorType: "http",
|
|
executorConfig: {
|
|
method: "POST",
|
|
},
|
|
},
|
|
},
|
|
},
|
|
pluginRefs: ["builtin:delivery-nodes"],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
const preflight = await readJson<{
|
|
ok: boolean;
|
|
issues: Array<{ code: string; message: string; nodeId?: string; severity: string }>;
|
|
summary: { errorCount: number; warningCount: number };
|
|
}>(
|
|
await fetch(`${server.baseUrl}/api/runs/preflight`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workflowDefinitionId: workflow._id,
|
|
workflowVersionId: version._id,
|
|
assetIds: [asset._id],
|
|
}),
|
|
}),
|
|
);
|
|
|
|
assert.equal(preflight.ok, false);
|
|
assert.equal(preflight.summary.errorCount, 1);
|
|
assert.equal(preflight.issues[0]?.code, "http_executor_missing_url");
|
|
assert.equal(preflight.issues[0]?.nodeId, "validate-structure");
|
|
|
|
const runResponse = await fetch(`${server.baseUrl}/api/runs`, {
|
|
method: "POST",
|
|
headers: { "content-type": "application/json" },
|
|
body: JSON.stringify({
|
|
workflowDefinitionId: workflow._id,
|
|
workflowVersionId: version._id,
|
|
assetIds: [asset._id],
|
|
}),
|
|
});
|
|
|
|
assert.equal(runResponse.status, 400);
|
|
const runPayload = (await runResponse.json()) as { message: string };
|
|
assert.equal(runPayload.message, "node validate-structure uses the http executor without a url");
|
|
});
|